mirror of
https://github.com/TwiN/gatus.git
synced 2024-11-07 08:34:15 +01:00
chore(deps): Update TwiN/gocache to v2.2.0
This commit is contained in:
parent
f1ce83c211
commit
c172e733be
2
go.mod
2
go.mod
@ -4,7 +4,7 @@ go 1.19
|
||||
|
||||
require (
|
||||
github.com/TwiN/g8 v1.4.0
|
||||
github.com/TwiN/gocache/v2 v2.1.1
|
||||
github.com/TwiN/gocache/v2 v2.2.0
|
||||
github.com/TwiN/health v1.5.0
|
||||
github.com/TwiN/whois v1.1.0
|
||||
github.com/coreos/go-oidc/v3 v3.1.0
|
||||
|
4
go.sum
4
go.sum
@ -35,8 +35,8 @@ github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03
|
||||
github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo=
|
||||
github.com/TwiN/g8 v1.4.0 h1:RUk5xTtxKCdMo0GGSbBVyjtAAfi2nqVbA9E0C4u5Cxo=
|
||||
github.com/TwiN/g8 v1.4.0/go.mod h1:ECyGJsoIb99klUfvVQoS1StgRLte9yvvPigGrHdy284=
|
||||
github.com/TwiN/gocache/v2 v2.1.1 h1:W/GLImqa+pZVIH9pcWEn1cBgy1KU66fUcBjOnPhjuno=
|
||||
github.com/TwiN/gocache/v2 v2.1.1/go.mod h1:SnUuBsrwGQeNcDG6vhkOMJnqErZM0JGjgIkuKryokYA=
|
||||
github.com/TwiN/gocache/v2 v2.2.0 h1:M3B36KyH24BntxLrLaUb2kgTdq8DzCnfod0IekLG57w=
|
||||
github.com/TwiN/gocache/v2 v2.2.0/go.mod h1:SnUuBsrwGQeNcDG6vhkOMJnqErZM0JGjgIkuKryokYA=
|
||||
github.com/TwiN/health v1.5.0 h1:ETTtbQfUbiiIiVTSpAiNzesHQvm8qarV/8ctlZsVhwA=
|
||||
github.com/TwiN/health v1.5.0/go.mod h1:Z6TszwQPMvtSiVx1QMidVRgvVr4KZGfiwqcD7/Z+3iw=
|
||||
github.com/TwiN/whois v1.1.0 h1:lhyrC/9yIXntEnbJ+0IBy9Z5NBcreieYyamlvniwq88=
|
||||
|
118
vendor/github.com/TwiN/gocache/v2/README.md
generated
vendored
118
vendor/github.com/TwiN/gocache/v2/README.md
generated
vendored
@ -70,12 +70,14 @@ cache.StartJanitor()
|
||||
| WithMaxSize | Sets the max size of the cache. `gocache.NoMaxSize` means there is no limit. If not set, the default max size is `gocache.DefaultMaxSize`. |
|
||||
| WithMaxMemoryUsage | Sets the max memory usage of the cache. `gocache.NoMaxMemoryUsage` means there is no limit. The default behavior is to not evict based on memory usage. |
|
||||
| WithEvictionPolicy | Sets the eviction algorithm to be used when the cache reaches the max size. If not set, the default eviction policy is `gocache.FirstInFirstOut` (FIFO). |
|
||||
| WithDefaultTTL | Sets the default TTL for each entry. |
|
||||
| WithForceNilInterfaceOnNilPointer | Configures whether values with a nil pointer passed to write functions should be forcefully set to nil. Defaults to true. |
|
||||
| StartJanitor | Starts the janitor, which is in charge of deleting expired cache entries in the background. |
|
||||
| StopJanitor | Stops the janitor. |
|
||||
| Set | Same as `SetWithTTL`, but with no expiration (`gocache.NoExpiration`) |
|
||||
| SetAll | Same as `Set`, but in bulk |
|
||||
| Set | Same as `SetWithTTL`, but using the default TTL (which is `gocache.NoExpiration`, unless configured otherwise). |
|
||||
| SetWithTTL | Creates or updates a cache entry with the given key, value and expiration time. If the max size after the aforementioned operation is above the configured max size, the tail will be evicted. Depending on the eviction policy, the tail is defined as the oldest |
|
||||
| SetAll | Same as `Set`, but in bulk. |
|
||||
| SetAllWithTTL | Same as `SetWithTTL`, but in bulk. |
|
||||
| Get | Gets a cache entry by its key. |
|
||||
| GetByKeys | Gets a map of entries by their keys. The resulting map will contain all keys, even if some of the keys in the slice passed as parameter were not present in the cache. |
|
||||
| GetAll | Gets all cache entries. |
|
||||
@ -131,7 +133,7 @@ func main() {
|
||||
|
||||
cache.Set("key", "value")
|
||||
cache.SetWithTTL("key-with-ttl", "value", 60*time.Minute)
|
||||
cache.SetAll(map[string]interface{}{"k1": "v1", "k2": "v2", "k3": "v3"})
|
||||
cache.SetAll(map[string]any{"k1": "v1", "k2": "v2", "k3": "v3"})
|
||||
|
||||
fmt.Println("[Count] Cache size:", cache.Count())
|
||||
|
||||
@ -247,7 +249,7 @@ but if you're looking into using a library like gocache, odds are, you want more
|
||||
|
||||
### Results
|
||||
| key | value |
|
||||
|:------ |:-------- |
|
||||
|:-------|:---------|
|
||||
| goos | windows |
|
||||
| goarch | amd64 |
|
||||
| cpu | i7-9700K |
|
||||
@ -255,66 +257,54 @@ but if you're looking into using a library like gocache, odds are, you want more
|
||||
|
||||
```
|
||||
// Normal map
|
||||
BenchmarkMap_Get
|
||||
BenchmarkMap_Get-8 46087372 26.7 ns/op
|
||||
BenchmarkMap_Set
|
||||
BenchmarkMap_Set/small_value-8 3841911 389 ns/op
|
||||
BenchmarkMap_Set/medium_value-8 3887074 391 ns/op
|
||||
BenchmarkMap_Set/large_value-8 3921956 393 ns/op
|
||||
// Gocache
|
||||
BenchmarkCache_Get
|
||||
BenchmarkCache_Get/FirstInFirstOut-8 27273036 46.4 ns/op
|
||||
BenchmarkCache_Get/LeastRecentlyUsed-8 26648248 46.3 ns/op
|
||||
BenchmarkCache_Set
|
||||
BenchmarkCache_Set/FirstInFirstOut_small_value-8 2919584 405 ns/op
|
||||
BenchmarkCache_Set/FirstInFirstOut_medium_value-8 2990841 391 ns/op
|
||||
BenchmarkCache_Set/FirstInFirstOut_large_value-8 2970513 391 ns/op
|
||||
BenchmarkCache_Set/LeastRecentlyUsed_small_value-8 2962939 402 ns/op
|
||||
BenchmarkCache_Set/LeastRecentlyUsed_medium_value-8 2962963 390 ns/op
|
||||
BenchmarkCache_Set/LeastRecentlyUsed_large_value-8 2962928 394 ns/op
|
||||
BenchmarkCache_SetUsingMaxMemoryUsage
|
||||
BenchmarkCache_SetUsingMaxMemoryUsage/small_value-8 2683356 447 ns/op
|
||||
BenchmarkCache_SetUsingMaxMemoryUsage/medium_value-8 2637578 441 ns/op
|
||||
BenchmarkCache_SetUsingMaxMemoryUsage/large_value-8 2672434 443 ns/op
|
||||
BenchmarkCache_SetWithMaxSize
|
||||
BenchmarkCache_SetWithMaxSize/100_small_value-8 4782966 252 ns/op
|
||||
BenchmarkCache_SetWithMaxSize/10000_small_value-8 4067967 296 ns/op
|
||||
BenchmarkCache_SetWithMaxSize/100000_small_value-8 3762055 328 ns/op
|
||||
BenchmarkCache_SetWithMaxSize/100_medium_value-8 4760479 252 ns/op
|
||||
BenchmarkCache_SetWithMaxSize/10000_medium_value-8 4081050 295 ns/op
|
||||
BenchmarkCache_SetWithMaxSize/100000_medium_value-8 3785050 330 ns/op
|
||||
BenchmarkCache_SetWithMaxSize/100_large_value-8 4732909 254 ns/op
|
||||
BenchmarkCache_SetWithMaxSize/10000_large_value-8 4079533 297 ns/op
|
||||
BenchmarkCache_SetWithMaxSize/100000_large_value-8 3712820 331 ns/op
|
||||
BenchmarkCache_SetWithMaxSizeAndLRU
|
||||
BenchmarkCache_SetWithMaxSizeAndLRU/100_small_value-8 4761732 254 ns/op
|
||||
BenchmarkCache_SetWithMaxSizeAndLRU/10000_small_value-8 4084474 296 ns/op
|
||||
BenchmarkCache_SetWithMaxSizeAndLRU/100000_small_value-8 3761402 329 ns/op
|
||||
BenchmarkCache_SetWithMaxSizeAndLRU/100_medium_value-8 4783075 254 ns/op
|
||||
BenchmarkCache_SetWithMaxSizeAndLRU/10000_medium_value-8 4103980 296 ns/op
|
||||
BenchmarkCache_SetWithMaxSizeAndLRU/100000_medium_value-8 3646023 331 ns/op
|
||||
BenchmarkCache_SetWithMaxSizeAndLRU/100_large_value-8 4779025 254 ns/op
|
||||
BenchmarkCache_SetWithMaxSizeAndLRU/10000_large_value-8 4096192 296 ns/op
|
||||
BenchmarkCache_SetWithMaxSizeAndLRU/100000_large_value-8 3726823 331 ns/op
|
||||
BenchmarkCache_GetSetMultipleConcurrent
|
||||
BenchmarkCache_GetSetMultipleConcurrent-8 707142 1698 ns/op
|
||||
BenchmarkCache_GetSetConcurrentWithFrequentEviction
|
||||
BenchmarkCache_GetSetConcurrentWithFrequentEviction/FirstInFirstOut-8 3616256 334 ns/op
|
||||
BenchmarkCache_GetSetConcurrentWithFrequentEviction/LeastRecentlyUsed-8 3636367 331 ns/op
|
||||
BenchmarkCache_GetConcurrentWithLRU
|
||||
BenchmarkCache_GetConcurrentWithLRU/FirstInFirstOut-8 4405557 268 ns/op
|
||||
BenchmarkCache_GetConcurrentWithLRU/LeastRecentlyUsed-8 4445475 269 ns/op
|
||||
BenchmarkCache_WithForceNilInterfaceOnNilPointer
|
||||
BenchmarkCache_WithForceNilInterfaceOnNilPointer/true_with_nil_struct_pointer-8 6184591 191 ns/op
|
||||
BenchmarkCache_WithForceNilInterfaceOnNilPointer/true-8 6090482 191 ns/op
|
||||
BenchmarkCache_WithForceNilInterfaceOnNilPointer/false_with_nil_struct_pointer-8 6184629 187 ns/op
|
||||
BenchmarkCache_WithForceNilInterfaceOnNilPointer/false-8 6281781 186 ns/op
|
||||
(Trimmed "BenchmarkCache_" for readability)
|
||||
WithForceNilInterfaceOnNilPointerWithConcurrency
|
||||
WithForceNilInterfaceOnNilPointerWithConcurrency/true_with_nil_struct_pointer-8 4379564 268 ns/op
|
||||
WithForceNilInterfaceOnNilPointerWithConcurrency/true-8 4379558 265 ns/op
|
||||
WithForceNilInterfaceOnNilPointerWithConcurrency/false_with_nil_struct_pointer-8 4444456 261 ns/op
|
||||
WithForceNilInterfaceOnNilPointerWithConcurrency/false-8 4493896 262 ns/op
|
||||
BenchmarkMap_Get-8 49944228 24.2 ns/op 7 B/op 0 allocs/op
|
||||
BenchmarkMap_Set/small_value-8 3939964 394.1 ns/op 188 B/op 2 allocs/op
|
||||
BenchmarkMap_Set/medium_value-8 3868586 395.5 ns/op 191 B/op 2 allocs/op
|
||||
BenchmarkMap_Set/large_value-8 3992138 385.3 ns/op 186 B/op 2 allocs/op
|
||||
// Gocache
|
||||
BenchmarkCache_Get/FirstInFirstOut-8 27907950 44.3 ns/op 7 B/op 0 allocs/op
|
||||
BenchmarkCache_Get/LeastRecentlyUsed-8 28211396 44.2 ns/op 7 B/op 0 allocs/op
|
||||
BenchmarkCache_Set/FirstInFirstOut_small_value-8 3139538 373.5 ns/op 185 B/op 3 allocs/op
|
||||
BenchmarkCache_Set/FirstInFirstOut_medium_value-8 3099516 378.6 ns/op 186 B/op 3 allocs/op
|
||||
BenchmarkCache_Set/FirstInFirstOut_large_value-8 3086776 386.7 ns/op 186 B/op 3 allocs/op
|
||||
BenchmarkCache_Set/LeastRecentlyUsed_small_value-8 3070555 379.0 ns/op 187 B/op 3 allocs/op
|
||||
BenchmarkCache_Set/LeastRecentlyUsed_medium_value-8 3056928 383.8 ns/op 187 B/op 3 allocs/op
|
||||
BenchmarkCache_Set/LeastRecentlyUsed_large_value-8 3108250 383.8 ns/op 186 B/op 3 allocs/op
|
||||
BenchmarkCache_SetUsingMaxMemoryUsage/medium_value-8 2773315 449.0 ns/op 210 B/op 4 allocs/op
|
||||
BenchmarkCache_SetUsingMaxMemoryUsage/large_value-8 2731818 440.0 ns/op 211 B/op 4 allocs/op
|
||||
BenchmarkCache_SetUsingMaxMemoryUsage/small_value-8 2659296 446.8 ns/op 213 B/op 4 allocs/op
|
||||
BenchmarkCache_SetWithMaxSize/100_small_value-8 4848658 248.8 ns/op 114 B/op 3 allocs/op
|
||||
BenchmarkCache_SetWithMaxSize/10000_small_value-8 4117632 293.7 ns/op 106 B/op 3 allocs/op
|
||||
BenchmarkCache_SetWithMaxSize/100000_small_value-8 3867402 313.0 ns/op 110 B/op 3 allocs/op
|
||||
BenchmarkCache_SetWithMaxSize/100_medium_value-8 4750057 250.1 ns/op 113 B/op 3 allocs/op
|
||||
BenchmarkCache_SetWithMaxSize/10000_medium_value-8 4143772 294.5 ns/op 106 B/op 3 allocs/op
|
||||
BenchmarkCache_SetWithMaxSize/100000_medium_value-8 3768883 313.2 ns/op 111 B/op 3 allocs/op
|
||||
BenchmarkCache_SetWithMaxSize/100_large_value-8 4822646 251.1 ns/op 114 B/op 3 allocs/op
|
||||
BenchmarkCache_SetWithMaxSize/10000_large_value-8 4154428 291.6 ns/op 106 B/op 3 allocs/op
|
||||
BenchmarkCache_SetWithMaxSize/100000_large_value-8 3897358 313.7 ns/op 110 B/op 3 allocs/op
|
||||
BenchmarkCache_SetWithMaxSizeAndLRU/100_small_value-8 4784180 254.2 ns/op 114 B/op 3 allocs/op
|
||||
BenchmarkCache_SetWithMaxSizeAndLRU/10000_small_value-8 4067042 292.0 ns/op 106 B/op 3 allocs/op
|
||||
BenchmarkCache_SetWithMaxSizeAndLRU/100000_small_value-8 3832760 313.8 ns/op 111 B/op 3 allocs/op
|
||||
BenchmarkCache_SetWithMaxSizeAndLRU/100_medium_value-8 4846706 252.2 ns/op 114 B/op 3 allocs/op
|
||||
BenchmarkCache_SetWithMaxSizeAndLRU/10000_medium_value-8 4103817 292.5 ns/op 106 B/op 3 allocs/op
|
||||
BenchmarkCache_SetWithMaxSizeAndLRU/100000_medium_value-8 3845623 315.1 ns/op 111 B/op 3 allocs/op
|
||||
BenchmarkCache_SetWithMaxSizeAndLRU/100_large_value-8 4744513 257.9 ns/op 114 B/op 3 allocs/op
|
||||
BenchmarkCache_SetWithMaxSizeAndLRU/10000_large_value-8 3956316 299.5 ns/op 106 B/op 3 allocs/op
|
||||
BenchmarkCache_SetWithMaxSizeAndLRU/100000_large_value-8 3876843 351.3 ns/op 110 B/op 3 allocs/op
|
||||
BenchmarkCache_GetSetMultipleConcurrent-8 750088 1566.0 ns/op 128 B/op 8 allocs/op
|
||||
BenchmarkCache_GetSetConcurrentWithFrequentEviction/FirstInFirstOut-8 3836961 316.2 ns/op 80 B/op 1 allocs/op
|
||||
BenchmarkCache_GetSetConcurrentWithFrequentEviction/LeastRecentlyUsed-8 3846165 315.6 ns/op 80 B/op 1 allocs/op
|
||||
BenchmarkCache_GetConcurrently/FirstInFirstOut-8 4830342 239.8 ns/op 8 B/op 1 allocs/op
|
||||
BenchmarkCache_GetConcurrently/LeastRecentlyUsed-8 4895587 243.2 ns/op 8 B/op 1 allocs/op
|
||||
(Trimmed "BenchmarkCache_" for readability)
|
||||
WithForceNilInterfaceOnNilPointer/true_with_nil_struct_pointer-8 6901461 178.5 ns/op 7 B/op 1 allocs/op
|
||||
WithForceNilInterfaceOnNilPointer/true-8 6629566 180.7 ns/op 7 B/op 1 allocs/op
|
||||
WithForceNilInterfaceOnNilPointer/false_with_nil_struct_pointer-8 6282798 170.1 ns/op 7 B/op 1 allocs/op
|
||||
WithForceNilInterfaceOnNilPointer/false-8 6741382 172.6 ns/op 7 B/op 1 allocs/op
|
||||
WithForceNilInterfaceOnNilPointerWithConcurrency/true_with_nil_struct_pointer-8 4432951 258.0 ns/op 8 B/op 1 allocs/op
|
||||
WithForceNilInterfaceOnNilPointerWithConcurrency/true-8 4676943 244.4 ns/op 8 B/op 1 allocs/op
|
||||
WithForceNilInterfaceOnNilPointerWithConcurrency/false_with_nil_struct_pointer-8 4818418 239.6 ns/op 8 B/op 1 allocs/op
|
||||
WithForceNilInterfaceOnNilPointerWithConcurrency/false-8 5025937 238.2 ns/op 8 B/op 1 allocs/op
|
||||
```
|
||||
|
||||
|
||||
|
8
vendor/github.com/TwiN/gocache/v2/entry.go
generated
vendored
8
vendor/github.com/TwiN/gocache/v2/entry.go
generated
vendored
@ -12,7 +12,7 @@ type Entry struct {
|
||||
Key string
|
||||
|
||||
// Value is the value of the cache entry
|
||||
Value interface{}
|
||||
Value any
|
||||
|
||||
// RelevantTimestamp is the variable used to store either:
|
||||
// - creation timestamp, if the Cache's EvictionPolicy is FirstInFirstOut
|
||||
@ -48,7 +48,7 @@ func (entry *Entry) SizeInBytes() int {
|
||||
return toBytes(entry.Key) + toBytes(entry.Value) + 32
|
||||
}
|
||||
|
||||
func toBytes(value interface{}) int {
|
||||
func toBytes(value any) int {
|
||||
switch value.(type) {
|
||||
case string:
|
||||
return int(unsafe.Sizeof(value)) + len(value.(string))
|
||||
@ -60,9 +60,9 @@ func toBytes(value interface{}) int {
|
||||
return int(unsafe.Sizeof(value)) + 4
|
||||
case int64, uint64, int, uint, float64, complex128:
|
||||
return int(unsafe.Sizeof(value)) + 8
|
||||
case []interface{}:
|
||||
case []any:
|
||||
size := 0
|
||||
for _, v := range value.([]interface{}) {
|
||||
for _, v := range value.([]any) {
|
||||
size += toBytes(v)
|
||||
}
|
||||
return int(unsafe.Sizeof(value)) + size
|
||||
|
84
vendor/github.com/TwiN/gocache/v2/gocache.go
generated
vendored
84
vendor/github.com/TwiN/gocache/v2/gocache.go
generated
vendored
@ -37,6 +37,8 @@ var (
|
||||
)
|
||||
|
||||
// Cache is the core struct of gocache which contains the data as well as all relevant configuration fields
|
||||
//
|
||||
// Do not instantiate this struct directly, use NewCache instead
|
||||
type Cache struct {
|
||||
// maxSize is the maximum amount of entries that can be in the cache at any given time
|
||||
// By default, this is set to DefaultMaxSize
|
||||
@ -50,6 +52,10 @@ type Cache struct {
|
||||
// evictionPolicy is the eviction policy
|
||||
evictionPolicy EvictionPolicy
|
||||
|
||||
// defaultTTL is the default TTL for each entry
|
||||
// Defaults to NoExpiration
|
||||
defaultTTL time.Duration
|
||||
|
||||
// stats is the object that contains cache statistics/metrics
|
||||
stats *Statistics
|
||||
|
||||
@ -143,12 +149,23 @@ func (cache *Cache) WithMaxMemoryUsage(maxMemoryUsageInBytes int) *Cache {
|
||||
}
|
||||
|
||||
// WithEvictionPolicy sets eviction algorithm.
|
||||
//
|
||||
// Defaults to FirstInFirstOut (FIFO)
|
||||
func (cache *Cache) WithEvictionPolicy(policy EvictionPolicy) *Cache {
|
||||
cache.evictionPolicy = policy
|
||||
return cache
|
||||
}
|
||||
|
||||
// WithDefaultTTL sets the default TTL for each entry (unless a different TTL is specified using SetWithTTL or SetAllWithTTL)
|
||||
//
|
||||
// Defaults to NoExpiration (-1)
|
||||
func (cache *Cache) WithDefaultTTL(ttl time.Duration) *Cache {
|
||||
if ttl > 1 {
|
||||
cache.defaultTTL = ttl
|
||||
}
|
||||
return cache
|
||||
}
|
||||
|
||||
// WithForceNilInterfaceOnNilPointer sets whether all Set-like functions should set a value as nil if the
|
||||
// interface passed has a nil value but not a nil type.
|
||||
//
|
||||
@ -165,25 +182,27 @@ func (cache *Cache) WithEvictionPolicy(policy EvictionPolicy) *Cache {
|
||||
// is nil or not.
|
||||
//
|
||||
// If set to true (default):
|
||||
// cache := gocache.NewCache().WithForceNilInterfaceOnNilPointer(true)
|
||||
// cache.Set("key", (*Struct)(nil))
|
||||
// value, _ := cache.Get("key")
|
||||
// // the following returns true, because the interface{} was forcefully set to nil
|
||||
// if value == nil {}
|
||||
// // the following will panic, because the value has been casted to its type (which is nil)
|
||||
// if value.(*Struct) == nil {}
|
||||
//
|
||||
// cache := gocache.NewCache().WithForceNilInterfaceOnNilPointer(true)
|
||||
// cache.Set("key", (*Struct)(nil))
|
||||
// value, _ := cache.Get("key")
|
||||
// // the following returns true, because the interface{} (any) was forcefully set to nil
|
||||
// if value == nil {}
|
||||
// // the following will panic, because the value has been casted to its type (which is nil)
|
||||
// if value.(*Struct) == nil {}
|
||||
//
|
||||
// If set to false:
|
||||
// cache := gocache.NewCache().WithForceNilInterfaceOnNilPointer(false)
|
||||
// cache.Set("key", (*Struct)(nil))
|
||||
// value, _ := cache.Get("key")
|
||||
// // the following returns false, because the interface{} returned has a non-nil type (*Struct)
|
||||
// if value == nil {}
|
||||
// // the following returns true, because the value has been casted to its type
|
||||
// if value.(*Struct) == nil {}
|
||||
//
|
||||
// cache := gocache.NewCache().WithForceNilInterfaceOnNilPointer(false)
|
||||
// cache.Set("key", (*Struct)(nil))
|
||||
// value, _ := cache.Get("key")
|
||||
// // the following returns false, because the interface{} (any) returned has a non-nil type (*Struct)
|
||||
// if value == nil {}
|
||||
// // the following returns true, because the value has been cast to its type
|
||||
// if value.(*Struct) == nil {}
|
||||
//
|
||||
// In other words, if set to true, you do not need to cast the value returned from the cache to
|
||||
// to check if the value is nil.
|
||||
// check if the value is nil.
|
||||
//
|
||||
// Defaults to true
|
||||
func (cache *Cache) WithForceNilInterfaceOnNilPointer(forceNilInterfaceOnNilPointer bool) *Cache {
|
||||
@ -194,12 +213,13 @@ func (cache *Cache) WithForceNilInterfaceOnNilPointer(forceNilInterfaceOnNilPoin
|
||||
// NewCache creates a new Cache
|
||||
//
|
||||
// Should be used in conjunction with Cache.WithMaxSize, Cache.WithMaxMemoryUsage and/or Cache.WithEvictionPolicy
|
||||
// gocache.NewCache().WithMaxSize(10000).WithEvictionPolicy(gocache.LeastRecentlyUsed)
|
||||
//
|
||||
// gocache.NewCache().WithMaxSize(10000).WithEvictionPolicy(gocache.LeastRecentlyUsed)
|
||||
func NewCache() *Cache {
|
||||
return &Cache{
|
||||
maxSize: DefaultMaxSize,
|
||||
evictionPolicy: FirstInFirstOut,
|
||||
defaultTTL: NoExpiration,
|
||||
stats: &Statistics{},
|
||||
entries: make(map[string]*Entry),
|
||||
mutex: sync.RWMutex{},
|
||||
@ -209,15 +229,15 @@ func NewCache() *Cache {
|
||||
}
|
||||
|
||||
// Set creates or updates a key with a given value
|
||||
func (cache *Cache) Set(key string, value interface{}) {
|
||||
cache.SetWithTTL(key, value, NoExpiration)
|
||||
func (cache *Cache) Set(key string, value any) {
|
||||
cache.SetWithTTL(key, value, cache.defaultTTL)
|
||||
}
|
||||
|
||||
// SetWithTTL creates or updates a key with a given value and sets an expiration time (-1 is NoExpiration)
|
||||
//
|
||||
// The TTL provided must be greater than 0, or NoExpiration (-1). If a negative value that isn't -1 (NoExpiration) is
|
||||
// provided, the entry will not be created if the key doesn't exist
|
||||
func (cache *Cache) SetWithTTL(key string, value interface{}, ttl time.Duration) {
|
||||
func (cache *Cache) SetWithTTL(key string, value any, ttl time.Duration) {
|
||||
// An interface is only nil if both its value and its type are nil, however, passing a nil pointer as an interface{}
|
||||
// means that the interface itself is not nil, because the interface value is nil but not the type.
|
||||
if cache.forceNilInterfaceOnNilPointer {
|
||||
@ -298,16 +318,21 @@ func (cache *Cache) SetWithTTL(key string, value interface{}, ttl time.Duration)
|
||||
}
|
||||
|
||||
// SetAll creates or updates multiple values
|
||||
func (cache *Cache) SetAll(entries map[string]interface{}) {
|
||||
func (cache *Cache) SetAll(entries map[string]any) {
|
||||
cache.SetAllWithTTL(entries, cache.defaultTTL)
|
||||
}
|
||||
|
||||
// SetAllWithTTL creates or updates multiple values
|
||||
func (cache *Cache) SetAllWithTTL(entries map[string]any, ttl time.Duration) {
|
||||
for key, value := range entries {
|
||||
cache.SetWithTTL(key, value, NoExpiration)
|
||||
cache.SetWithTTL(key, value, ttl)
|
||||
}
|
||||
}
|
||||
|
||||
// Get retrieves an entry using the key passed as parameter
|
||||
// If there is no such entry, the value returned will be nil and the boolean will be false
|
||||
// If there is an entry, the value returned will be the value cached and the boolean will be true
|
||||
func (cache *Cache) Get(key string) (interface{}, bool) {
|
||||
func (cache *Cache) Get(key string) (any, bool) {
|
||||
cache.mutex.Lock()
|
||||
entry, ok := cache.get(key)
|
||||
if !ok {
|
||||
@ -337,7 +362,7 @@ func (cache *Cache) Get(key string) (interface{}, bool) {
|
||||
|
||||
// GetValue retrieves an entry using the key passed as parameter
|
||||
// Unlike Get, this function only returns the value
|
||||
func (cache *Cache) GetValue(key string) interface{} {
|
||||
func (cache *Cache) GetValue(key string) any {
|
||||
value, _ := cache.Get(key)
|
||||
return value
|
||||
}
|
||||
@ -346,8 +371,8 @@ func (cache *Cache) GetValue(key string) interface{} {
|
||||
// All keys are returned in the map, regardless of whether they exist or not, however, entries that do not exist in the
|
||||
// cache will return nil, meaning that there is no way of determining whether a key genuinely has the value nil, or
|
||||
// whether it doesn't exist in the cache using only this function.
|
||||
func (cache *Cache) GetByKeys(keys []string) map[string]interface{} {
|
||||
entries := make(map[string]interface{})
|
||||
func (cache *Cache) GetByKeys(keys []string) map[string]any {
|
||||
entries := make(map[string]any)
|
||||
for _, key := range keys {
|
||||
entries[key], _ = cache.Get(key)
|
||||
}
|
||||
@ -365,8 +390,8 @@ func (cache *Cache) GetByKeys(keys []string) map[string]interface{} {
|
||||
// GetKeysByPattern is a good alternative if you want to retrieve entries that you do not have the key for, as it only
|
||||
// retrieves the keys and does not trigger active eviction and has a parameter for setting a limit to the number of keys
|
||||
// you wish to retrieve.
|
||||
func (cache *Cache) GetAll() map[string]interface{} {
|
||||
entries := make(map[string]interface{})
|
||||
func (cache *Cache) GetAll() map[string]any {
|
||||
entries := make(map[string]any)
|
||||
cache.mutex.Lock()
|
||||
for key, entry := range cache.entries {
|
||||
if entry.Expired() {
|
||||
@ -385,8 +410,9 @@ func (cache *Cache) GetAll() map[string]interface{} {
|
||||
// If the limit is above 0, the search will stop once the specified number of matching keys have been found.
|
||||
//
|
||||
// e.g.
|
||||
// cache.GetKeysByPattern("*some*", 0) will return all keys containing "some" in them
|
||||
// cache.GetKeysByPattern("*some*", 5) will return 5 keys (or less) containing "some" in them
|
||||
//
|
||||
// cache.GetKeysByPattern("*some*", 0) will return all keys containing "some" in them
|
||||
// cache.GetKeysByPattern("*some*", 5) will return 5 keys (or less) containing "some" in them
|
||||
//
|
||||
// Note that GetKeysByPattern does not trigger active evictions, nor does it count as accessing the entry (if LRU).
|
||||
// The reason for that behavior is that these two (active eviction and access) only applies when you access the value
|
||||
|
2
vendor/github.com/TwiN/gocache/v2/policy.go
generated
vendored
2
vendor/github.com/TwiN/gocache/v2/policy.go
generated
vendored
@ -3,7 +3,7 @@ package gocache
|
||||
// EvictionPolicy is what dictates how evictions are handled
|
||||
type EvictionPolicy string
|
||||
|
||||
var (
|
||||
const (
|
||||
// LeastRecentlyUsed is an eviction policy that causes the most recently accessed cache entry to be moved to the
|
||||
// head of the cache. Effectively, this causes the cache entries that have not been accessed for some time to
|
||||
// gradually move closer and closer to the tail, and since the tail is the entry that gets deleted when an eviction
|
||||
|
2
vendor/modules.txt
vendored
2
vendor/modules.txt
vendored
@ -1,7 +1,7 @@
|
||||
# github.com/TwiN/g8 v1.4.0
|
||||
## explicit; go 1.19
|
||||
github.com/TwiN/g8
|
||||
# github.com/TwiN/gocache/v2 v2.1.1
|
||||
# github.com/TwiN/gocache/v2 v2.2.0
|
||||
## explicit; go 1.19
|
||||
github.com/TwiN/gocache/v2
|
||||
# github.com/TwiN/health v1.5.0
|
||||
|
Loading…
Reference in New Issue
Block a user