const ( // Default2QRecentRatio is the ratio of the 2Q cache dedicated // to recently added entries that have only been accessed once. Default2QRecentRatio = 0.25 // Default2QGhostEntries is the default ratio of ghost // entries kept to track entries recently evicted Default2QGhostEntries = 0.50 )
const ( // DefaultEvictedBufferSize defines the default buffer size to store evicted key/val DefaultEvictedBufferSize = 16 )
ARCCache is a thread-safe fixed size Adaptive Replacement Cache (ARC). ARC is an enhancement over the standard LRU cache in that tracks both frequency and recency of use. This avoids a burst in access to new entries from evicting the frequently used older entries. It adds some additional tracking overhead to a standard LRU cache, computationally it is roughly 2x the cost, and the extra memory overhead is linear with the size of the cache. ARC has been patented by IBM, but is similar to the TwoQueueCache (2Q) which requires setting parameters.
type ARCCache struct {
// contains filtered or unexported fields
}
func NewARC(size int) (*ARCCache, error)
NewARC creates an ARC of the given size
func (c *ARCCache) Add(key, value interface{})
Add adds a value to the cache.
func (c *ARCCache) Contains(key interface{}) bool
Contains is used to check if the cache contains a key without updating recency or frequency.
func (c *ARCCache) Get(key interface{}) (value interface{}, ok bool)
Get looks up a key's value from the cache.
func (c *ARCCache) Keys() []interface{}
Keys returns all the cached keys
func (c *ARCCache) Len() int
Len returns the number of cached entries
func (c *ARCCache) Peek(key interface{}) (value interface{}, ok bool)
Peek is used to inspect the cache value of a key without updating recency or frequency.
func (c *ARCCache) Purge()
Purge is used to clear the cache
func (c *ARCCache) Remove(key interface{})
Remove is used to purge a key from the cache
Cache is a thread-safe fixed size LRU cache.
type Cache struct {
// contains filtered or unexported fields
}
func New(size int) (*Cache, error)
New creates an LRU of the given size.
func NewWithEvict(size int, onEvicted func(key, value interface{})) (c *Cache, err error)
NewWithEvict constructs a fixed size cache with the given eviction callback.
func (c *Cache) Add(key, value interface{}) (evicted bool)
Add adds a value to the cache. Returns true if an eviction occurred.
func (c *Cache) Contains(key interface{}) bool
Contains checks if a key is in the cache, without updating the recent-ness or deleting it for being stale.
func (c *Cache) ContainsOrAdd(key, value interface{}) (ok, evicted bool)
ContainsOrAdd checks if a key is in the cache without updating the recent-ness or deleting it for being stale, and if not, adds the value. Returns whether found and whether an eviction occurred.
func (c *Cache) Get(key interface{}) (value interface{}, ok bool)
Get looks up a key's value from the cache.
func (c *Cache) GetOldest() (key, value interface{}, ok bool)
GetOldest returns the oldest entry
func (c *Cache) Keys() []interface{}
Keys returns a slice of the keys in the cache, from oldest to newest.
func (c *Cache) Len() int
Len returns the number of items in the cache.
func (c *Cache) Peek(key interface{}) (value interface{}, ok bool)
Peek returns the key value (or undefined if not found) without updating the "recently used"-ness of the key.
func (c *Cache) PeekOrAdd(key, value interface{}) (previous interface{}, ok, evicted bool)
PeekOrAdd checks if a key is in the cache without updating the recent-ness or deleting it for being stale, and if not, adds the value. Returns whether found and whether an eviction occurred.
func (c *Cache) Purge()
Purge is used to completely clear the cache.
func (c *Cache) Remove(key interface{}) (present bool)
Remove removes the provided key from the cache.
func (c *Cache) RemoveOldest() (key, value interface{}, ok bool)
RemoveOldest removes the oldest item from the cache.
func (c *Cache) Resize(size int) (evicted int)
Resize changes the cache size.
TwoQueueCache is a thread-safe fixed size 2Q cache. 2Q is an enhancement over the standard LRU cache in that it tracks both frequently and recently used entries separately. This avoids a burst in access to new entries from evicting frequently used entries. It adds some additional tracking overhead to the standard LRU cache, and is computationally about 2x the cost, and adds some metadata over head. The ARCCache is similar, but does not require setting any parameters.
type TwoQueueCache struct {
// contains filtered or unexported fields
}
func New2Q(size int) (*TwoQueueCache, error)
New2Q creates a new TwoQueueCache using the default values for the parameters.
func New2QParams(size int, recentRatio, ghostRatio float64) (*TwoQueueCache, error)
New2QParams creates a new TwoQueueCache using the provided parameter values.
func (c *TwoQueueCache) Add(key, value interface{})
Add adds a value to the cache.
func (c *TwoQueueCache) Contains(key interface{}) bool
Contains is used to check if the cache contains a key without updating recency or frequency.
func (c *TwoQueueCache) Get(key interface{}) (value interface{}, ok bool)
Get looks up a key's value from the cache.
func (c *TwoQueueCache) Keys() []interface{}
Keys returns a slice of the keys in the cache. The frequently used keys are first in the returned slice.
func (c *TwoQueueCache) Len() int
Len returns the number of items in the cache.
func (c *TwoQueueCache) Peek(key interface{}) (value interface{}, ok bool)
Peek is used to inspect the cache value of a key without updating recency or frequency.
func (c *TwoQueueCache) Purge()
Purge is used to completely clear the cache.
func (c *TwoQueueCache) Remove(key interface{})
Remove removes the provided key from the cache.