package cache import ( "container/list" "sync" "time" "forge.lclr.dev/AI/xdebug-mcp/internal/cachegrind" ) const DefaultCapacity = 2 type entry struct { key string profile *cachegrind.Profile modTime time.Time elem *list.Element } // Cache is a thread-safe LRU cache keyed by absolute file path. type Cache struct { mu sync.Mutex capacity int items map[string]*entry order list.List } func New(capacity int) *Cache { return &Cache{ capacity: capacity, items: make(map[string]*entry), } } // Get returns the cached Profile if key exists and modTime matches. func (c *Cache) Get(key string, modTime time.Time) (*cachegrind.Profile, bool) { c.mu.Lock() defer c.mu.Unlock() e, ok := c.items[key] if !ok { return nil, false } if !e.modTime.Equal(modTime) { c.evict(e) return nil, false } c.order.MoveToFront(e.elem) return e.profile, true } // Set stores a Profile, evicting the least-recently-used entry if at capacity. func (c *Cache) Set(key string, profile *cachegrind.Profile, modTime time.Time) { c.mu.Lock() defer c.mu.Unlock() if e, ok := c.items[key]; ok { c.evict(e) } for len(c.items) >= c.capacity { back := c.order.Back() if back == nil { break } c.evict(back.Value.(*entry)) } e := &entry{key: key, profile: profile, modTime: modTime} e.elem = c.order.PushFront(e) c.items[key] = e } func (c *Cache) evict(e *entry) { c.order.Remove(e.elem) delete(c.items, e.key) }