Difficulty: 🟡 Intermediate | Time: 25-35 min | Key Concepts: Data Structures, HashMap + Doubly Linked List
Design and implement an LRU (Least Recently Used) Cache with O(1) time complexity for both get and put operations. This is a classic data structure problem frequently asked in interviews.
class LRUCache(Generic[K, V]): """ LRU Cache with O(1) get and put operations. Uses HashMap for O(1) key lookup and Doubly Linked List for O(1) ordering. """ def __init__(self, capacity: int): if capacity <= 0: raise ValueError("Capacity must be positive") self.capacity = capacity self.cache: Dict[K, Node[K, V]] = {} self.list = DoublyLinkedList[K, V]() def get(self, key: K) -> Optional[V]: """ Get value by key. Returns: Value if key exists, None otherwise. Side effect: Moves accessed key to front (most recently used). """ if key not in self.cache: return None node = self.cache[key] # Move to front (mark as recently used) self.list.move_to_front(node) return node.value def put(self, key: K, value: V) -> None: """ Insert or update key-value pair. If key exists: Update value and move to front. If key is new: - If at capacity: Evict least recently used - Add new node to front """ if key in self.cache: # Update existing key node = self.cache[key] node.value = value self.list.move_to_front(node) else: # Check capacity if len(self.list) >= self.capacity: # Evict least recently used evicted = self.list.remove_last() if evicted: del self.cache[evicted.key] # Add new node node = Node(key, value) self.cache[key] = node self.list.add_to_front(node) def delete(self, key: K) -> bool: """ Explicitly remove a key from cache. Returns: True if key was present and removed, False otherwise. """ if key not in self.cache: return False node = self.cache[key] self.list.remove(node) del self.cache[key] return True def clear(self) -> None: """Remove all entries from cache.""" self.cache.clear() self.list = DoublyLinkedList[K, V]() def __len__(self) -> int: return len(self.list) def __contains__(self, key: K) -> bool: return key in self.cache def __repr__(self) -> str: items = [] current = self.list.head.next while current != self.list.tail: items.append(f"{current.key}: {current.value}") current = current.next return f"LRUCache([{', '.join(items)}])"
Python’s OrderedDict can simplify LRU cache implementation:
Copy
from collections import OrderedDictclass LRUCacheSimple: """ Simplified LRU Cache using OrderedDict. OrderedDict maintains insertion order and supports move_to_end() for O(1) reordering. """ def __init__(self, capacity: int): self.capacity = capacity self.cache = OrderedDict() def get(self, key): if key not in self.cache: return -1 # Move to end (most recently used) self.cache.move_to_end(key) return self.cache[key] def put(self, key, value): if key in self.cache: # Update and move to end self.cache.move_to_end(key) elif len(self.cache) >= self.capacity: # Remove first item (least recently used) self.cache.popitem(last=False) self.cache[key] = value
Interview Note: Using OrderedDict is acceptable for a quick solution, but interviewers often want to see the HashMap + DLL implementation to verify you understand the underlying data structures.
from collections import defaultdictclass LFUCache: """ Least Frequently Used cache. Evicts the key with lowest access frequency. On tie, evicts the least recently used among them. """ def __init__(self, capacity: int): self.capacity = capacity self.min_freq = 0 # key -> (value, frequency) self.key_val = {} # frequency -> OrderedDict of keys (for LRU within same frequency) self.freq_keys = defaultdict(OrderedDict) def get(self, key: int) -> int: if key not in self.key_val: return -1 val, freq = self.key_val[key] # Remove from current frequency del self.freq_keys[freq][key] if not self.freq_keys[freq]: del self.freq_keys[freq] if self.min_freq == freq: self.min_freq += 1 # Add to next frequency self.key_val[key] = (val, freq + 1) self.freq_keys[freq + 1][key] = None return val def put(self, key: int, value: int) -> None: if self.capacity <= 0: return if key in self.key_val: self.key_val[key] = (value, self.key_val[key][1]) self.get(key) # Update frequency return if len(self.key_val) >= self.capacity: # Evict LFU (least recently used among min frequency) evict_key, _ = self.freq_keys[self.min_freq].popitem(last=False) del self.key_val[evict_key] self.key_val[key] = (value, 1) self.freq_keys[1][key] = None self.min_freq = 1
Singly linked list: O(n) to remove a node (need to find previous)
Doubly linked list: O(1) to remove (have direct access to prev and next)
Why store key in the Node?
When evicting from the tail, we need to also remove from HashMap.
Without the key stored in the node, we’d need O(n) to find which key maps to this node.
What about cache invalidation?
Add TTL per entry
Lazy invalidation: check TTL on access
Active invalidation: background thread cleans expired entries