From 88a5d266b75409eb445f801223c968aaf2bbb9c7 Mon Sep 17 00:00:00 2001 From: RahwaZeslusHaile Date: Wed, 25 Feb 2026 23:37:12 +0000 Subject: [PATCH 1/2] Implement LRU cache with O(1) operations --- Sprint-2/implement_lru_cache/lru_cache.py | 90 +++++++++++++++++++++++ 1 file changed, 90 insertions(+) diff --git a/Sprint-2/implement_lru_cache/lru_cache.py b/Sprint-2/implement_lru_cache/lru_cache.py index e69de29..00e9b1e 100644 --- a/Sprint-2/implement_lru_cache/lru_cache.py +++ b/Sprint-2/implement_lru_cache/lru_cache.py @@ -0,0 +1,90 @@ +class Node: + def __init__(self, key, value): + self.key = key + self.value = value + self.next = None + self.previous = None + + +class LruCache: + def __init__(self, limit): + if limit <= 0: + raise ValueError("Cache limit must be greater than 0") + + self.limit = limit + self.cache = {} + self.head = None + self.tail = None + + def get(self, key): + if key not in self.cache: + return None + + node = self.cache[key] + + self._move_to_head(node) + + return node.value + + def set(self, key, value): + if key in self.cache: + node = self.cache[key] + node.value = value + self._move_to_head(node) + else: + if len(self.cache) >= self.limit: + self._evict_tail() + + new_node = Node(key, value) + self.cache[key] = new_node + self._add_to_head(new_node) + + def _move_to_head(self, node): + if node == self.head: + return + + self._remove_node(node) + + self._add_to_head(node) + + def _remove_node(self, node): + if node == self.head == self.tail: + self.head = None + self.tail = None + elif node == self.head: + self.head = node.next + self.head.previous = None + elif node == self.tail: + self.tail = node.previous + self.tail.next = None + else: + node.previous.next = node.next + node.next.previous = node.previous + + node.next = None + node.previous = None + + def _add_to_head(self, node): + node.next = None + node.previous = None + + if self.head is None: + self.head = node + self.tail = node + else: + node.next = self.head + self.head.previous = node + self.head = node + + def _evict_tail(self): + if self.tail is None: + return + + del self.cache[self.tail.key] + + if self.head == self.tail: + self.head = None + self.tail = None + else: + self.tail = self.tail.previous + self.tail.next = None From 769afdb38c98da8ef78eefd45ff19f4a51c8e6af Mon Sep 17 00:00:00 2001 From: RahwaZeslusHaile Date: Fri, 27 Feb 2026 14:36:13 +0000 Subject: [PATCH 2/2] Refactor LRU Cache to follow SRP - separate DoublyLinkedList class --- Sprint-2/implement_lru_cache/lru_cache.py | 115 ++++++++++++---------- 1 file changed, 62 insertions(+), 53 deletions(-) diff --git a/Sprint-2/implement_lru_cache/lru_cache.py b/Sprint-2/implement_lru_cache/lru_cache.py index 00e9b1e..3331d38 100644 --- a/Sprint-2/implement_lru_cache/lru_cache.py +++ b/Sprint-2/implement_lru_cache/lru_cache.py @@ -1,4 +1,5 @@ class Node: + """Represents a node in the doubly-linked list.""" def __init__(self, key, value): self.key = key self.value = value @@ -6,48 +7,35 @@ def __init__(self, key, value): self.previous = None -class LruCache: - def __init__(self, limit): - if limit <= 0: - raise ValueError("Cache limit must be greater than 0") - - self.limit = limit - self.cache = {} - self.head = None - self.tail = None +class DoublyLinkedList: + """Manages insertion, removal, and reordering of nodes in a doubly-linked list.""" + def __init__(self): + self.head = None + self.tail = None - def get(self, key): - if key not in self.cache: - return None - - node = self.cache[key] - - self._move_to_head(node) + def add_to_head(self, node): + """Add a node to the head of the list.""" + node.next = None + node.previous = None - return node.value - - def set(self, key, value): - if key in self.cache: - node = self.cache[key] - node.value = value - self._move_to_head(node) + if self.head is None: + self.head = node + self.tail = node else: - if len(self.cache) >= self.limit: - self._evict_tail() - - new_node = Node(key, value) - self.cache[key] = new_node - self._add_to_head(new_node) + node.next = self.head + self.head.previous = node + self.head = node - def _move_to_head(self, node): + def move_to_head(self, node): + """Move an existing node to the head of the list.""" if node == self.head: return - self._remove_node(node) - - self._add_to_head(node) + self.remove_node(node) + self.add_to_head(node) - def _remove_node(self, node): + def remove_node(self, node): + """Remove a node from the list.""" if node == self.head == self.tail: self.head = None self.tail = None @@ -64,27 +52,48 @@ def _remove_node(self, node): node.next = None node.previous = None - def _add_to_head(self, node): - node.next = None - node.previous = None + def get_tail(self): + """Return the tail node (least recently used).""" + return self.tail + + +class LruCache: + """Implements an LRU cache with O(1) operations.""" + def __init__(self, limit): + if limit <= 0: + raise ValueError("Cache limit must be greater than 0") - if self.head is None: - self.head = node - self.tail = node - else: - node.next = self.head - self.head.previous = node - self.head = node + self.limit = limit + self.cache = {} + self.order = DoublyLinkedList() - def _evict_tail(self): - if self.tail is None: - return + def get(self, key): + """Retrieve a value and mark it as recently used.""" + if key not in self.cache: + return None - del self.cache[self.tail.key] + node = self.cache[key] + self.order.move_to_head(node) - if self.head == self.tail: - self.head = None - self.tail = None + return node.value + + def set(self, key, value): + """Store a key-value pair and evict LRU if necessary.""" + if key in self.cache: + node = self.cache[key] + node.value = value + self.order.move_to_head(node) else: - self.tail = self.tail.previous - self.tail.next = None + if len(self.cache) >= self.limit: + self._evict_lru() + + new_node = Node(key, value) + self.cache[key] = new_node + self.order.add_to_head(new_node) + + def _evict_lru(self): + """Remove the least recently used item.""" + lru_node = self.order.get_tail() + if lru_node is not None: + del self.cache[lru_node.key] + self.order.remove_node(lru_node)