Skip to content

London | March-2025 | Elhadj Abdoul Diallo | implement_lru_cache #17

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 1 commit into
base: main
Choose a base branch
from
Open
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
54 changes: 54 additions & 0 deletions Sprint-2/implement_lru_cache/lru_cache.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,54 @@
class Node:
def __init__(self, key, value):
self.key = key
self.value = value
self.prev = None
self.next = None

class LruCache:
def __init__(self, limit):
if limit <= 0:
raise ValueError("Limit must be greater than 0")
self.limit = limit
self.cache = {} # key -> node
# Dummy head and tail nodes for easy removal
self.head = Node(None, None)
self.tail = Node(None, None)
self.head.next = self.tail
self.tail.prev = self.head

def _remove(self, node):
prev, nxt = node.prev, node.next
prev.next = nxt
nxt.prev = prev

def _add_to_front(self, node):
node.next = self.head.next
node.prev = self.head
self.head.next.prev = node
self.head.next = node

def get(self, key):
node = self.cache.get(key)
if not node:
return None
# Move to front (most recently used)
self._remove(node)
self._add_to_front(node)
return node.value

def set(self, key, value):
node = self.cache.get(key)
if node:
node.value = value
self._remove(node)
self._add_to_front(node)
else:
if len(self.cache) >= self.limit:
# Remove least recently used (tail.prev)
lru = self.tail.prev
self._remove(lru)
del self.cache[lru.key]
new_node = Node(key, value)
self.cache[key] = new_node
self._add_to_front(new_node)