Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
100 changes: 100 additions & 0 deletions Algorithms/datastructs/hash_table/cache.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,100 @@
import functools

class Node(object):
def __init__(self, key, value):
self.key = key
self.value = value
self.prev = None
self.next = None

class NodeList:
def __init__(self):
self.start = None
self.end = None
def add(self, key: int, value: int):
temp = Node(key, value)
if self.start:
if self.start == self.end:
self.start.next = temp
self.end = temp
self.end.prev = self.start
else:
temp.prev = self.end
self.end.next = temp
self.end = temp
else:
self.start = temp
self.end = self.start
return temp
def remove(self, node: Node):
prev_node = node.prev
next_node = node.next
if prev_node:
prev_node.next = next_node
else:
self.start = next_node
if next_node:
next_node.prev = prev_node
else:
self.end = prev_node
def move_to_end(self, node):
if node == self.end:
return
self.remove(node)
node.prev = self.end
node.next = None
self.end.next = node
self.end = node
def evict(self):
start = self.start
if not start:
return None
next_node = start.next
if next_node:
next_node.prev = None
self.start = next_node
else:
self.start = None
self.end = None
return start
def __str__(self):
head = self.start
res = ""
while head:
res += f"Node(key: {head.key}, value: {head.value}, prev: {head.prev}, next: {head.next})"
head = head.next
return res

class LRUCache:
def __init__(self, capacity: int):
self.capacity = capacity
self.table = dict()
self.cache = NodeList()
def get(self, key: int) -> int:
if key in self.table:
node = self.table[key]
self.cache.move_to_end(node)
return node.value
else:
return -1
def put(self, key: int, value: int) -> None:
if key in self.table:
old_node = self.table[key]
old_node.value = value
self.cache.move_to_end(old_node)
else:
if self.capacity == 0:
return
if len(self.table) == self.capacity:
lru = self.cache.evict()
del self.table[lru.key]
node = self.cache.add(key, value)
self.table[key] = node
else:
node = self.cache.add(key, value)
self.table[key] = node

# using the functools cache decorator
@functools.lru_cache
def factorial(n: int) -> int:
return n*factorial(n-1) if n > 1 else 1
127 changes: 127 additions & 0 deletions tests/test_cache.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,127 @@
import time
import pytest
import functools
from Algorithms.datastructs.hash_table.cache import LRUCache, factorial

# -------------------------
# Correctness tests: LRUCache
# -------------------------

def test_basic_put_get():
cache = LRUCache(2)
cache.put(1, 10)
cache.put(2, 20)

assert cache.get(1) == 10
assert cache.get(2) == 20
assert cache.get(3) == -1


def test_eviction_order():
cache = LRUCache(2)
cache.put(1, 1)
cache.put(2, 2)

# Access key 1 -> makes key 2 LRU
assert cache.get(1) == 1

# This should evict key 2
cache.put(3, 3)

assert cache.get(2) == -1
assert cache.get(1) == 1
assert cache.get(3) == 3


def test_update_existing_key():
cache = LRUCache(2)
cache.put(1, 1)
cache.put(2, 2)

cache.put(1, 100) # update value + recency

assert cache.get(1) == 100

# key 2 should now be LRU
cache.put(3, 3)

assert cache.get(2) == -1
assert cache.get(1) == 100
assert cache.get(3) == 3


def test_capacity_one():
cache = LRUCache(1)
cache.put(1, 1)
cache.put(2, 2)

assert cache.get(1) == -1
assert cache.get(2) == 2


def test_capacity_zero():
cache = LRUCache(0)
cache.put(1, 1)

assert cache.get(1) == -1


def test_repeated_get_does_not_break_order():
cache = LRUCache(2)
cache.put(1, 1)
cache.put(2, 2)

# Repeated gets
assert cache.get(1) == 1
assert cache.get(1) == 1

cache.put(3, 3)

# key 2 should still be evicted
assert cache.get(2) == -1
assert cache.get(1) == 1
assert cache.get(3) == 3


# -------------------------
# Timing tests: functools.lru_cache
# -------------------------

def uncached_factorial(n: int) -> int:
return n * uncached_factorial(n - 1) if n > 1 else 1


def test_lru_cache_speedup():
"""
This test checks that functools.lru_cache provides
a measurable speedup over uncached recursion.

We avoid flaky microbenchmarks by:
- warming the cache
- using multiple calls
- asserting relative (order-of-magnitude) improvement
"""

n = 300

# Warm-up cached version
factorial.cache_clear()
factorial(n)

# Measure cached calls
start = time.perf_counter()
for _ in range(1000):
factorial(n)
cached_time = time.perf_counter() - start

# Measure uncached calls (much fewer iterations)
start = time.perf_counter()
for _ in range(10):
uncached_factorial(n)
uncached_time = time.perf_counter() - start

# Cached should be significantly faster per call
cached_per_call = cached_time / 1000
uncached_per_call = uncached_time / 10

assert cached_per_call < uncached_per_call / 50
2 changes: 1 addition & 1 deletion tests/test_hash_table.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from Algorithms.datastructs.hash_table import RandomizedSet
from Algorithms.datastructs.hash_table.hash_table import RandomizedSet

def test_randomized_set():
rs = RandomizedSet()
Expand Down