Skip to content

Navigation Menu

Sign in
Appearance settings

Search code, repositories, users, issues, pull requests...

Provide feedback

We read every piece of feedback, and take your input very seriously.

Saved searches

Use saved searches to filter your results more quickly

Sign up
Appearance settings

Commit e4f2ccd

Browse files
Add an example of the LRU (Least Recently Used) Cache implementation (trekhleb#980)
* Add an example of the LRU Cache implementation. * Promote the node on set() as well. * Add LRU Cache images.
1 parent 6c335c5 commit e4f2ccd

File tree

6 files changed

+339
-0
lines changed

6 files changed

+339
-0
lines changed

‎README.md

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -64,6 +64,7 @@ the data.
6464
* `A` [Graph](src/data-structures/graph) (both directed and undirected)
6565
* `A` [Disjoint Set](src/data-structures/disjoint-set)
6666
* `A` [Bloom Filter](src/data-structures/bloom-filter)
67+
* `A` [LRU Cache](src/data-structures/lru-cache/) - Least Recently Used (LRU) cache
6768

6869
## Algorithms
6970

Lines changed: 134 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,134 @@
1+
/* eslint-disable no-param-reassign */
2+
import LinkedListNode from './LinkedListNode';
3+
4+
/**
5+
* Implementation of the LRU (Least Recently Used) Cache
6+
* based on the HashMap and Doubly Linked List data-structures.
7+
*
8+
* Current implementation allows to have fast (O(1)) read and write operations.
9+
*
10+
* At any moment in time the LRU Cache holds not more that "capacity" number of items in it.
11+
*/
12+
class LRUCache {
13+
/**
14+
* Creates a cache instance of a specific capacity.
15+
* @param {number} capacity
16+
*/
17+
constructor(capacity) {
18+
this.capacity = capacity; // How many items to store in cache at max.
19+
this.nodesMap = {}; // The quick links to each linked list node in cache.
20+
this.size = 0; // The number of items that is currently stored in the cache.
21+
this.head = new LinkedListNode(); // The Head (first) linked list node.
22+
this.tail = new LinkedListNode(); // The Tail (last) linked list node.
23+
}
24+
25+
/**
26+
* Returns the cached value by its key.
27+
* Time complexity: O(1).
28+
* @param {string} key
29+
* @returns {any}
30+
*/
31+
get(key) {
32+
if (this.nodesMap[key] === undefined) return undefined;
33+
const node = this.nodesMap[key];
34+
this.promote(node);
35+
return node.val;
36+
}
37+
38+
/**
39+
* Sets the value to cache by its key.
40+
* Time complexity: O(1).
41+
* @param {string} key
42+
* @param {any} val
43+
*/
44+
set(key, val) {
45+
if (this.nodesMap[key]) {
46+
const node = this.nodesMap[key];
47+
node.val = val;
48+
this.promote(node);
49+
} else {
50+
const node = new LinkedListNode(key, val);
51+
this.append(node);
52+
}
53+
}
54+
55+
/**
56+
* Promotes the node to the end of the linked list.
57+
* It means that the node is most frequently used.
58+
* It also reduces the chance for such node to get evicted from cache.
59+
* @param {LinkedListNode} node
60+
*/
61+
promote(node) {
62+
this.evict(node);
63+
this.append(node);
64+
}
65+
66+
/**
67+
* Appends a new node to the end of the cache linked list.
68+
* @param {LinkedListNode} node
69+
*/
70+
append(node) {
71+
this.nodesMap[node.key] = node;
72+
73+
if (!this.head.next) {
74+
// First node to append.
75+
this.head.next = node;
76+
this.tail.prev = node;
77+
node.prev = this.head;
78+
node.next = this.tail;
79+
} else {
80+
// Append to an existing tail.
81+
const oldTail = this.tail.prev;
82+
oldTail.next = node;
83+
node.prev = oldTail;
84+
node.next = this.tail;
85+
this.tail.prev = node;
86+
}
87+
88+
this.size += 1;
89+
90+
if (this.size > this.capacity) {
91+
this.evict(this.head.next);
92+
}
93+
}
94+
95+
/**
96+
* Evicts (removes) the node from cache linked list.
97+
* @param {LinkedListNode} node
98+
*/
99+
evict(node) {
100+
delete this.nodesMap[node.key];
101+
this.size -= 1;
102+
103+
const prevNode = node.prev;
104+
const nextNode = node.next;
105+
106+
// If one and only node.
107+
if (prevNode === this.head && nextNode === this.tail) {
108+
this.head.next = null;
109+
this.tail.prev = null;
110+
this.size = 0;
111+
return;
112+
}
113+
114+
// If this is a Head node.
115+
if (prevNode === this.head) {
116+
nextNode.prev = this.head;
117+
this.head.next = nextNode;
118+
return;
119+
}
120+
121+
// If this is a Tail node.
122+
if (nextNode === this.tail) {
123+
prevNode.next = this.tail;
124+
this.tail.prev = prevNode;
125+
return;
126+
}
127+
128+
// If the node is in the middle.
129+
prevNode.next = nextNode;
130+
nextNode.prev = prevNode;
131+
}
132+
}
133+
134+
export default LRUCache;
Lines changed: 17 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,17 @@
1+
class LinkedListNode {
2+
/**
3+
* Creates a doubly-linked list node.
4+
* @param {string} key
5+
* @param {any} val
6+
* @param {LinkedListNode} prev
7+
* @param {LinkedListNode} next
8+
*/
9+
constructor(key, val, prev = null, next = null) {
10+
this.key = key;
11+
this.val = val;
12+
this.prev = prev;
13+
this.next = next;
14+
}
15+
}
16+
17+
export default LinkedListNode;

‎src/data-structures/lru-cache/README.md

Lines changed: 37 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,37 @@
1+
# Least Recently Used (LRU) Cache
2+
3+
A **Least Recently Used (LRU) Cache** organizes items in order of use, allowing you to quickly identify which item hasn't been used for the longest amount of time.
4+
5+
Picture a clothes rack, where clothes are always hung up on one side. To find the least-recently used item, look at the item on the other end of the rack.
6+
7+
## The problem statement
8+
9+
Implement the LRUCache class:
10+
11+
- `LRUCache(int capacity)` Initialize the LRU cache with **positive** size `capacity`.
12+
- `int get(int key)` Return the value of the `key` if the `key` exists, otherwise return `undefined`.
13+
- `void set(int key, int value)` Update the value of the `key` if the `key` exists. Otherwise, add the `key-value` pair to the cache. If the number of keys exceeds the `capacity` from this operation, **evict** the least recently used key.
14+
15+
The functions `get()` and `set()` must each run in `O(1)` average time complexity.
16+
17+
## Implementation
18+
19+
See the `LRUCache` implementation example in [LRUCache.js](./LRUCache.js). The solution uses a `HashMap` for fast `O(1)` cache items access, and a `DoublyLinkedList` for fast `O(1)` cache items promotions and eviction (to keep the maximum allowed cache capacity).
20+
21+
![Linked List](./images/lru-cache.jpg)
22+
23+
*Made with [okso.app](https://okso.app)*
24+
25+
## Costs
26+
27+
| | Worst Case |
28+
|---|---|
29+
| Space |`O(n)`|
30+
| Get item | `O(1)` |
31+
| Set item | `O(1)` |
32+
33+
## References
34+
35+
- [LRU Cache on LeetCode](https://leetcode.com/problems/lru-cache/solutions/244744/lru-cache/)
36+
- [LRU Cache on InterviewCake](https://www.interviewcake.com/concept/java/lru-cache)
37+
- [LRU Cache on Wiki](https://en.wikipedia.org/wiki/Cache_replacement_policies)
Lines changed: 150 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,150 @@
1+
import LRUCache from '../LRUCache';
2+
3+
describe('LRUCache', () => {
4+
it('should set and get values to and from the cache', () => {
5+
const cache = new LRUCache(100);
6+
expect(cache.get('key-1')).toBeUndefined();
7+
8+
cache.set('key-1', 15);
9+
cache.set('key-2', 16);
10+
cache.set('key-3', 17);
11+
expect(cache.get('key-1')).toBe(15);
12+
expect(cache.get('key-2')).toBe(16);
13+
expect(cache.get('key-3')).toBe(17);
14+
expect(cache.get('key-3')).toBe(17);
15+
expect(cache.get('key-2')).toBe(16);
16+
expect(cache.get('key-1')).toBe(15);
17+
18+
cache.set('key-1', 5);
19+
cache.set('key-2', 6);
20+
cache.set('key-3', 7);
21+
expect(cache.get('key-1')).toBe(5);
22+
expect(cache.get('key-2')).toBe(6);
23+
expect(cache.get('key-3')).toBe(7);
24+
});
25+
26+
it('should evict least recently used items from cache with cache size of 1', () => {
27+
const cache = new LRUCache(1);
28+
expect(cache.get('key-1')).toBeUndefined();
29+
30+
cache.set('key-1', 15);
31+
expect(cache.get('key-1')).toBe(15);
32+
33+
cache.set('key-2', 16);
34+
expect(cache.get('key-1')).toBeUndefined();
35+
expect(cache.get('key-2')).toBe(16);
36+
37+
cache.set('key-2', 17);
38+
expect(cache.get('key-2')).toBe(17);
39+
40+
cache.set('key-3', 18);
41+
cache.set('key-4', 19);
42+
expect(cache.get('key-2')).toBeUndefined();
43+
expect(cache.get('key-3')).toBeUndefined();
44+
expect(cache.get('key-4')).toBe(19);
45+
});
46+
47+
it('should evict least recently used items from cache with cache size of 2', () => {
48+
const cache = new LRUCache(2);
49+
expect(cache.get('key-21')).toBeUndefined();
50+
51+
cache.set('key-21', 15);
52+
expect(cache.get('key-21')).toBe(15);
53+
54+
cache.set('key-22', 16);
55+
expect(cache.get('key-21')).toBe(15);
56+
expect(cache.get('key-22')).toBe(16);
57+
58+
cache.set('key-22', 17);
59+
expect(cache.get('key-22')).toBe(17);
60+
61+
cache.set('key-23', 18);
62+
expect(cache.size).toBe(2);
63+
expect(cache.get('key-21')).toBeUndefined();
64+
expect(cache.get('key-22')).toBe(17);
65+
expect(cache.get('key-23')).toBe(18);
66+
67+
cache.set('key-24', 19);
68+
expect(cache.size).toBe(2);
69+
expect(cache.get('key-21')).toBeUndefined();
70+
expect(cache.get('key-22')).toBeUndefined();
71+
expect(cache.get('key-23')).toBe(18);
72+
expect(cache.get('key-24')).toBe(19);
73+
});
74+
75+
it('should evict least recently used items from cache with cache size of 3', () => {
76+
const cache = new LRUCache(3);
77+
78+
cache.set('key-1', 1);
79+
cache.set('key-2', 2);
80+
cache.set('key-3', 3);
81+
expect(cache.get('key-1')).toBe(1);
82+
expect(cache.get('key-2')).toBe(2);
83+
expect(cache.get('key-3')).toBe(3);
84+
85+
cache.set('key-3', 4);
86+
expect(cache.get('key-1')).toBe(1);
87+
expect(cache.get('key-2')).toBe(2);
88+
expect(cache.get('key-3')).toBe(4);
89+
90+
cache.set('key-4', 5);
91+
expect(cache.get('key-1')).toBeUndefined();
92+
expect(cache.get('key-2')).toBe(2);
93+
expect(cache.get('key-3')).toBe(4);
94+
expect(cache.get('key-4')).toBe(5);
95+
});
96+
97+
it('should promote the node while calling set() method', () => {
98+
const cache = new LRUCache(2);
99+
100+
cache.set('2', 1);
101+
cache.set('1', 1);
102+
cache.set('2', 3);
103+
cache.set('4', 1);
104+
expect(cache.get('1')).toBeUndefined();
105+
expect(cache.get('2')).toBe(3);
106+
});
107+
108+
it('should promote the recently accessed item with cache size of 3', () => {
109+
const cache = new LRUCache(3);
110+
111+
cache.set('key-1', 1);
112+
cache.set('key-2', 2);
113+
cache.set('key-3', 3);
114+
expect(cache.get('key-1')).toBe(1);
115+
116+
cache.set('key-4', 4);
117+
expect(cache.get('key-1')).toBe(1);
118+
expect(cache.get('key-3')).toBe(3);
119+
expect(cache.get('key-4')).toBe(4);
120+
expect(cache.get('key-2')).toBeUndefined();
121+
});
122+
123+
it('should promote the recently accessed item with cache size of 4', () => {
124+
const cache = new LRUCache(4);
125+
126+
cache.set('key-1', 1);
127+
cache.set('key-2', 2);
128+
cache.set('key-3', 3);
129+
cache.set('key-4', 4);
130+
expect(cache.get('key-4')).toBe(4);
131+
expect(cache.get('key-3')).toBe(3);
132+
expect(cache.get('key-2')).toBe(2);
133+
expect(cache.get('key-1')).toBe(1);
134+
135+
cache.set('key-5', 5);
136+
expect(cache.get('key-1')).toBe(1);
137+
expect(cache.get('key-2')).toBe(2);
138+
expect(cache.get('key-3')).toBe(3);
139+
expect(cache.get('key-4')).toBeUndefined();
140+
expect(cache.get('key-5')).toBe(5);
141+
142+
cache.set('key-6', 6);
143+
expect(cache.get('key-1')).toBeUndefined();
144+
expect(cache.get('key-2')).toBe(2);
145+
expect(cache.get('key-3')).toBe(3);
146+
expect(cache.get('key-4')).toBeUndefined();
147+
expect(cache.get('key-5')).toBe(5);
148+
expect(cache.get('key-6')).toBe(6);
149+
});
150+
});
662 KB
Loading[フレーム]

0 commit comments

Comments
(0)

AltStyle によって変換されたページ (->オリジナル) /