Last active
April 13, 2022 04:37
-
-
Save RakshithNM/8bc126a49378c7560af8aba93315aaf4 to your computer and use it in GitHub Desktop.
Least Recently Used Cache
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
/* Implement Least Recently Used Cache | |
* Improvements to make | |
* 2. Invalidate only a specific node when there is no read/write on that node for a certain duration. | |
*/ | |
/* Node that represents item of a doubly linked list */ | |
class Node { | |
constructor(key, value, previous = null, next = null) { | |
this.key = key; | |
this.value = value; | |
this.previous = previous; | |
this.next = next; | |
} | |
} | |
/* Least Recent Used Cache */ | |
class LRU { | |
constructor(limit = 10) { | |
this.size = 0; | |
this.limit = limit; | |
this.head = null; | |
this.tail = null; | |
this.cacheMap = {}; | |
} | |
/* Write to the LRU | |
* If the node already exists, make it the most recently used aka bring it to the top of the queue | |
* If the size limit of LRU is reached, remove the least recently used and create and new node with key and value and make it the most recently used | |
* If the LRU is empty, construct a new entry node and assign it to the head pointer and tail pointer | |
* If the LRU is not empty, assign the new node to the head's previous node and make the new node the head pointer of the LRU | |
* In the last two cases, create an entry in the cacheMap for the passed in key with the new head pointer as the value and increment the size of the LRU | |
*/ | |
write(key, value) { | |
const existingNode = this.cacheMap[key]; | |
/* If the node exists in the cacheMap, and we wish to write the same node again, then | |
* detach the node from the LRU | |
* decrease the size of the LRU | |
*/ | |
if(existingNode) { | |
this.detach(existingNode); | |
this.size--; | |
} | |
/* If the size of the LRU is equal to the limit and we wish to write the node, then | |
* delete the entry in the cacheMap for the node | |
* detach the tail, since we want to remove the least recently used entry | |
* decrease the size of the LRU | |
*/ | |
else if(this.size === this.limit) { | |
delete this.cacheMap[this.tail.key]; | |
this.detach(this.tail); | |
this.size-- | |
} | |
/* START READIND CODE HERE | |
* If the LRU doesn't have a head(empty), we instantiate a new node and assign it to head and tail of LRU | |
*/ | |
if(!this.head) { | |
this.head = this.tail = new Node(key, value); | |
} | |
/* else we instantiate a new node and assign as the current heads previous node and make the new node as the head */ | |
else { | |
const node = new Node(key, value); | |
this.head.previous = node; | |
this.head = node; | |
} | |
/* Store the new head node in the cacheMap */ | |
this.cacheMap[key] = this.head; | |
/* Increase the size of the LRU so it can be used to remove the tail when limit is met */ | |
this.size++; | |
} | |
/* Read a value corresponding to the passed in key from LRU | |
* We only care for the exising keys | |
* Get the value for the key from the cacheMap, we now have the key(passed in) and value(retrieved from cacheMap) | |
* Now we can write the (key, value) to the LRU, it will update the head to be the retrieved(recently used) item | |
*/ | |
read(key) { | |
/* Get the value for the key from the cacheMap */ | |
const existingNode = this.cacheMap[key]; | |
if(existingNode) { | |
/* Get the value for the node */ | |
const value = existingNode.value; | |
/* We don't have to queue the node(bring corresponding to the key to the top) if its the head of the LRU */ | |
if(this.head !== existingNode) { | |
this.write(key, value); | |
} | |
return; | |
} | |
console.log(`Item not available in cache for key ${key}`); | |
} | |
/* Detach the node from the LRU | |
* Skip the current node and connect the LRU nodes | |
* 1. When the passed in node is not a head(there is a previous pointer[not null]) - assign the node's next pointer to the node's previous pointers next pointer | |
* 2. When the passed in node is a head(no previous pointer) - assign the node's next pointer to the head pointer, the node's next node becomes the head | |
* 3. When the passed in node is not a tail(there is a next pointer[no null]) - assign the node's previous pointer to the node's next pointers previous pointer | |
* 4. When the passed in node is a tail(no next pointer) - assign the node's previous pointer to the tail pointer, the node's previous node becomes the tail | |
*/ | |
detach(node) { | |
/* If we want to detach a node that is in the middle | |
* skip the node by assinging the previous node's next pointer to the node's next pointer | |
*/ | |
if(node.previous !== null) { | |
node.previous.next = node.next; | |
} | |
/* If we want to detach the head i.e the input node is the head | |
* assign the node's(current head) next pointer(null) to the head of the LRU | |
*/ | |
else { | |
this.head = node.next; | |
} | |
/* If we want to detach the tail i.e the input node is the tail | |
* skip the node by assigning the next node's previous pointer to the node's previous pointer | |
*/ | |
if(node.next !== null) { | |
node.next.previous = node.previous; | |
} | |
/* If we want to detach the tail i.e the input node is the tail | |
* assign the node's(current tail) previous pointer to the tail of the LRU | |
*/ | |
else { | |
this.tail = node.previous; | |
} | |
} | |
} | |
const lruCache = new LRU(3); // Instantiate a LRU with the limit of 3 | |
lruCache.write('a', 1); | |
lruCache.write('b', 2); | |
lruCache.write('c', 3); | |
// At this point, the LRU item in the cache is 'a' | |
// 'c' -> 'b' -> 'a' | |
console.log('head : ' + lruCache.head.key, 'tail : ' + lruCache.tail.key); | |
console.log('reading a'); | |
lruCache.read('a'); | |
// At this point, the LRU item in the cache is 'b' | |
// 'a' -> 'c' -> 'b' | |
console.log('head : ' + lruCache.head.key, 'tail : ' + lruCache.tail.key); | |
console.log('writing d'); | |
lruCache.write('d', 4); | |
// At this point, the LRU item in the cache is 'c', the 'b' got removed when 'd' was written | |
// 'd' -> 'a' -> 'c' | |
console.log('head : ' + lruCache.head.key, 'tail : ' + lruCache.tail.key); |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
/* Implement Least Recently Used Cache | |
* Invalidate the cache when no read/write happens for a certain duration | |
*/ | |
/* Node that represents item of a doubly linked list */ | |
class Node { | |
constructor(key, value, previous = null, next = null) { | |
this.key = key; | |
this.value = value; | |
this.previous = previous; | |
this.next = next; | |
} | |
} | |
/* Least Recent Used Cache */ | |
class LRU { | |
constructor(limit = 10, timeLimit = 500) { | |
this.size = 0; | |
this.limit = limit; | |
this.head = null; | |
this.tail = null; | |
this.cacheMap = {}; | |
this.timeLimit = timeLimit; | |
this.timer = setTimeout(() => this.block(), this.timeLimit); | |
} | |
/* Write to the LRU | |
* If the node already exists, make it the most recently used aka bring it to the top of the queue | |
* If the size limit of LRU is reached, remove the least recently used and create and new node with key and value and make it the most recently used | |
* If the LRU is empty, construct a new entry node and assign it to the head pointer and tail pointer | |
* If the LRU is not empty, assign the new node to the head's previous node and make the new node the head pointer of the LRU | |
* In the last two cases, create an entry in the cacheMap for the passed in key with the new head pointer as the value and increment the size of the LRU | |
*/ | |
write(key, value) { | |
const existingNode = this.cacheMap[key]; | |
/* If the node exists in the cacheMap, and we wish to write the same node again, then | |
* detach the node from the LRU | |
* decrease the size of the LRU | |
*/ | |
if(existingNode) { | |
this.detach(existingNode); | |
this.size--; | |
} | |
/* If the size of the LRU is equal to the limit and we wish to write the node, then | |
* delete the entry in the cacheMap for the node | |
* detach the tail, since we want to remove the least recently used entry | |
* decrease the size of the LRU | |
*/ | |
else if(this.size === this.limit) { | |
delete this.cacheMap[this.tail.key]; | |
this.detach(this.tail); | |
this.size-- | |
} | |
/* START READIND CODE HERE | |
* If the LRU doesn't have a head(empty), we instantiate a new node and assign it to head and tail of LRU | |
*/ | |
if(!this.head) { | |
this.head = this.tail = new Node(key, value); | |
} | |
/* else we instantiate a new node and assign as the current heads previous node and make the new node as the head */ | |
else { | |
const node = new Node(key, value); | |
this.head.previous = node; | |
this.head = node; | |
} | |
this.resetRWAccessWindow(); | |
/* Store the new head node in the cacheMap */ | |
this.cacheMap[key] = this.head; | |
/* Increase the size of the LRU so it can be used to remove the tail when limit is met */ | |
this.size++; | |
} | |
/* Read a value corresponding to the passed in key from LRU | |
* We only care for the exising keys | |
* Get the value for the key from the cacheMap, we now have the key(passed in) and value(retrieved from cacheMap) | |
* Now we can write the (key, value) to the LRU, it will update the head to be the retrieved(recently used) item | |
*/ | |
read(key) { | |
/* Get the value for the key from the cacheMap */ | |
const existingNode = this.cacheMap[key]; | |
if(existingNode) { | |
/* Get the value for the node */ | |
const value = existingNode.value; | |
/* We don't have to queue the node(bring corresponding to the key to the top) if its the head of the LRU */ | |
if(this.head !== existingNode) { | |
this.write(key, value); | |
} | |
return; | |
} | |
console.log(`Item not available in cache for key ${key}`); | |
} | |
/* Detach the node from the LRU | |
* Skip the current node and connect the LRU nodes | |
* 1. When the passed in node is not a head(there is a previous pointer[not null]) - assign the node's next pointer to the node's previous pointers next pointer | |
* 2. When the passed in node is a head(no previous pointer) - assign the node's next pointer to the head pointer, the node's next node becomes the head | |
* 3. When the passed in node is not a tail(there is a next pointer[no null]) - assign the node's previous pointer to the node's next pointers previous pointer | |
* 4. When the passed in node is a tail(no next pointer) - assign the node's previous pointer to the tail pointer, the node's previous node becomes the tail | |
*/ | |
detach(node) { | |
/* If we want to detach a node that is in the middle | |
* skip the node by assinging the previous node's next pointer to the node's next pointer | |
*/ | |
if(node.previous !== null) { | |
node.previous.next = node.next; | |
} | |
/* If we want to detach the head i.e the input node is the head | |
* assign the node's(current head) next pointer(null) to the head of the LRU | |
*/ | |
else { | |
this.head = node.next; | |
} | |
/* If we want to detach the tail i.e the input node is the tail | |
* skip the node by assigning the next node's previous pointer to the node's previous pointer | |
*/ | |
if(node.next !== null) { | |
node.next.previous = node.previous; | |
} | |
/* If we want to detach the tail i.e the input node is the tail | |
* assign the node's(current tail) previous pointer to the tail of the LRU | |
*/ | |
else { | |
this.tail = node.previous; | |
} | |
} | |
/* Reset the access time limit | |
* If there is a timer already running, clear the timeout | |
* Start a timer and invalidate the cache when it runs out | |
*/ | |
resetRWAccessWindow() { | |
console.log('there was read/write to the LRU cache, resetting the access time limit'); | |
if(this.timer) { | |
clearTimeout(this.timer); | |
} | |
this.timer = setTimeout(() => this.invalidate(), this.timeLimit); | |
} | |
/* Invalidate the LRU cache | |
* Assign empty object to cacheMap | |
* Make the head and tail pointer to null | |
* Make the size of the LRU cache 0 | |
*/ | |
invalidate() { | |
console.log(`there was no read/write for over ${this.timeLimit}, invalidating the LRU cache`); | |
this.cacheMap = {}; | |
this.head = null; | |
this.tail = null; | |
this.size = 0; | |
} | |
} | |
const lruCache = new LRU(3); // Instantiate a LRU with the limit of 3 | |
lruCache.write('a', 1); | |
lruCache.write('b', 2); | |
lruCache.write('c', 3); | |
// At this point, the LRU item in the cache is 'a' | |
// 'c' -> 'b' -> 'a' | |
console.log('head : ' + lruCache.head.key, 'tail : ' + lruCache.tail.key); | |
console.log('reading a'); | |
lruCache.read('a'); | |
// At this point, the LRU item in the cache is 'b' | |
// 'a' -> 'c' -> 'b' | |
console.log('head : ' + lruCache.head.key, 'tail : ' + lruCache.tail.key); | |
const delay = ms => new Promise(res => setTimeout(res, ms)); | |
const wait = async () => { | |
console.log(`if delay greater than ${lruCache.timeLimit}, the cache will be invalidated`); | |
await delay(600); | |
console.log('writing d'); | |
lruCache.write('d', 4); | |
console.log('since writing d happened after the delay, the ony item in the cache is "d"'); | |
// At this point, the LRU item in the cache is 'd' | |
// 'd' | |
console.log('head : ' + lruCache?.head?.key, 'tail : ' + lruCache?.tail?.key); | |
}; | |
wait(); |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment