LRU Cache

Problem: LRU Cache

We can use a queue to maintain keys and once a key is achieved we reschedule it. Here's another solution with better running time.

Code in Python:

from collections import deque

class LRUCache(object):

    def __init__(self, capacity):
        """
        :type capacity: int
        """
        self.queue = deque([])
        self.dict = {}
        self.capacity = capacity


    def get(self, key):
        """
        :rtype: int
        """
        if key in self.dict:
            self.queue.remove(key)
            self.queue.append(key)
            return self.dict[key]
        else: return -1


    def set(self, key, value):
        """
        :type key: int
        :type value: int
        :rtype: nothing
        """
        if key not in self.dict:
            if len(self.queue) == self.capacity:
                remove = self.queue.popleft()
                del self.dict[remove]
            self.queue.append(key)
        else:
            self.queue.remove(key)
            self.queue.append(key)
        self.dict[key] = value

results matching ""

    No results matching ""