LRU

  1. 从get/set可以自然直接联想到用map
  2. 根据recentiness进行排序 还有便于添加删除, 可以想到linkedlist
public class LRUCache {
    // doubly linkedlist + HashMap
    class Node {
        Node prev;
        Node next;
        int key;
        int value;
        public Node (int key, int val) {
            this.key = key;
            this.value = val;
        }
    }

    private int capacity;
    private Map<Integer,Node> map = new HashMap();
    private Node head = new Node(-1,-1);
    private Node tail = new Node(-1,-1);

    public LRUCache(int capacity) {
        this.capacity = capacity;
        tail.prev = head;
        head.next = tail;
    }

    public int get(int key) {
        if (!map.containsKey(key)) {
            return -1;
        } 

        // if could get, move it to tail since it is the most recently visited
        Node cur = map.get(key);
        cur.prev.next = cur.next;
        cur.next.prev = cur.prev;

        moveToTail(cur);
        return map.get(key).value;
    }

    public void set(int key, int value) {
         if( get(key) != -1) {
            map.get(key).value = value;
            return;
        }
        // below is = -1 case (no existing)
        if (map.size() == capacity) {
            map.remove(head.next.key);
            head.next = head.next.next;
            head.next.prev = head;
        }

        Node insert = new Node(key, value);
        map.put(key, insert);
        moveToTail(insert);
    }

    private void moveToTail(Node cur) {
        cur.prev = tail.prev;
        cur.next = tail;
        tail.prev = cur;
        cur.prev.next = cur;
    }
}

LFU

why use treeset instead of priorityqueue?

Delete operation costs O(1) for treeset, but O(n) for PQ

We use another hashmap + treeset to track the order of each Cache object. (LRU uses the linkedlist)

Implementation details:

  1. remove() for treeset use the equals() to compare elements, so we have to override the equals().
  2. the Camparator used by TreeSet should compare frequency first and then recentness.

  3. use an integer id as the timestamp

public class LFUCache {

     class Cache {
        int key, f, r;
        public Cache(int key, int f, int r) {
            this.key = key;
            this.f = f;
            this.r = r;

        }
        public boolean equals(Object object) {
            return key == ((Cache) object).key;

        }
        /**
        public int hashCode() {return key;}
        implements Comparable<Cache> 
        public int compareTo(Cache o) {return key==o.key?0:f==o.f?r-o.r:f-o.f;}
        */
    }

    int capacity;
    int id; // timestamp
    HashMap<Integer, Integer> hashMap;
    HashMap<Integer, Cache> caches;
    TreeSet<Cache> treeSet;

    public LFUCache(int capacity) {
        this.capacity=capacity;
        id=0;
        hashMap=new HashMap<>();
        caches=new HashMap<>();
        treeSet=new TreeSet<>(new Comparator<Cache>() {
            public int compare(Cache c1, Cache c2) {
                // if f are same, compare recentiness
                int res = c1.f == c2.f ? c1.r - c2 .r : c1.f - c2.f;
                return res;
            }
        });
    }

    public int get(int key) {
        id++;
        if (hashMap.containsKey(key)) {
            update(key);
            return hashMap.get(key);
        }
        return -1;
    }

    public void set(int key, int value) {
        if (capacity == 0) return;
        id++;
        if (hashMap.containsKey(key)) {
            update(key);
            hashMap.put(key, value);
            return;
        }

        if (hashMap.size()==capacity) {
            Cache first=treeSet.pollFirst();
            hashMap.remove(first.key);
            caches.remove(first.key);
        }
        hashMap.put(key, value);
        Cache cache = new Cache(key, 1, id);
        caches.put(key, cache);
        treeSet.add(cache);
    }

    private void update(int key) {
        int f = caches.get(key).f;
        treeSet.remove(caches.get(key));
        Cache cache = new Cache(key, f+1, id);
        caches.put(key, cache);
        treeSet.add(cache);
    }
}

results matching ""

    No results matching ""