P24. 实现 LRU 缓存

https://algocasts.io/episodes/rLmP8moY

head 一直指向最久未使用节点,访问过的节点应该放 head 后面,3’02 这里说的是前面?3’23 又说移动到后面。

:+1:

看得真仔细,3’02 那里说「放在 head 前面」说错了,这个题目中 head 是以逆时针方向移动的,所以节点 C 移动到的位置应该视为 head 的后面。

感谢提出来,我回头打个勘误到视频里。

P.S. 这个是早期制作的视频,现在看来那个图画得好丑,有时间我打算重做一个视频。

public void put(int key, int value) {
      if (map.containsKey(key)) {
        Node cur = map.get(key);
        cur.val = value;
        move2Head(cur);
      } else {
        if (head.val != -1) map.remove(head.key);
        head.key = key;
        head.val = value;
        map.put(key, head);
        head = head.prev;
      }
    }

所以head = head.prev 应该该为 head = hext.next 吗?

@791783391
不是,代码是没有问题的。

所以 prev 是逆时针箭头往前走, next 是顺时针箭头往后走对吧你画的 ABCD 图可能用顺时针的方式画好像才正确? 如下

public LRUCache(int capacity) {
     Node cur = head;
     for (int i = 0; i < capacity-1; ++i) {
          cur.next = new Node(-1, -1, cur, null);
          cur = cur.next; // 顺时针往后走
     }
      cur.next = head;
      head.prev = cur;
 }
    public LRUCache(int capacity) {
      Node cur = head;
      for (int i = 0; i < capacity-1; ++i) {
        cur.next = new Node(-1, -1, cur, null);
        cur = cur.next;
      }
      cur.next = head;
      head.prev = cur;
    }

爲什麼這裏可以用 cur 指向 head 呢,cur 不是 null 嗎?

分享一個 Python 寫法。

# -*- coding: utf-8 -*-

class Node(object):
    def __init__(self, key, val, prev, next_):
        self.key = key
        self.val = val
        self.prev = prev
        self.next_ = next_

    def __add__(self, node):
        self.next_ = node
        node.prev = self
        return self

class LRUCache(object):

    def __init__(self, capacity):
        self.capacity = capacity
        self.v2node = {}
        head = Node(-1, -1, None, None)
        p = head
        for i in range(self.capacity-1):
            cur = Node(-1, -1, None, None)
            p = p + cur
            p = p.next_
        p.next_ = head
        head.prev = p
        self.head = head

    def get(self, key):
        if key not in self.v2node: return -1
        self.move2head(self.v2node[key])
        return self.v2node[key].val

    def put(self, key, value):
        if key in self.v2node:
            self.v2node[key].val = value
            self.move2head(self.v2node[key])
        else:
            if self.head.val != -1: del self.v2node[self.head.key]
            self.head.key = key
            self.head.val = value
            self.v2node[key] = self.head
            self.head = self.head.next_

    def move2head(self, node):
        if node is self.head: self.head = self.head.next_

        prev = node.prev
        next_ = node.next_
        prev.next_ = next_
        next_.prev = prev

        prev = self.head.prev
        next_ = prev.next_
        prev.next_ = node
        next_.prev = node
        node.prev = prev
        node.next_ = next_

雖然其實差不多啦。關鍵點是 Python 是不可以 null.next 這樣的。

@kevin

是的,对应构造函数应该顺时针画图才对。

@punut

这里 cur 不是 null 哦。 看下面的注释说明:

    public LRUCache(int capacity) {
      Node cur = head;
      for (int i = 0; i < capacity-1; ++i) {
        cur.next = new Node(-1, -1, cur, null); // 创建一个新 Node,让 cur 的 next 指向它
        cur = cur.next; // cur 移动到 cur.next,也就是现在 cur 指向的是上一行代码创建的新 Node
      }
      cur.next = head; // 因此这里 cur 不会是 null
      head.prev = cur;
    }

╮( ̄▽ ̄)╭ 原来如此啊!!!原来是我看代码的时候写错了。然后按照这个思路又另外写了一遍。

前面2:20说head需要在最久访问的地方,为什么3:30要把head移到C那?移到C那边变成是最近才访问的地方,之后如果放入新的key不是就会把最近才访问的地方覆盖掉而不是最久访问的地方? 假如我放入新的key (6, F), head指向的C会被更新,但是应该是要更新B才对

@kevin

你理解错了。head 并没有移动到 C。是 C 移动到了 head 后面,head 此时仍然在节点 B。head 和 C 的关系是:

head.next = C;
C.prev = head;

而不是 head = C;

贴个Scala的泛型版:

  class LRUCache[K, V](capacity: Int) {
    private class Node(val key: K, val value: V, var prev: Node, var next: Node)

    private val map = mutable.Map.empty[K, Node]
    private var head: Node = _
    private var _size = 0

    def top: Option[V] = if (head == null) None else Some(head.value)

    def get(key: K): Option[V] = {
      map.get(key).map { node =>
        move2Head(node)
        node.value
      }
    }

    def put(key: K, value: V): Option[V] = {
      if (!map.contains(key) && _size == capacity) { // capacity is full
        None
      } else {
        val node = new Node(key, value, null, null)
        val old = map.put(key, node).map { node =>
          detach(node)
          node.value
        }
        move2Head(node)
        if (old.isEmpty) {
          _size += 1
        }
        old
      }
    }

    def remove(key: K): Option[V] = {
      map.get(key).foreach { node =>
        _size -= 1
        if (_size == 0) {
          head = null
        } else if (node == head) {
          head = node.next
        }
        detach(node)
      }
      map.remove(key).map(_.value)
    }

    def size: Int = _size

    private def move2Head(node: Node): Unit = {
      if (Objects.isNull(head)) {
        head = node
        head.prev = node
        head.next = node
      } else {
        detach(node)
        attach(node)
      }
    }

    private def attach(node: Node): Unit = {
      if (Objects.nonNull(head.prev)) {
        head.prev.next = node
        node.prev = head.prev
      }
      if (Objects.nonNull(head.next)) {
        head.next.prev = node
        node.next = head.next
      }
      head = node
    }

    private def detach(node: Node): Unit = {
      if (Objects.nonNull(node.prev)) {
        node.prev.next = node.next
      }
      if (Objects.nonNull(node.next)) {
        node.next.prev = node.prev
      }
    }
  }

  test("LRUCache") {
    val cache = new LRUCache[Int, Int](4)
    cache.top should ===(None)

    cache.put(1, 1) should ===(None)
    cache.top should ===(Some(1))

    cache.put(2, 2) should ===(None)
    cache.top should ===(Some(2))
    cache.size should ===(2)

    cache.put(3, 3) should ===(None)
    cache.top should ===(Some(3))
    cache.top should ===(Some(3))

    cache.put(4, 4) should ===(None)
    cache.top should ===(Some(4))
    cache.size should ===(4)

    cache.put(5, 5) should ===(None)
    cache.size should ===(4)

    cache.put(3, 33) should ===(Some(3))
    cache.size should ===(4)
    cache.top should ===(Some(33))

    cache.remove(5) should ===(None)
    cache.size should ===(4)

    cache.remove(3) should ===(Some(33))
    cache.size should ===(3)
    cache.top should ===(Some(4))

    cache.get(1) should ===(Some(1))
    cache.top should ===(Some(1))

    cache.remove(1)
    cache.size should ===(2)
    cache.top should ===(Some(4))

    cache.remove(2)
    cache.size should ===(1)
    cache.top should ===(Some(4))

    cache.remove(4)
    cache.top should ===(None)
    cache.size should ===(0)

    cache.remove(9)
    cache.top should ===(None)
    cache.size should ===(0)
  }