From 6b45a4ea06279313b52a3ea11ddb39107a3b5c8a Mon Sep 17 00:00:00 2001 From: Hans-Werner Hilse Date: Wed, 12 Nov 2014 15:15:55 +0100 Subject: [PATCH] fix cache implementation the cache would behave badly when the same item was insert()ed twice: it would add the size twice to memory consumption, but would never substract it twice when purging the (actually single) object from cache. So the cache would seem to fill up while in fact it wasn't. --- frontend/cache.lua | 35 +++++++++++++++++++++++++++-------- 1 file changed, 27 insertions(+), 8 deletions(-) diff --git a/frontend/cache.lua b/frontend/cache.lua index fc5993cc3..fda092f8e 100644 --- a/frontend/cache.lua +++ b/frontend/cache.lua @@ -66,7 +66,32 @@ function Cache:new(o) return o end +-- internal: remove reference in cache_order list +function Cache:_unref(key) + for i = #self.cache_order, 1, -1 do + if self.cache_order[i] == key then + table.remove(self.cache_order, i) + end + end +end + +-- internal: free cache item +function Cache:_free(key) + if not self.cache[key] then return end + self.current_memsize = self.current_memsize - self.cache[key].size + self.cache[key]:onFree() + self.cache[key] = nil +end + +-- drop an item named via key from the cache +function Cache:drop(key) + self:_unref(key) + self:_free(key) +end + function Cache:insert(key, object) + -- make sure that one key only exists once: delete existing + self:drop(key) -- guarantee that we have enough memory in cache if(object.size > self.max_memsize) then DEBUG("too much memory claimed for", key) @@ -76,9 +101,7 @@ function Cache:insert(key, object) -- (they are at the end of the cache_order array) while self.current_memsize + object.size > self.max_memsize do local removed_key = table.remove(self.cache_order) - self.current_memsize = self.current_memsize - self.cache[removed_key].size - self.cache[removed_key]:onFree() - self.cache[removed_key] = nil + self:_free(removed_key) end -- insert new object in front of the LRU order table.insert(self.cache_order, 1, key) @@ -94,11 +117,7 @@ function Cache:check(key, ItemClass) if self.cache[key] then if self.cache_order[1] ~= key then -- put key in front of the LRU list - for k, v in ipairs(self.cache_order) do - if v == key then - table.remove(self.cache_order, k) - end - end + self:_unref(key) table.insert(self.cache_order, 1, key) end return self.cache[key]