Web   ·   Wiki   ·   Activities   ·   Blog   ·   Lists   ·   Chat   ·   Meeting   ·   Bugs   ·   Git   ·   Translate   ·   Archive   ·   People   ·   Donate
summaryrefslogtreecommitdiffstats
path: root/src/jarabe
diff options
context:
space:
mode:
authorSascha Silbe <sascha@silbe.org>2010-02-08 18:15:33 (GMT)
committer Sascha Silbe <sascha-pgp@silbe.org>2010-02-08 18:15:33 (GMT)
commitab68ecfbcd67e4f9d1cb1a1146f8e88e9b7274b4 (patch)
tree4c4e74b84b8dd258dbbbd594f5fb1027778c70b6 /src/jarabe
parentfef8f822f21ab19b64f2369691c8840227f58017 (diff)
simplify jarabe.journal.model._Cache (#1651)
cache.remove_all(cache) doesn't work as expected as we're iterating over the object we're modifying. cache won't be empty after this call, causing the caching algorithm to break. Signed-off-by: Sascha Silbe <sascha@silbe.org>
Diffstat (limited to 'src/jarabe')
-rw-r--r--src/jarabe/journal/model.py30
1 files changed, 10 insertions, 20 deletions
diff --git a/src/jarabe/journal/model.py b/src/jarabe/journal/model.py
index 85b4f46..4183deb 100644
--- a/src/jarabe/journal/model.py
+++ b/src/jarabe/journal/model.py
@@ -49,34 +49,24 @@ class _Cache(object):
def __init__(self, entries=None):
self._array = []
- self._dict = {}
if entries is not None:
self.append_all(entries)
def prepend_all(self, entries):
- for entry in entries[::-1]:
- self._array.insert(0, entry)
- self._dict[entry['uid']] = entry
+ self._array[0:0] = entries
def append_all(self, entries):
- for entry in entries:
- self._array.append(entry)
- self._dict[entry['uid']] = entry
-
- def remove_all(self, entries):
- for uid in [entry['uid'] for entry in entries]:
- obj = self._dict[uid]
- self._array.remove(obj)
- del self._dict[uid]
+ self._array += entries
def __len__(self):
return len(self._array)
def __getitem__(self, key):
- if isinstance(key, basestring):
- return self._dict[key]
- else:
- return self._array[key]
+ return self._array[key]
+
+ def __delitem__(self, key):
+ del self._array[key]
+
class BaseResultSet(object):
"""Encapsulates the result of a query
@@ -148,7 +138,7 @@ class BaseResultSet(object):
query['offset'] = offset
entries, self._total_count = self.find(query)
- self._cache.remove_all(self._cache)
+ del self._cache[:]
self._cache.append_all(entries)
self._offset = offset
@@ -170,7 +160,7 @@ class BaseResultSet(object):
objects_excess = len(self._cache) - cache_limit
if objects_excess > 0:
self._offset += objects_excess
- self._cache.remove_all(self._cache[:objects_excess])
+ del self._cache[:objects_excess]
elif remaining_forward_entries > 0 and \
remaining_backwards_entries <= 0 and self._offset > 0:
@@ -193,7 +183,7 @@ class BaseResultSet(object):
cache_limit = self._page_size * MAX_PAGES_TO_CACHE
objects_excess = len(self._cache) - cache_limit
if objects_excess > 0:
- self._cache.remove_all(self._cache[-objects_excess:])
+ del self._cache[-objects_excess:]
else:
logging.debug('cache hit and no need to grow the cache')