Web   ·   Wiki   ·   Activities   ·   Blog   ·   Lists   ·   Chat   ·   Meeting   ·   Bugs   ·   Git   ·   Translate   ·   Archive   ·   People   ·   Donate
summaryrefslogtreecommitdiffstats
path: root/src
diff options
context:
space:
mode:
authorTomeu Vizoso <tomeu@sugarlabs.org>2009-02-25 13:58:31 (GMT)
committer Tomeu Vizoso <tomeu@sugarlabs.org>2009-02-25 13:58:31 (GMT)
commit921e5df97b3239db1ae6ee93748def336805f938 (patch)
tree4f42358f0b3208e4aa04696898de8bceecbae76c /src
parentac3c460ab57fdd4c4451eff0901c1fda75ed3c8b (diff)
Dont hardcode the maximum amount of entries to cache in the journal #72
Diffstat (limited to 'src')
-rw-r--r--src/jarabe/journal/listview.py4
-rw-r--r--src/jarabe/journal/model.py38
2 files changed, 22 insertions, 20 deletions
diff --git a/src/jarabe/journal/listview.py b/src/jarabe/journal/listview.py
index 34cd7a0..9358036 100644
--- a/src/jarabe/journal/listview.py
+++ b/src/jarabe/journal/listview.py
@@ -202,7 +202,7 @@ class BaseListView(gtk.HBox):
if self._result_set is not None:
self._result_set.stop()
- self._result_set = model.find(self._query)
+ self._result_set = model.find(self._query, self._page_size)
self._result_set.ready.connect(self.__result_set_ready_cb)
self._result_set.progress.connect(self.__result_set_progress_cb)
self._result_set.setup()
@@ -336,7 +336,7 @@ class BaseListView(gtk.HBox):
self._vadjustment.changed()
if self._result_set is None:
- self._result_set = model.find(self._query)
+ self._result_set = model.find(self._query, self._page_size)
max_value = max(0, self._result_set.length - self._page_size)
if self._vadjustment.props.value > max_value:
diff --git a/src/jarabe/journal/model.py b/src/jarabe/journal/model.py
index 663cb12..054de7d 100644
--- a/src/jarabe/journal/model.py
+++ b/src/jarabe/journal/model.py
@@ -41,6 +41,8 @@ PROPERTIES = ['uid', 'title', 'mtime', 'timestamp', 'keep', 'buddies',
'icon-color', 'mime_type', 'progress', 'activity', 'mountpoint',
'activity_id']
+PAGES_TO_CACHE = 5
+
class _Cache(object):
__gtype_name__ = 'model_Cache'
@@ -81,12 +83,11 @@ class BaseResultSet(object):
"""Encapsulates the result of a query
"""
- _CACHE_LIMIT = 80
-
- def __init__(self, query):
+ def __init__(self, query, cache_limit):
self._total_count = -1
self._position = -1
self._query = query
+ self._cache_limit = cache_limit
self._offset = 0
self._cache = _Cache()
@@ -103,7 +104,7 @@ class BaseResultSet(object):
def get_length(self):
if self._total_count == -1:
query = self._query.copy()
- query['limit'] = BaseResultSet._CACHE_LIMIT
+ query['limit'] = self._cache_limit
entries, self._total_count = self.find(query)
self._cache.append_all(entries)
self._offset = 0
@@ -120,10 +121,10 @@ class BaseResultSet(object):
def read(self, max_count):
logging.debug('ResultSet.read position: %r' % self._position)
- if max_count * 5 > BaseResultSet._CACHE_LIMIT:
+ if max_count * PAGES_TO_CACHE > self._cache_limit:
raise RuntimeError(
- 'max_count (%i) too big for BaseResultSet._CACHE_LIMIT'
- ' (%i).' % (max_count, BaseResultSet._CACHE_LIMIT))
+ 'max_count (%i) too big for self._cache_limit'
+ ' (%i).' % (max_count, self._cache_limit))
if self._position == -1:
self.seek(0)
@@ -143,14 +144,14 @@ class BaseResultSet(object):
if (remaining_forward_entries <= 0 and
remaining_backwards_entries <= 0) or \
- max_count > BaseResultSet._CACHE_LIMIT:
+ max_count > self._cache_limit:
# Total cache miss: remake it
offset = max(0, self._position - max_count)
logging.debug('remaking cache, offset: %r limit: %r' % \
(offset, max_count * 2))
query = self._query.copy()
- query['limit'] = BaseResultSet._CACHE_LIMIT
+ query['limit'] = self._cache_limit
query['offset'] = offset
entries, self._total_count = self.find(query)
@@ -173,7 +174,7 @@ class BaseResultSet(object):
self._cache.append_all(entries)
# apply the cache limit
- objects_excess = len(self._cache) - BaseResultSet._CACHE_LIMIT
+ objects_excess = len(self._cache) - self._cache_limit
if objects_excess > 0:
self._offset += objects_excess
self._cache.remove_all(self._cache[:objects_excess])
@@ -195,7 +196,7 @@ class BaseResultSet(object):
self._cache.prepend_all(entries)
# apply the cache limit
- objects_excess = len(self._cache) - BaseResultSet._CACHE_LIMIT
+ objects_excess = len(self._cache) - self._cache_limit
if objects_excess > 0:
self._cache.remove_all(self._cache[-objects_excess:])
else:
@@ -208,7 +209,7 @@ class BaseResultSet(object):
class DatastoreResultSet(BaseResultSet):
"""Encapsulates the result of a query on the datastore
"""
- def __init__(self, query):
+ def __init__(self, query, cache_limit):
if query.get('query', '') and not query['query'].startswith('"'):
query_text = ''
@@ -221,7 +222,7 @@ class DatastoreResultSet(BaseResultSet):
query['query'] = query_text
- BaseResultSet.__init__(self, query)
+ BaseResultSet.__init__(self, query, cache_limit)
def find(self, query):
entries, total_count = _get_datastore().find(query, PROPERTIES,
@@ -235,8 +236,8 @@ class DatastoreResultSet(BaseResultSet):
class InplaceResultSet(BaseResultSet):
"""Encapsulates the result of a query on a mount point
"""
- def __init__(self, query, mount_point):
- BaseResultSet.__init__(self, query)
+ def __init__(self, query, cache_limit, mount_point):
+ BaseResultSet.__init__(self, query, cache_limit)
self._mount_point = mount_point
self._file_list = None
self._pending_directories = 0
@@ -378,7 +379,7 @@ def _datastore_updated_cb(object_id):
def _datastore_deleted_cb(object_id):
deleted.send(None, object_id=object_id)
-def find(query):
+def find(query, page_size):
"""Returns a ResultSet
"""
if 'order_by' not in query:
@@ -388,10 +389,11 @@ def find(query):
if mount_points is None or len(mount_points) != 1:
raise ValueError('Exactly one mount point must be specified')
+ cache_limit = page_size * PAGES_TO_CACHE
if mount_points[0] == '/':
- return DatastoreResultSet(query)
+ return DatastoreResultSet(query, cache_limit)
else:
- return InplaceResultSet(query, mount_points[0])
+ return InplaceResultSet(query, cache_limit, mount_points[0])
def _get_mount_point(path):
dir_path = os.path.dirname(path)