Web   ·   Wiki   ·   Activities   ·   Blog   ·   Lists   ·   Chat   ·   Meeting   ·   Bugs   ·   Git   ·   Translate   ·   Archive   ·   People   ·   Donate
summaryrefslogtreecommitdiffstats
path: root/gdatastore
diff options
context:
space:
mode:
authorSascha Silbe <sascha-pgp@silbe.org>2011-04-10 18:49:14 (GMT)
committer Sascha Silbe <sascha-pgp@silbe.org>2011-04-10 18:49:14 (GMT)
commit8e35c43b3318f92018b4b2a43d267aa832cad37a (patch)
tree33c1fed5e6841da2c799d29a8e318811f87e4d03 /gdatastore
parentd2d26fdabd2b2ac27bf86d37f6b0beca865718ac (diff)
move src/gdatastore to top level (distutils-extra convention)
Diffstat (limited to 'gdatastore')
-rw-r--r--gdatastore/__init__.py0
-rw-r--r--gdatastore/datastore.py473
-rw-r--r--gdatastore/index.py395
3 files changed, 868 insertions, 0 deletions
diff --git a/gdatastore/__init__.py b/gdatastore/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/gdatastore/__init__.py
diff --git a/gdatastore/datastore.py b/gdatastore/datastore.py
new file mode 100644
index 0000000..1cb8555
--- /dev/null
+++ b/gdatastore/datastore.py
@@ -0,0 +1,473 @@
+#
+# Author: Sascha Silbe <sascha-pgp@silbe.org>
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 3
+# as published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <http://www.gnu.org/licenses/>.
+"""
+Gdatastore D-Bus service API
+"""
+
+import hashlib
+import logging
+import os
+import shutil
+from subprocess import Popen, PIPE
+import tempfile
+import time
+import uuid
+
+import dbus
+import dbus.service
+import gconf
+
+from gdatastore.index import Index
+
+
+DBUS_SERVICE_NATIVE_V1 = 'org.silbe.GDataStore'
+DBUS_INTERFACE_NATIVE_V1 = 'org.silbe.GDataStore1'
+DBUS_PATH_NATIVE_V1 = '/org/silbe/GDataStore1'
+
+DBUS_SERVICE_SUGAR_V2 = 'org.laptop.sugar.DataStore'
+DBUS_INTERFACE_SUGAR_V2 = 'org.laptop.sugar.DataStore'
+DBUS_PATH_SUGAR_V2 = '/org/laptop/sugar/DataStore'
+
+DBUS_SERVICE_SUGAR_V3 = 'org.laptop.sugar.DataStore'
+DBUS_INTERFACE_SUGAR_V3 = 'org.laptop.sugar.DataStore2'
+DBUS_PATH_SUGAR_V3 = '/org/laptop/sugar/DataStore2'
+
+
+class DataStoreError(Exception):
+ pass
+
+
+class GitError(DataStoreError):
+ def __init__(self, returncode, stderr):
+ self.returncode = returncode
+ self.stderr = unicode(stderr)
+ Exception.__init__(self)
+
+ def __unicode__(self):
+ return u'Git returned with exit code #%d: %s' % (self.returncode,
+ self.stderr)
+
+ def __str__(self):
+ return self.__unicode__()
+
+
+class DBusApiSugarV2(dbus.service.Object):
+ """Compatibility layer for the Sugar 0.84+ data store D-Bus API
+ """
+
+ def __init__(self, internal_api):
+ self._internal_api = internal_api
+ bus_name = dbus.service.BusName(DBUS_SERVICE_SUGAR_V2,
+ bus=dbus.SessionBus(),
+ replace_existing=False,
+ allow_replacement=False)
+ dbus.service.Object.__init__(self, bus_name, DBUS_PATH_SUGAR_V2)
+
+ @dbus.service.method(DBUS_INTERFACE_SUGAR_V2,
+ in_signature='a{sv}sb', out_signature='s',
+ async_callbacks=('async_cb', 'async_err_cb'),
+ byte_arrays=True)
+ def create(self, props, file_path, transfer_ownership,
+ async_cb, async_err_cb):
+ def success_cb(tree_id, child_id):
+ self.Created(tree_id)
+ async_cb(tree_id)
+
+ self._internal_api.save(tree_id='', parent_id='', metadata=props,
+ path=file_path,
+ delete_after=transfer_ownership,
+ async_cb=success_cb,
+ async_err_cb=async_err_cb)
+
+ @dbus.service.signal(DBUS_INTERFACE_SUGAR_V2, signature='s')
+ def Created(self, uid):
+ # pylint: disable-msg=C0103
+ pass
+
+ @dbus.service.method(DBUS_INTERFACE_SUGAR_V2,
+ in_signature='sa{sv}sb', out_signature='',
+ async_callbacks=('async_cb', 'async_err_cb'),
+ byte_arrays=True)
+ def update(self, uid, props, file_path, transfer_ownership,
+ async_cb, async_err_cb):
+ def success_cb(tree_id, child_id):
+ self.Updated(tree_id)
+ async_cb()
+
+ latest_versions = self._get_latest(uid)
+ if not latest_versions:
+ raise ValueError('Trying to update non-existant entry %s - wanted'
+ ' to use create()?' % (uid, ))
+
+ parent = latest_versions[0]
+ object_id = parent['tree_id'], parent['version_id']
+ if self._compare_checksums(parent, file_path):
+ self._internal_api.change_metadata(object_id, props)
+ return success_cb(uid, None)
+
+ self._internal_api.save(tree_id=uid,
+ parent_id=parent['version_id'], metadata=props,
+ path=file_path, delete_after=transfer_ownership,
+ async_cb=success_cb, async_err_cb=async_err_cb)
+
+ @dbus.service.signal(DBUS_INTERFACE_SUGAR_V2, signature='s')
+ def Updated(self, uid):
+ # pylint: disable-msg=C0103
+ pass
+
+ @dbus.service.method(DBUS_INTERFACE_SUGAR_V2,
+ in_signature='a{sv}as', out_signature='aa{sv}u')
+ def find(self, query, properties):
+ if 'uid' in properties:
+ properties.append('tree_id')
+ properties.remove('uid')
+
+ options = {'metadata': properties}
+ for name in ['offset', 'limit', 'order_by']:
+ if name in query:
+ options[name] = query.pop(name)
+
+ if 'uid' in query:
+ query['tree_id'] = query.pop('uid')
+
+ results, count = self._internal_api.find(query, options,
+ query.pop('query', None))
+
+ if not properties or 'tree_id' in properties:
+ for entry in results:
+ entry['uid'] = entry.pop('tree_id')
+
+ return results, count
+
+ @dbus.service.method(DBUS_INTERFACE_SUGAR_V2,
+ in_signature='s', out_signature='s',
+ sender_keyword='sender')
+ def get_filename(self, uid, sender=None):
+ latest_versions = self._get_latest(uid)
+ if not latest_versions:
+ raise ValueError('Entry %s does not exist' % (uid, ))
+
+ object_id = (uid, latest_versions[0]['version_id'])
+ return self._internal_api.get_data_path(object_id, sender=sender)
+
+ @dbus.service.method(DBUS_INTERFACE_SUGAR_V2,
+ in_signature='s', out_signature='a{sv}')
+ def get_properties(self, uid):
+ latest_versions = self._get_latest(uid)
+ if not latest_versions:
+ raise ValueError('Entry %s does not exist' % (uid, ))
+
+ latest_versions[0]['uid'] = latest_versions[0].pop('tree_id')
+ del latest_versions[0]['version_id']
+ return latest_versions[0]
+
+ @dbus.service.method(DBUS_INTERFACE_SUGAR_V2,
+ in_signature='sa{sv}', out_signature='as')
+ def get_uniquevaluesfor(self, propertyname, query=None):
+ return self._internal_api.find_unique_values(query, propertyname)
+
+ @dbus.service.method(DBUS_INTERFACE_SUGAR_V2,
+ in_signature='s', out_signature='')
+ def delete(self, uid):
+ latest_versions = self._get_latest(uid)
+ if not latest_versions:
+ raise ValueError('Entry %s does not exist' % (uid, ))
+
+ self._internal_api.delete((uid, latest_versions[0]['version_id']))
+ self.Deleted(uid)
+
+ @dbus.service.signal(DBUS_INTERFACE_SUGAR_V2, signature='s')
+ def Deleted(self, uid):
+ # pylint: disable-msg=C0103
+ pass
+
+ @dbus.service.method(DBUS_INTERFACE_SUGAR_V2,
+ in_signature='', out_signature='aa{sv}')
+ def mounts(self):
+ return [{'id': 1}]
+
+ @dbus.service.signal(DBUS_INTERFACE_SUGAR_V2, signature='a{sv}')
+ def Mounted(self, descriptior):
+ # pylint: disable-msg=C0103
+ pass
+
+ @dbus.service.signal(DBUS_INTERFACE_SUGAR_V2, signature='a{sv}')
+ def Unmounted(self, descriptor):
+ # pylint: disable-msg=C0103
+ pass
+
+ def _get_latest(self, uid):
+ return self._internal_api.find({'tree_id': uid},
+ {'limit': 1, 'order_by': ['+timestamp']})[0]
+
+ def _compare_checksums(self, parent, child_data_path):
+ parent_object_id = (parent['tree_id'], parent['version_id'])
+ parent_data_path = self._internal_api.get_data_path(parent_object_id)
+ if bool(child_data_path) ^ bool(parent_data_path):
+ return False
+ elif not child_data_path:
+ return True
+
+ return False
+ parent_checksum = self._internal_api.get_data_checksum(
+ parent_object_id)
+ child_checksum = calculate_checksum(child_data_path)
+ return parent_checksum == child_checksum
+
+
+class InternalApi(object):
+ def __init__(self, base_dir):
+ self._base_dir = base_dir
+ self._checkouts_dir = os.path.join(base_dir, 'checkouts')
+ if not os.path.exists(self._checkouts_dir):
+ os.makedirs(self._checkouts_dir)
+ self._git_dir = os.path.join(base_dir, 'git')
+ self._git_env = {}
+ gconf_client = gconf.client_get_default()
+ self._max_versions = gconf_client.get_int(
+ '/desktop/sugar/datastore/max_versions')
+ logging.debug('max_versions=%r', self._max_versions)
+ self._index = Index(os.path.join(self._base_dir, 'index'))
+ self._migrate()
+ self._metadata = {}
+
+ def change_metadata(self, object_id, metadata):
+ logging.debug('change_metadata(%r, %r)', object_id, metadata)
+ metadata['tree_id'], metadata['version_id'] = object_id
+ if 'creation_time' not in metadata:
+ old_metadata = self._metadata[object_id]
+ metadata['creation_time'] = old_metadata['creation_time']
+
+ self._index.store(object_id, metadata)
+ self._metadata[object_id] = metadata
+
+ def delete(self, object_id):
+ logging.debug('delete(%r)', object_id)
+ self._index.delete(object_id)
+ del self._metadata[object_id]
+ self._git_call('update-ref', ['-d', _format_ref(*object_id)])
+
+ def get_data_path(self, (tree_id, version_id), sender=None):
+ logging.debug('get_data_path((%r, %r), %r)', tree_id, version_id,
+ sender)
+ ref_name = _format_ref(tree_id, version_id)
+ top_level_entries = self._git_call('ls-tree',
+ [ref_name]).splitlines()
+ if len(top_level_entries) == 1 and \
+ top_level_entries[0].endswith('\tdata'):
+ blob_hash = top_level_entries[0].split('\t')[0].split(' ')[2]
+ return self._checkout_file(blob_hash)
+
+ return self._checkout_dir(ref_name)
+
+ def find(self, query_dict, options, query_string=None):
+ logging.debug('find(%r, %r, %r)', query_dict, options, query_string)
+ entries, total_count = self._index.find(query_dict, query_string,
+ options)
+ #logging.debug('object_ids=%r', object_ids)
+ property_names = options.pop('metadata', None)
+ for entry in entries:
+ for name in entry.keys():
+ if property_names and name not in property_names:
+ del entry[name]
+ elif isinstance(entry[name], str):
+ entry[name] = dbus.ByteArray(entry[name])
+
+ return entries, total_count
+
+ def find_unique_values(self, query, name):
+ logging.debug('find_unique_values(%r, %r)', query, name)
+ return ['org.sugarlabs.DataStoreTest1', 'org.sugarlabs.DataStoreTest2']
+
+ def save(self, tree_id, parent_id, metadata, path, delete_after, async_cb,
+ async_err_cb):
+ logging.debug('save(%r, %r, %r, %r, %r)', tree_id, parent_id,
+ metadata, path, delete_after)
+
+ if path:
+ path = os.path.realpath(path)
+ if not os.access(path, os.R_OK):
+ raise ValueError('Invalid path given.')
+
+ if delete_after and not os.access(os.path.dirname(path), os.W_OK):
+ raise ValueError('Deletion requested for read-only directory')
+
+ if (not tree_id) and parent_id:
+ raise ValueError('tree_id is empty but parent_id is not')
+
+ if tree_id and not parent_id:
+ if self.find({'tree_id': tree_id}, {'limit': 1})[1]:
+ raise ValueError('No parent_id given but tree_id already '
+ 'exists')
+
+ elif parent_id:
+ if not self._index.contains((tree_id, parent_id)):
+ raise ValueError('Given parent does not exist')
+
+ if not tree_id:
+ tree_id = self._gen_uuid()
+
+ child_id = metadata.get('version_id')
+ if not child_id:
+ child_id = self._gen_uuid()
+ elif not tree_id:
+ raise ValueError('No tree_id given but metadata contains'
+ ' version_id')
+ elif self._index.contains((tree_id, child_id)):
+ raise ValueError('There is an existing entry with the same tree_id'
+ ' and version_id')
+
+ if 'timestamp' not in metadata:
+ metadata['timestamp'] = time.time()
+
+ if 'creation_time' not in metadata:
+ metadata['creation_time'] = metadata['timestamp']
+
+ if os.path.isfile(path):
+ metadata['filesize'] = str(os.stat(path).st_size)
+ elif not path:
+ metadata['filesize'] = '0'
+
+ tree_id = str(tree_id)
+ parent_id = str(parent_id)
+ child_id = str(child_id)
+
+ metadata['tree_id'] = tree_id
+ metadata['version_id'] = child_id
+
+ # TODO: check metadata for validity first (index?)
+ self._store_entry(tree_id, child_id, parent_id, path, metadata)
+ self._metadata[(tree_id, child_id)] = metadata
+ self._index.store((tree_id, child_id), metadata)
+ async_cb(tree_id, child_id)
+
+ def stop(self):
+ logging.debug('stop()')
+ self._index.close()
+
+ def _add_to_index(self, index_path, path):
+ if os.path.isdir(path):
+ self._git_call('add', ['-A'], work_dir=path, index_path=index_path)
+ elif os.path.isfile(path):
+ object_hash = self._git_call('hash-object', ['-w', path]).strip()
+ mode = os.stat(path).st_mode
+ self._git_call('update-index',
+ ['--add',
+ '--cacheinfo', oct(mode), object_hash, 'data'],
+ index_path=index_path)
+ else:
+ raise DataStoreError('Refusing to store special object %r' % (path, ))
+
+ def _check_max_versions(self, tree_id):
+ if not self._max_versions:
+ return
+
+ options = {'all_versions': True, 'offset': self._max_versions,
+ 'metadata': ['tree_id', 'version_id', 'timestamp'],
+ 'order_by': ['+timestamp']}
+ old_versions = self.find({'tree_id': tree_id}, options)[0]
+ logging.info('Deleting old versions: %r', old_versions)
+ for entry in old_versions:
+ self.delete((entry['tree_id'], entry['version_id']))
+
+ def _checkout_file(self, blob_hash):
+ fd, file_name = tempfile.mkstemp(dir=self._checkouts_dir)
+ try:
+ self._git_call('cat-file', ['blob', blob_hash], stdout_fd=fd)
+ finally:
+ os.close(fd)
+ return file_name
+
+ def _checkout_dir(self, ref_name):
+ # FIXME
+ return ''
+
+ def _create_repo(self):
+ os.makedirs(self._git_dir)
+ self._git_call('init', ['-q', '--bare'])
+
+ def _find_git_parent(self, tree_id, parent_id):
+ if not parent_id:
+ return None
+
+ return self._git_call('rev-parse',
+ [_format_ref(tree_id, parent_id)]).strip()
+
+ def _format_commit_message(self, metadata):
+ return repr(metadata)
+
+ def _gen_uuid(self):
+ return str(uuid.uuid4())
+
+ def _git_call(self, command, args=None, input=None, input_fd=None,
+ stdout_fd=None, work_dir=None, index_path=None):
+ env = dict(self._git_env)
+ if work_dir:
+ env['GIT_WORK_TREE'] = work_dir
+ if index_path:
+ env['GIT_INDEX_FILE'] = index_path
+ logging.debug('calling git %s, env=%r', ['git', command] + (args or []), env)
+ pipe = Popen(['git', command] + (args or []), stdin=input_fd or PIPE,
+ stdout=stdout_fd or PIPE, stderr=PIPE, close_fds=True,
+ cwd=self._git_dir, env=env)
+ stdout, stderr = pipe.communicate(input)
+ if pipe.returncode:
+ raise GitError(pipe.returncode, stderr)
+ return stdout
+
+ def _migrate(self):
+ if not os.path.exists(self._git_dir):
+ return self._create_repo()
+
+ def _store_entry(self, tree_id, version_id, parent_id, path, metadata):
+ parent_hash = self._find_git_parent(tree_id, parent_id)
+ commit_message = self._format_commit_message(metadata)
+ tree_hash = self._write_tree(path)
+ commit_options = [tree_hash]
+ if parent_hash:
+ commit_options += ['-p', parent_hash]
+ commit_hash = self._git_call('commit-tree', commit_options,
+ input=commit_message).strip()
+ self._git_call('update-ref', [_format_ref(tree_id, version_id),
+ commit_hash])
+
+ def _write_tree(self, path):
+ if not path:
+ return self._git_call('hash-object',
+ ['-w', '-t', 'tree', '--stdin'],
+ input='').strip()
+
+ index_dir = tempfile.mkdtemp(prefix='gdatastore-')
+ index_path = os.path.join(index_dir, 'index')
+ try:
+ self._add_to_index(index_path, path)
+ return self._git_call('write-tree', index_path=index_path).strip()
+ finally:
+ shutil.rmtree(index_dir)
+
+
+def calculate_checksum(path):
+ checksum = hashlib.sha1()
+ f = file(path)
+ while True:
+ chunk = f.read(65536)
+ if not chunk:
+ return checksum.hexdigest()
+
+ checksum.update(chunk)
+
+
+def _format_ref(tree_id, version_id):
+ return 'refs/gdatastore/%s/%s' % (tree_id, version_id)
diff --git a/gdatastore/index.py b/gdatastore/index.py
new file mode 100644
index 0000000..c93f5b3
--- /dev/null
+++ b/gdatastore/index.py
@@ -0,0 +1,395 @@
+#
+# Author: Sascha Silbe <sascha-pgp@silbe.org>
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 3
+# as published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <http://www.gnu.org/licenses/>.
+"""
+Gdatastore metadata index interface
+"""
+
+import logging
+import os
+import sys
+
+import xapian
+from xapian import Document, Enquire, Query, WritableDatabase
+
+
+_CURRENT_VERSION = 1
+_STANDARD_TERMS = {
+ 'activity': {'prefix': 'Xactname', 'type': str},
+ 'activity_id': {'prefix': 'Xactid', 'type': str},
+ 'description': {'prefix': 'Xdesc', 'type': unicode},
+ 'keep': {'prefix': 'Xkeep', 'type': str},
+ 'mime_type': {'prefix': 'T', 'type': str},
+ 'tags': {'prefix': 'K', 'type': unicode},
+ 'title': {'prefix': 'S', 'type': unicode},
+ 'tree_id': {'prefix': 'Xtree', 'type': str},
+ 'version_id': {'prefix': 'Xversion', 'type': str},
+}
+_VALUE_TREE_ID = 0
+_VALUE_VERSION_ID = 1
+_VALUE_MTIME = 2
+_VALUE_SIZE = 3
+_VALUE_CTIME = 4
+_STANDARD_VALUES = {
+ 'creation_time': {'number': _VALUE_CTIME, 'type': float},
+ 'filesize': {'number': _VALUE_SIZE, 'type': int},
+ 'timestamp': {'number': _VALUE_MTIME, 'type': float},
+ 'tree_id': {'number': _VALUE_TREE_ID, 'type': str},
+ 'version_id': {'number': _VALUE_VERSION_ID, 'type': str},
+}
+_IGNORE_PROPERTIES = ['preview']
+_PREFIX_FULL_VALUE = 'Xf'
+_PREFIX_OBJECT_ID = 'Q'
+_LIMIT_MAX = 2 ** 31 - 1
+_DOCID_REVERSE_MAP = {True: Enquire.DESCENDING, False: Enquire.ASCENDING}
+
+
+class DSIndexError(Exception):
+ pass
+
+
+class TermGenerator(xapian.TermGenerator):
+
+ def __init__(self):
+ self._document = None
+ xapian.TermGenerator.__init__(self)
+
+ def index_document(self, document, properties):
+ for name, info in _STANDARD_VALUES.items():
+ if name not in properties:
+ continue
+
+ document.add_value(info['number'],
+ _serialise_value(info, properties[name]))
+
+ self._document = document
+ self.set_document(document)
+
+ properties = dict(properties)
+ self._index_known(properties)
+ self._index_unknown(properties)
+
+ def _index_known(self, properties):
+ """Index standard properties and remove them from the input."""
+ for name, info in _STANDARD_TERMS.items():
+ if name not in properties:
+ continue
+
+ self._index_property(properties.pop(name), info['prefix'])
+
+ def _index_unknown(self, properties):
+ """
+ Index all given properties.
+
+ Expects not to get passed any standard term-stored property.
+ """
+ for name, value in properties.items():
+ if name in _IGNORE_PROPERTIES or name in _STANDARD_VALUES:
+ continue
+
+ if isinstance(value, unicode):
+ value = value.encode('utf-8')
+ elif not isinstance(value, str):
+ value = str(value)
+
+ self._index_property(_prefix_for_unknown(name), value)
+
+ def _index_property(self, value, prefix):
+ # We need to add the full value (i.e. not split into words), too, so
+ # we can enumerate unique values. It also simplifies setting up
+ # dictionary-based queries.
+ self._document.add_term(_PREFIX_FULL_VALUE + prefix + value)
+ self.index_text(value, 1, prefix)
+ self.increase_termpos()
+
+
+class QueryParser(xapian.QueryParser):
+ """
+ QueryParser that understands dictionaries and Xapian query strings.
+
+ The dictionary may contain property names as keys and basic types
+ (exact match), 2-tuples (range, only valid for value-stored
+ standard properties) and lists (multiple exact matches joined with
+ OR) as values.
+ An empty dictionary matches everything. Queries from different keys
+ (i.e. different property names) are joined with AND.
+
+ Full text search (Xapian query string) is only supported for standard
+ properties.
+ """
+
+ _FLAGS = (xapian.QueryParser.FLAG_PHRASE |
+ xapian.QueryParser.FLAG_BOOLEAN |
+ xapian.QueryParser.FLAG_LOVEHATE |
+ xapian.QueryParser.FLAG_WILDCARD)
+
+ def __init__(self):
+ xapian.QueryParser.__init__(self)
+ for name, info in _STANDARD_TERMS.items():
+ self.add_prefix(name, info['prefix'])
+ self.add_prefix('', info['prefix'])
+
+ def _parse_query_term(self, prefix, value):
+ if isinstance(value, list):
+ subqueries = [self._parse_query_term(prefix, word)
+ for word in value]
+ return Query(Query.OP_OR, subqueries)
+
+ return Query(_PREFIX_FULL_VALUE + prefix + str(value))
+
+ def _parse_query_value_range(self, info, value):
+ if len(value) != 2:
+ raise TypeError('Only tuples of size 2 have a defined meaning.'
+ ' Did you mean to pass a list instead?')
+
+ start, end = value
+ return Query(Query.OP_VALUE_RANGE, info['number'],
+ _serialise_value(info, start),
+ _serialise_value(info, end))
+
+ def _parse_query_value(self, info, value):
+ if isinstance(value, list):
+ subqueries = [self._parse_query_value(info, word)
+ for word in value]
+ return Query(Query.OP_OR, subqueries)
+
+ elif isinstance(value, tuple):
+ return self._parse_query_value_range(info, value)
+
+ elif isinstance(value, dict):
+ # compatibility option for timestamp: {'start': 0, 'end': 1}
+ start = value.get('start', 0)
+ end = value.get('end', sys.maxint)
+ return self._parse_query_value_range(info, (start, end))
+
+ else:
+ return self._parse_query_value_range(info, (value, value))
+
+ def _parse_query_xapian(self, query_str):
+ return xapian.QueryParser.parse_query(self, query_str,
+ QueryParser._FLAGS, '')
+
+ def parse_datastore_query(self, query_dict, query_string):
+ logging.debug('query_dict=%r, query_string=%r', query_dict,
+ query_string)
+ queries = []
+ query_dict = dict(query_dict or {})
+
+ if query_string is not None:
+ queries.append(self._parse_query_xapian(str(query_string)))
+
+ for name, value in query_dict.items():
+ if name in _STANDARD_TERMS:
+ prefix = _STANDARD_TERMS[name]['prefix']
+ query = self._parse_query_term(prefix, value)
+ elif name in _STANDARD_VALUES:
+ info = _STANDARD_VALUES[name]
+ query = self._parse_query_value(info, value)
+ else:
+ logging.warning('Unknown term: %r=%r', name, value)
+ continue
+
+ queries.append(query)
+
+ if not queries:
+ queries.append(Query(''))
+
+ logging.debug('queries: %r', [str(query) for query in queries])
+ return Query(Query.OP_AND, queries)
+
+
+class Index(object):
+
+ def __init__(self, base_dir):
+ self._base_dir = base_dir
+ self._database = None
+
+ if not os.path.exists(self._base_dir):
+ os.makedirs(self._base_dir)
+ self._create_database()
+
+ self._migrate()
+ self._query_parser = QueryParser()
+ self._query_parser.set_database(self._database)
+
+ def close(self):
+ """Close index database if it is open."""
+ if not self._database:
+ return
+
+ self._database.close()
+ self._database = None
+
+ def contains(self, object_id):
+ postings = self._database.postlist(_object_id_term(object_id))
+ try:
+ _ = postings.next()
+ except StopIteration:
+ return False
+ return True
+
+ def delete(self, object_id):
+ object_id_term = _object_id_term(object_id)
+ if __debug__:
+ enquire = Enquire(self._database)
+ enquire.set_query(Query(object_id_term))
+ documents = [hit.document for hit in enquire.get_mset(0, 2, 2)]
+ assert len(documents) == 1
+
+ self._database.delete_document(object_id_term)
+
+ def find(self, query_dict, query_string, options):
+ offset = options.pop('offset', 0)
+ limit = options.pop('limit', _LIMIT_MAX)
+ order_by = options.pop('order_by', ['+timestamp'])[0]
+ all_versions = options.pop('all_versions', False)
+ check_at_least = options.pop('check_at_least', offset + limit + 1)
+
+ enquire = Enquire(self._database)
+ query = self._query_parser.parse_datastore_query(query_dict,
+ query_string)
+ enquire.set_query(query)
+
+ sort_reverse = {'+': True, '-': False}[order_by[0]]
+ try:
+ sort_value_nr = _STANDARD_VALUES[order_by[1:]]['number']
+ except KeyError:
+ logging.warning('Trying to order by unknown property: %r',
+ order_by[1:])
+ sort_value_nr = _VALUE_MTIME
+
+ enquire.set_sort_by_value(sort_value_nr, reverse=sort_reverse)
+ enquire.set_docid_order(_DOCID_REVERSE_MAP[sort_reverse])
+
+ if not all_versions:
+ enquire.set_collapse_key(_VALUE_TREE_ID)
+
+ if not all_versions and order_by != '+timestamp':
+ # Xapian doesn't support using a different sort order while
+ # collapsing (which needs to be timestamp in our case), so
+ # we need to query everything and sort+limit ourselves.
+ enquire.set_sort_by_value(_VALUE_MTIME, True)
+ enquire.set_docid_order(enquire.ASCENDING)
+ query_result = enquire.get_mset(0, _LIMIT_MAX, _LIMIT_MAX)
+ else:
+ logging.debug('Offset/limit using Xapian: %d %d %d', offset, limit, check_at_least)
+ query_result = enquire.get_mset(offset, limit, check_at_least)
+
+ total_count = query_result.get_matches_lower_bound()
+ documents = [hit.document for hit in query_result]
+
+ if (not all_versions) and (order_by != '+timestamp'):
+ _sort_documents(documents, sort_value_nr, sort_reverse)
+ del documents[offset + limit:]
+
+ #object_ids = [(document.get_value(_VALUE_TREE_ID),
+ # document.get_value(_VALUE_VERSION_ID))
+ # for document in documents]
+ entries = [deserialise_metadata(document.get_data())
+ for document in documents]
+
+ return entries, total_count
+
+ def store(self, object_id, properties):
+ logging.debug('store(%r, %r)', object_id, properties)
+ assert (properties['tree_id'], properties['version_id']) == object_id
+ id_term = _object_id_term(object_id)
+ document = Document()
+ logging.debug('serialised=%r', serialiase_metadata(properties))
+ document.set_data(serialiase_metadata(properties))
+ document.add_term(id_term)
+ term_generator = TermGenerator()
+ term_generator.index_document(document, properties)
+ assert (document.get_value(_VALUE_TREE_ID), document.get_value(_VALUE_VERSION_ID)) == object_id
+ self._database.replace_document(id_term, document)
+
+ def _create_database(self):
+ database = WritableDatabase(self._base_dir, xapian.DB_CREATE_OR_OPEN)
+ database.set_metadata('gdatastore_version', str(_CURRENT_VERSION))
+ database.close()
+
+ def _migrate(self):
+ self._database = WritableDatabase(self._base_dir,
+ xapian.DB_CREATE_OR_OPEN)
+ version = int(self._database.get_metadata('gdatastore_version'))
+
+ if version > _CURRENT_VERSION:
+ raise DSIndexError('Unsupported index version: %d > %d' %
+ (version, _CURRENT_VERSION))
+
+
+def deserialise_metadata(serialised):
+ """Deserialise a string generated by serialise_metadata().
+
+ Do NOT pass any value that might have been modified since it was generated
+ by serialiase_metadata().
+ """
+ return eval(serialised)
+
+
+def serialiase_metadata(metadata):
+ return repr(_to_native(metadata))
+
+
+def _object_id_term(object_id):
+ return _PREFIX_FULL_VALUE + _PREFIX_OBJECT_ID + '%s-%s' % object_id
+
+
+def _prefix_for_unknown(name):
+ return 'Xu%d:%s' % (len(name), unicode(name).encode('utf-8'))
+
+
+def _serialise_value(info, value):
+ if info['type'] in (float, int, long):
+ return xapian.sortable_serialise(info['type'](value))
+ elif info['type'] == unicode:
+ return unicode(value).encode('utf-8')
+
+ return str(info['type'](value))
+
+
+def _sort_documents(documents, sort_value_nr, sort_reverse):
+ def _cmp(document_a, document_b):
+ value_a = document_a.get_value(sort_value_nr)
+ value_b = document_b.get_value(sort_value_nr)
+ if value_a < value_b:
+ return -1
+ elif value_a > value_b:
+ return 1
+
+ docid_a = document_a.get_docid()
+ docid_b = document_b.get_docid()
+ if docid_a < docid_b:
+ return -1
+ elif docid_a > docid_b:
+ return 1
+ return 0
+
+ documents.sort(cmp=_cmp, reverse=sort_reverse)
+
+
+def _to_native(value):
+ if isinstance(value, list):
+ return [_to_native(e) for e in value]
+ elif isinstance(value, dict):
+ return dict([(_to_native(k), _to_native(v)) for k, v in value.items()])
+ elif isinstance(value, unicode):
+ return unicode(value)
+ elif isinstance(value, str):
+ return str(value)
+ elif isinstance(value, int):
+ return int(value)
+ elif isinstance(value, float):
+ return float(value)
+ return value