From 0e7387ab6dfcf6ddd1a663a2700f0007fed03a8e Mon Sep 17 00:00:00 2001 From: Jonas Smedegaard Date: Fri, 20 Jun 2008 23:03:17 +0000 Subject: Merge branch 'upstream' --- diff --git a/configure b/configure index 34606d4..dad3d25 100755 --- a/configure +++ b/configure @@ -1,6 +1,6 @@ #! /bin/sh # Guess values for system-dependent variables and create Makefiles. -# Generated by GNU Autoconf 2.61 for sugar-datastore 0.8.1. +# Generated by GNU Autoconf 2.61 for sugar-datastore 0.8.2. # # Copyright (C) 1992, 1993, 1994, 1995, 1996, 1998, 1999, 2000, 2001, # 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc. @@ -572,8 +572,8 @@ SHELL=${CONFIG_SHELL-/bin/sh} # Identity of this package. PACKAGE_NAME='sugar-datastore' PACKAGE_TARNAME='sugar-datastore' -PACKAGE_VERSION='0.8.1' -PACKAGE_STRING='sugar-datastore 0.8.1' +PACKAGE_VERSION='0.8.2' +PACKAGE_STRING='sugar-datastore 0.8.2' PACKAGE_BUGREPORT='' ac_unique_file="configure.ac" @@ -1152,7 +1152,7 @@ if test "$ac_init_help" = "long"; then # Omit some internal or obsolete options to make the list less imposing. # This message is too long to be a string in the A/UX 3.1 sh. cat <<_ACEOF -\`configure' configures sugar-datastore 0.8.1 to adapt to many kinds of systems. +\`configure' configures sugar-datastore 0.8.2 to adapt to many kinds of systems. Usage: $0 [OPTION]... [VAR=VALUE]... @@ -1218,7 +1218,7 @@ fi if test -n "$ac_init_help"; then case $ac_init_help in - short | recursive ) echo "Configuration of sugar-datastore 0.8.1:";; + short | recursive ) echo "Configuration of sugar-datastore 0.8.2:";; esac cat <<\_ACEOF @@ -1282,7 +1282,7 @@ fi test -n "$ac_init_help" && exit $ac_status if $ac_init_version; then cat <<\_ACEOF -sugar-datastore configure 0.8.1 +sugar-datastore configure 0.8.2 generated by GNU Autoconf 2.61 Copyright (C) 1992, 1993, 1994, 1995, 1996, 1998, 1999, 2000, 2001, @@ -1296,7 +1296,7 @@ cat >config.log <<_ACEOF This file contains any messages produced by compilers while running configure, to aid debugging if configure makes a mistake. -It was created by sugar-datastore $as_me 0.8.1, which was +It was created by sugar-datastore $as_me 0.8.2, which was generated by GNU Autoconf 2.61. Invocation command line was $ $0 $@ @@ -1971,7 +1971,7 @@ fi # Define the identity of the package. PACKAGE='sugar-datastore' - VERSION='0.8.1' + VERSION='0.8.2' cat >>confdefs.h <<_ACEOF @@ -2681,7 +2681,7 @@ exec 6>&1 # report actual input values of CONFIG_FILES etc. instead of their # values after options handling. ac_log=" -This file was extended by sugar-datastore $as_me 0.8.1, which was +This file was extended by sugar-datastore $as_me 0.8.2, which was generated by GNU Autoconf 2.61. Invocation command line was CONFIG_FILES = $CONFIG_FILES @@ -2724,7 +2724,7 @@ Report bugs to ." _ACEOF cat >>$CONFIG_STATUS <<_ACEOF ac_cs_version="\\ -sugar-datastore config.status 0.8.1 +sugar-datastore config.status 0.8.2 configured by $0, generated by GNU Autoconf 2.61, with options \\"`echo "$ac_configure_args" | sed 's/^ //; s/[\\""\`\$]/\\\\&/g'`\\" diff --git a/configure.ac b/configure.ac index bd353b1..07d00a7 100644 --- a/configure.ac +++ b/configure.ac @@ -1,4 +1,4 @@ -AC_INIT([sugar-datastore],[0.8.1],[],[sugar-datastore]) +AC_INIT([sugar-datastore],[0.8.2],[],[sugar-datastore]) AC_PREREQ([2.59]) diff --git a/src/olpc/datastore/backingstore.py b/src/olpc/datastore/backingstore.py index fc3c05f..cd23680 100644 --- a/src/olpc/datastore/backingstore.py +++ b/src/olpc/datastore/backingstore.py @@ -29,6 +29,13 @@ import dbus import xapian import gobject +try: + import cjson + has_cjson = True +except ImportError: + import simplejson + has_cjson = False + from olpc.datastore.xapianindex import IndexManager from olpc.datastore import bin_copy from olpc.datastore import utils @@ -215,7 +222,11 @@ class FileBackingStore(BackingStore): instead of a method parameter because this is less invasive for Update 1. """ self.current_user_id = None - + + # source for an idle callback that exports to the file system the + # metadata from the index + self._export_metadata_source = None + # Informational def descriptor(self): """return a dict with atleast the following keys @@ -327,7 +338,28 @@ class FileBackingStore(BackingStore): im.connect(index_name) self.indexmanager = im - + + # Check that all entries have their metadata in the file system. + if not os.path.exists(os.path.join(self.base, '.metadata.exported')): + uids_to_export = [] + uids = self.indexmanager.get_all_ids() + + for uid in uids: + if not os.path.exists(os.path.join(self.base, uid + '.metadata')): + uids_to_export.append(uid) + + if uids_to_export: + self._export_metadata_source = gobject.idle_add( + self._export_metadata, uids_to_export) + else: + open(os.path.join(self.base, '.metadata.exported'), 'w').close() + + def _export_metadata(self, uids_to_export): + uid = uids_to_export.pop() + props = self.indexmanager.get(uid).properties + self._store_metadata(uid, props) + return len(uids_to_export) > 0 + def bind_to(self, datastore): ## signal from datastore that we are being bound to it self.datastore = datastore @@ -500,8 +532,33 @@ class FileBackingStore(BackingStore): c.update(line) fp.close() return c.hexdigest() - + # File Management API + def _encode_json(self, metadata, file_path): + if has_cjson: + f = open(file_path, 'w') + f.write(cjson.encode(metadata)) + f.close() + else: + simplejson.dump(metadata, open(file_path, 'w')) + + def _store_metadata(self, uid, props): + t = time.time() + temp_path = os.path.join(self.base, '.temp_metadata') + props = props.copy() + for property_name in model.defaultModel.get_external_properties(): + if property_name in props: + del props[property_name] + self._encode_json(props, temp_path) + path = os.path.join(self.base, uid + '.metadata') + os.rename(temp_path, path) + logging.debug('exported metadata: %r s.' % (time.time() - t)) + + def _delete_metadata(self, uid): + path = os.path.join(self.base, uid + '.metadata') + if os.path.exists(path): + os.unlink(path) + def _create_completion(self, uid, props, completion, exc=None, path=None): if exc: completion(exc) @@ -517,6 +574,7 @@ class FileBackingStore(BackingStore): if completion is None: raise RuntimeError("Completion must be valid for async create") uid = self.indexmanager.index(props) + self._store_metadata(uid, props) props['uid'] = uid if filelike: if isinstance(filelike, basestring): @@ -531,6 +589,7 @@ class FileBackingStore(BackingStore): def create(self, props, filelike, can_move=False): if filelike: uid = self.indexmanager.index(props) + self._store_metadata(uid, props) props['uid'] = uid if isinstance(filelike, basestring): # lets treat it as a filename @@ -540,7 +599,9 @@ class FileBackingStore(BackingStore): self.indexmanager.index(props, path) return uid else: - return self.indexmanager.index(props) + uid = self.indexmanager.index(props) + self._store_metadata(uid, props) + return uid def get(self, uid, env=None, allowMissing=False, includeFile=False): content = self.indexmanager.get(uid) @@ -575,6 +636,7 @@ class FileBackingStore(BackingStore): raise RuntimeError("Completion must be valid for async update") props['uid'] = uid + self._store_metadata(uid, props) if filelike: uid = self.indexmanager.index(props, filelike) props['uid'] = uid @@ -590,6 +652,7 @@ class FileBackingStore(BackingStore): def update(self, uid, props, filelike=None, can_move=False): props['uid'] = uid + self._store_metadata(uid, props) if filelike: if isinstance(filelike, basestring): # lets treat it as a filename @@ -610,6 +673,7 @@ class FileBackingStore(BackingStore): def delete(self, uid, allowMissing=True): self._delete_external_properties(uid) + self._delete_metadata(uid) self.indexmanager.delete(uid) path = self._translatePath(uid) @@ -617,7 +681,7 @@ class FileBackingStore(BackingStore): os.unlink(path) else: if not allowMissing: - raise KeyError("object for uid:%s missing" % uid) + raise KeyError("object for uid:%s missing" % uid) def get_uniquevaluesfor(self, propertyname): return self.indexmanager.get_uniquevaluesfor(propertyname) @@ -651,6 +715,8 @@ class FileBackingStore(BackingStore): return self.indexmanager.get_all_ids() def stop(self): + if self._export_metadata_source is not None: + gobject.source_remove(self._export_metadata_source) self.indexmanager.stop() def complete_indexing(self): diff --git a/src/olpc/datastore/datastore.py b/src/olpc/datastore/datastore.py index 67ddca9..a15d5cf 100644 --- a/src/olpc/datastore/datastore.py +++ b/src/olpc/datastore/datastore.py @@ -128,28 +128,10 @@ class DataStore(dbus.service.Object): ### Backup support def pause(self, mountpoints=None): - """pause the datastore, during this time it will not process - requests. this allows the underlying stores to be backup up via - traditional mechanisms - """ - if mountpoints: - mps = [self.mountpoints[mp] for mp in mountpoints] - else: - mps = self.mountpoints.values() - - for mp in mps: - mp.stop() + """ Deprecated. """ def unpause(self, mountpoints=None): - """resume the operation of a set of paused mountpoints""" - if mountpoints: - mps = [self.mountpoints[mp] for mp in mountpoints] - else: - mps = self.mountpoints.values() - - for mp in mps: - mp.initialize_and_load() - + """ Deprecated. """ ### End Backups def connect_backingstore(self, uri, **kwargs): -- cgit v0.9.1