From 94574ebd1c05783f455712d5ab13f537e2ab9739 Mon Sep 17 00:00:00 2001 From: James Cameron Date: Fri, 10 Dec 2010 22:47:23 +0000 Subject: Merge branch 'sucrose-0.84' of git.sugarlabs.org:sugar/mainline into sucrose-0.84 Conflicts: src/jarabe/journal/model.py --- diff --git a/configure.ac b/configure.ac index 02cc850..0a44d7e 100644 --- a/configure.ac +++ b/configure.ac @@ -1,11 +1,11 @@ -AC_INIT([Sugar],[0.84.25],[],[sugar]) +AC_INIT([Sugar],[0.84.27],[],[sugar]) AC_PREREQ([2.59]) AC_CONFIG_MACRO_DIR([m4]) AC_CONFIG_SRCDIR([configure.ac]) -SUCROSE_VERSION="0.84.24" +SUCROSE_VERSION="0.84.27" AC_SUBST(SUCROSE_VERSION) AM_INIT_AUTOMAKE([1.9 foreign dist-bzip2 no-dist-gzip]) diff --git a/extensions/cpsection/aboutcomputer/view.py b/extensions/cpsection/aboutcomputer/view.py index dd4f8f3..4b638ff 100644 --- a/extensions/cpsection/aboutcomputer/view.py +++ b/extensions/cpsection/aboutcomputer/view.py @@ -174,7 +174,7 @@ class AboutComputer(SectionView): vbox_copyright.set_border_width(style.DEFAULT_SPACING * 2) vbox_copyright.set_spacing(style.DEFAULT_SPACING) - label_copyright = gtk.Label("© 2006-2009 One Laptop per Child " + label_copyright = gtk.Label("© 2006-2010 One Laptop per Child " "Association Inc; Red Hat Inc; Collabora Ltd; " "and Contributors.") label_copyright.set_alignment(0, 0) diff --git a/src/jarabe/journal/model.py b/src/jarabe/journal/model.py index 112f097..24f6140 100644 --- a/src/jarabe/journal/model.py +++ b/src/jarabe/journal/model.py @@ -20,13 +20,14 @@ import errno from datetime import datetime import time import shutil +import tempfile from stat import S_IFLNK, S_IFMT, S_IFDIR, S_IFREG import traceback import re +import json import gobject import dbus -import gconf import gio from sugar import dispatch @@ -44,6 +45,8 @@ PROPERTIES = ['uid', 'title', 'mtime', 'timestamp', 'keep', 'buddies', PAGES_TO_CACHE = 5 +JOURNAL_METADATA_DIR = '.Sugar-Metadata' + class _Cache(object): __gtype_name__ = 'model_Cache' @@ -314,8 +317,10 @@ class InplaceResultSet(BaseResultSet): files = self._file_list[offset:offset + limit] entries = [] - for file_path, stat, mtime_ in files: - metadata = _get_file_metadata(file_path, stat) + for file_path, stat, mtime_, metadata in files: + if metadata is None: + # FIXME: the find should fetch metadata + metadata = _get_file_metadata(file_path, stat) metadata['mountpoint'] = self._mount_point entries.append(metadata) @@ -386,6 +391,16 @@ class InplaceResultSet(BaseResultSet): not self._regex.match(full_path): return + metadata_matched = False + metadata = _get_file_metadata_from_json(dir_path, entry, preview=False) + if metadata is not None: + for f in ['fulltext', 'title', 'description', 'tags']: + if f in metadata and self._regex.match(metadata[f]): + metadata_matched = True + break + if not metadata_matched: + return + if self._date_start is not None and self.st_mtime < self._date_start: return @@ -397,7 +412,7 @@ class InplaceResultSet(BaseResultSet): if mime_type not in self._mime_types: return - file_info = (full_path, stat, int(stat.st_mtime)) + file_info = (full_path, stat, int(stat.st_mtime), metadata) self._file_list.append(file_info) return @@ -419,16 +434,56 @@ class InplaceResultSet(BaseResultSet): return def _get_file_metadata(path, stat): - client = gconf.client_get_default() + """Returns the metadata from the corresponding file + on the external device or does create the metadata + based on the file properties. + + """ + filename = os.path.basename(path) + dir_path = os.path.dirname(path) + metadata = _get_file_metadata_from_json(dir_path, filename, preview=True) + if metadata: + return metadata + return {'uid': path, 'title': os.path.basename(path), 'timestamp': stat.st_mtime, 'mime_type': gio.content_type_guess(filename=path), 'activity': '', 'activity_id': '', - 'icon-color': client.get_string('/desktop/sugar/user/color'), + 'icon-color': '', 'description': path} +def _get_file_metadata_from_json(dir_path, filename, preview=False): + """Returns the metadata from the json file and the preview + stored on the external device. + + """ + metadata = None + metadata_path = os.path.join(dir_path, JOURNAL_METADATA_DIR, + filename + '.metadata') + if os.path.exists(metadata_path): + try: + metadata = json.load(open(metadata_path)) + except ValueError: + logging.debug("Could not read metadata for file %r on" \ + "external device.", filename) + else: + metadata['uid'] = os.path.join(dir_path, filename) + if preview: + preview_path = os.path.join(dir_path, JOURNAL_METADATA_DIR, + filename + '.preview') + if os.path.exists(preview_path): + try: + metadata['preview'] = dbus.ByteArray(open(preview_path).read()) + except: + logging.debug("Could not read preview for file %r on" \ + "external device.", filename) + else: + if metadata and 'preview' in metadata: + del(metadata['preview']) + return metadata + _datastore = None def _get_datastore(): global _datastore @@ -515,6 +570,18 @@ def delete(object_id): """ if os.path.exists(object_id): os.unlink(object_id) + dir_path = os.path.dirname(object_id) + filename = os.path.basename(object_id) + old_files = [os.path.join(dir_path, JOURNAL_METADATA_DIR, + filename + '.metadata'), + os.path.join(dir_path, JOURNAL_METADATA_DIR, + filename + '.preview')] + for old_file in old_files: + if os.path.exists(old_file): + try: + os.unlink(old_file) + except: + pass deleted.send(None, object_id=object_id) else: _get_datastore().delete(object_id) @@ -550,26 +617,102 @@ def write(metadata, file_path='', update_mtime=True, transfer_ownership=True): file_path, transfer_ownership) else: - if not os.path.exists(file_path): - raise ValueError('Entries without a file cannot be copied to ' - 'removable devices') + object_id = _write_entry_on_external_device(metadata, file_path) - file_name = _get_file_name(metadata['title'], metadata['mime_type']) - file_name = _get_unique_file_name(metadata['mountpoint'], file_name) + return object_id + +def _write_entry_on_external_device(metadata, file_path): + """This creates and updates an entry copied from the + DS to external storage device. Besides copying the + associated file a hidden file for the preview and one + for the metadata are stored. We make sure that the + metadata and preview file are in the same directory + as the data file. + + This function handles renames of an entry on the + external device and avoids name collisions. Renames are + handled failsafe. + + """ + if 'uid' in metadata and os.path.exists(metadata['uid']): + file_path = metadata['uid'] + if not file_path or not os.path.exists(file_path): + raise ValueError('Entries without a file cannot be copied to ' + 'removable devices') + + file_name = _get_file_name(metadata['title'], metadata['mime_type']) + + destination_path = os.path.join(metadata['mountpoint'], file_name) + if destination_path != file_path: + file_name = _get_unique_file_name(metadata['mountpoint'], file_name) destination_path = os.path.join(metadata['mountpoint'], file_name) + clean_name, extension_ = os.path.splitext(file_name) + metadata['title'] = clean_name + + metadata_copy = metadata.copy() + del metadata_copy['mountpoint'] + if 'uid' in metadata_copy: + del metadata_copy['uid'] + + metadata_dir_path = os.path.join(metadata['mountpoint'], + JOURNAL_METADATA_DIR) + if not os.path.exists(metadata_dir_path): + os.mkdir(metadata_dir_path) + + if 'preview' in metadata_copy: + preview = metadata_copy['preview'] + preview_fname = file_name + '.preview' + preview_path = os.path.join(metadata['mountpoint'], + JOURNAL_METADATA_DIR, preview_fname) + metadata_copy['preview'] = preview_fname + + (fh, fn) = tempfile.mkstemp(dir=metadata['mountpoint']) + os.write(fh, preview) + os.close(fh) + os.rename(fn, preview_path) + + metadata_path = os.path.join(metadata['mountpoint'], + JOURNAL_METADATA_DIR, + file_name + '.metadata') + (fh, fn) = tempfile.mkstemp(dir=metadata['mountpoint']) + os.write(fh, json.dumps(metadata_copy)) + os.close(fh) + os.rename(fn, metadata_path) + + if os.path.dirname(destination_path) == os.path.dirname(file_path): + old_file_path = file_path + if old_file_path != destination_path: + os.rename(file_path, destination_path) + old_fname = os.path.basename(file_path) + old_files = [os.path.join(metadata['mountpoint'], + JOURNAL_METADATA_DIR, + old_fname + '.metadata'), + os.path.join(metadata['mountpoint'], + JOURNAL_METADATA_DIR, + old_fname + '.preview')] + for ofile in old_files: + if os.path.exists(ofile): + try: + os.unlink(ofile) + except: + pass + else: shutil.copy(file_path, destination_path) - object_id = destination_path - created.send(None, object_id=object_id) + + object_id = destination_path + created.send(None, object_id=object_id) return object_id def _get_file_name(title, mime_type): file_name = title - extension = '.' + mime.get_primary_extension(mime_type) - if not file_name.endswith(extension): - file_name += extension + mime_extension = mime.get_primary_extension(mime_type) + if mime_extension: + extension = '.' + mime_extension + if not file_name.endswith(extension): + file_name += extension # Invalid characters in VFAT filenames. From # http://en.wikipedia.org/wiki/File_Allocation_Table @@ -589,8 +732,8 @@ def _get_file_name(title, mime_type): def _get_unique_file_name(mount_point, file_name): if os.path.exists(os.path.join(mount_point, file_name)): i = 1 + name, extension = os.path.splitext(file_name) while len(file_name) <= 255: - name, extension = os.path.splitext(file_name) file_name = name + '_' + str(i) + extension if not os.path.exists(os.path.join(mount_point, file_name)): break diff --git a/src/jarabe/journal/volumestoolbar.py b/src/jarabe/journal/volumestoolbar.py index 978028c..99f1725 100644 --- a/src/jarabe/journal/volumestoolbar.py +++ b/src/jarabe/journal/volumestoolbar.py @@ -17,6 +17,11 @@ import logging import os from gettext import gettext as _ +import cPickle +import xapian +import json +import tempfile +import shutil import gobject import gio @@ -30,6 +35,105 @@ from sugar.graphics.xocolor import XoColor from jarabe.journal import model from jarabe.view.palettes import VolumePalette +_JOURNAL_0_METADATA_DIR = '.olpc.store' + +def _get_id(document): + """Get the ID for the document in the xapian database.""" + tl = document.termlist() + try: + term = tl.skip_to('Q').term + if len(term) == 0 or term[0] != 'Q': + return None + return term[1:] + except StopIteration: + return None + +def _convert_entries(root): + """Converts the entries written by the datastore version 0. + The metadata and the preview will be written using the new + scheme for writing Journal entries to removable storage + devices. + + - entries that do not have an associated file are not + converted. + - when done we write the file converted to the old metadat + directory, that we do not convert several times + + """ + try: + database = xapian.Database(os.path.join(root, _JOURNAL_0_METADATA_DIR, + 'index')) + except xapian.DatabaseError, e: + logging.error('Convert DS-0 Journal entry. Error reading db: %s', + os.path.join(root, _JOURNAL_0_METADATA_DIR, 'index')) + return + + metadata_dir_path = os.path.join(root, model.JOURNAL_METADATA_DIR) + if not os.path.exists(metadata_dir_path): + os.mkdir(metadata_dir_path) + + for i in range(1, database.get_lastdocid() + 1): + try: + document = database.get_document(i) + except xapian.DocNotFoundError, e: + logging.debug('Convert DS-0 Journal entry. ' \ + 'Error getting document %s: %s', i, e) + continue + + try: + metadata_loaded = cPickle.loads(document.get_data()) + except cPickle.PickleError, e: + logging.debug('Convert DS-0 Journal entry. ' \ + 'Error converting metadata: %s', e) + continue + + if 'activity_id' in metadata_loaded and \ + 'mime_type' in metadata_loaded and \ + 'title' in metadata_loaded: + metadata = {} + + uid = _get_id(document) + if uid is None: + continue + + for key, value in metadata_loaded.items(): + metadata[str(key)] = str(value[0]) + + if 'uid' not in metadata: + metadata['uid'] = uid + + if 'filename' in metadata: + filename = metadata['filename'] + else: + continue + if not os.path.exists(os.path.join(root, filename)): + continue + + preview_path = os.path.join(root, _JOURNAL_0_METADATA_DIR, + 'preview', uid) + if os.path.exists(preview_path): + preview_fname = filename + '.preview' + new_preview_path = os.path.join(root, + model.JOURNAL_METADATA_DIR, + preview_fname) + if not os.path.exists(new_preview_path): + metadata['preview'] = preview_fname + shutil.copy(preview_path, new_preview_path) + + metadata_fname = filename + '.metadata' + metadata_path = os.path.join(root, model.JOURNAL_METADATA_DIR, + metadata_fname) + if not os.path.exists(metadata_path): + (fh, fn) = tempfile.mkstemp(dir=root) + os.write(fh, json.dumps(metadata)) + os.close(fh) + os.rename(fn, metadata_path) + + logging.debug('Convert DS-0 Journal entry. Entry converted: ' \ + 'File=%s Metadata=%s', + os.path.join(root, filename), metadata) + + class VolumesToolbar(gtk.Toolbar): __gtype_name__ = 'VolumesToolbar' @@ -82,6 +186,11 @@ class VolumesToolbar(gtk.Toolbar): def _add_button(self, mount): logging.debug('VolumeToolbar._add_button: %r' % mount.get_name()) + if os.path.exists(os.path.join(mount.get_root().get_path(), + _JOURNAL_0_METADATA_DIR)): + logging.debug('Convert DS-0 Journal entries.') + gobject.idle_add(_convert_entries, mount.get_root().get_path()) + button = VolumeButton(mount) button.props.group = self._volume_buttons[0] button.connect('toggled', self._button_toggled_cb) diff --git a/src/jarabe/model/bundleregistry.py b/src/jarabe/model/bundleregistry.py index 1060aff..d48c071 100644 --- a/src/jarabe/model/bundleregistry.py +++ b/src/jarabe/model/bundleregistry.py @@ -159,22 +159,12 @@ class BundleRegistry(gobject.GObject): self._write_favorites_file() def get_bundle(self, bundle_id): - """Returns a bundle given service name or substring, - returns None if there is either no match, or more than one - match by substring.""" - result = [] - key = bundle_id.lower() - + """Returns an bundle given his service name""" for bundle in self._bundles: - name = bundle.get_bundle_id() - if name == bundle_id: + if bundle.get_bundle_id() == bundle_id: return bundle - if key in name.lower(): - result.append(bundle) - if len(result) == 1: - return result[0] return None - + def __iter__(self): return self._bundles.__iter__() -- cgit v0.9.1