Web   ·   Wiki   ·   Activities   ·   Blog   ·   Lists   ·   Chat   ·   Meeting   ·   Bugs   ·   Git   ·   Translate   ·   Archive   ·   People   ·   Donate
summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorSascha Silbe <sascha-pgp@silbe.org>2010-10-22 17:35:19 (GMT)
committer Sascha Silbe <sascha-pgp@silbe.org>2010-10-22 17:35:19 (GMT)
commit38e80176d4de7b8633e6a7b8b50a4aef38d625cd (patch)
tree12342f9f683dbb9c51aeaa04004576ff0dec5630
parent7314430fb3dfe88eff626c773d32cd3fd329561d (diff)
parent1a06a5e35cebb6384293cf666f298be85d588366 (diff)
Merge commit 'refs/top-bases/t/rainbow-0.8' into t/rainbow-0.8
-rw-r--r--.gitignore3
-rw-r--r--HACKING15
-rwxr-xr-xbin/copy-from-journal56
-rwxr-xr-xbin/copy-to-journal22
-rwxr-xr-xbin/datastore-service18
-rwxr-xr-xmaint-helper.py14
-rw-r--r--src/carquinyol/datastore.py12
-rw-r--r--src/carquinyol/filestore.py3
-rw-r--r--src/carquinyol/indexstore.py4
-rw-r--r--src/carquinyol/layoutmanager.py3
-rw-r--r--src/carquinyol/metadatareader.c198
-rw-r--r--src/carquinyol/migration.py1
-rw-r--r--src/carquinyol/optimizer.py4
-rw-r--r--sweets.recipe33
14 files changed, 220 insertions, 166 deletions
diff --git a/.gitignore b/.gitignore
index 12965e2..5abf3f2 100644
--- a/.gitignore
+++ b/.gitignore
@@ -8,6 +8,8 @@
*.lo
*.loT
*.service
+*.so
+*.tar.*
# Absolute
Makefile
@@ -33,3 +35,4 @@ m4/ltoptions.m4
m4/ltsugar.m4
m4/ltversion.m4
m4/lt~obsolete.m4
+.sweets
diff --git a/HACKING b/HACKING
new file mode 100644
index 0000000..5c3a436
--- /dev/null
+++ b/HACKING
@@ -0,0 +1,15 @@
+Before committing
+~~~~~~~~~~~~~~~~~
+All source files need to be passed through sugar-lint[1] command.
+Follow sugar-lint home page instructions and especially
+"Lint files before committing" section.
+
+
+Send patches
+~~~~~~~~~~~~
+Create your patches using "git format" command and send them to all maintainers
+from AUTHORS file with CCing to sugar-devel@lists.sugarlabs.org.
+The easiest way it just using "git send-email" command.
+
+
+[1] http://wiki.sugarlabs.org/go/Activity_Team/Sugar_Lint
diff --git a/bin/copy-from-journal b/bin/copy-from-journal
index 7a10bfd..c8d2d84 100755
--- a/bin/copy-from-journal
+++ b/bin/copy-from-journal
@@ -4,7 +4,6 @@
# Reinier Heeres, <reinier@heeres.eu>, 2007-12-24
# Phil Bordelon <phil@thenexusproject.org>
-import sys
import os
import shutil
import optparse
@@ -20,81 +19,84 @@ import sugar.mime
# for quicker operation.
RETURN_LIMIT = 2
+
def build_option_parser():
usage = "Usage: %prog [-o OBJECT_ID] [-q SEARCH_STR] [-m] OUTFILE"
parser = optparse.OptionParser(usage=usage)
parser.add_option("-o", "--object_id", action="store", dest="object_id",
- help="Retrieve object with explicit ID OBJECT_ID",
+ help="Retrieve object with explicit ID OBJECT_ID",
metavar="OBJECT_ID", default=None)
parser.add_option("-q", "--query", action="store", dest="query",
- help="Full-text-search the metadata for SEARCH_STR",
+ help="Full-text-search the metadata for SEARCH_STR",
metavar="SEARCH_STR", default=None)
- parser.add_option("-m", "--metadata", action="store_true", dest="show_meta",
- help="Show all non-preview metadata [default: hide]",
- default=False)
+ parser.add_option("-m", "--metadata", action="store_true",
+ dest="show_meta",
+ help="Show all non-preview metadata [default: hide]",
+ default=False)
return parser
if __name__ == "__main__":
- parser = build_option_parser()
- options, args = parser.parse_args()
+ option_parser = build_option_parser()
+ options, args = option_parser.parse_args()
if len(args) < 1:
- parser.print_help()
+ option_parser.print_help()
exit(0)
try:
dsentry = None
-
+
# Get object directly if we were given an explicit object ID.
if options.object_id is not None:
dsentry = datastore.get(options.object_id)
-
+
# Compose the query based on the options provided.
if dsentry is None:
query = {}
-
+
if options.query is not None:
query['query'] = options.query
-
+
# We only want a single file at a time; limit the number of objects
- # returned to two, as anything more than one means the criteria were
- # not limited enough.
- objects, count = datastore.find(query, limit=RETURN_LIMIT, sorting='-mtime')
+ # returned to two, as anything more than one means the criteria
+ # were not limited enough.
+ objects, count = \
+ datastore.find(query, limit=RETURN_LIMIT, sorting='-mtime')
if count > 1:
- print 'WARNING: %d objects found; retrieving most recent.' % (count)
+ print 'WARNING: %d objects found; getting most recent.' % count
for i in xrange(1, RETURN_LIMIT):
objects[i].destroy()
-
+
if count > 0:
dsentry = objects[0]
-
+
# If neither an explicit object ID nor a query gave us data, fail.
if dsentry is None:
print 'ERROR: unable to determine journal object to copy.'
- parser.print_help()
+ option_parser.print_help()
exit(0)
-
+
# Print metadata if that is what the user asked for.
if options.show_meta:
print 'Metadata:'
for key, val in dsentry.metadata.get_dictionary().iteritems():
if key != 'preview':
print '%20s -> %s' % (key, val)
-
+
# If no file is associated with this object, we can't save it out.
if dsentry.get_file_path() == "":
print 'ERROR: no file associated with object, just metadata.'
dsentry.destroy()
exit(0)
-
+
outname = args[0]
outroot, outext = os.path.splitext(outname)
-
+
# Do our best to determine the output file extension, based on Sugar's
# MIME-type-to-extension mappings.
if outext == "":
@@ -102,13 +104,13 @@ if __name__ == "__main__":
outext = sugar.mime.get_primary_extension(mimetype)
if outext == None:
outext = "dsobject"
- outext = '.' + outext
-
+ outext = '.' + outext
+
# Lastly, actually copy the file out of the datastore and onto the
# filesystem.
shutil.copyfile(dsentry.get_file_path(), outroot + outext)
print '%s -> %s' % (dsentry.get_file_path(), outroot + outext)
-
+
# Cleanup.
dsentry.destroy()
diff --git a/bin/copy-to-journal b/bin/copy-to-journal
index ca6f872..e675441 100755
--- a/bin/copy-to-journal
+++ b/bin/copy-to-journal
@@ -7,7 +7,6 @@
# to support adding metadata. Note that the MIME-type is required,
# as otherwise the datastore will not accept the file.
-import sys
import os
import optparse
from gettext import gettext as _
@@ -19,9 +18,11 @@ if os.path.exists("/tmp/olpc-session-bus"):
from sugar.datastore import datastore
from sugar import mime
+
def build_option_parser():
- usage = "Usage: %prog <file> [-m MIMETYPE] [-t TITLE] [-d DESC] [-T tag1 [-T tag2 ...]]"
+ usage = "Usage: %prog <file> [-m MIMETYPE] [-t TITLE] [-d DESC] " \
+ "[-T tag1 [-T tag2 ...]]"
parser = optparse.OptionParser(usage=usage)
parser.add_option("-t", "--title", action="store", dest="title",
@@ -36,23 +37,24 @@ def build_option_parser():
help="Set the file's MIME-type to MIMETYPE",
default=None)
parser.add_option("-T", "--tag", action="append", dest="tag_list",
- help="Add tag TAG to the journal entry's tags; this option can be repeated",
+ help="Add tag TAG to the journal entry's tags; " \
+ "this option can be repeated",
metavar="TAG")
return parser
if __name__ == "__main__":
- parser = build_option_parser()
- options, args = parser.parse_args()
+ option_parser = build_option_parser()
+ options, args = option_parser.parse_args()
if len(args) < 1:
- parser.print_help()
+ option_parser.print_help()
exit(0)
-
+
fname = args[0]
absname = os.path.abspath(fname)
if not os.path.exists(absname):
print 'Error: File does not exist.'
- parser.print_help()
+ option_parser.print_help()
exit(0)
try:
@@ -85,9 +87,9 @@ if __name__ == "__main__":
datastore.write(entry)
print 'Created as %s' % (entry.object_id)
-
+
entry.destroy()
-
+
except dbus.DBusException:
print 'ERROR: Unable to connect to the datastore.\n'\
'Check that you are running in the same environment as the '\
diff --git a/bin/datastore-service b/bin/datastore-service
index 06b6517..008b66d 100755
--- a/bin/datastore-service
+++ b/bin/datastore-service
@@ -1,5 +1,8 @@
#!/usr/bin/env python
-import sys, os, signal, logging
+import sys
+import os
+import signal
+import logging
import gobject
import dbus.service
import dbus.mainloop.glib
@@ -11,7 +14,8 @@ from sugar import logger
profile = os.environ.get('SUGAR_PROFILE', 'default')
base_dir = os.path.join(os.path.expanduser('~'), '.sugar', profile)
log_dir = os.path.join(base_dir, "logs")
-if not os.path.exists(log_dir): os.makedirs(log_dir)
+if not os.path.exists(log_dir):
+ os.makedirs(log_dir)
# setup logger
logger.start('datastore')
@@ -23,16 +27,16 @@ connected = True
ds = DataStore()
-# and run it
+# and run it
mainloop = gobject.MainLoop()
+
def handle_disconnect():
- global mainloop
mainloop.quit()
logging.debug("Datastore disconnected from the bus.")
+
def handle_shutdown(signum, frame):
- global mainloop
mainloop.quit()
raise SystemExit("Shutting down on signal %s" % signum)
@@ -44,6 +48,7 @@ bus.add_signal_receiver(handle_disconnect,
signal.signal(signal.SIGHUP, handle_shutdown)
signal.signal(signal.SIGTERM, handle_shutdown)
+
def main():
try:
mainloop.run()
@@ -51,8 +56,7 @@ def main():
logging.info("DataStore shutdown by user")
except:
logging.error("Datastore shutdown with error", exc_info=sys.exc_info())
-
+
main()
ds.stop()
-
diff --git a/maint-helper.py b/maint-helper.py
index 5ffd7e0..2d0537e 100755
--- a/maint-helper.py
+++ b/maint-helper.py
@@ -126,21 +126,21 @@ def cmd_build_snapshot():
print 'Done.'
-def check_licenses(path, license, missing):
+def check_licenses(path, license_text, missing):
matchers = {'LGPL': 'GNU Lesser General Public',
'GPL': 'GNU General Public License'}
license_file = os.path.join(path, '.license')
if os.path.isfile(license_file):
f = open(license_file, 'r')
- license = f.readline().strip()
+ license_text = f.readline().strip()
f.close()
for item in os.listdir(path):
full_path = os.path.join(path, item)
if os.path.isdir(full_path):
- check_licenses(full_path, license, missing)
+ check_licenses(full_path, license_text, missing)
else:
check_source = is_source(item)
@@ -155,7 +155,7 @@ def check_licenses(path, license, missing):
f.close()
miss_license = True
- if source.find(matchers[license]) > 0:
+ if source.find(matchers[license_text]) > 0:
miss_license = False
# Special cases.
@@ -163,9 +163,9 @@ def check_licenses(path, license, missing):
miss_license = False
if miss_license:
- if license not in missing:
- missing[license] = []
- missing[license].append(full_path)
+ if license_text not in missing:
+ missing[license_text] = []
+ missing[license_text].append(full_path)
def cmd_check_licenses():
diff --git a/src/carquinyol/datastore.py b/src/carquinyol/datastore.py
index 86d5b02..837de35 100644
--- a/src/carquinyol/datastore.py
+++ b/src/carquinyol/datastore.py
@@ -15,11 +15,13 @@
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
+# pylint fails on @debian's arguments
+# pylint: disable=C0322
+
import logging
import uuid
import time
import os
-import traceback
import dbus
import dbus.service
@@ -208,10 +210,10 @@ class DataStore(dbus.service.Object):
self._metadata_store.store(uid, props)
self._index_store.store(uid, props)
self._file_store.store(uid, file_path, transfer_ownership,
- lambda *args: self._create_completion_cb(async_cb,
+ lambda * args: self._create_completion_cb(async_cb,
async_err_cb,
uid,
- *args))
+ * args))
@dbus.service.signal(DS_DBUS_INTERFACE, signature="s")
def Created(self, uid):
@@ -268,10 +270,10 @@ class DataStore(dbus.service.Object):
(not file_path or os.path.exists(file_path)):
self._optimizer.remove(uid)
self._file_store.store(uid, file_path, transfer_ownership,
- lambda *args: self._update_completion_cb(async_cb,
+ lambda * args: self._update_completion_cb(async_cb,
async_err_cb,
uid,
- *args))
+ * args))
@dbus.service.signal(DS_DBUS_INTERFACE, signature="s")
def Updated(self, uid):
diff --git a/src/carquinyol/filestore.py b/src/carquinyol/filestore.py
index f9cd724..592c41a 100644
--- a/src/carquinyol/filestore.py
+++ b/src/carquinyol/filestore.py
@@ -155,7 +155,8 @@ class FileStore(object):
os.remove(file_path)
def hard_link_entry(self, new_uid, existing_uid):
- existing_file = layoutmanager.get_instance().get_data_path(existing_uid)
+ existing_file = layoutmanager.get_instance().get_data_path(
+ existing_uid)
new_file = layoutmanager.get_instance().get_data_path(new_uid)
logging.debug('removing %r', new_file)
diff --git a/src/carquinyol/indexstore.py b/src/carquinyol/indexstore.py
index 8a3de30..dc721e7 100644
--- a/src/carquinyol/indexstore.py
+++ b/src/carquinyol/indexstore.py
@@ -87,7 +87,6 @@ class TermGenerator (xapian.TermGenerator):
logging.debug('Invalid value for creation_time property: %s',
properties['creation_time'])
-
self.set_document(document)
properties = dict(properties)
@@ -202,6 +201,7 @@ class QueryParser (xapian.QueryParser):
logging.warning('Invalid query string: ' + exception.get_msg())
return Query()
+ # pylint: disable=W0221
def parse_query(self, query_dict, query_string):
logging.debug('parse_query %r %r', query_dict, query_string)
queries = []
@@ -261,7 +261,7 @@ class IndexStore(object):
postings = self._database.postlist(_PREFIX_FULL_VALUE + \
_PREFIX_UID + uid)
try:
- postlist_item = postings.next()
+ __ = postings.next()
except StopIteration:
return False
return True
diff --git a/src/carquinyol/layoutmanager.py b/src/carquinyol/layoutmanager.py
index 335614f..3179a98 100644
--- a/src/carquinyol/layoutmanager.py
+++ b/src/carquinyol/layoutmanager.py
@@ -20,6 +20,7 @@ import logging
MAX_QUERY_LIMIT = 40960
CURRENT_LAYOUT_VERSION = 6
+
class LayoutManager(object):
"""Provide the logic about how entries are stored inside the datastore
directory
@@ -105,6 +106,8 @@ class LayoutManager(object):
return True
_instance = None
+
+
def get_instance():
global _instance
if _instance is None:
diff --git a/src/carquinyol/metadatareader.c b/src/carquinyol/metadatareader.c
index 454c8c3..d053a22 100644
--- a/src/carquinyol/metadatareader.c
+++ b/src/carquinyol/metadatareader.c
@@ -8,40 +8,38 @@
static PyObject *byte_array_type = NULL;
int
-add_property(const char *metadata_path, char *property_name, PyObject *dict,
- int must_exist)
-{
+add_property (const char *metadata_path, char *property_name, PyObject * dict,
+ int must_exist) {
int file_path_size;
char *file_path = NULL;
FILE *file = NULL;
long file_size;
- char *value_buf = NULL;
+ char *value_buf = NULL;
PyObject *value = NULL;
struct stat file_stat;
// Build path of the property file
- file_path_size = strlen(metadata_path) + 1 + strlen(property_name) + 1;
- file_path = PyMem_Malloc(file_path_size);
- if (file_path == NULL) {
- PyErr_NoMemory();
+ file_path_size = strlen (metadata_path) + 1 + strlen (property_name) + 1;
+ file_path = PyMem_Malloc (file_path_size);
+ if (file_path == NULL) {
+ PyErr_NoMemory ();
return 0;
- }
+ }
snprintf (file_path, file_path_size, "%s/%s", metadata_path, property_name);
- if ((!must_exist) && (stat(file_path, &file_stat) != 0)) {
- PyMem_Free(file_path);
+ if ((!must_exist) && (stat (file_path, &file_stat) != 0)) {
+ PyMem_Free (file_path);
return 1;
}
- file = fopen(file_path, "r");
+ file = fopen (file_path, "r");
if (file == NULL) {
- char buf[256];
- snprintf(buf, sizeof(buf), "Cannot open property file %s: %s",
- file_path, strerror(errno));
- PyErr_SetString(PyExc_IOError, buf);
+ char buf[256];
+ snprintf (buf, sizeof (buf), "Cannot open property file %s: %s",
+ file_path, strerror (errno));
+ PyErr_SetString (PyExc_IOError, buf);
goto cleanup;
}
-
// Get file size
fseek (file, 0, SEEK_END);
file_size = ftell (file);
@@ -49,166 +47,160 @@ add_property(const char *metadata_path, char *property_name, PyObject *dict,
if (file_size == 0) {
// Empty property
- fclose(file);
+ fclose (file);
file = NULL;
- value = PyString_FromString("");
+ value = PyString_FromString ("");
if (value == NULL) {
- PyErr_SetString(PyExc_ValueError,
- "Failed to convert value to python string");
+ PyErr_SetString (PyExc_ValueError,
+ "Failed to convert value to python string");
goto cleanup;
}
} else {
if (file_size > MAX_PROPERTY_LENGTH) {
- PyErr_SetString(PyExc_ValueError, "Property file too big");
+ PyErr_SetString (PyExc_ValueError, "Property file too big");
goto cleanup;
}
-
// Read the whole file
- value_buf = PyMem_Malloc(file_size);
- if (value_buf == NULL) {
- PyErr_NoMemory();
+ value_buf = PyMem_Malloc (file_size);
+ if (value_buf == NULL) {
+ PyErr_NoMemory ();
goto cleanup;
- }
- long read_size = fread(value_buf, 1, file_size, file);
+ }
+ long read_size = fread (value_buf, 1, file_size, file);
if (read_size < file_size) {
- char buf[256];
- snprintf(buf, sizeof(buf),
- "Error while reading property file %s", file_path);
- PyErr_SetString(PyExc_IOError, buf);
+ char buf[256];
+ snprintf (buf, sizeof (buf),
+ "Error while reading property file %s", file_path);
+ PyErr_SetString (PyExc_IOError, buf);
goto cleanup;
}
- fclose(file);
+ fclose (file);
file = NULL;
// Convert value to dbus.ByteArray
- PyObject *args = Py_BuildValue("(s#)", value_buf, file_size);
+ PyObject *args = Py_BuildValue ("(s#)", value_buf, file_size);
- PyMem_Free(value_buf);
+ PyMem_Free (value_buf);
value_buf = NULL;
- value = PyObject_CallObject(byte_array_type, args);
- Py_DECREF(args);
+ value = PyObject_CallObject (byte_array_type, args);
+ Py_DECREF (args);
if (value == NULL) {
- PyErr_SetString(PyExc_ValueError,
- "Failed to convert value to dbus.ByteArray");
+ PyErr_SetString (PyExc_ValueError,
+ "Failed to convert value to dbus.ByteArray");
goto cleanup;
}
}
// Add property to the metadata dict
- if (PyDict_SetItemString(dict, property_name, value) == -1) {
- PyErr_SetString(PyExc_ValueError,
- "Failed to add property to dictionary");
+ if (PyDict_SetItemString (dict, property_name, value) == -1) {
+ PyErr_SetString (PyExc_ValueError,
+ "Failed to add property to dictionary");
goto cleanup;
}
- Py_DECREF(value);
- PyMem_Free(file_path);
+ Py_DECREF (value);
+ PyMem_Free (file_path);
return 1;
-cleanup:
+ cleanup:
if (file_path) {
- PyMem_Free(file_path);
+ PyMem_Free (file_path);
}
if (value_buf) {
- PyMem_Free(value_buf);
+ PyMem_Free (value_buf);
}
if (file) {
- fclose(file);
+ fclose (file);
}
if (value) {
- Py_DECREF(value);
+ Py_DECREF (value);
}
- return 0;
+ return 0;
}
-static PyObject *
-read_from_properties_list (const char *metadata_path, PyObject *properties)
-{
- PyObject *dict = PyDict_New();
+static PyObject *read_from_properties_list (const char *metadata_path,
+ PyObject * properties) {
+ PyObject *dict = PyDict_New ();
int i;
- for (i = 0; i < PyList_Size(properties); i++) {
- PyObject *property = PyList_GetItem(properties, i);
+ for (i = 0; i < PyList_Size (properties); i++) {
+ PyObject *property = PyList_GetItem (properties, i);
char *property_name = PyString_AsString (property);
- if (add_property(metadata_path, property_name, dict, 0) == 0)
+ if (add_property (metadata_path, property_name, dict, 0) == 0)
goto cleanup;
}
return dict;
-cleanup:
+ cleanup:
if (dict) {
- Py_DECREF(dict);
+ Py_DECREF (dict);
}
- return NULL;
+ return NULL;
}
-static PyObject *
-read_all_properties (const char *metadata_path)
-{
- PyObject *dict = PyDict_New();
- DIR *dir_stream = NULL;
- struct dirent *dir_entry = NULL;
+static PyObject *read_all_properties (const char *metadata_path) {
+ PyObject *dict = PyDict_New ();
+ DIR *dir_stream = NULL;
+ struct dirent *dir_entry = NULL;
dir_stream = opendir (metadata_path);
- if (dir_stream == NULL) {
- char buf[256];
- snprintf(buf, sizeof(buf), "Couldn't open metadata directory %s",
- metadata_path);
- PyErr_SetString(PyExc_IOError, buf);
+ if (dir_stream == NULL) {
+ char buf[256];
+ snprintf (buf, sizeof (buf), "Couldn't open metadata directory %s",
+ metadata_path);
+ PyErr_SetString (PyExc_IOError, buf);
goto cleanup;
- }
+ }
- dir_entry = readdir(dir_stream);
+ dir_entry = readdir (dir_stream);
while (dir_entry != NULL) {
// Skip . and ..
if (dir_entry->d_name[0] == '.' &&
- (strlen(dir_entry->d_name) == 1 ||
- (dir_entry->d_name[1] == '.' &&
- strlen(dir_entry->d_name) == 2)))
- goto next_property;
+ (strlen (dir_entry->d_name) == 1 ||
+ (dir_entry->d_name[1] == '.' &&
+ strlen (dir_entry->d_name) == 2)))
+ goto next_property;
- if (add_property(metadata_path, dir_entry->d_name, dict, 1) == 0)
+ if (add_property (metadata_path, dir_entry->d_name, dict, 1) == 0)
goto cleanup;
- next_property:
- dir_entry = readdir(dir_stream);
+ next_property:
+ dir_entry = readdir (dir_stream);
}
- closedir(dir_stream);
+ closedir (dir_stream);
return dict;
-cleanup:
+ cleanup:
if (dict) {
- Py_DECREF(dict);
+ Py_DECREF (dict);
}
if (dir_stream) {
- closedir(dir_stream);
- }
- return NULL;
+ closedir (dir_stream);
+ }
+ return NULL;
}
-static PyObject *
-metadatareader_retrieve(PyObject *unused, PyObject *args)
-{
+static PyObject *metadatareader_retrieve (PyObject * unused, PyObject * args) {
PyObject *dict = NULL;
PyObject *properties = NULL;
const char *metadata_path = NULL;
- if (!PyArg_ParseTuple(args, "sO:retrieve", &metadata_path, &properties))
+ if (!PyArg_ParseTuple (args, "sO:retrieve", &metadata_path, &properties))
return NULL;
- if ((properties != Py_None) && (PyList_Size(properties) > 0)) {
- dict = read_from_properties_list(metadata_path, properties);
+ if ((properties != Py_None) && (PyList_Size (properties) > 0)) {
+ dict = read_from_properties_list (metadata_path, properties);
} else {
- dict = read_all_properties(metadata_path);
+ dict = read_all_properties (metadata_path);
}
return dict;
@@ -216,18 +208,16 @@ metadatareader_retrieve(PyObject *unused, PyObject *args)
static PyMethodDef metadatareader_functions[] = {
{"retrieve", metadatareader_retrieve, METH_VARARGS,
- PyDoc_STR("Read a dictionary from a directory with a single file " \
- "(containing the content) per key")},
+ PyDoc_STR
+ ("Read a dictionary from a directory with a single file "
+ "(containing the content) per key")},
{NULL, NULL, 0, NULL}
};
-PyMODINIT_FUNC
-initmetadatareader(void)
-{
- PyObject* mod;
- mod = Py_InitModule("metadatareader", metadatareader_functions);
-
- PyObject *dbus_module = PyImport_ImportModule("dbus");
- byte_array_type = PyObject_GetAttrString(dbus_module, "ByteArray");
-}
+PyMODINIT_FUNC initmetadatareader (void) {
+ PyObject *mod;
+ mod = Py_InitModule ("metadatareader", metadatareader_functions);
+ PyObject *dbus_module = PyImport_ImportModule ("dbus");
+ byte_array_type = PyObject_GetAttrString (dbus_module, "ByteArray");
+}
diff --git a/src/carquinyol/migration.py b/src/carquinyol/migration.py
index 686902f..1745f2c 100644
--- a/src/carquinyol/migration.py
+++ b/src/carquinyol/migration.py
@@ -21,7 +21,6 @@ import os
import logging
import shutil
import time
-import traceback
import cjson
diff --git a/src/carquinyol/optimizer.py b/src/carquinyol/optimizer.py
index 2b6ce29..c038c2b 100644
--- a/src/carquinyol/optimizer.py
+++ b/src/carquinyol/optimizer.py
@@ -163,5 +163,5 @@ class Optimizer(object):
"""
popen = subprocess.Popen(['md5sum', path], stdout=subprocess.PIPE)
- stdout, stderr_ = popen.communicate()
- return stdout.split(' ', 1)[0]
+ stdout, __ = popen.communicate()
+ return str(stdout).split(' ', 1)[0]
diff --git a/sweets.recipe b/sweets.recipe
new file mode 100644
index 0000000..9616cb9
--- /dev/null
+++ b/sweets.recipe
@@ -0,0 +1,33 @@
+[DEFAULT]
+sweet = sugar-datastore
+summary = Backend of the Sugar Journal
+license = LGPLv2.1+
+homepage = http://git.sugarlabs.org/projects/sugar-datastore
+
+version = 0.90.0
+stability = testing
+
+[Component]
+requires = sugar-toolkit; python-cjson; xapian-bindings-python
+binding = PATH bin; PYTHONPATH python; XDG_DATA_DIRS share
+arch = any
+
+[Build]
+requires = pkg-config; intltool >= 0.33; make; gcc-c; python
+cleanup = make distclean; ./autogen.sh
+configure = ./configure
+ --prefix=%(PREFIX)s
+ am_cv_python_pythondir=%(PREFIX)s/python
+ am_cv_python_pyexecdir=%(PREFIX)s/python
+ CFLAGS="%(CFLAGS)s"
+make = make
+install = make DESTDIR=%(DESTDIR)s install
+implement = %(install)s &&
+ rm -rf %(DESTDIR)s/%(PREFIX)s/bin &&
+ rm -rf %(DESTDIR)s/%(PREFIX)s/python &&
+ ln -s %(BUILDDIR)s/bin %(DESTDIR)s/%(PREFIX)s/ &&
+ ln -s %(BUILDDIR)s/src %(DESTDIR)s/%(PREFIX)s/python &&
+ ln -fs .libs/metadatareader.so src/carquinyol/
+
+[Source]
+exec = ./autogen.sh && make distcheck