Web   ·   Wiki   ·   Activities   ·   Blog   ·   Lists   ·   Chat   ·   Meeting   ·   Bugs   ·   Git   ·   Translate   ·   Archive   ·   People   ·   Donate
summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorSascha Silbe <sascha-pgp@silbe.org>2013-07-23 11:38:10 (GMT)
committer Sascha Silbe <sascha-pgp@silbe.org>2013-07-23 11:38:10 (GMT)
commiteea772d7e5fed34fd06b34d3604d6b404f57daa3 (patch)
tree08ac455d0c6be52089f332f379d371fc43faed72
parenta55e209df374f8cceaaf0dab3b6a18d88f080a46 (diff)
fsemulation: guard all data store accesses with lock
Despite having internal locking, python-dbus / libdbus apparently isn't thread-safe. All current data store implementations are single-threaded anyway, so just guard all data store accesses (which happen via D-Bus) with a (single) lock to prevent multiple D-Bus calls being invoked in parallel.
-rw-r--r--fsemulation.py30
1 files changed, 30 insertions, 0 deletions
diff --git a/fsemulation.py b/fsemulation.py
index 31f9b47..1de01fd 100644
--- a/fsemulation.py
+++ b/fsemulation.py
@@ -14,9 +14,11 @@
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import collections
import errno
+import functools
import logging
import os
import stat
+import threading
import time
import dbus
@@ -38,6 +40,14 @@ _USEFUL_PROPS = ['mime_type', 'tags', 'timestamp', 'title']
"""Metadata properties used for determining the file name of an entry"""
+def synchronised(func):
+ @functools.wraps(func)
+ def wrapper(self, *args, **kwargs):
+ with self._lock:
+ return func(self, *args, **kwargs)
+ return wrapper
+
+
class _LRU(collections.MutableMapping):
"""Simple, but reasonably fast Least Recently Used (LRU) cache"""
@@ -83,6 +93,7 @@ class _LRU(collections.MutableMapping):
class DataStore(object):
def __init__(self):
self.supports_versions = False
+ self._lock = threading.RLock()
self._data_store_version = 0
bus = dbus.SessionBus()
try:
@@ -113,6 +124,7 @@ class DataStore(object):
logging.info('0.84+ data store without version support found')
self._data_store_version = 84
+ @synchronised
def list_object_ids(self, query=None):
"""Retrieve the object_ids of all (matching) data store entries
@@ -139,6 +151,7 @@ class DataStore(object):
for entry in self._data_store.find(query, ['uid'],
byte_arrays=True, timeout=DBUS_TIMEOUT_MAX)[0]]
+ @synchronised
def list_metadata(self, query=None):
"""Retrieve object_id and selected metadata of matching entries
@@ -173,6 +186,7 @@ class DataStore(object):
for entry in self._data_store.find(query, properties,
timeout=DBUS_TIMEOUT_MAX, byte_arrays=True)[0]]
+ @synchronised
def list_versions(self, tree_id):
"""Retrieve all version_ids of the given data store entry"""
options = {'all_versions': True, 'order_by': ['-timestamp']}
@@ -180,10 +194,12 @@ class DataStore(object):
for entry in self._data_store.find({'tree_id': tree_id},
options, timeout=DBUS_TIMEOUT_MAX, byte_arrays=True)[0]]
+ @synchronised
def list_tree_ids(self, query=None):
"""Retrieve the tree_ids of all (matching) data store entries"""
return [unicode(entry[0]) for entry in self.list_object_ids(query)]
+ @synchronised
def list_property_values(self, name, query=None):
"""Return all unique values of the given property"""
assert isinstance(name, unicode)
@@ -204,6 +220,7 @@ class DataStore(object):
return dict.fromkeys([entry.get(name) for entry in entries]).keys()
+ @synchronised
def check_object_id(self, object_id):
"""Return True if the given object_id identifies a data store entry"""
try:
@@ -215,6 +232,7 @@ class DataStore(object):
return True
+ @synchronised
def check_tree_id(self, tree_id):
"""Return True if the given tree_id identifies a data store entry"""
assert isinstance(tree_id, unicode)
@@ -223,6 +241,7 @@ class DataStore(object):
byte_arrays=True)[0]
return bool(results)
+ @synchronised
def check_property_contains(self, name, word):
"""Return True if there is at least one entry containing word in the
given property
@@ -244,6 +263,7 @@ class DataStore(object):
return bool(results)
+ @synchronised
def get_properties(self, object_id, names=None):
"""Read given properties for data store entry identified by object_id
@@ -279,6 +299,7 @@ class DataStore(object):
return self._convert_metadata(metadata)
+ @synchronised
def list_properties(self, object_id):
"""List the names of all properties for this entry
@@ -286,6 +307,7 @@ class DataStore(object):
"""
return self.get_properties(object_id).keys()
+ @synchronised
def create_property(self, object_id, name, value):
"""Set the given property, raising an error if it already exists"""
assert isinstance(name, unicode)
@@ -296,6 +318,7 @@ class DataStore(object):
metadata[name] = value
self._change_metadata(object_id, metadata)
+ @synchronised
def replace_property(self, object_id, name, value):
"""Modify the given, already existing property"""
assert isinstance(name, unicode)
@@ -308,6 +331,7 @@ class DataStore(object):
metadata[name] = value
self._change_metadata(object_id, metadata)
+ @synchronised
def set_properties(self, object_id, properties):
"""Write the given (sub)set of properties
@@ -322,6 +346,7 @@ class DataStore(object):
metadata.update(properties)
self._change_metadata(object_id, metadata)
+ @synchronised
def remove_properties(self, object_id, names):
"""Remove the given (sub)set of properties
@@ -338,6 +363,7 @@ class DataStore(object):
self._change_metadata(object_id, metadata)
+ @synchronised
def remove_entry(self, object_id):
"""Remove a single (version of a) data store entry"""
if self._data_store.dbus_interface == DS_DBUS_INTERFACE2:
@@ -352,6 +378,7 @@ class DataStore(object):
assert isinstance(object_id, unicode)
self._data_store.delete(object_id, timeout=DBUS_TIMEOUT_MAX)
+ @synchronised
def create_new(self, properties):
"""Create a new data store entry
@@ -369,6 +396,7 @@ class DataStore(object):
else:
return self._data_store.create(properties, '', False)
+ @synchronised
def get_data(self, object_id):
"""Return path to data for data store entry identified by object_id."""
if self._data_store.dbus_interface == DS_DBUS_INTERFACE2:
@@ -382,6 +410,7 @@ class DataStore(object):
assert isinstance(object_id, unicode)
return self._data_store.get_filename(object_id, byte_arrays=True)
+ @synchronised
def get_size(self, object_id):
# FIXME: make use of filesize property if available
path = self.get_data(object_id)
@@ -392,6 +421,7 @@ class DataStore(object):
os.remove(path)
return size
+ @synchronised
def write_data(self, object_id, path):
"""Update data for data store entry identified by object_id.