Web   ·   Wiki   ·   Activities   ·   Blog   ·   Lists   ·   Chat   ·   Meeting   ·   Bugs   ·   Git   ·   Translate   ·   Archive   ·   People   ·   Donate
summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorSascha Silbe <sascha-pgp@silbe.org>2013-07-23 11:38:10 (GMT)
committer Sascha Silbe <sascha-pgp@silbe.org>2013-11-16 20:15:30 (GMT)
commitee24b353a3d276aa16d6c2470a44926298b7989b (patch)
tree176093422a72930e192db0096a72d9b50b0dc015
parent95f705afe324f1f3e5b1d07a234c4026c7dd867c (diff)
fsemulation: guard all data store accesses with lock
Despite having internal locking, python-dbus / libdbus apparently isn't thread-safe. All current data store implementations are single-threaded anyway, so just guard all data store accesses (which happen via D-Bus) with a (single) lock to prevent multiple D-Bus calls being invoked in parallel.
-rw-r--r--fsemulation.py30
1 files changed, 30 insertions, 0 deletions
diff --git a/fsemulation.py b/fsemulation.py
index 13a5d27..939f9c9 100644
--- a/fsemulation.py
+++ b/fsemulation.py
@@ -14,9 +14,11 @@
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import collections
import errno
+import functools
import logging
import os
import stat
+import threading
import time
import dbus
@@ -39,6 +41,14 @@ _USEFUL_PROPS = ['mime_type', 'tags', 'timestamp', 'title']
"""Metadata properties used for determining the file name of an entry"""
+def synchronised(func):
+ @functools.wraps(func)
+ def wrapper(self, *args, **kwargs):
+ with self._lock:
+ return func(self, *args, **kwargs)
+ return wrapper
+
+
class _LRU(collections.MutableMapping):
"""Simple, but reasonably fast Least Recently Used (LRU) cache"""
@@ -85,6 +95,7 @@ class DataStore(object):
def __init__(self, root_query):
self._root_query = root_query
self.supports_versions = False
+ self._lock = threading.RLock()
self._data_store_version = 0
bus = dbus.SessionBus()
try:
@@ -115,6 +126,7 @@ class DataStore(object):
logging.info('0.84+ data store without version support found')
self._data_store_version = 84
+ @synchronised
def list_object_ids(self, query=None):
"""Retrieve the object_ids of all (matching) data store entries
@@ -142,6 +154,7 @@ class DataStore(object):
for entry in self._data_store.find(query, ['uid'],
byte_arrays=True, timeout=DBUS_TIMEOUT_MAX)[0]]
+ @synchronised
def list_metadata(self, query=None):
"""Retrieve object_id and selected metadata of matching entries
@@ -176,6 +189,7 @@ class DataStore(object):
for entry in self._data_store.find(query, properties,
timeout=DBUS_TIMEOUT_MAX, byte_arrays=True)[0]]
+ @synchronised
def list_versions(self, tree_id):
"""Retrieve all version_ids of the given data store entry"""
query = dict(self._root_query)
@@ -185,11 +199,13 @@ class DataStore(object):
for entry in self._data_store.find(query,
options, timeout=DBUS_TIMEOUT_MAX, byte_arrays=True)[0]]
+ @synchronised
def list_tree_ids(self, query=None):
"""Retrieve the tree_ids of all (matching) data store entries"""
query = self._merge_root_query(query)
return [unicode(entry[0]) for entry in self.list_object_ids(query)]
+ @synchronised
def list_property_values(self, name, query=None):
"""Return all unique values of the given property"""
assert isinstance(name, unicode)
@@ -210,6 +226,7 @@ class DataStore(object):
return dict.fromkeys([entry.get(name) for entry in entries]).keys()
+ @synchronised
def check_object_id(self, object_id):
"""Return True if the given object_id identifies a data store entry"""
try:
@@ -221,6 +238,7 @@ class DataStore(object):
return True
+ @synchronised
def check_tree_id(self, tree_id):
"""Return True if the given tree_id identifies a data store entry"""
assert isinstance(tree_id, unicode)
@@ -231,6 +249,7 @@ class DataStore(object):
byte_arrays=True)[0]
return bool(results)
+ @synchronised
def check_property_contains(self, name, word):
"""Return True if there is at least one entry containing word in the
given property
@@ -255,6 +274,7 @@ class DataStore(object):
return bool(results)
+ @synchronised
def get_properties(self, object_id, names=None):
"""Read given properties for data store entry identified by object_id
@@ -301,6 +321,7 @@ class DataStore(object):
return self._convert_metadata(metadata)
+ @synchronised
def list_properties(self, object_id):
"""List the names of all properties for this entry
@@ -308,6 +329,7 @@ class DataStore(object):
"""
return self.get_properties(object_id).keys()
+ @synchronised
def create_property(self, object_id, name, value):
"""Set the given property, raising an error if it already exists"""
assert isinstance(name, unicode)
@@ -318,6 +340,7 @@ class DataStore(object):
metadata[name] = value
self._change_metadata(object_id, metadata)
+ @synchronised
def replace_property(self, object_id, name, value):
"""Modify the given, already existing property"""
assert isinstance(name, unicode)
@@ -330,6 +353,7 @@ class DataStore(object):
metadata[name] = value
self._change_metadata(object_id, metadata)
+ @synchronised
def set_properties(self, object_id, properties):
"""Write the given (sub)set of properties
@@ -344,6 +368,7 @@ class DataStore(object):
metadata.update(properties)
self._change_metadata(object_id, metadata)
+ @synchronised
def remove_properties(self, object_id, names):
"""Remove the given (sub)set of properties
@@ -360,6 +385,7 @@ class DataStore(object):
self._change_metadata(object_id, metadata)
+ @synchronised
def remove_entry(self, object_id):
"""Remove a single (version of a) data store entry"""
# Make sure we don't allow deleting entries that don't match
@@ -375,6 +401,7 @@ class DataStore(object):
else:
self._data_store.delete(object_id, timeout=DBUS_TIMEOUT_MAX)
+ @synchronised
def create_new(self, properties):
"""Create a new data store entry
@@ -392,6 +419,7 @@ class DataStore(object):
else:
return self._data_store.create(properties, '', False)
+ @synchronised
def get_data(self, object_id):
"""Return path to data for data store entry identified by object_id."""
# Make sure we don't allow deleting entries that don't match
@@ -407,6 +435,7 @@ class DataStore(object):
else:
return self._data_store.get_filename(object_id, byte_arrays=True)
+ @synchronised
def get_size(self, object_id):
# FIXME: make use of filesize property if available
path = self.get_data(object_id)
@@ -417,6 +446,7 @@ class DataStore(object):
os.remove(path)
return size
+ @synchronised
def write_data(self, object_id, path):
"""Update data for data store entry identified by object_id.