Web   ·   Wiki   ·   Activities   ·   Blog   ·   Lists   ·   Chat   ·   Meeting   ·   Bugs   ·   Git   ·   Translate   ·   Archive   ·   People   ·   Donate
summaryrefslogtreecommitdiffstats
path: root/src/jarabe/journal/model.py
diff options
context:
space:
mode:
Diffstat (limited to 'src/jarabe/journal/model.py')
-rw-r--r--src/jarabe/journal/model.py205
1 files changed, 203 insertions, 2 deletions
diff --git a/src/jarabe/journal/model.py b/src/jarabe/journal/model.py
index 83e216f..527f78d 100644
--- a/src/jarabe/journal/model.py
+++ b/src/jarabe/journal/model.py
@@ -20,6 +20,7 @@
import logging
import os
+import stat
import errno
import subprocess
from datetime import datetime
@@ -36,11 +37,14 @@ import gobject
import dbus
import gio
import gconf
+import string
from sugar import dispatch
from sugar import mime
from sugar import util
+from jarabe.journal.webdavmanager import get_remote_webdav_share_metadata
+
DS_DBUS_SERVICE = 'org.laptop.sugar.DataStore'
DS_DBUS_INTERFACE = 'org.laptop.sugar.DataStore'
DS_DBUS_PATH = '/org/laptop/sugar/DataStore'
@@ -425,6 +429,123 @@ class InplaceResultSet(BaseResultSet):
return
+class RemoteShareResultSet(object):
+ def __init__(self, ip_address, query):
+ self._ip_address = ip_address
+ self._file_list = []
+
+ self.ready = dispatch.Signal()
+ self.progress = dispatch.Signal()
+
+ # First time, query is none.
+ if query is None:
+ return
+
+ query_text = query.get('query', '')
+ if query_text.startswith('"') and query_text.endswith('"'):
+ self._regex = re.compile('*%s*' % query_text.strip(['"']))
+ elif query_text:
+ expression = ''
+ for word in query_text.split(' '):
+ expression += '(?=.*%s.*)' % word
+ self._regex = re.compile(expression, re.IGNORECASE)
+ else:
+ self._regex = None
+
+ if query.get('timestamp', ''):
+ self._date_start = int(query['timestamp']['start'])
+ self._date_end = int(query['timestamp']['end'])
+ else:
+ self._date_start = None
+ self._date_end = None
+
+ self._mime_types = query.get('mime_type', [])
+
+ self._sort = query.get('order_by', ['+timestamp'])[0]
+
+ def setup(self):
+ metadata_list_complete = get_remote_webdav_share_metadata(self._ip_address)
+ for metadata in metadata_list_complete:
+
+ add_to_list = False
+ if self._regex is not None:
+ for f in ['fulltext', 'title',
+ 'description', 'tags']:
+ if f in metadata and \
+ self._regex.match(metadata[f]):
+ add_to_list = True
+ break
+ else:
+ add_to_list = True
+ if not add_to_list:
+ continue
+
+ add_to_list = False
+ if self._date_start is not None:
+ if metadata['timestamp'] > self._date_start:
+ add_to_list = True
+ else:
+ add_to_list = True
+ if not add_to_list:
+ continue
+
+ add_to_list = False
+ if self._date_end is not None:
+ if metadata['timestamp'] < self._date_end:
+ add_to_list = True
+ else:
+ add_to_list = True
+ if not add_to_list:
+ continue
+
+ add_to_list = False
+ if self._mime_types:
+ mime_type = metadata['mime_type']
+ if mime_type in self._mime_types:
+ add_to_list = True
+ else:
+ add_to_list = True
+ if not add_to_list:
+ continue
+
+ # If control reaches here, the current metadata has passed
+ # out all filter-tests.
+ file_info = (metadata['timestamp'],
+ metadata['creation_time'],
+ metadata['filesize'],
+ metadata)
+ self._file_list.append(file_info)
+
+ if self._sort[1:] == 'filesize':
+ keygetter = itemgetter(2)
+ elif self._sort[1:] == 'creation_time':
+ keygetter = itemgetter(1)
+ else:
+ # timestamp
+ keygetter = itemgetter(0)
+
+ self._file_list.sort(lambda a, b: cmp(b, a),
+ key=keygetter,
+ reverse=(self._sort[0] == '-'))
+
+ self.ready.send(self)
+
+ def get_length(self):
+ return len(self._file_list)
+
+ length = property(get_length)
+
+ def seek(self, position):
+ self._position = position
+
+ def read(self):
+ modified_timestamp, creation_timestamp, filesize, metadata = self._file_list[self._position]
+ return metadata
+
+ def stop(self):
+ self._stopped = True
+
+
def _get_file_metadata(path, stat, fetch_preview=True):
"""Return the metadata from the corresponding file.
@@ -436,10 +557,16 @@ def _get_file_metadata(path, stat, fetch_preview=True):
dir_path = os.path.dirname(path)
metadata = _get_file_metadata_from_json(dir_path, filename, fetch_preview)
if metadata:
+ # For Documents/Shares/Mounted-Drives.
+ # Special case: for locally-mounted-remote-files, ensure that
+ # "metadata['filesize' is already present before-hand. This
+ # will have to be done at the time of fetching
+ # webdav-properties per resource.
if 'filesize' not in metadata:
metadata['filesize'] = stat.st_size
return metadata
+ # For Journal.
return {'uid': path,
'title': os.path.basename(path),
'timestamp': stat.st_mtime,
@@ -529,11 +656,33 @@ def find(query_, page_size):
raise ValueError('Exactly one mount point must be specified')
if mount_points[0] == '/':
+ """
+ For Journal.
+ """
return DatastoreResultSet(query, page_size)
+ elif is_mount_point_for_locally_mounted_remote_share(mount_points[0]):
+ """
+ For Locally-Mounted-Remote-Shares.
+ Regex Matching is used, to ensure that the mount-point is an
+ IP-Address.
+ """
+ return RemoteShareResultSet(mount_points[0], query)
else:
+ """
+ For Documents/Shares/Mounted-Drives.
+ """
return InplaceResultSet(query, page_size, mount_points[0])
+def is_mount_point_for_locally_mounted_remote_share(mount_point):
+ import re
+
+ pattern = '[1-9][0-9]{0,2}\.[0-9]{0,3}\.[0-9]{0,3}\.[0-9]{0,3}'
+ if re.match(pattern, mount_point) is None:
+ return False
+ return True
+
+
def _get_mount_point(path):
dir_path = os.path.dirname(path)
while dir_path:
@@ -544,14 +693,45 @@ def _get_mount_point(path):
return None
+def is_locally_mounted_remote_share(path):
+ return string.find(path, '/tmp/') == 0
+
+
+def extract_ip_address_from_locally_mounted_remote_share_path(path):
+ """
+ Path is of type ::
+
+ /tmp/127.0.0.1/webdav/a.txt
+ """
+ return path.split('/')[2]
+
+
def get(object_id):
"""Returns the metadata for an object
"""
- if os.path.exists(object_id):
- stat = os.stat(object_id)
+ if (os.path.exists(object_id) or (is_locally_mounted_remote_share(object_id))):
+ """
+ For Documents/Shares/Mounted-Drives/Locally-Mounted-Remote-Shares,
+ where ".Sugar-Metadata" folder exists.
+
+ The only thing is that, for locally-mounted-remote-shares, the
+ "file" is not physically present.
+ """
+ if os.path.exists(object_id):
+ # if the file is physically present, derive file-metadata
+ # by physical examination of the file.
+ stat = os.stat(object_id)
+ else:
+ # if the file is remote, derive file-metadata by fetching
+ # properties remotely (webdav properties).
+ stat = None
+
metadata = _get_file_metadata(object_id, stat)
metadata['mountpoint'] = _get_mount_point(object_id)
else:
+ """
+ For journal, where ".Sugar-Metadata" folder does not exists.
+ """
metadata = _get_datastore().get_properties(object_id, byte_arrays=True)
metadata['mountpoint'] = '/'
return metadata
@@ -561,9 +741,16 @@ def get_file(object_id):
"""Returns the file for an object
"""
if os.path.exists(object_id):
+ """
+ For Documents/Shares/Mounted-Drives/
+ Locally-Mounted-Remote-Shares-in-case-when-it-is-present-already.
+ """
logging.debug('get_file asked for file with path %r', object_id)
return object_id
else:
+ """
+ For Journal.
+ """
logging.debug('get_file asked for entry with id %r', object_id)
file_path = _get_datastore().get_filename(object_id)
if file_path:
@@ -754,6 +941,20 @@ def _write_entry_on_external_device(metadata, file_path):
_rename_entry_on_external_device(file_path, destination_path,
metadata_dir_path)
+ # For "Shares" folder, we need to set the permissions of the newly
+ # copied file to 0777, else it will not be accessible by "httpd"
+ # service.
+ if metadata['mountpoint'] == '/var/www/web1/web':
+ fd = os.open(destination_path, os.O_RDONLY)
+ os.fchmod(fd, stat.S_IRWXU | stat.S_IRWXG | stat.S_IRWXO)
+ os.close(fd)
+
+ metadata_file_path = os.path.join(metadata_dir_path, file_name + '.metadata')
+ fd = os.open(metadata_file_path, os.O_RDONLY)
+ os.fchmod(fd, stat.S_IRWXU | stat.S_IRWXG | stat.S_IRWXO)
+ os.close(fd)
+
+
object_id = destination_path
created.send(None, object_id=object_id)