Web   ·   Wiki   ·   Activities   ·   Blog   ·   Lists   ·   Chat   ·   Meeting   ·   Bugs   ·   Git   ·   Translate   ·   Archive   ·   People   ·   Donate
summaryrefslogtreecommitdiffstats
path: root/journal2webdav
diff options
context:
space:
mode:
authorSascha Silbe <sascha-pgp@silbe.org>2012-04-03 10:25:11 (GMT)
committer Sascha Silbe <sascha-pgp@silbe.org>2012-04-03 10:25:11 (GMT)
commita1348e94a60317411a2a507505867a2b050153f2 (patch)
treed606cd5e318e05b2b5592a66e7b65d10cb5a4ef4 /journal2webdav
parent2c3f4ddb36c4ca55367a0b791b5eb4e8d0017f6c (diff)
add setup.py
Add a standard interface for easy installation of journal2webdav.
Diffstat (limited to 'journal2webdav')
-rwxr-xr-xjournal2webdav500
1 files changed, 500 insertions, 0 deletions
diff --git a/journal2webdav b/journal2webdav
new file mode 100755
index 0000000..8529664
--- /dev/null
+++ b/journal2webdav
@@ -0,0 +1,500 @@
+#!/usr/bin/env python
+#
+# Author: Sascha Silbe <sascha-pgp@silbe.org> (OpenPGP signed mails only)
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 3
+# as published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <http://www.gnu.org/licenses/>.
+from BaseHTTPServer import HTTPServer
+from SocketServer import ThreadingMixIn
+import logging
+import optparse
+import os
+import sys
+import time
+import urllib
+from urlparse import urljoin, urlparse
+
+import DAV
+import DAV.propfind
+from DAV.constants import COLLECTION, OBJECT
+from DAV.iface import dav_interface
+from DAV.errors import DAV_Error, DAV_NotFound, DAV_Requested_Range_Not_Satisfiable
+from DAVServer.fileauth import DAVAuthHandler
+
+import fsemulation
+
+
+DS_DBUS_SERVICE = 'org.laptop.sugar.DataStore'
+DS_DBUS_INTERFACE1 = 'org.laptop.sugar.DataStore'
+DS_DBUS_PATH1 = '/org/laptop/sugar/DataStore'
+DS_DBUS_INTERFACE2 = 'org.laptop.sugar.DataStore2'
+DS_DBUS_PATH2 = '/org/laptop/sugar/DataStore2'
+
+SUGAR_NS = 'http://people.sugarlabs.org/silbe/webdavns/sugar'
+#SCHEMA_NS = 'http://www.w3.org/2001/XMLSchema'
+INVALID_XML_CHARS = [unichr(i) for i in range(0, 0x20)
+ if i not in [0x09, 0x0A, 0x0D]]
+CHUNK_SIZE = 65536
+
+
+log = None
+
+
+class ThreadedHTTPServer(ThreadingMixIn, HTTPServer):
+ """Handle requests in a separate thread."""
+
+
+class PROPFIND(DAV.propfind.PROPFIND):
+ # pylint: disable=C0324,C0322
+ def mk_propname_response(self,uri,propnames,doc):
+ # copy of original, but with bug fix for multiple namespaces
+ re=doc.createElement("D:response")
+
+ # write href information
+ uparts=urlparse(uri)
+ fileloc=uparts[2]
+ href=doc.createElement("D:href")
+ huri=doc.createTextNode(uparts[0]+'://'+'/'.join(uparts[1:2]) + urllib.quote(fileloc))
+ href.appendChild(huri)
+ re.appendChild(href)
+
+ ps=doc.createElement("D:propstat")
+ nsnum=0
+
+ for ns,plist in propnames.items():
+ # write prop element
+ pr=doc.createElement("D:prop")
+ nsp="ns"+str(nsnum)
+ pr.setAttribute("xmlns:"+nsp,ns)
+ nsnum=nsnum+1
+
+ # write propertynames
+ for p in plist:
+ pe=doc.createElement(nsp+":"+p)
+ pr.appendChild(pe)
+
+ ps.appendChild(pr)
+
+ re.appendChild(ps)
+
+ return re
+
+ def create_allprop(self):
+ """ return a list of all properties """
+ # modified to recurse over children if applicable
+ self.proplist={}
+ rel_path_q = ['']
+ if self._depth == 'infinity':
+ max_depth = sys.maxint
+ else:
+ max_depth = int(self._depth)
+
+ while rel_path_q:
+ rel_path = rel_path_q.pop()
+ if rel_path:
+ uri = '%s/%s' % (self._uri, rel_path)
+ else:
+ uri = self._uri
+
+ self.proplist.update(self._dataclass.get_propnames(uri))
+
+ if rel_path.count('/') >= max_depth:
+ continue
+
+ if not self._dataclass.is_collection(uri):
+ continue
+
+ rel_path_q += [child_uri[len(self._uri) + 1:]
+ for child_uri in self._dataclass.get_childs(uri)]
+
+ self.namespaces=self.proplist.keys()
+ return self.create_prop()
+
+
+class JournalObjectResource(object):
+ def __init__(self, fs_object):
+ path = fs_object.get_data()
+ if path:
+ self._file = file(path)
+ os.remove(path)
+ self._size = os.fstat(self._file.fileno()).st_size
+ else:
+ self._file = None
+ self._size = 0
+
+ def __len__(self):
+ return self._size
+
+ def __iter__(self):
+ while self._size:
+ data = self._file.read(CHUNK_SIZE)
+ if not data:
+ break
+
+ yield data
+
+ if self._file is not None:
+ self._file.close()
+ self._file = None
+
+ def read(self, length=0):
+ return self._file.read(length or self._size)
+
+
+class ObjectListHtmlResource(object):
+
+ def __init__(self, data_store, query):
+ self._html = self._generate_html(data_store, query)
+ self._position = 0
+
+ def __len__(self):
+ return len(self._html)
+
+ def __iter__(self):
+ while self._position < len(self._html):
+ yield self._html[self._position:self._position + CHUNK_SIZE]
+ self._position += CHUNK_SIZE
+
+ def read(self, length=0):
+ old_position = self._position
+ self._position += length or len(self._html)
+ return self._html[old_position:self._position]
+
+ def _generate_html(self, data_store, query):
+ entries = data_store.find_entries(query)
+
+ lines = ['<html>', '<head><title>Journal listing</title></head>',
+ '<body>','<table>', '<tr><th>Title</th></tr>']
+
+ if entries and isinstance(entries[0][0], tuple):
+ lines += [u'<tr><td><a href="/object/%s/%s">%s</a></td></tr>' % \
+ object_id + (title, ) for object_id, title in entries]
+ else:
+ lines += [u'<tr><td><a href="/object/%s">%s</a></td></tr>' % \
+ (object_id, title) for object_id, title in entries]
+
+ lines += ['</table>', '</html>']
+ return u'\n'.join(lines).encode('utf-8')
+
+
+class JournalHandler(dav_interface):
+
+ def __init__(self, file_system, base_uri, verbose=False):
+ self._fs = file_system
+ # required by dav_interface
+ self.baseuri = base_uri
+ self.verbose = verbose
+
+ def exists(self, uri):
+ log.debug('exists %r', uri)
+ try:
+ self._lookup_uri(uri)
+ except DAV_Error:
+ return False
+ else:
+ return True
+
+ def get_propnames(self, uri):
+ log.debug('get_propnames %r', uri)
+ # FIXME: we're apparently advertising but not supporting displayname, getcontentlanguage, getetag and source
+ props_by_ns = {'DAV:': self.PROPS['DAV:']}
+ fs_object = self._lookup_uri(uri)
+ if isinstance(fs_object, fsemulation.DSObject):
+ props_by_ns[SUGAR_NS] = fs_object.list_properties()
+
+ log.debug('props_by_ns=%r', props_by_ns)
+ return props_by_ns
+
+ def get_prop(self, uri, ns, propname):
+ if ns != SUGAR_NS:
+ return dav_interface.get_prop(self, uri, ns, propname)
+
+ log.debug('get_prop %r %r %r', uri, ns, propname)
+ fs_object = self._lookup_uri(uri)
+ if isinstance(fs_object, fsemulation.DSObject):
+ metadata = fs_object.get_properties([propname])
+ if propname not in metadata:
+ raise DAV_NotFound
+
+ value = metadata[propname]
+ if isinstance(value, unicode):
+ if not [c for c in value if c in INVALID_XML_CHARS]:
+ return value
+ else:
+ # contains control characters => return as binary string
+ # (base64 encoded)
+ value = value.encode('utf-8')
+
+ # binary data (e.g. PNG previews)
+
+ # FIXME: We can't add an XML element containing a text node since
+ # the xml.dom.minidom implementation requires the Document object
+ # for instantiating Nodes, but DAV.propfind.PROPFIND does not pass
+ # the Document object down (and there's not even private API to
+ # access it as it's a local variable). So the only thing we can
+ # return is a plain string.
+ #element = document.Element('base64Binary', SCHEMA_NS)
+ #text_node = document.createTextNode(value.encode('base64'))
+ #element.appendChild(text_node)
+ return 'base64:' + value.encode('base64')
+
+
+ raise DAV_NotFound
+
+ def get_childs(self, uri):
+ """Return the child objects of the given URI as absolute URIs."""
+ scheme, netloc = urlparse(str(uri))[:2]
+ path = unicode(urlparse(str(uri))[2], 'utf-8').strip('/')
+ log.debug('get_childs %r', uri)
+ fs_object = self._lookup_uri(uri)
+ if not isinstance(fs_object, fsemulation.Directory):
+ # PROPFIND.create_prop() recurses over all entities without
+ # checking is_collection() first.
+ return []
+
+ if path:
+ path += u'/'
+ return ['%s://%s/%s%s' % (scheme, netloc, path.encode('utf-8'),
+ child_name.encode('utf-8'))
+ for child_name in fs_object.listdir()
+ if not child_name in ['.', '..']]
+
+ def get_data(self, uri, byte_range=None):
+ log.debug('get_data %r %r', uri, byte_range)
+ fs_object = self._lookup_uri(uri)
+
+ if not isinstance(fs_object, fsemulation.DSObject):
+ # FIXME: directory listings
+ #~ return self._get_data_object_list(self._root_query, scheme,
+ #~ netloc, range)
+ raise DAV_NotFound
+
+ return JournalObjectResource(fs_object)
+
+ def _get_dav_resourcetype(self, uri):
+ log.debug('_get_dav_resourcetype %r', uri)
+ fs_object = self._lookup_uri(uri)
+ if isinstance(fs_object, fsemulation.Directory):
+ return COLLECTION
+ else:
+ return OBJECT
+
+ def _get_dav_getcontentlength(self, uri):
+ log.debug('_get_dav_getcontentlength %r', uri)
+ fs_object = self._lookup_uri(uri)
+ if isinstance(fs_object, fsemulation.DSObject):
+ return str(fs_object.get_size())
+ else:
+ return '0'
+
+ #~ if not path:
+ #~ # FIXME: inefficient
+ #~ return str(len(ObjectListHtmlResource(self._ds, self._root_query)))
+
+ def _get_dav_getcontenttype(self, uri):
+ log.debug('_get_dav_getcontenttype %r', uri)
+ fs_object = self._lookup_uri(uri)
+ if isinstance(fs_object, fsemulation.Directory):
+ return 'text/html; charset=utf-8'
+ elif isinstance(fs_object, fsemulation.DSObject):
+ metadata = fs_object.get_properties(['mime_type'])
+ return str(metadata.get('mime_type', 'application/octet-stream'))
+
+ raise DAV_NotFound
+
+ def get_creationdate(self, uri):
+ log.debug('get_creationdate %r', uri)
+ fs_object = self._lookup_uri(uri)
+ if not isinstance(fs_object, fsemulation.DSObject):
+ return time.time()
+
+ props = fs_object.get_properties(['creation_time', 'timestamp'])
+ try:
+ return float(props['creation_time'])
+ except (KeyError, ValueError, TypeError):
+ pass
+
+ try:
+ return float(props['timestamp'])
+ except (KeyError, ValueError, TypeError):
+ return time.time()
+
+ def get_lastmodified(self, uri):
+ log.debug('get_lastmodified %r', uri)
+ fs_object = self._lookup_uri(uri)
+ if not isinstance(fs_object, fsemulation.DSObject):
+ return time.time()
+
+ props = fs_object.get_properties(['timestamp'])
+ try:
+ return float(props['timestamp'])
+ except (KeyError, ValueError, TypeError):
+ return time.time()
+
+ def is_collection(self, uri):
+ log.debug('is_collection %r', uri)
+ fs_object = self._lookup_uri(uri)
+ return isinstance(fs_object, fsemulation.Directory)
+
+ def _lookup_uri(self, uri):
+ path = unicode(urlparse(str(uri))[2], 'utf-8')
+ return self._lookup_path(path)
+
+ def _lookup_path(self, path):
+ try:
+ # WebDAV doesn't support symlinks :-/
+ fs_object = self._fs.resolve(path, follow_links=True)
+ except IOError:
+ # FIXME: better error mapping
+ raise DAV_NotFound
+
+ return fs_object
+
+ #~ def _get_data_object_list(self, query, scheme, netloc, byte_range):
+ #~ resource = ObjectListHtmlResource(self._ds, query)
+ #~ start, end = self._parse_range(byte_range, len(resource))
+ #~ log.info('Serving range %s -> %s of object list' % (start, end))
+ #~ return resource
+
+ def _parse_range(self, byte_range, size):
+ if not byte_range or not byte_range[0]:
+ start = 0
+ else:
+ start = int(byte_range[0])
+
+ if not byte_range or not byte_range[1]:
+ end = size
+ else:
+ end = min(int(byte_range[1]), size)
+
+ if start > size:
+ raise DAV_Requested_Range_Not_Satisfiable
+
+ return start, end
+
+
+def setupDummyConfig(**kw):
+ class DummyConfigDAV:
+ def __init__(self, **kw):
+ self.__dict__.update(**kw)
+
+ def getboolean(self, name):
+ return (str(getattr(self, name, 0)) in ('1', "yes", "true", "on", "True"))
+
+ class DummyConfig:
+ DAV = DummyConfigDAV(**kw)
+
+ def __init__(self):
+ pass
+
+ return DummyConfig()
+
+
+class RequestHandler(DAVAuthHandler):
+ # pylint: disable=W0402,W0404,C0324,W0612
+ def do_PROPFIND(self):
+ from string import atoi
+ # exact copy of original, just to override the PROPFIND class
+
+ dc = self.IFACE_CLASS
+
+ # read the body containing the xml request
+ # iff there is no body then this is an ALLPROP request
+ body = None
+ if self.headers.has_key('Content-Length'):
+ l = self.headers['Content-Length']
+ body = self.rfile.read(atoi(l))
+
+ uri = urljoin(self.get_baseuri(dc), self.path)
+ uri = urllib.unquote(uri)
+
+ pf = PROPFIND(uri, dc, self.headers.get('Depth', 'infinity'), body)
+
+ try:
+ DATA = '%s\n' % pf.createResponse()
+ except DAV_Error, (ec,dd):
+ return self.send_status(ec)
+
+ # work around MSIE DAV bug for creation and modified date
+ # taken from Resource.py @ Zope webdav
+ if (self.headers.get('User-Agent') ==
+ 'Microsoft Data Access Internet Publishing Provider DAV 1.1'):
+ DATA = DATA.replace('<ns0:getlastmodified xmlns:ns0="DAV:">',
+ '<ns0:getlastmodified xmlns:n="DAV:" xmlns:b="urn:uuid:c2f41010-65b3-11d1-a29f-00aa00c14882/" b:dt="dateTime.rfc1123">')
+ DATA = DATA.replace('<ns0:creationdate xmlns:ns0="DAV:">',
+ '<ns0:creationdate xmlns:n="DAV:" xmlns:b="urn:uuid:c2f41010-65b3-11d1-a29f-00aa00c14882/" b:dt="dateTime.tz">')
+
+ self.send_body_chunks_if_http11(DATA, '207','Multi-Status','Multiple responses')
+
+ def log_message(self, format, *args):
+ # pylint: disable=W0622
+ log.info('%s - - [%s] %s', self.address_string(),
+ self.log_date_time_string(), format % args)
+
+
+def main(my_name, args):
+ global log
+
+ parser = optparse.OptionParser()
+ parser.add_option('-d', '--debug', action='store_true', default=False,
+ help='enable additional debugging output')
+ parser.add_option('-H', '--host', default='localhost', metavar='HOST',
+ help='bind to HOST; use empty string to listen on all'
+ ' interfaces [default: %default]')
+ parser.add_option('-p', '--port', default=8009, metavar='PORT', type='int',
+ help='listen on PORT [default: %default]')
+ parser.add_option('-r', '--root-query', default="{'keep': '1'}",
+ metavar='QUERY', help='publish all data store entries'
+ ' matching the data store query QUERY'
+ ' [default: %default]')
+ parser.add_option('-q', '--quiet', action='store_false', dest='verbose',
+ help='only output warnings and errors')
+ parser.add_option('-v', '--verbose', action='store_true', default=True,
+ help='override a previous -q or --quiet option')
+ options, args = parser.parse_args()
+ if args:
+ parser.error('extra arguments passed')
+
+ root_query = eval(options.root_query)
+
+ if options.debug:
+ logging.basicConfig(level=logging.DEBUG)
+ elif options.verbose:
+ logging.basicConfig(level=logging.INFO)
+ else:
+ logging.basicConfig(level=logging.WARNING)
+ log = logging.getLogger('journal2webdav')
+
+ emulated_fs = fsemulation.FSEmulation(root_query)
+
+ handler = RequestHandler
+ base_url = 'http://%s:%d/' % (options.host, options.port)
+ handler.IFACE_CLASS = JournalHandler(emulated_fs, base_url, options.debug)
+ handler.DO_AUTH = False
+ handler.IFACE_CLASS.mimecheck = True # pylint: disable=W0201
+
+ handler._config = setupDummyConfig(verbose=options.debug,
+ port=options.port, host=options.host,
+ noauth=True, chunked_http_response=True)
+
+ runner = ThreadedHTTPServer((options.host, options.port), handler)
+
+ log.info('Server running.')
+ try:
+ runner.serve_forever()
+ except KeyboardInterrupt:
+ log.info('Killed by user')
+
+ return 0
+
+
+sys.exit(main(sys.argv[0], sys.argv[1:]))