Web   ·   Wiki   ·   Activities   ·   Blog   ·   Lists   ·   Chat   ·   Meeting   ·   Bugs   ·   Git   ·   Translate   ·   Archive   ·   People   ·   Donate
summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorAleksey Lim <alsroot@sugarlabs.org>2013-02-12 17:00:32 (GMT)
committer Aleksey Lim <alsroot@sugarlabs.org>2013-02-13 17:28:52 (GMT)
commit35cd60ed71d5a23e5d209bb54ef3d5842472925c (patch)
treea8eac89fc9bcba043a5ef500ae69235bee35215c
parent91d0dc78baf24b90d827f20f08a959bdaa1387ed (diff)
parent610a9c14543aa9c92d1646b1ae60f9270aee0b3b (diff)
Merge active_document
-rw-r--r--.gitmodules4
-rwxr-xr-xsugar-network18
-rwxr-xr-xsugar-network-client19
-rwxr-xr-xsugar-network-node11
-rw-r--r--sugar_network/__init__.py26
-rw-r--r--sugar_network/client/__init__.py56
-rw-r--r--sugar_network/client/journal.py13
-rw-r--r--sugar_network/client/mounts.py44
-rw-r--r--sugar_network/client/mountset.py55
-rw-r--r--sugar_network/db/__init__.py38
-rw-r--r--sugar_network/db/commands.py346
-rw-r--r--sugar_network/db/directory.py468
-rw-r--r--sugar_network/db/document.py100
-rw-r--r--sugar_network/db/env.py148
-rw-r--r--sugar_network/db/index.py443
-rw-r--r--sugar_network/db/metadata.py397
-rw-r--r--sugar_network/db/storage.py275
-rw-r--r--sugar_network/db/volume.py289
m---------sugar_network/lib/zeroinstall-injector (renamed from sugar_network/zeroinstall/zeroinstall-injector)0
-rw-r--r--sugar_network/node/__init__.py2
-rw-r--r--sugar_network/node/auth.py7
-rw-r--r--sugar_network/node/commands.py82
-rw-r--r--sugar_network/node/obs.py6
-rw-r--r--sugar_network/node/stats.py15
-rw-r--r--sugar_network/node/sync_master.py36
-rw-r--r--sugar_network/node/sync_node.py24
-rw-r--r--sugar_network/resources/artifact.py45
-rw-r--r--sugar_network/resources/comment.py25
-rw-r--r--sugar_network/resources/context.py81
-rw-r--r--sugar_network/resources/feedback.py18
-rw-r--r--sugar_network/resources/implementation.py33
-rw-r--r--sugar_network/resources/notification.py26
-rw-r--r--sugar_network/resources/report.py33
-rw-r--r--sugar_network/resources/review.py26
-rw-r--r--sugar_network/resources/solution.py15
-rw-r--r--sugar_network/resources/user.py42
-rw-r--r--sugar_network/resources/volume.py66
-rw-r--r--sugar_network/toolkit/__init__.py224
-rw-r--r--sugar_network/toolkit/application.py390
-rw-r--r--sugar_network/toolkit/coroutine.py246
-rw-r--r--sugar_network/toolkit/files_sync.py11
-rw-r--r--sugar_network/toolkit/http.py16
-rw-r--r--sugar_network/toolkit/inotify.py9
-rw-r--r--sugar_network/toolkit/lsb_release.py (renamed from sugar_network/zerosugar/lsb_release.py)14
-rw-r--r--sugar_network/toolkit/mountpoints.py4
-rw-r--r--sugar_network/toolkit/netlink.py7
-rw-r--r--sugar_network/toolkit/network.py38
-rw-r--r--sugar_network/toolkit/options.py358
-rw-r--r--sugar_network/toolkit/pipe.py3
-rw-r--r--sugar_network/toolkit/printf.py223
-rw-r--r--sugar_network/toolkit/router.py38
-rw-r--r--sugar_network/toolkit/rrd.py12
-rw-r--r--sugar_network/toolkit/sneakernet.py16
-rw-r--r--sugar_network/toolkit/sugar.py5
-rw-r--r--sugar_network/toolkit/util.py622
-rw-r--r--sugar_network/zeroinstall/__init__.py13
-rw-r--r--sugar_network/zerosugar/cache.py8
-rw-r--r--sugar_network/zerosugar/clones.py55
-rw-r--r--sugar_network/zerosugar/injector.py7
-rw-r--r--sugar_network/zerosugar/licenses.py2
-rw-r--r--sugar_network/zerosugar/packagekit.py3
-rw-r--r--sugar_network/zerosugar/spec.py6
-rw-r--r--sweets.recipe4
-rw-r--r--tests/__init__.py26
-rw-r--r--tests/__main__.py16
-rw-r--r--tests/integration/__main__.py9
-rwxr-xr-xtests/integration/cli.py9
-rwxr-xr-xtests/integration/sync.py8
-rw-r--r--tests/units/__main__.py36
-rw-r--r--tests/units/client/__init__.py9
-rw-r--r--tests/units/client/__main__.py14
-rwxr-xr-xtests/units/client/client.py (renamed from tests/units/client.py)10
-rwxr-xr-xtests/units/client/home_mount.py (renamed from tests/units/home_mount.py)22
-rwxr-xr-xtests/units/client/journal.py (renamed from tests/units/journal.py)10
-rwxr-xr-xtests/units/client/mountset.py (renamed from tests/units/mountset.py)23
-rwxr-xr-xtests/units/client/node_mount.py (renamed from tests/units/node_mount.py)14
-rwxr-xr-xtests/units/client/proxy_commands.py (renamed from tests/units/proxy_commands.py)20
-rwxr-xr-xtests/units/client/remote_mount.py (renamed from tests/units/remote_mount.py)34
-rw-r--r--tests/units/db/__init__.py9
-rw-r--r--tests/units/db/__main__.py15
-rwxr-xr-xtests/units/db/commands.py502
-rwxr-xr-xtests/units/db/document.py1008
-rwxr-xr-xtests/units/db/env.py39
-rwxr-xr-xtests/units/db/index.py743
-rwxr-xr-xtests/units/db/metadata.py125
-rwxr-xr-xtests/units/db/migrate.py289
-rwxr-xr-xtests/units/db/storage.py214
-rwxr-xr-xtests/units/db/volume.py969
-rw-r--r--tests/units/node/__init__.py9
-rw-r--r--tests/units/node/__main__.py13
-rwxr-xr-xtests/units/node/auth.py (renamed from tests/units/auth.py)20
-rwxr-xr-xtests/units/node/node.py (renamed from tests/units/node.py)32
-rwxr-xr-xtests/units/node/obs.py (renamed from tests/units/obs.py)0
-rwxr-xr-xtests/units/node/stats.py (renamed from tests/units/stats.py)0
-rwxr-xr-xtests/units/node/sync_master.py (renamed from tests/units/sync_master.py)81
-rwxr-xr-xtests/units/node/sync_node.py (renamed from tests/units/sync_node.py)11
-rw-r--r--tests/units/resources/__init__.py9
-rw-r--r--tests/units/resources/__main__.py13
-rwxr-xr-xtests/units/resources/comment.py (renamed from tests/units/comment.py)2
-rwxr-xr-xtests/units/resources/context.py (renamed from tests/units/context.py)12
-rwxr-xr-xtests/units/resources/implementation.py (renamed from tests/units/implementation.py)4
-rwxr-xr-xtests/units/resources/review.py (renamed from tests/units/review.py)2
-rwxr-xr-xtests/units/resources/solution.py (renamed from tests/units/solution.py)2
-rwxr-xr-xtests/units/resources/volume.py (renamed from tests/units/volume.py)57
-rw-r--r--tests/units/toolkit/__init__.py9
-rw-r--r--tests/units/toolkit/__main__.py13
-rwxr-xr-xtests/units/toolkit/files_sync.py (renamed from tests/units/files_sync.py)75
-rwxr-xr-xtests/units/toolkit/http.py (renamed from tests/units/http.py)8
-rwxr-xr-xtests/units/toolkit/mountpoints.py (renamed from tests/units/mountpoints.py)3
-rwxr-xr-xtests/units/toolkit/router.py (renamed from tests/units/router.py)82
-rwxr-xr-xtests/units/toolkit/sneakernet.py (renamed from tests/units/sneakernet.py)0
-rwxr-xr-xtests/units/toolkit/util.py342
-rw-r--r--tests/units/zerosugar/__init__.py9
-rw-r--r--tests/units/zerosugar/__main__.py10
-rwxr-xr-xtests/units/zerosugar/clones.py (renamed from tests/units/clones.py)6
-rwxr-xr-xtests/units/zerosugar/injector.py (renamed from tests/units/injector.py)26
-rwxr-xr-xtests/units/zerosugar/spec.py (renamed from tests/units/spec.py)0
117 files changed, 9568 insertions, 1121 deletions
diff --git a/.gitmodules b/.gitmodules
index 59272ad..e8dac90 100644
--- a/.gitmodules
+++ b/.gitmodules
@@ -1,3 +1,3 @@
-[submodule "sugar_network/zeroinstall/zeroinstall-injector"]
-path = sugar_network/zeroinstall/zeroinstall-injector
+[submodule "sugar_network/lib/zeroinstall-injector"]
+path = sugar_network/lib/zeroinstall-injector
url = git://git.sugarlabs.org/0sugar/zeroinstall-injector.git
diff --git a/sugar-network b/sugar-network
index 58b73e9..6bba505 100755
--- a/sugar-network
+++ b/sugar-network
@@ -25,15 +25,13 @@ from os.path import join
from gevent import monkey
-import active_document as ad
-from sugar_network import client, sugar, toolkit, IPCClient
+from sugar_network import db, client
from sugar_network.toolkit.router import IPCRouter, Request
from sugar_network.resources.volume import Volume
from sugar_network.client.mountset import Mountset
from sugar_network.client.mounts import HomeMount, RemoteMount
-from active_toolkit.options import Option
-from active_toolkit.sockets import BUFFER_SIZE
-from active_toolkit import printf, application, coroutine, enforce
+from sugar_network.toolkit import sugar, util, sugar, Option, BUFFER_SIZE
+from sugar_network.toolkit import printf, application, coroutine, enforce
anonymous = Option(
@@ -69,7 +67,7 @@ class Application(application.Application):
def __init__(self, **kwargs):
application.Application.__init__(self, **kwargs)
application.rundir.value = join(client.local_root.value, 'run')
- toolkit.init_logging(application.debug.value)
+ util.init_logging(application.debug.value)
@application.command(
'send POST API request')
@@ -94,7 +92,7 @@ class Application(application.Application):
def _call(self, method, post):
request = Request(method=method)
request.allow_redirects = True
- response = ad.Response()
+ response = db.Response()
reply = []
if post:
@@ -161,8 +159,8 @@ class Application(application.Application):
sugar.color = lambda: '#000000,#000000'
sugar.privkey_path = lambda: '/fake/path'
else:
- toolkit.ensure_dsa_pubkey(sugar.profile_path('owner.key'))
- volume = Volume(client.db_path())
+ util.ensure_dsa_pubkey(sugar.profile_path('owner.key'))
+ volume = Volume(client.path('db'))
mountset = Mountset(volume)
mountset['~'] = HomeMount(volume)
if not mountpoint or mountpoint == '/':
@@ -174,7 +172,7 @@ class Application(application.Application):
coroutine.spawn(server.serve_forever)
coroutine.dispatch()
- result = IPCClient(sync=True).call(request, response)
+ result = client.IPCClient(sync=True).call(request, response)
finally:
if server is not None:
diff --git a/sugar-network-client b/sugar-network-client
index 941f511..55fcb95 100755
--- a/sugar-network-client
+++ b/sugar-network-client
@@ -25,9 +25,8 @@ from os.path import join, abspath, exists
from gevent import monkey
-import active_document as ad
import sugar_network_webui as webui
-from sugar_network import toolkit, client, node
+from sugar_network import db, toolkit, client, node
from sugar_network.toolkit import sugar, mountpoints
from sugar_network.toolkit.router import IPCRouter
from sugar_network.client.mounts import HomeMount, RemoteMount
@@ -35,8 +34,8 @@ from sugar_network.client.mountset import Mountset
from sugar_network.zerosugar import clones
from sugar_network.node import stats
from sugar_network.resources.volume import Volume
-from active_toolkit.options import Option
-from active_toolkit import util, printf, application, coroutine, enforce
+from sugar_network.toolkit import Option
+from sugar_network.toolkit import util, printf, application, coroutine, enforce
class Application(application.Daemon):
@@ -45,7 +44,7 @@ class Application(application.Daemon):
application.Daemon.__init__(self, **kwargs)
self.jobs = coroutine.Pool()
- toolkit.init_logging(application.debug.value)
+ util.init_logging(application.debug.value)
new_root = (client.local_root.value != client.local_root.default)
client.local_root.value = abspath(client.local_root.value)
@@ -73,7 +72,7 @@ class Application(application.Daemon):
printf.info('Index database in %r', client.local_root.value)
- volume = Volume(client.db_path())
+ volume = Volume(client.path('db'))
try:
volume.populate()
self._sync(volume)
@@ -108,8 +107,8 @@ class Application(application.Daemon):
self.cmd_start()
def run(self):
- toolkit.ensure_dsa_pubkey(sugar.profile_path('owner.key'))
- volume = Volume(client.db_path(), lazy_open=client.lazy_open.value)
+ util.ensure_dsa_pubkey(sugar.profile_path('owner.key'))
+ volume = Volume(client.path('db'), lazy_open=client.lazy_open.value)
mountset = Mountset(volume)
mountset['~'] = HomeMount(volume)
mountset['/'] = RemoteMount(volume)
@@ -171,7 +170,7 @@ class Application(application.Daemon):
def _sync(self, volume):
contexts = volume['context']
- docs, __ = contexts.find(limit=ad.MAX_LIMIT, clone=[1, 2])
+ docs, __ = contexts.find(limit=db.MAX_LIMIT, clone=[1, 2])
for context in docs:
if not clones.ensure_clones(context.guid):
contexts.update(context.guid, clone=0)
@@ -205,7 +204,7 @@ Option.seek('client', client)
Option.seek('client', [sugar.keyfile, toolkit.tmpdir])
Option.seek('node', [node.port, node.sync_dirs])
Option.seek('stats', stats)
-Option.seek('active-document', ad)
+Option.seek('db', db)
app = Application(
name='sugar-network-client',
diff --git a/sugar-network-node b/sugar-network-node
index 5dc7875..314e750 100755
--- a/sugar-network-node
+++ b/sugar-network-node
@@ -22,19 +22,16 @@ from os.path import exists
from gevent import monkey
-import active_document as ad
import sugar_network_webui as webui
-from active_toolkit import coroutine, application
-from active_toolkit.options import Option
-from sugar_network import node, client, toolkit
+from sugar_network import db, node, client, toolkit
from sugar_network.client.mounts import LocalMount
from sugar_network.client.mountset import Mountset
from sugar_network.client.mounts import LocalMount
from sugar_network.node import stats, obs
from sugar_network.node.commands import NodeCommands
from sugar_network.resources.volume import Volume
-from sugar_network.toolkit import sugar, sneakernet
from sugar_network.toolkit.router import Router
+from sugar_network.toolkit import sugar, coroutine, application, util, Option
class Application(application.Daemon):
@@ -42,7 +39,7 @@ class Application(application.Daemon):
jobs = coroutine.Pool()
def run(self):
- toolkit.init_logging(application.debug.value)
+ util.init_logging(application.debug.value)
if toolkit.tmpdir.value and not exists(toolkit.tmpdir.value):
os.makedirs(toolkit.tmpdir.value)
@@ -114,7 +111,7 @@ Option.seek('node', node)
Option.seek('node', [toolkit.tmpdir])
Option.seek('stats', stats)
Option.seek('obs', obs)
-Option.seek('active-document', ad)
+Option.seek('db', db)
app = Application(
name='sugar-network-node',
diff --git a/sugar_network/__init__.py b/sugar_network/__init__.py
index a6a5910..3e35c90 100644
--- a/sugar_network/__init__.py
+++ b/sugar_network/__init__.py
@@ -1,4 +1,4 @@
-# Copyright (C) 2012 Aleksey Lim
+# Copyright (C) 2012-2013 Aleksey Lim
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
@@ -12,27 +12,3 @@
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
-
-from sugar_network.toolkit import sugar
-from sugar_network.client import api_url, server_mode
-from sugar_network_webui import webui_port
-
-
-def clones(*args, **kwargs):
- import sugar_network.zerosugar.clones
- return sugar_network.zerosugar.clones.walk(*args, **kwargs)
-
-
-def Client(url=None, sugar_auth=True, **session):
- from sugar_network.toolkit import http
- if url is None:
- url = api_url.value
- return http.Client(url, sugar_auth=sugar_auth, **session)
-
-
-def IPCClient(**session):
- from sugar_network.toolkit import http
- from sugar_network.client import ipc_port
- # Since `IPCClient` uses only localhost, ignore `http_proxy` envar
- session['config'] = {'trust_env': False}
- return http.Client('http://localhost:%s' % ipc_port.value, **session)
diff --git a/sugar_network/client/__init__.py b/sugar_network/client/__init__.py
index 69afab0..480b152 100644
--- a/sugar_network/client/__init__.py
+++ b/sugar_network/client/__init__.py
@@ -1,4 +1,4 @@
-# Copyright (C) 2012 Aleksey Lim
+# Copyright (C) 2012-2013 Aleksey Lim
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
@@ -14,12 +14,9 @@
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import os
-import errno
-import logging
-from os.path import join, exists, abspath, dirname, expanduser
+from os.path import join, expanduser
-from active_toolkit.options import Option
-from sugar_network.toolkit import sugar
+from sugar_network.toolkit import Option, sugar
api_url = Option(
@@ -111,41 +108,20 @@ def path(*args):
return str(result)
-def ensure_path(*args):
- """Calculate a path from the root.
-
- If resulting directory path doesn't exists, it will be created.
-
- :param args:
- path parts to add to the root path; if ends with empty string,
- the resulting path will be treated as a path to a directory
- :returns:
- absolute path
-
- """
- if not args:
- result = local_root.value
- elif args[0].startswith(os.sep):
- result = join(*args)
- else:
- result = join(local_root.value, *args)
- result = str(result)
-
- if result.endswith(os.sep):
- result_dir = result = result.rstrip(os.sep)
- else:
- result_dir = dirname(result)
+def clones(*args, **kwargs):
+ import sugar_network.zerosugar.clones
+ return sugar_network.zerosugar.clones.walk(*args, **kwargs)
- if not exists(result_dir):
- try:
- os.makedirs(result_dir)
- except OSError, error:
- # In case if another process already create directory
- if error.errno != errno.EEXIST:
- raise
- return abspath(result)
+def Client(url=None, sugar_auth=True, **session):
+ from sugar_network.toolkit import http
+ if url is None:
+ url = api_url.value
+ return http.Client(url, sugar_auth=sugar_auth, **session)
-def db_path():
- return join(local_root.value, 'db')
+def IPCClient(**session):
+ from sugar_network.toolkit import http
+ # Since `IPCClient` uses only localhost, ignore `http_proxy` envar
+ session['config'] = {'trust_env': False}
+ return http.Client('http://localhost:%s' % ipc_port.value, **session)
diff --git a/sugar_network/client/journal.py b/sugar_network/client/journal.py
index 1f9f30c..783b9ca 100644
--- a/sugar_network/client/journal.py
+++ b/sugar_network/client/journal.py
@@ -22,12 +22,9 @@ import hashlib
import logging
from tempfile import NamedTemporaryFile
-import active_document as ad
-from sugar_network import client
-from sugar_network.toolkit import sugar, router
+from sugar_network import db, client
+from sugar_network.toolkit import BUFFER_SIZE, sugar, router, enforce
from sugar_network.toolkit.router import Request
-from active_toolkit.sockets import BUFFER_SIZE
-from active_toolkit import enforce
_logger = logging.getLogger('client.journal')
@@ -161,7 +158,7 @@ class Commands(object):
if key in request:
request[key] = int(request[key])
if 'reply' in request:
- reply = ad.to_list(request.pop('reply'))
+ reply = db.to_list(request.pop('reply'))
else:
reply = ['uid', 'title', 'description', 'preview']
if 'preview' in reply:
@@ -204,10 +201,10 @@ class Commands(object):
prop = request.path[2]
if prop == 'preview':
- return ad.PropertyMeta(path=_prop_path(guid, prop),
+ return db.PropertyMetadata(path=_prop_path(guid, prop),
mime_type='image/png')
elif prop == 'data':
- return ad.PropertyMeta(path=_ds_path(guid, 'data'),
+ return db.PropertyMetadata(path=_ds_path(guid, 'data'),
mime_type=get(guid, 'mime_type') or 'application/octet')
else:
response.content_type = 'application/json'
diff --git a/sugar_network/client/mounts.py b/sugar_network/client/mounts.py
index c76fe5a..4118997 100644
--- a/sugar_network/client/mounts.py
+++ b/sugar_network/client/mounts.py
@@ -18,14 +18,12 @@ import logging
from os.path import isabs, exists, join, basename
from gettext import gettext as _
-import active_document as ad
from sugar_network.zerosugar import injector
-from sugar_network.toolkit import http
+from sugar_network.toolkit import http, coroutine, exception, enforce
from sugar_network.toolkit.router import Request
from sugar_network.resources.volume import VolumeCommands
from sugar_network.client import journal
-from sugar_network import client, Client
-from active_toolkit import util, coroutine, enforce
+from sugar_network import db, client
_LOCAL_PROPS = frozenset(['favorite', 'clone'])
@@ -82,10 +80,10 @@ class LocalMount(VolumeCommands, _Mount):
volume.connect(self._events_cb)
- @ad.property_command(method='PUT', cmd='upload_blob')
+ @db.property_command(method='PUT', cmd='upload_blob')
def upload_blob(self, document, guid, prop, path, pass_ownership=False):
directory = self.volume[document]
- directory.metadata[prop].assert_access(ad.ACCESS_WRITE)
+ directory.metadata[prop].assert_access(db.ACCESS_WRITE)
enforce(isabs(path), 'Path is not absolute')
try:
directory.set_blob(guid, prop, path)
@@ -109,10 +107,10 @@ class HomeMount(LocalMount):
def name(self):
return _('Home')
- @ad.directory_command(method='POST', cmd='create_with_guid',
- permissions=ad.ACCESS_AUTH, mime_type='application/json')
+ @db.directory_command(method='POST', cmd='create_with_guid',
+ permissions=db.ACCESS_AUTH, mime_type='application/json')
def create_with_guid(self, request):
- with self._post(request, ad.ACCESS_CREATE) as (directory, doc):
+ with self._post(request, db.ACCESS_CREATE) as (directory, doc):
enforce('guid' in doc.props, 'GUID should be specified')
self.before_create(request, doc.props)
return directory.create(doc.props)
@@ -134,15 +132,15 @@ class _ProxyCommands(object):
self._home_volume = home_mount
def proxy_call(self, request, response):
- raise ad.CommandNotFound()
+ raise db.CommandNotFound()
- @ad.directory_command(method='GET',
- arguments={'reply': ad.to_list}, mime_type='application/json')
+ @db.directory_command(method='GET',
+ arguments={'reply': db.to_list}, mime_type='application/json')
def find(self, request, response, reply):
return self._proxy_get(request, response)
- @ad.document_command(method='GET',
- arguments={'reply': ad.to_list}, mime_type='application/json')
+ @db.document_command(method='GET',
+ arguments={'reply': db.to_list}, mime_type='application/json')
def get(self, request, response):
return self._proxy_get(request, response)
@@ -193,7 +191,7 @@ class _ProxyCommands(object):
return result
-class RemoteMount(ad.CommandsProcessor, _Mount, _ProxyCommands):
+class RemoteMount(db.CommandsProcessor, _Mount, _ProxyCommands):
@property
def name(self):
@@ -204,7 +202,7 @@ class RemoteMount(ad.CommandsProcessor, _Mount, _ProxyCommands):
return '/'.join((self._url,) + path)
def __init__(self, home_volume, listen_events=True):
- ad.CommandsProcessor.__init__(self)
+ db.CommandsProcessor.__init__(self)
_Mount.__init__(self)
_ProxyCommands.__init__(self, home_volume)
@@ -234,13 +232,13 @@ class RemoteMount(ad.CommandsProcessor, _Mount, _ProxyCommands):
self.mounted.wait(client.connect_timeout.value)
try:
try:
- return ad.CommandsProcessor.call(self, request, response)
- except ad.CommandNotFound:
+ return db.CommandsProcessor.call(self, request, response)
+ except db.CommandNotFound:
return self.proxy_call(request, response)
except http.ConnectionError:
if a_try:
raise
- util.exception('Got connection error, try to reconnect')
+ exception('Got connection error, try to reconnect')
continue
def set_mounted(self, value):
@@ -250,7 +248,7 @@ class RemoteMount(ad.CommandsProcessor, _Mount, _ProxyCommands):
else:
self._connections.kill()
- @ad.property_command(method='PUT', cmd='upload_blob')
+ @db.property_command(method='PUT', cmd='upload_blob')
def upload_blob(self, document, guid, prop, path, pass_ownership=False):
enforce(isabs(path), 'Path is not absolute')
@@ -273,12 +271,12 @@ class RemoteMount(ad.CommandsProcessor, _Mount, _ProxyCommands):
for url in self._api_urls:
try:
_logger.debug('Connecting to %r node', url)
- self._client = Client(url)
+ self._client = client.Client(url)
info = self._client.get(cmd='info')
if self._listen_events:
subscription = self._client.subscribe()
except Exception:
- util.exception(_logger, 'Cannot connect to %r node', url)
+ exception(_logger, 'Cannot connect to %r node', url)
continue
impl_info = info['documents'].get('implementation')
@@ -302,7 +300,7 @@ class RemoteMount(ad.CommandsProcessor, _Mount, _ProxyCommands):
event['mountpoint'] = self.mountpoint
self.publish(event)
except Exception:
- util.exception(_logger, 'Failed to dispatch remote event')
+ exception(_logger, 'Failed to dispatch remote event')
finally:
_logger.info('Got disconnected from %r node', url)
_Mount.set_mounted(self, False)
diff --git a/sugar_network/client/mountset.py b/sugar_network/client/mountset.py
index 28b0d69..058fc70 100644
--- a/sugar_network/client/mountset.py
+++ b/sugar_network/client/mountset.py
@@ -1,4 +1,4 @@
-# Copyright (C) 2012 Aleksey Lim
+# Copyright (C) 2012-2013 Aleksey Lim
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
@@ -18,17 +18,15 @@ import socket
import logging
from os.path import join, exists
-import active_document as ad
-
-from sugar_network import client, node
-from sugar_network.toolkit import netlink, network, mountpoints, router
+from sugar_network import db, client, node
+from sugar_network.toolkit import netlink, mountpoints, router
+from sugar_network.toolkit import coroutine, util, exception, enforce
from sugar_network.client import journal, zeroconf
from sugar_network.client.mounts import LocalMount, NodeMount
from sugar_network.node.commands import NodeCommands
from sugar_network.node.sync_node import SyncCommands
from sugar_network.zerosugar import clones, injector
from sugar_network.resources.volume import Volume, Commands
-from active_toolkit import util, coroutine, enforce
_DB_DIRNAME = '.sugar-network'
@@ -36,18 +34,17 @@ _DB_DIRNAME = '.sugar-network'
_logger = logging.getLogger('client.mountset')
-class Mountset(dict, ad.CommandsProcessor, Commands, journal.Commands,
+class Mountset(dict, db.CommandsProcessor, Commands, journal.Commands,
SyncCommands):
def __init__(self, home_volume):
self.opened = coroutine.Event()
self._subscriptions = {}
- self._lang = ad.default_lang()
self._jobs = coroutine.Pool()
self._servers = coroutine.Pool()
dict.__init__(self)
- ad.CommandsProcessor.__init__(self)
+ db.CommandsProcessor.__init__(self)
SyncCommands.__init__(self, client.path('sync'))
Commands.__init__(self)
if not client.no_dbus.value:
@@ -96,7 +93,7 @@ class Mountset(dict, ad.CommandsProcessor, Commands, journal.Commands,
return router.stream_reader(file(path, 'rb'))
- @ad.volume_command(method='GET', cmd='mounts',
+ @db.volume_command(method='GET', cmd='mounts',
mime_type='application/json')
def mounts(self):
result = []
@@ -109,7 +106,7 @@ class Mountset(dict, ad.CommandsProcessor, Commands, journal.Commands,
})
return result
- @ad.volume_command(method='GET', cmd='mounted',
+ @db.volume_command(method='GET', cmd='mounted',
mime_type='application/json')
def mounted(self, mountpoint):
mount = self.get(mountpoint)
@@ -119,7 +116,7 @@ class Mountset(dict, ad.CommandsProcessor, Commands, journal.Commands,
mount.set_mounted(True)
return mount.mounted.is_set()
- @ad.volume_command(method='POST', cmd='publish')
+ @db.volume_command(method='POST', cmd='publish')
def publish(self, event, request=None):
if request is not None:
event = request.content
@@ -132,9 +129,9 @@ class Mountset(dict, ad.CommandsProcessor, Commands, journal.Commands,
try:
callback(event)
except Exception:
- util.exception(_logger, 'Failed to dispatch %r', event)
+ exception(_logger, 'Failed to dispatch %r', event)
- @ad.document_command(method='GET', cmd='make')
+ @db.document_command(method='GET', cmd='make')
def make(self, mountpoint, document, guid):
enforce(document == 'context', 'Only contexts can be launched')
@@ -142,8 +139,8 @@ class Mountset(dict, ad.CommandsProcessor, Commands, journal.Commands,
event['event'] = 'make'
self.publish(event)
- @ad.document_command(method='GET', cmd='launch',
- arguments={'args': ad.to_list})
+ @db.document_command(method='GET', cmd='launch',
+ arguments={'args': db.to_list})
def launch(self, mountpoint, document, guid, args, activity_id=None,
object_id=None, uri=None, color=None, no_spawn=None):
enforce(document == 'context', 'Only contexts can be launched')
@@ -160,11 +157,11 @@ class Mountset(dict, ad.CommandsProcessor, Commands, journal.Commands,
else:
self._jobs.spawn(do_launch)
- @ad.document_command(method='PUT', cmd='clone',
+ @db.document_command(method='PUT', cmd='clone',
arguments={
- 'force': ad.to_int,
- 'nodeps': ad.to_int,
- 'requires': ad.to_list,
+ 'force': db.to_int,
+ 'nodeps': db.to_int,
+ 'requires': db.to_list,
})
def clone(self, request, mountpoint, document, guid, force):
mount = self[mountpoint]
@@ -181,13 +178,13 @@ class Mountset(dict, ad.CommandsProcessor, Commands, journal.Commands,
context=guid, stability='stable',
order_by='-version', limit=1,
reply=['guid'])['result']
- enforce(impls, ad.NotFound, 'No implementations')
+ enforce(impls, db.NotFound, 'No implementations')
impl_id = impls[0]['guid']
props = mount(method='GET', document='context', guid=guid,
reply=['title', 'description'])
props['preview'] = mount(method='GET', document='context',
guid=guid, prop='preview')
- data_response = ad.Response()
+ data_response = db.Response()
props['data'] = mount(data_response, method='GET',
document='implementation', guid=impl_id,
prop='data')
@@ -215,7 +212,7 @@ class Mountset(dict, ad.CommandsProcessor, Commands, journal.Commands,
else:
raise RuntimeError('Command is not supported for %r' % document)
- @ad.document_command(method='PUT', cmd='favorite')
+ @db.document_command(method='PUT', cmd='favorite')
def favorite(self, request, mountpoint, document, guid):
if document == 'context':
if request.content or self.volume['context'].exists(guid):
@@ -223,7 +220,7 @@ class Mountset(dict, ad.CommandsProcessor, Commands, journal.Commands,
else:
raise RuntimeError('Command is not supported for %r' % document)
- @ad.volume_command(method='GET', cmd='whoami',
+ @db.volume_command(method='GET', cmd='whoami',
mime_type='application/json')
def whoami(self, request):
result = self['/'].call(request)
@@ -235,13 +232,13 @@ class Mountset(dict, ad.CommandsProcessor, Commands, journal.Commands,
return mount.call(request, response)
def call(self, request, response=None):
- request.accept_language = [self._lang]
+ request.accept_language = [db.default_lang()]
request.mountpoint = request.get('mountpoint')
if not request.mountpoint:
request.mountpoint = request['mountpoint'] = '/'
try:
- return ad.CommandsProcessor.call(self, request, response)
- except ad.CommandNotFound:
+ return db.CommandsProcessor.call(self, request, response)
+ except db.CommandNotFound:
return self.super_call(request, response)
def connect(self, callback, condition=None, **kwargs):
@@ -288,7 +285,7 @@ class Mountset(dict, ad.CommandsProcessor, Commands, journal.Commands,
if message is None:
break
# Otherwise, `socket.gethostbyname()` will return stale resolve
- network.res_init()
+ util.res_init()
def _found_mount(self, path):
volume, server_mode = self._mount_volume(path)
@@ -388,7 +385,7 @@ class Mountset(dict, ad.CommandsProcessor, Commands, journal.Commands,
requires=request.get('requires'),
order_by='-version', limit=1,
reply=['guid', 'spec'])['result']
- enforce(impls, ad.NotFound, 'No implementations')
+ enforce(impls, db.NotFound, 'No implementations')
pipe = injector.clone_impl(mountpoint, guid, **impls[0])
else:
pipe = injector.clone(mountpoint, guid)
diff --git a/sugar_network/db/__init__.py b/sugar_network/db/__init__.py
new file mode 100644
index 0000000..80385c5
--- /dev/null
+++ b/sugar_network/db/__init__.py
@@ -0,0 +1,38 @@
+# Copyright (C) 2011-2013 Aleksey Lim
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <http://www.gnu.org/licenses/>.
+
+from sugar_network.db.env import \
+ ACCESS_CREATE, ACCESS_WRITE, ACCESS_READ, ACCESS_DELETE, \
+ ACCESS_AUTHOR, ACCESS_AUTH, ACCESS_PUBLIC, ACCESS_LEVELS, \
+ ACCESS_SYSTEM, ACCESS_LOCAL, ACCESS_REMOTE, MAX_LIMIT, \
+ index_flush_timeout, index_flush_threshold, index_write_queue, \
+ BadRequest, NotFound, Forbidden, CommandNotFound, \
+ uuid, default_lang, gettext
+
+from sugar_network.db.metadata import \
+ indexed_property, stored_property, blob_property, \
+ StoredProperty, BlobProperty, IndexedProperty, \
+ PropertyMetadata
+
+from sugar_network.db.commands import \
+ volume_command, volume_command_pre, volume_command_post, \
+ directory_command, directory_command_pre, directory_command_post, \
+ document_command, document_command_pre, document_command_post, \
+ property_command, property_command_pre, property_command_post, \
+ to_int, to_list, Request, Response, CommandsProcessor
+
+from sugar_network.db.document import Document
+
+from sugar_network.db.volume import SingleVolume, VolumeCommands
diff --git a/sugar_network/db/commands.py b/sugar_network/db/commands.py
new file mode 100644
index 0000000..3b10771
--- /dev/null
+++ b/sugar_network/db/commands.py
@@ -0,0 +1,346 @@
+# Copyright (C) 2012 Aleksey Lim
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <http://www.gnu.org/licenses/>.
+
+import logging
+
+from sugar_network.db import env
+from sugar_network.db.metadata import PropertyMetadata
+from sugar_network.toolkit import enforce
+
+
+_logger = logging.getLogger('db.commands')
+
+
+def db_command(scope, **kwargs):
+
+ def decorate(func):
+ func.scope = scope
+ func.kwargs = kwargs
+ return func
+
+ return decorate
+
+
+volume_command = \
+ lambda ** kwargs: db_command('volume', **kwargs)
+volume_command_pre = \
+ lambda ** kwargs: db_command('volume', wrapper='pre', **kwargs)
+volume_command_post = \
+ lambda ** kwargs: db_command('volume', wrapper='post', **kwargs)
+
+directory_command = \
+ lambda ** kwargs: db_command('directory', **kwargs)
+directory_command_pre = \
+ lambda ** kwargs: db_command('directory', wrapper='pre', **kwargs)
+directory_command_post = \
+ lambda ** kwargs: db_command('directory', wrapper='post', **kwargs)
+
+document_command = \
+ lambda ** kwargs: db_command('document', **kwargs)
+document_command_pre = \
+ lambda ** kwargs: db_command('document', wrapper='pre', **kwargs)
+document_command_post = \
+ lambda ** kwargs: db_command('document', wrapper='post', **kwargs)
+
+property_command = \
+ lambda ** kwargs: db_command('property', **kwargs)
+property_command_pre = \
+ lambda ** kwargs: db_command('property', wrapper='pre', **kwargs)
+property_command_post = \
+ lambda ** kwargs: db_command('property', wrapper='post', **kwargs)
+
+
+def to_int(value):
+ if isinstance(value, basestring):
+ if not value:
+ return 0
+ enforce(value.isdigit(), 'Argument should be an integer value')
+ return int(value)
+ return value
+
+
+def to_list(value):
+ if isinstance(value, basestring):
+ if value:
+ return value.split(',')
+ else:
+ return []
+ return value
+
+
+class Request(dict):
+
+ content = None
+ content_stream = None
+ content_length = None
+ content_type = None
+ access_level = env.ACCESS_REMOTE
+ accept_language = None
+ commands = None
+ response = None
+
+ def __init__(self, **kwargs):
+ dict.__init__(self, kwargs)
+ self._pos = 0
+
+ def __getitem__(self, key):
+ enforce(key in self, 'Cannot find %r request argument', key)
+ return self.get(key)
+
+ def read(self, size=None):
+ if self.content_stream is None:
+ return ''
+ rest = max(0, self.content_length - self._pos)
+ size = rest if size is None else min(rest, size)
+ result = self.content_stream.read(size)
+ if not result:
+ return ''
+ self._pos += len(result)
+ return result
+
+ def clone(self):
+ request = type(self)()
+ request.access_level = self.access_level
+ request.accept_language = self.accept_language
+ request.commands = self.commands
+ return request
+
+ def call(self, method, content=None, content_stream=None,
+ content_length=None, **kwargs):
+ enforce(self.commands is not None)
+
+ request = self.clone()
+ request.update(kwargs)
+ request['method'] = method
+ request.content = content
+ request.content_stream = content_stream
+ request.content_length = content_length
+
+ return self.commands.call(request, Response())
+
+ def __repr__(self):
+ args = ['content_length=%r' % self.content_length,
+ 'access_level=%r' % self.access_level,
+ 'accept_language=%r' % self.accept_language,
+ ] + ['%s=%r' % i for i in self.items()]
+ return '<db.Request %s>' % ' '.join(args)
+
+
+class Response(dict):
+
+ content_length = None
+ content_type = None
+ #: UNIX seconds of last modification
+ last_modified = None
+
+ def __init__(self, *args, **props):
+ if args:
+ props = args[0]
+ dict.__init__(self, props)
+
+ def __repr__(self):
+ args = ['%s=%r' % i for i in self.items()]
+ return '<db.Response %s>' % ' '.join(args)
+
+
+class CommandsProcessor(object):
+
+ def __init__(self, volume=None, parent=None):
+ self._commands = {
+ 'volume': _Commands(),
+ 'directory': _Commands(),
+ 'document': _Commands(),
+ 'property': _Commands(),
+ }
+ self.volume = volume
+ self.parent = parent
+
+ for scope, kwargs in _scan_class(self.__class__, False):
+ cmd = _Command((self,), **kwargs)
+ self._commands[scope].add(cmd)
+
+ if volume is not None:
+ for directory in volume.values():
+ for scope, kwargs in _scan_class(directory.document_class):
+ cmd = _ObjectCommand(directory, **kwargs)
+ self._commands[scope].add(cmd)
+
+ def super_call(self, request, response):
+ raise env.CommandNotFound()
+
+ def call(self, request, response=None):
+ cmd = self.resolve(request)
+ enforce(cmd is not None, env.CommandNotFound, 'Unsupported command')
+
+ enforce(request.access_level & cmd.access_level, env.Forbidden,
+ 'Operation is permitted on requester\'s level')
+
+ if response is None:
+ response = Response()
+ request.commands = self
+ request.response = response
+
+ if not request.accept_language:
+ request.accept_language = [env.default_lang()]
+
+ for arg, cast in cmd.arguments.items():
+ if arg not in request:
+ continue
+ try:
+ request[arg] = cast(request[arg])
+ except Exception, error:
+ raise RuntimeError('Cannot typecast %r command argument: %s' %
+ (arg, error))
+
+ args = cmd.get_args(request)
+
+ for pre in cmd.pre:
+ pre(*args, request=request)
+
+ kwargs = {}
+ for arg in cmd.kwarg_names:
+ if arg == 'request':
+ kwargs[arg] = request
+ elif arg == 'response':
+ kwargs[arg] = response
+ elif arg not in kwargs:
+ kwargs[arg] = request.get(arg)
+
+ result = cmd.callback(*args, **kwargs)
+
+ for post in cmd.post:
+ result = post(*args, result=result, request=request,
+ response=response)
+
+ if not response.content_type:
+ if isinstance(result, PropertyMetadata):
+ response.content_type = result.get('mime_type')
+ if not response.content_type:
+ response.content_type = cmd.mime_type
+
+ return result
+
+ def resolve(self, request):
+ key = (request.get('method', 'GET'), request.get('cmd'), None)
+
+ if 'document' not in request:
+ return self._commands['volume'].get(key)
+
+ document_key = key[:2] + (request['document'],)
+
+ if 'guid' not in request:
+ commands = self._commands['directory']
+ return commands.get(key) or commands.get(document_key)
+
+ if 'prop' not in request:
+ commands = self._commands['document']
+ return commands.get(key) or commands.get(document_key)
+
+ commands = self._commands['property']
+ return commands.get(key) or commands.get(document_key)
+
+
+class _Command(object):
+
+ def __init__(self, args, callback, method='GET', document=None, cmd=None,
+ mime_type=None, permissions=0, access_level=env.ACCESS_LEVELS,
+ arguments=None, pre=None, post=None):
+ self.args = args
+ self.callback = callback
+ self.mime_type = mime_type
+ self.permissions = permissions
+ self.access_level = access_level
+ self.kwarg_names = _function_arg_names(callback)
+ self.key = (method, cmd, document)
+ self.arguments = arguments or {}
+ self.pre = pre
+ self.post = post
+
+ def get_args(self, request):
+ return self.args
+
+ def __repr__(self):
+ return '%s(method=%s, cmd=%s, document=%s)' % \
+ ((self.callback.__name__,) + self.key)
+
+
+class _ObjectCommand(_Command):
+
+ def __init__(self, directory, **kwargs):
+ _Command.__init__(self, (), document=directory.metadata.name, **kwargs)
+ self._directory = directory
+
+ def get_args(self, request):
+ document = self._directory.get(request['guid'])
+ document.request = request
+ return (document,)
+
+
+class _Commands(dict):
+
+ def add(self, cmd):
+ enforce(cmd.key not in self, 'Command %r already exists', cmd)
+ self[cmd.key] = cmd
+
+
+def _function_arg_names(func):
+ if hasattr(func, 'im_func'):
+ func = func.im_func
+ if not hasattr(func, 'func_code'):
+ return []
+ code = func.func_code
+ # `1:` is for skipping the first, `self` or `cls`, argument
+ return code.co_varnames[1:code.co_argcount]
+
+
+def _scan_class(root_cls, is_document_class=True):
+ processed = set()
+ commands = {}
+
+ cls = root_cls
+ while cls is not None:
+ for name in dir(cls):
+ if name in processed:
+ continue
+ attr = getattr(cls, name)
+ if not hasattr(attr, 'scope'):
+ continue
+ enforce(not is_document_class or
+ attr.scope in ('document', 'property'),
+ 'Wrong scale command')
+ key = (attr.scope,
+ attr.kwargs.get('method') or 'GET',
+ attr.kwargs.get('cmd'))
+ kwargs = commands.setdefault(key, {'pre': [], 'post': []})
+ callback = getattr(root_cls, attr.__name__)
+ if 'wrapper' not in attr.kwargs:
+ kwargs.update(attr.kwargs)
+ kwargs['callback'] = callback
+ else:
+ for key in ('arguments',):
+ if key in attr.kwargs and key not in kwargs:
+ kwargs[key] = attr.kwargs[key]
+ kwargs[attr.kwargs['wrapper']].append(callback)
+ processed.add(name)
+ cls = cls.__base__
+
+ for (scope, method, cmd), kwargs in commands.items():
+ if 'callback' not in kwargs:
+ kwargs['method'] = method
+ if cmd:
+ kwargs['cmd'] = cmd
+ kwargs['callback'] = lambda self, request, response: \
+ self.super_call(request, response)
+ yield scope, kwargs
diff --git a/sugar_network/db/directory.py b/sugar_network/db/directory.py
new file mode 100644
index 0000000..886ce58
--- /dev/null
+++ b/sugar_network/db/directory.py
@@ -0,0 +1,468 @@
+# Copyright (C) 2011-2012 Aleksey Lim
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <http://www.gnu.org/licenses/>.
+
+import os
+import re
+import shutil
+import logging
+from os.path import exists, join
+
+from sugar_network.db import env
+from sugar_network.db.storage import Storage
+from sugar_network.db.metadata import Metadata, GUID_PREFIX
+from sugar_network.db.metadata import IndexedProperty, StoredProperty
+from sugar_network.toolkit import util, exception, enforce
+
+
+# To invalidate existed index on stcuture changes
+_LAYOUT_VERSION = 3
+
+_GUID_RE = re.compile('[a-zA-Z0-9_+-.]+$')
+
+_logger = logging.getLogger('db.directory')
+
+
+class Directory(object):
+
+ def __init__(self, root, document_class, index_class,
+ notification_cb=None, seqno=None):
+ """
+ :param index_class:
+ what class to use to access to indexes, for regular casses
+ (using `Master` and `Node`, it will be all time ProxyIndex to
+ keep writer in separate process).
+
+ """
+ if not exists(root):
+ os.makedirs(root)
+
+ if document_class.metadata is None:
+ # Metadata cannot be recreated
+ document_class.metadata = Metadata(document_class)
+ document_class.metadata['guid'] = IndexedProperty('guid',
+ permissions=env.ACCESS_CREATE | env.ACCESS_READ, slot=0,
+ prefix=GUID_PREFIX)
+ self.metadata = document_class.metadata
+
+ self.document_class = document_class
+ self._root = root
+ self._notification_cb = notification_cb
+ self._seqno = _SessionSeqno() if seqno is None else seqno
+
+ index_path = join(root, 'index')
+ if self._is_layout_stale():
+ if exists(index_path):
+ _logger.warning('%r layout is stale, remove index',
+ self.metadata.name)
+ shutil.rmtree(index_path, ignore_errors=True)
+ self._save_layout()
+
+ self._storage = Storage(root, self.metadata)
+ self._index = index_class(index_path, self.metadata, self._post_commit)
+
+ _logger.debug('Initiated %r document', document_class)
+
+ @property
+ def mtime(self):
+ return self._index.mtime
+
+ @mtime.setter
+ def mtime(self, value):
+ self._index.mtime = value
+ self._notify({'event': 'populate', 'props': {'mtime': value}})
+
+ def close(self):
+ """Flush index write pending queue and close the index."""
+ self._index.close()
+ self._storage = None
+ self._index = None
+
+ def commit(self):
+ """Flush pending chnages to disk."""
+ self._index.commit()
+
+ def create(self, props=None, **kwargs):
+ """Create new document.
+
+ If `guid` property is not specified, it will be auto set.
+
+ :param kwargs:
+ new document properties
+ :returns:
+ GUID of newly created document
+
+ """
+ if props is None:
+ props = kwargs
+
+ if 'guid' in props:
+ guid = props['guid']
+ enforce(_GUID_RE.match(guid) is not None, 'Malformed GUID')
+ enforce(not self.exists(guid), 'Document already exists')
+ else:
+ guid = props['guid'] = env.uuid()
+
+ for prop_name, prop in self.metadata.items():
+ if isinstance(prop, StoredProperty):
+ if prop_name in props:
+ continue
+ enforce(prop.default is not None,
+ 'Property %r should be passed for new %r document',
+ prop_name, self.metadata.name)
+ if prop.default is not None:
+ props[prop_name] = prop.default
+
+ _logger.debug('Create %s[%s]: %r', self.metadata.name, guid, props)
+ self._post(guid, props, True)
+ return guid
+
+ def update(self, guid, props=None, **kwargs):
+ """Update properties for an existing document.
+
+ :param guid:
+ document GUID to store
+ :param kwargs:
+ properties to store, not necessary all document's properties
+
+ """
+ if props is None:
+ props = kwargs
+ if not props:
+ return
+ _logger.debug('Update %s[%s]: %r', self.metadata.name, guid, props)
+ self._post(guid, props, False)
+
+ def delete(self, guid):
+ """Delete document.
+
+ :param guid:
+ document GUID to delete
+
+ """
+ _logger.debug('Delete %s[%s]', self.metadata.name, guid)
+ event = {'event': 'delete', 'guid': guid}
+ self._index.delete(guid, self._post_delete, event)
+
+ def exists(self, guid):
+ return self._storage.get(guid).consistent
+
+ def get(self, guid):
+ cached_props = self._index.get_cached(guid)
+ record = self._storage.get(guid)
+ enforce(cached_props or record.exists, env.NotFound,
+ 'Document %r does not exist in %r',
+ guid, self.metadata.name)
+ return self.document_class(guid, record, cached_props)
+
+ def find(self, *args, **kwargs):
+ """Search documents.
+
+ The result will be an array of dictionaries with found documents'
+ properties.
+
+ :param offset:
+ the resulting list should start with this offset;
+ 0 by default
+ :param limit:
+ the resulting list will be at least `limit` size;
+ the `--find-limit` will be used by default
+ :param query:
+ a string in Xapian serach format, empty to avoid text search
+ :param reply:
+ an array of property names to use only in the resulting list;
+ only GUID property will be used by default
+ :param order_by:
+ property name to sort resulting list; might be prefixed with ``+``
+ (or without any prefixes) for ascending order, and ``-`` for
+ descending order
+ :param group_by:
+ property name to group resulting list by; no groupping by default
+ :param kwargs:
+ a dictionary with property values to restrict the search
+ :returns:
+ a tuple of (`documents`, `total_count`); where the `total_count` is
+ the total number of documents conforming the search parameters,
+ i.e., not only documents that are included to the resulting list
+
+ """
+ mset = self._index.find(_Query(*args, **kwargs))
+
+ def iterate():
+ for hit in mset:
+ guid = hit.document.get_value(0)
+ record = self._storage.get(guid)
+ yield self.document_class(guid, record)
+
+ return iterate(), mset.get_matches_estimated()
+
+ def set_blob(self, guid, prop, data=None, size=None, mime_type=None,
+ **kwargs):
+ """Receive BLOB property.
+
+ This function works in parallel to setting non-BLOB properties values
+ and `post()` function.
+
+ :param prop:
+ BLOB property name
+ :param data:
+ stream to read BLOB content, path to file to copy, or, web url
+ :param size:
+ read only specified number of bytes; otherwise, read until the EOF
+
+ """
+ prop = self.metadata[prop]
+ record = self._storage.get(guid)
+ seqno = self._seqno.next()
+
+ _logger.debug('Received %r BLOB property from %s[%s]',
+ prop.name, self.metadata.name, guid)
+
+ if not mime_type:
+ mime_type = prop.mime_type
+ record.set_blob(prop.name, data, size, seqno=seqno,
+ mime_type=mime_type, **kwargs)
+
+ if record.consistent:
+ self._post(guid, {'seqno': seqno}, False)
+
+ def populate(self):
+ """Populate the index.
+
+ This function needs be called right after `init()` to pickup possible
+ pending changes made during the previous session when index was not
+ propertly closed.
+
+ :returns:
+ function is a generator that will be iterated after picking up
+ every object to let the caller execute urgent tasks
+
+ """
+ found = False
+ migrate = (self._index.mtime == 0)
+
+ for guid in self._storage.walk(self._index.mtime):
+ if not found:
+ _logger.info('Start populating %r index', self.metadata.name)
+ found = True
+
+ if migrate:
+ self._storage.migrate(guid)
+
+ record = self._storage.get(guid)
+ try:
+ props = {}
+ for name, prop in self.metadata.items():
+ if not isinstance(prop, StoredProperty):
+ continue
+ meta = record.get(name)
+ if meta is not None:
+ props[name] = meta['value']
+ self._index.store(guid, props, None, None, None)
+ yield
+ except Exception:
+ exception('Cannot populate %r in %r, invalidate it',
+ guid, self.metadata.name)
+ record.invalidate()
+
+ self._index.checkpoint()
+ if found:
+ self._save_layout()
+ self.commit()
+ self._notify({'event': 'populate'})
+
+ def diff(self, in_seq, out_seq, **kwargs):
+ if 'group_by' in kwargs:
+ # Pickup only most recent change
+ kwargs['order_by'] = '-seqno'
+ else:
+ kwargs['order_by'] = 'seqno'
+ # TODO On big requests, xapian can raise an exception on edits
+ kwargs['limit'] = env.MAX_LIMIT
+ kwargs['no_cache'] = True
+
+ for start, end in in_seq:
+ query = 'seqno:%s..' % start
+ if end:
+ query += str(end)
+ documents, __ = self.find(query=query, **kwargs)
+
+ for doc in documents:
+ diff = {}
+ diff_seq = util.Sequence()
+ for name in self.metadata.keys():
+ if name == 'seqno':
+ continue
+ meta = doc.meta(name)
+ if meta is None:
+ continue
+ seqno = meta.get('seqno')
+ if seqno not in in_seq:
+ continue
+ prop = diff[name] = {'mtime': meta['mtime']}
+ for i in ('value', 'mime_type', 'digest', 'path', 'url'):
+ if i in meta:
+ prop[i] = meta[i]
+ diff_seq.include(seqno, seqno)
+ yield doc.guid, diff
+ out_seq.include(diff_seq)
+
+ def merge(self, guid, diff, increment_seqno=True):
+ """Apply changes for documents."""
+ record = self._storage.get(guid)
+ seqno = None
+ merged = False
+
+ for prop, meta in diff.items():
+ orig_meta = record.get(prop)
+ if orig_meta is not None and orig_meta['mtime'] >= meta['mtime']:
+ continue
+ if increment_seqno:
+ if not seqno:
+ seqno = self._seqno.next()
+ meta['seqno'] = seqno
+ else:
+ meta['seqno'] = (orig_meta or {}).get('seqno') or 0
+
+ _logger.error('2> %r %r %r', prop, meta, orig_meta)
+
+ record.set(prop, **meta)
+ merged = True
+
+ if merged and record.consistent:
+ props = {}
+ if seqno:
+ props['seqno'] = seqno
+ self._index.store(guid, props, False,
+ self._pre_store, self._post_store,
+ # No need in after-merge event, further commit event
+ # is enough to avoid events flow on nodes synchronization
+ None, False)
+
+ return seqno
+
+ def _pre_store(self, guid, changes, event, increment_seqno):
+ seqno = changes.get('seqno')
+ if increment_seqno and not seqno:
+ seqno = changes['seqno'] = self._seqno.next()
+
+ record = self._storage.get(guid)
+ existed = record.exists
+
+ for name, prop in self.metadata.items():
+ if not isinstance(prop, StoredProperty):
+ continue
+ value = changes.get(name)
+ if value is None:
+ if existed:
+ meta = record.get(name)
+ if meta is not None:
+ value = meta['value']
+ changes[name] = prop.default if value is None else value
+ else:
+ if prop.localized:
+ if not isinstance(value, dict):
+ value = {env.default_lang(): value}
+ if existed:
+ meta = record.get(name)
+ if meta is not None:
+ meta['value'].update(value)
+ value = meta['value']
+ changes[name] = value
+ record.set(name, value=value, seqno=seqno)
+
+ def _post_store(self, guid, changes, event, increment_seqno):
+ if event:
+ self._notify(event)
+
+ def _post_delete(self, guid, event):
+ self._storage.delete(guid)
+ self._notify(event)
+
+ def _post_commit(self):
+ self._seqno.commit()
+ self._notify({'event': 'commit'})
+
+ def _post(self, guid, props, new):
+ event = {'event': 'create' if new else 'update',
+ 'props': props.copy(),
+ 'guid': guid,
+ }
+ self._index.store(guid, props, new,
+ self._pre_store, self._post_store, event, True)
+
+ def _notify(self, event):
+ if self._notification_cb is not None:
+ event['document'] = self.metadata.name
+ self._notification_cb(event)
+
+ def _save_layout(self):
+ path = join(self._root, 'layout')
+ with util.new_file(path) as f:
+ f.write(str(_LAYOUT_VERSION))
+
+ def _is_layout_stale(self):
+ path = join(self._root, 'layout')
+ if not exists(path):
+ return True
+ with file(path) as f:
+ version = f.read()
+ return not version.isdigit() or int(version) != _LAYOUT_VERSION
+
+
+class _SessionSeqno(object):
+
+ def __init__(self):
+ self._value = 0
+
+ @property
+ def value(self):
+ return self._value
+
+ def next(self):
+ self._value += 1
+ return self._value
+
+ def commit(self):
+ pass
+
+
+class _Query(object):
+
+ def __init__(self, offset=None, limit=None, query='', reply=None,
+ order_by=None, no_cache=False, group_by=None, **kwargs):
+ self.query = query
+ self.no_cache = no_cache
+ self.group_by = group_by
+
+ if offset is None:
+ offset = 0
+ self.offset = offset
+
+ self.limit = limit or 16
+
+ if reply is None:
+ reply = ['guid']
+ self.reply = reply
+
+ if order_by is None:
+ order_by = 'ctime'
+ self.order_by = order_by
+
+ self.request = kwargs
+
+ def __repr__(self):
+ return 'offset=%s limit=%s request=%r query=%r order_by=%s ' \
+ 'group_by=%s' % (self.offset, self.limit, self.request,
+ self.query, self.order_by, self.group_by)
diff --git a/sugar_network/db/document.py b/sugar_network/db/document.py
new file mode 100644
index 0000000..81e3fa6
--- /dev/null
+++ b/sugar_network/db/document.py
@@ -0,0 +1,100 @@
+# Copyright (C) 2011-2012 Aleksey Lim
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <http://www.gnu.org/licenses/>.
+
+import logging
+
+from sugar_network.db import env
+from sugar_network.db.metadata import StoredProperty, PropertyMetadata
+from sugar_network.db.metadata import indexed_property
+
+
+_logger = logging.getLogger('db.document')
+
+
+class Document(object):
+
+ #: `Metadata` object that describes the document
+ metadata = None
+
+ def __init__(self, guid, record, cached_props=None, request=None):
+ self.props = cached_props or {}
+ self.guid = guid
+ self._record = record
+ self.request = request
+
+ @property
+ def volume(self):
+ return self.request.commands.volume
+
+ @property
+ def directory(self):
+ return self.volume[self.metadata.name]
+
+ @indexed_property(slot=1000, prefix='IC', typecast=int,
+ permissions=env.ACCESS_READ, default=0)
+ def ctime(self, value):
+ return value
+
+ @indexed_property(slot=1001, prefix='IM', typecast=int,
+ permissions=env.ACCESS_READ, default=0)
+ def mtime(self, value):
+ return value
+
+ @indexed_property(slot=1002, prefix='IS', typecast=int,
+ permissions=0, default=0)
+ def seqno(self, value):
+ return value
+
+ def get(self, prop, accept_language=None):
+ """Get document's property value.
+
+ :param prop:
+ property name to get value
+ :returns:
+ `prop` value
+
+ """
+ prop = self.metadata[prop]
+
+ value = self.props.get(prop.name)
+ if value is None and self._record is not None:
+ meta = self._record.get(prop.name)
+ if isinstance(prop, StoredProperty):
+ if meta is not None:
+ value = meta.get('value')
+ else:
+ value = meta or PropertyMetadata()
+ self.props[prop.name] = value
+
+ if value is not None and accept_language:
+ if isinstance(prop, StoredProperty) and prop.localized:
+ value = env.gettext(value, accept_language)
+
+ return value
+
+ def properties(self, props, accept_language=None):
+ result = {}
+ for i in props:
+ result[i] = self.get(i, accept_language)
+ return result
+
+ def meta(self, prop):
+ return self._record.get(prop)
+
+ def __getitem__(self, prop):
+ return self.get(prop)
+
+ def __setitem__(self, prop, value):
+ self.props[prop] = value
diff --git a/sugar_network/db/env.py b/sugar_network/db/env.py
new file mode 100644
index 0000000..2a18b34
--- /dev/null
+++ b/sugar_network/db/env.py
@@ -0,0 +1,148 @@
+# Copyright (C) 2011-2013 Aleksey Lim
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <http://www.gnu.org/licenses/>.
+
+from sugar_network.toolkit import Option
+
+
+ACCESS_CREATE = 1
+ACCESS_WRITE = 2
+ACCESS_READ = 4
+ACCESS_DELETE = 8
+ACCESS_PUBLIC = ACCESS_CREATE | ACCESS_WRITE | ACCESS_READ | ACCESS_DELETE
+
+ACCESS_AUTH = 16
+ACCESS_AUTHOR = 32
+
+ACCESS_SYSTEM = 64
+ACCESS_LOCAL = 128
+ACCESS_REMOTE = 256
+ACCESS_LEVELS = ACCESS_SYSTEM | ACCESS_LOCAL | ACCESS_REMOTE
+
+ACCESS_NAMES = {
+ ACCESS_CREATE: 'Create',
+ ACCESS_WRITE: 'Write',
+ ACCESS_READ: 'Read',
+ ACCESS_DELETE: 'Delete',
+ }
+
+MAX_LIMIT = 2147483648
+
+
+index_flush_timeout = Option(
+ 'flush index index after specified seconds since the last change',
+ default=5, type_cast=int)
+
+index_flush_threshold = Option(
+ 'flush index every specified changes',
+ default=32, type_cast=int)
+
+index_write_queue = Option(
+ 'if active-document is being used for the scheme with one writer '
+ 'process and multiple reader processes, this option specifies '
+ 'the writer\'s queue size',
+ default=256, type_cast=int)
+
+
+def uuid():
+ """Generate GUID value.
+
+ Function will tranform `uuid.uuid1()` result to leave only alnum symbols.
+ The reason is reusing the same resulting GUID in different cases, e.g.,
+ for Telepathy names where `-` symbols, from `uuid.uuid1()`, are not
+ permitted.
+
+ :returns:
+ GUID string value
+
+ """
+ from uuid import uuid1
+ return ''.join(str(uuid1()).split('-'))
+
+
+def default_lang():
+ """Default language to fallback for localized strings.
+
+ :returns:
+ string in format of HTTP's Accept-Language, e.g., `en-gb`.
+
+ """
+ global _default_lang
+
+ if _default_lang is None:
+ import locale
+ lang = locale.getdefaultlocale()[0]
+ if lang:
+ _default_lang = lang.replace('_', '-').lower()
+ else:
+ _default_lang = 'en'
+
+ return _default_lang
+
+
+def gettext(value, accept_language=None):
+ if not value:
+ return ''
+ if not isinstance(value, dict):
+ return value
+
+ if accept_language is None:
+ accept_language = [default_lang()]
+ elif isinstance(accept_language, basestring):
+ accept_language = [accept_language]
+ stripped_value = None
+
+ for lang in accept_language:
+ result = value.get(lang)
+ if result is not None:
+ return result
+
+ prime_lang = lang.split('-')[0]
+ if prime_lang != lang:
+ result = value.get(prime_lang)
+ if result is not None:
+ return result
+
+ if stripped_value is None:
+ stripped_value = {}
+ for k, v in value.items():
+ if '-' in k:
+ stripped_value[k.split('-', 1)[0]] = v
+ result = stripped_value.get(prime_lang)
+ if result is not None:
+ return result
+
+ return value[min(value.keys())]
+
+
+class BadRequest(Exception):
+ """Bad requested resource."""
+ pass
+
+
+class NotFound(Exception):
+ """Resource was not found."""
+ pass
+
+
+class Forbidden(Exception):
+ """Caller does not have permissions to get access."""
+ pass
+
+
+class CommandNotFound(Exception):
+ pass
+
+
+_default_lang = None
diff --git a/sugar_network/db/index.py b/sugar_network/db/index.py
new file mode 100644
index 0000000..5180e02
--- /dev/null
+++ b/sugar_network/db/index.py
@@ -0,0 +1,443 @@
+# Copyright (C) 2011-2012 Aleksey Lim
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <http://www.gnu.org/licenses/>.
+
+import os
+import re
+import time
+import shutil
+import logging
+from os.path import exists, join
+
+import xapian
+
+from sugar_network.db import env
+from sugar_network.db.metadata import IndexedProperty, GUID_PREFIX
+from sugar_network.toolkit import coroutine, exception, enforce
+
+# Additional Xapian term prefix for exact search terms
+_EXACT_PREFIX = 'X'
+
+# The regexp to extract exact search terms from a query string
+_EXACT_QUERY_RE = re.compile('([a-zA-Z0-9_]+):=(")?((?(2)[^"]+|\\S+))(?(2)")')
+
+# How many times to call Xapian database reopen() before fail
+_REOPEN_LIMIT = 10
+
+_logger = logging.getLogger('db.index')
+
+
+class IndexReader(object):
+ """Read-only access to an index."""
+
+ def __init__(self, root, metadata, commit_cb=None):
+ self.metadata = metadata
+ self._db = None
+ self._props = {}
+ self._path = root
+ self._mtime_path = join(self._path, 'mtime')
+ self._dirty = True
+ self._commit_cb = commit_cb
+
+ for name, prop in self.metadata.items():
+ if isinstance(prop, IndexedProperty):
+ self._props[name] = prop
+
+ @property
+ def mtime(self):
+ """UNIX seconds of the last `commit()` call."""
+ if exists(self._mtime_path):
+ return int(os.stat(self._mtime_path).st_mtime)
+ else:
+ return 0
+
+ @mtime.setter
+ def mtime(self, value):
+ with file(self._mtime_path, 'w'):
+ pass
+ os.utime(self._mtime_path, (value, value))
+
+ def get_cached(self, guid):
+ """Return cached document.
+
+ Only in case if index support caching updates.
+
+ :param guid:
+ document GUID to get cache for
+ :returns:
+ dictionary with cached properties or `None`
+
+ """
+ pass
+
+ def store(self, guid, properties, new, pre_cb=None, post_cb=None, *args):
+ """Store new document in the index.
+
+ :param guid:
+ document's GUID to store
+ :param properties:
+ document's properties to store; for non new entities,
+ not necessary all document's properties
+ :param new:
+ initial store for the document; `None` for merging from other nodes
+ :param pre_cb:
+ callback to execute before storing;
+ will be called with passing `guid` and `properties`
+ :param post_cb:
+ callback to execute after storing;
+ will be called with passing `guid` and `properties`
+
+ """
+ raise NotImplementedError()
+
+ def delete(self, guid, post_cb=None, *args):
+ """Delete a document from the index.
+
+ :param guid:
+ document's GUID to remove
+ :param post_cb:
+ callback to execute after deleting;
+ will be called with passing `guid`
+
+ """
+ raise NotImplementedError()
+
+ def find(self, query):
+ """Search documents within the index.
+
+ Function interface is the same as for `db.Document.find`.
+
+ """
+ start_timestamp = time.time()
+ # This will assure that the results count is exact.
+ check_at_least = query.offset + query.limit + 1
+
+ enquire = self._enquire(query.request, query.query, query.order_by,
+ query.group_by)
+ mset = self._call_db(enquire.get_mset, query.offset, query.limit,
+ check_at_least)
+
+ _logger.debug('Found in %s: %s time=%s total=%s parsed=%s',
+ self.metadata.name, query, time.time() - start_timestamp,
+ mset.get_matches_estimated(), enquire.get_query())
+
+ return mset
+
+ def commit(self):
+ """Flush index changes to the disk."""
+ raise NotImplementedError()
+
+ def _enquire(self, request, query, order_by, group_by):
+ enquire = xapian.Enquire(self._db)
+ queries = []
+ and_not_queries = []
+ boolean_queries = []
+
+ if query:
+ query = self._extract_exact_search_terms(query, request)
+
+ if query:
+ parser = xapian.QueryParser()
+ parser.set_database(self._db)
+ for name, prop in self._props.items():
+ if not prop.prefix:
+ continue
+ if prop.boolean:
+ parser.add_boolean_prefix(name, prop.prefix)
+ else:
+ parser.add_prefix(name, prop.prefix)
+ parser.add_prefix('', prop.prefix)
+ if prop.slot is not None and \
+ prop.typecast in [int, float, bool]:
+ value_range = xapian.NumberValueRangeProcessor(
+ prop.slot, name + ':')
+ parser.add_valuerangeprocessor(value_range)
+ parser.add_prefix('', '')
+ query = parser.parse_query(query,
+ xapian.QueryParser.FLAG_PHRASE |
+ xapian.QueryParser.FLAG_BOOLEAN |
+ xapian.QueryParser.FLAG_LOVEHATE |
+ xapian.QueryParser.FLAG_PARTIAL |
+ xapian.QueryParser.FLAG_WILDCARD |
+ xapian.QueryParser.FLAG_PURE_NOT,
+ '')
+ queries.append(query)
+
+ for name, value in request.items():
+ prop = self._props.get(name)
+ if prop is None or not prop.prefix:
+ continue
+
+ sub_queries = []
+ not_queries = []
+ for needle in value if type(value) in (tuple, list) else [value]:
+ if needle is None:
+ continue
+ needle = prop.to_string(needle)[0]
+ if needle.startswith('!'):
+ term = _term(prop.prefix, needle[1:])
+ not_queries.append(xapian.Query(term))
+ elif needle.startswith('-'):
+ term = _term(prop.prefix, needle[1:])
+ and_not_queries.append(xapian.Query(term))
+ else:
+ term = _term(prop.prefix, needle)
+ sub_queries.append(xapian.Query(term))
+
+ if not_queries:
+ not_query = xapian.Query(xapian.Query.OP_AND_NOT,
+ [xapian.Query(''),
+ xapian.Query(xapian.Query.OP_OR, not_queries)])
+ sub_queries.append(not_query)
+
+ if sub_queries:
+ if len(sub_queries) == 1:
+ query = sub_queries[0]
+ else:
+ query = xapian.Query(xapian.Query.OP_OR, sub_queries)
+ if prop.boolean:
+ boolean_queries.append(query)
+ else:
+ queries.append(query)
+
+ final = None
+ if queries:
+ final = xapian.Query(xapian.Query.OP_AND, queries)
+ if boolean_queries:
+ query = xapian.Query(xapian.Query.OP_AND, boolean_queries)
+ if final is None:
+ final = query
+ else:
+ final = xapian.Query(xapian.Query.OP_FILTER, [final, query])
+ if final is None:
+ final = xapian.Query('')
+ for i in and_not_queries:
+ final = xapian.Query(xapian.Query.OP_AND_NOT, [final, i])
+ enquire.set_query(final)
+
+ if hasattr(xapian, 'MultiValueKeyMaker'):
+ sorter = xapian.MultiValueKeyMaker()
+ if order_by:
+ if order_by.startswith('+'):
+ reverse = False
+ order_by = order_by[1:]
+ elif order_by.startswith('-'):
+ reverse = True
+ order_by = order_by[1:]
+ else:
+ reverse = False
+ prop = self._props.get(order_by)
+ enforce(prop is not None and prop.slot is not None,
+ 'Cannot sort using %r property of %r',
+ order_by, self.metadata.name)
+ sorter.add_value(prop.slot, reverse)
+ # Sort by ascending GUID to make order predictable all time
+ sorter.add_value(0, False)
+ enquire.set_sort_by_key(sorter, reverse=False)
+ else:
+ _logger.warning('In order to support sorting, '
+ 'Xapian should be at least 1.2.0')
+
+ if group_by:
+ prop = self._props.get(group_by)
+ enforce(prop is not None and prop.slot is not None,
+ 'Cannot group by %r property of %r',
+ group_by, self.metadata.name)
+ enquire.set_collapse_key(prop.slot)
+
+ return enquire
+
+ def _call_db(self, op, *args):
+ tries = 0
+ while True:
+ try:
+ return op(*args)
+ except xapian.DatabaseError, error:
+ if tries >= _REOPEN_LIMIT:
+ _logger.warning('Cannot open %r index',
+ self.metadata.name)
+ raise
+ _logger.debug('Fail to %r %r index, will reopen it %sth '
+ 'time: %s', op, self.metadata.name, tries, error)
+ time.sleep(tries * .1)
+ self._db.reopen()
+ tries += 1
+
+ def _extract_exact_search_terms(self, query, props):
+ while True:
+ exact_term = _EXACT_QUERY_RE.search(query)
+ if exact_term is None:
+ break
+ query = query[:exact_term.start()] + query[exact_term.end():]
+ term, __, value = exact_term.groups()
+ prop = self.metadata.get(term)
+ if isinstance(prop, IndexedProperty) and prop.prefix:
+ props[term] = value
+ return query
+
+
+class IndexWriter(IndexReader):
+ """Write access to Xapian databases."""
+
+ def __init__(self, root, metadata, commit_cb=None):
+ IndexReader.__init__(self, root, metadata, commit_cb)
+
+ self._pending_updates = 0
+ self._commit_cond = coroutine.Event()
+ self._commit_job = coroutine.spawn(self._commit_handler)
+
+ # Let `_commit_handler()` call `wait()` to not miss immediate commit
+ coroutine.dispatch()
+
+ self._do_open()
+
+ def close(self):
+ """Flush index write pending queue and close the index."""
+ if self._db is None:
+ return
+ self._commit()
+ self._commit_job.kill()
+ self._commit_job = None
+ self._db = None
+
+ def find(self, query):
+ if self._db is None:
+ self._do_open()
+ return IndexReader.find(self, query)
+
+ def store(self, guid, properties, new, pre_cb=None, post_cb=None, *args):
+ if self._db is None:
+ self._do_open()
+
+ if pre_cb is not None:
+ pre_cb(guid, properties, *args)
+
+ _logger.debug('Index %r object: %r', self.metadata.name, properties)
+
+ document = xapian.Document()
+ term_generator = xapian.TermGenerator()
+ term_generator.set_document(document)
+
+ for name, prop in self._props.items():
+ value = guid if prop.slot == 0 else properties[name]
+
+ if prop.slot is not None:
+ if prop.typecast in [int, float, bool]:
+ add_value = xapian.sortable_serialise(value)
+ else:
+ if prop.localized:
+ value = env.gettext(value) or ''
+ add_value = prop.to_string(value)[0]
+ document.add_value(prop.slot, add_value)
+
+ if prop.prefix or prop.full_text:
+ for value in prop.to_string(value):
+ if prop.prefix:
+ if prop.boolean:
+ document.add_boolean_term(
+ _term(prop.prefix, value))
+ else:
+ document.add_term(_term(prop.prefix, value))
+ if prop.full_text:
+ term_generator.index_text(value, 1, prop.prefix or '')
+ term_generator.increase_termpos()
+
+ self._db.replace_document(_term(GUID_PREFIX, guid), document)
+ self._pending_updates += 1
+
+ if post_cb is not None:
+ post_cb(guid, properties, *args)
+
+ self._check_for_commit()
+
+ def delete(self, guid, post_cb=None, *args):
+ if self._db is None:
+ self._do_open()
+
+ _logger.debug('Delete %r document from %r',
+ guid, self.metadata.name)
+
+ self._db.delete_document(_term(GUID_PREFIX, guid))
+ self._pending_updates += 1
+
+ if post_cb is not None:
+ post_cb(guid, *args)
+
+ self._check_for_commit()
+
+ def commit(self):
+ if self._db is None:
+ return
+ self._commit()
+ # Trigger condition to reset waiting for `index_flush_timeout` timeout
+ self._commit_cond.set()
+
+ def checkpoint(self):
+ with file(self._mtime_path, 'w'):
+ pass
+ self._dirty = False
+
+ def _do_open(self):
+ try:
+ self._db = xapian.WritableDatabase(self._path,
+ xapian.DB_CREATE_OR_OPEN)
+ except xapian.DatabaseError:
+ exception('Cannot open Xapian index in %r, will rebuild it',
+ self.metadata.name)
+ shutil.rmtree(self._path, ignore_errors=True)
+ self._db = xapian.WritableDatabase(self._path,
+ xapian.DB_CREATE_OR_OPEN)
+
+ def _commit(self):
+ if self._pending_updates <= 0:
+ return
+
+ _logger.debug('Commiting %s changes of %r index to the disk',
+ self._pending_updates, self.metadata.name)
+ ts = time.time()
+
+ if hasattr(self._db, 'commit'):
+ self._db.commit()
+ else:
+ self._db.flush()
+ if not self._dirty:
+ self.checkpoint()
+ self._pending_updates = 0
+
+ _logger.debug('Commit %r changes took %s seconds',
+ self.metadata.name, time.time() - ts)
+
+ if self._commit_cb is not None:
+ self._commit_cb()
+
+ def _check_for_commit(self):
+ if env.index_flush_threshold.value > 0 and \
+ self._pending_updates >= env.index_flush_threshold.value:
+ # Avoid processing heavy commits in the same coroutine
+ self._commit_cond.set()
+
+ def _commit_handler(self):
+ if env.index_flush_timeout.value > 0:
+ timeout = env.index_flush_timeout.value
+ else:
+ timeout = None
+
+ while True:
+ self._commit_cond.wait(timeout)
+ self._commit()
+ self._commit_cond.clear()
+
+
+def _term(prefix, value):
+ return _EXACT_PREFIX + prefix + str(value).split('\n')[0][:243]
diff --git a/sugar_network/db/metadata.py b/sugar_network/db/metadata.py
new file mode 100644
index 0000000..301de76
--- /dev/null
+++ b/sugar_network/db/metadata.py
@@ -0,0 +1,397 @@
+# Copyright (C) 2011-2013 Aleksey Lim
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <http://www.gnu.org/licenses/>.
+
+import os
+import types
+import cPickle as pickle
+from os.path import exists
+
+from sugar_network.db import env
+from sugar_network.toolkit import enforce
+
+
+#: Xapian term prefix for GUID value
+GUID_PREFIX = 'I'
+
+_LIST_TYPES = (list, tuple, frozenset)
+
+
+def indexed_property(property_class=None, *args, **kwargs):
+
+ def getter(func, self):
+ value = self[func.__name__]
+ return func(self, value)
+
+ def decorate_setter(func, attr):
+ attr.prop.setter = lambda self, value: \
+ self.set(attr.name, func(self, value))
+ attr.prop.on_set = func
+ return attr
+
+ def decorate_getter(func):
+ enforce(func.__name__ != 'guid',
+ "Active property should not have 'guid' name")
+ attr = lambda self: getter(func, self)
+ attr.setter = lambda func: decorate_setter(func, attr)
+ # pylint: disable-msg=W0212
+ attr._is_db_property = True
+ attr.name = func.__name__
+ attr.prop = (property_class or IndexedProperty)(
+ attr.name, *args, **kwargs)
+ attr.prop.on_get = func
+ return attr
+
+ return decorate_getter
+
+
+stored_property = lambda ** kwargs: indexed_property(StoredProperty, **kwargs)
+blob_property = lambda ** kwargs: indexed_property(BlobProperty, **kwargs)
+
+
+class Metadata(dict):
+ """Structure to describe the document.
+
+ Dictionary derived class that contains `_Property` objects.
+
+ """
+
+ def __init__(self, cls):
+ """
+ :param cls:
+ class inherited from `db.Document`
+
+ """
+ self._name = cls.__name__.lower()
+
+ slots = {}
+ prefixes = {}
+
+ for attr in [getattr(cls, i) for i in dir(cls)]:
+ if not hasattr(attr, '_is_db_property'):
+ continue
+
+ prop = attr.prop
+
+ if hasattr(prop, 'slot'):
+ enforce(prop.slot is None or prop.slot not in slots,
+ 'Property %r has a slot already defined for %r in %r',
+ prop.name, slots.get(prop.slot), self.name)
+ slots[prop.slot] = prop.name
+
+ if hasattr(prop, 'prefix'):
+ enforce(not prop.prefix or prop.prefix not in prefixes,
+ 'Property %r has a prefix already defined for %r',
+ prop.name, prefixes.get(prop.prefix))
+ prefixes[prop.prefix] = prop.name
+
+ if prop.setter is not None:
+ setattr(cls, attr.name, property(attr, prop.setter))
+ else:
+ setattr(cls, attr.name, property(attr))
+
+ self[prop.name] = prop
+
+ @property
+ def name(self):
+ """Document type name."""
+ return self._name
+
+ def __getitem__(self, prop_name):
+ enforce(prop_name in self, 'There is no %r property in %r',
+ prop_name, self.name)
+ return dict.__getitem__(self, prop_name)
+
+
+class PropertyMetadata(dict):
+
+ BLOB_SUFFIX = '.blob'
+
+ def __init__(self, path_=None, **meta):
+ if path_:
+ with file(path_) as f:
+ meta.update(pickle.load(f))
+ if exists(path_ + PropertyMetadata.BLOB_SUFFIX):
+ meta['path'] = path_ + PropertyMetadata.BLOB_SUFFIX
+ meta['mtime'] = int(os.stat(path_).st_mtime)
+ dict.__init__(self, meta)
+
+ @classmethod
+ def is_blob(cls, blob):
+ return isinstance(blob, (type(None), basestring, cls)) or \
+ hasattr(blob, 'read')
+
+
+class _Property(object):
+ """Bacis class to collect information about document property."""
+
+ def __init__(self, name, permissions=env.ACCESS_PUBLIC, typecast=None,
+ reprcast=None, default=None):
+ self.setter = None
+ self.on_get = lambda self, x: x
+ self.on_set = None
+ self._name = name
+ self._permissions = permissions
+ self._typecast = typecast
+ self._reprcast = reprcast
+ self._default = default
+
+ @property
+ def name(self):
+ """Property name."""
+ return self._name
+
+ @property
+ def permissions(self):
+ """Specify access to the property.
+
+ Value might be ORed composition of `db.ACCESS_*`
+ constants.
+
+ """
+ return self._permissions
+
+ @property
+ def typecast(self):
+ """Cast property value before storing in the system.
+
+ Supported values are:
+ * `None`, string values
+ * `int`, interger values
+ * `float`, float values
+ * `bool`, boolean values repesented by symbols `0` and `1`
+ * sequence of strings, property value should confirm one of values
+ from the sequence
+
+ """
+ return self._typecast
+
+ @property
+ def composite(self):
+ """Is property value a list of values."""
+ is_composite, __ = _is_composite(self.typecast)
+ return is_composite
+
+ @property
+ def default(self):
+ """Default property value or None."""
+ return self._default
+
+ def decode(self, value):
+ """Convert property value according to its `typecast`."""
+ if self.typecast is None:
+ return value
+ return _decode(self.typecast, value)
+
+ def to_string(self, value):
+ """Convert value to list of strings ready to index."""
+ result = []
+
+ if self._reprcast is not None:
+ value = self._reprcast(value)
+
+ for value in (value if type(value) in _LIST_TYPES else [value]):
+ if type(value) is bool:
+ value = int(value)
+ if type(value) is unicode:
+ value = unicode(value).encode('utf8')
+ else:
+ value = str(value)
+ result.append(value)
+
+ return result
+
+ def assert_access(self, mode):
+ """Is access to the property permitted.
+
+ If there are no permissions, function should raise
+ `db.Forbidden` exception.
+
+ :param mode:
+ one of `db.ACCESS_*` constants
+ to specify the access mode
+
+ """
+ enforce(mode & self.permissions, env.Forbidden,
+ '%s access is disabled for %r property',
+ env.ACCESS_NAMES[mode], self.name)
+
+
+class StoredProperty(_Property):
+ """Property that can be saved in persistent storare."""
+
+ def __init__(self, name, localized=False, typecast=None, reprcast=None,
+ **kwargs):
+ self._localized = localized
+
+ if localized:
+ enforce(typecast is None,
+ 'typecast should be None for localized properties')
+ enforce(reprcast is None,
+ 'reprcast should be None for localized properties')
+ typecast = _localized_typecast
+ reprcast = _localized_reprcast
+
+ _Property.__init__(self, name, typecast=typecast, reprcast=reprcast,
+ **kwargs)
+
+ @property
+ def localized(self):
+ """Property value will be stored per locale."""
+ return self._localized
+
+
+class IndexedProperty(StoredProperty):
+ """Property that need to be indexed."""
+
+ def __init__(self, name, slot=None, prefix=None, full_text=False,
+ boolean=False, **kwargs):
+ enforce(name == 'guid' or slot != 0,
+ "For %r property, slot '0' is reserved for internal needs",
+ name)
+ enforce(name == 'guid' or prefix != GUID_PREFIX,
+ 'For %r property, prefix %r is reserved for internal needs',
+ name, GUID_PREFIX)
+ enforce(slot is not None or prefix or full_text,
+ 'For %r property, either slot, prefix or full_text '
+ 'need to be set',
+ name)
+ enforce(slot is None or _is_sloted_prop(kwargs.get('typecast')),
+ 'Slot can be set only for properties for str, int, float, '
+ 'bool types, or, for list of these types')
+
+ StoredProperty.__init__(self, name, **kwargs)
+ self._slot = slot
+ self._prefix = prefix
+ self._full_text = full_text
+ self._boolean = boolean
+
+ @property
+ def slot(self):
+ """Xapian document's slot number to add property value to."""
+ return self._slot
+
+ @property
+ def prefix(self):
+ """Xapian serach term prefix, if `None`, property is not a term."""
+ return self._prefix
+
+ @property
+ def full_text(self):
+ """Property takes part in full-text search."""
+ return self._full_text
+
+ @property
+ def boolean(self):
+ """Xapian will use boolean search for this property."""
+ return self._boolean
+
+
+class BlobProperty(_Property):
+ """Binary large objects that need to be fetched alone.
+
+ To get access to these properties, use `Document.send()` and
+ `Document.receive()` functions.
+
+ """
+
+ def __init__(self, name, permissions=env.ACCESS_PUBLIC,
+ mime_type='application/octet-stream', composite=False):
+ _Property.__init__(self, name, permissions=permissions)
+ self._mime_type = mime_type
+ self._composite = composite
+
+ @property
+ def mime_type(self):
+ """MIME type for BLOB content.
+
+ By default, MIME type is application/octet-stream.
+
+ """
+ return self._mime_type
+
+ @property
+ def composite(self):
+ """Property is a list of BLOBs."""
+ return self._composite
+
+
+def _is_composite(typecast):
+ if type(typecast) in _LIST_TYPES:
+ if typecast:
+ first = iter(typecast).next()
+ if type(first) is not type and \
+ type(first) not in _LIST_TYPES:
+ return False, True
+ return True, False
+ return False, False
+
+
+def _decode(typecast, value):
+ enforce(value is not None, ValueError, 'Property value cannot be None')
+
+ is_composite, is_enum = _is_composite(typecast)
+
+ if is_composite:
+ enforce(len(typecast) <= 1, ValueError,
+ 'List values should contain values of the same type')
+ if type(value) not in _LIST_TYPES:
+ value = (value,)
+ typecast, = typecast or [str]
+ value = tuple([_decode(typecast, i) for i in value])
+ elif is_enum:
+ enforce(value in typecast, ValueError,
+ "Value %r is not in '%s' list",
+ value, ', '.join([str(i) for i in typecast]))
+ elif isinstance(typecast, types.FunctionType):
+ value = typecast(value)
+ elif typecast is str:
+ if isinstance(value, unicode):
+ value = value.encode('utf-8')
+ else:
+ value = str(value)
+ elif typecast is int:
+ value = int(value)
+ elif typecast is float:
+ value = float(value)
+ elif typecast is bool:
+ value = bool(value)
+ elif typecast is dict:
+ value = dict(value)
+ else:
+ raise ValueError('Unknown typecast')
+ return value
+
+
+def _is_sloted_prop(typecast):
+ if typecast in [None, int, float, bool, str]:
+ return True
+ if type(typecast) in _LIST_TYPES:
+ if typecast and [i for i in typecast
+ if type(i) in [None, int, float, bool, str]]:
+ return True
+
+
+def _localized_typecast(value):
+ if isinstance(value, dict):
+ return value
+ else:
+ return {env.default_lang(): value}
+
+
+def _localized_reprcast(value):
+ if isinstance(value, dict):
+ return value.values()
+ else:
+ return [value]
diff --git a/sugar_network/db/storage.py b/sugar_network/db/storage.py
new file mode 100644
index 0000000..c38fcd7
--- /dev/null
+++ b/sugar_network/db/storage.py
@@ -0,0 +1,275 @@
+# Copyright (C) 2012 Aleksey Lim
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <http://www.gnu.org/licenses/>.
+
+import os
+import sys
+import time
+import json
+import shutil
+import hashlib
+import cPickle as pickle
+from base64 import b64decode
+from os.path import exists, join, isdir, basename, relpath, lexists, isabs
+
+from sugar_network.db import env
+from sugar_network.db.metadata import PropertyMetadata, BlobProperty
+from sugar_network.toolkit import BUFFER_SIZE, util, exception
+
+
+class Storage(object):
+ """Get access to documents' data storage."""
+
+ def __init__(self, root, metadata):
+ self._root = root
+ self.metadata = metadata
+
+ def get(self, guid):
+ """Get access to particular document's properties.
+
+ :param guid:
+ document GUID to get access to
+ :returns:
+ `Record` object
+
+ """
+ return Record(self._path(guid))
+
+ def delete(self, guid):
+ """Remove document properties from the storage.
+
+ :param guid:
+ document to remove
+
+ """
+ path = self._path(guid)
+ if not exists(path):
+ return
+ try:
+ shutil.rmtree(path)
+ except Exception, error:
+ exception()
+ raise RuntimeError('Cannot delete %r document from %r: %s' %
+ (guid, self.metadata.name, error))
+
+ def walk(self, mtime):
+ """Generator function to enumerate all existing documents.
+
+ :param mtime:
+ return entities that were modified after `mtime`
+ :returns:
+ generator returns (guid, properties) typle for all found
+ documents; the properties dictionary will contain only
+ `StoredProperty` properties
+
+ """
+ if not exists(self._root):
+ return
+
+ for guids_dirname in os.listdir(self._root):
+ guids_dir = join(self._root, guids_dirname)
+ if not isdir(guids_dir) or \
+ mtime and os.stat(guids_dir).st_mtime < mtime:
+ continue
+ for guid in os.listdir(guids_dir):
+ path = join(guids_dir, guid, 'guid')
+ if exists(path) and os.stat(path).st_mtime > mtime:
+ yield guid
+
+ def migrate(self, guid):
+ root = self._path(guid)
+ record = self.get(guid)
+
+ path = join(root, '.seqno')
+ if exists(path):
+ seqno = int(os.stat(path).st_mtime)
+ with file(join(root, 'seqno'), 'w') as f:
+ pickle.dump({'seqno': seqno, 'value': seqno}, f)
+ os.unlink(path)
+
+ for name, prop in self.metadata.items():
+ path = join(root, name)
+ if exists(path + '.seqno'):
+ self._migrate_to_1(path, prop)
+ continue
+ if exists(path):
+ with file(path) as f:
+ meta = f.read()
+ if meta:
+ if meta[0] == '{':
+ with file(path, 'w') as f:
+ pickle.dump(json.loads(meta), f)
+ continue
+ if not isinstance(prop, BlobProperty) and prop.default is not None:
+ record.set(name, seqno=0, value=prop.default)
+
+ def _migrate_to_1(self, path, prop):
+ meta = {'seqno': int(os.stat(path + '.seqno').st_mtime)}
+
+ mtime = None
+ if lexists(path):
+ if exists(path):
+ mtime = os.stat(path).st_mtime
+ else:
+ os.unlink(path)
+
+ if isinstance(prop, BlobProperty):
+ if mtime is not None:
+ if exists(path + '.sha1'):
+ with file(path + '.sha1') as f:
+ meta['digest'] = f.read().strip()
+ os.unlink(path + '.sha1')
+ else:
+ # TODO calculate new digest
+ meta['digest'] = ''
+ shutil.move(path, path + PropertyMetadata.BLOB_SUFFIX)
+ meta['mime_type'] = prop.mime_type
+ else:
+ if exists(path + '.sha1'):
+ os.unlink(path + '.sha1')
+ meta = None
+ else:
+ if mtime is not None:
+ with file(path) as f:
+ value = json.load(f)
+ if prop.localized and type(value) is not dict:
+ value = {env.default_lang(): value}
+ else:
+ value = prop.default
+ meta['value'] = value
+
+ if meta is not None:
+ with file(path, 'w') as f:
+ pickle.dump(meta, f)
+ if mtime is not None:
+ os.utime(path, (mtime, mtime))
+
+ os.unlink(path + '.seqno')
+
+ def _path(self, guid, *args):
+ return join(self._root, guid[:2], guid, *args)
+
+
+class Record(object):
+ """Interface to document data."""
+
+ def __init__(self, root):
+ self._root = root
+
+ @property
+ def guid(self):
+ return basename(self._root)
+
+ @property
+ def exists(self):
+ return exists(self._root)
+
+ @property
+ def consistent(self):
+ return exists(join(self._root, 'guid'))
+
+ def invalidate(self):
+ guid_path = join(self._root, 'guid')
+ if exists(guid_path):
+ os.unlink(guid_path)
+
+ def get(self, prop):
+ path = join(self._root, prop)
+ if exists(path):
+ return PropertyMetadata(path)
+
+ def set(self, prop, mtime=None, path=None, content=None, **meta):
+ if not exists(self._root):
+ os.makedirs(self._root)
+ meta_path = join(self._root, prop)
+
+ blob_path = join(self._root, prop + PropertyMetadata.BLOB_SUFFIX)
+ if content is not None:
+ with util.new_file(blob_path) as f:
+ f.write(b64decode(content))
+ elif path and exists(path):
+ util.cptree(path, blob_path)
+
+ with util.new_file(meta_path) as f:
+ pickle.dump(meta, f)
+ if mtime:
+ os.utime(meta_path, (mtime, mtime))
+
+ if prop == 'guid':
+ if not mtime:
+ mtime = time.time()
+ # Touch directory to let it possible to crawl it on startup
+ # when index was not previously closed properly
+ os.utime(join(self._root, '..'), (mtime, mtime))
+
+ def set_blob(self, prop, data=None, size=None, **kwargs):
+ if not exists(self._root):
+ os.makedirs(self._root)
+ path = join(self._root, prop + PropertyMetadata.BLOB_SUFFIX)
+ meta = PropertyMetadata(**kwargs)
+
+ if data is None:
+ if exists(path):
+ os.unlink(path)
+ elif isinstance(data, PropertyMetadata):
+ data.update(meta)
+ meta = data
+ else:
+ digest = hashlib.sha1()
+ if hasattr(data, 'read'):
+ if size is None:
+ size = sys.maxint
+ self._set_blob_by_stream(digest, data, size, path)
+ elif isabs(data) and exists(data):
+ self._set_blob_by_path(digest, data, path)
+ else:
+ with util.new_file(path) as f:
+ f.write(data)
+ digest.update(data)
+ meta['digest'] = digest.hexdigest()
+
+ self.set(prop, **meta)
+
+ def _set_blob_by_stream(self, digest, stream, size, path):
+ with util.new_file(path) as f:
+ while size > 0:
+ chunk = stream.read(min(size, BUFFER_SIZE))
+ if not chunk:
+ break
+ f.write(chunk)
+ size -= len(chunk)
+ if digest is not None:
+ digest.update(chunk)
+
+ def _set_blob_by_path(self, digest, src_path, dst_path):
+ util.cptree(src_path, dst_path)
+
+ def hash_file(path):
+ with file(path) as f:
+ while True:
+ chunk = f.read(BUFFER_SIZE)
+ if not chunk:
+ break
+ if digest is not None:
+ digest.update(chunk)
+
+ if isdir(dst_path):
+ for root, __, files in os.walk(dst_path):
+ for filename in files:
+ path = join(root, filename)
+ if digest is not None:
+ digest.update(relpath(path, dst_path))
+ hash_file(path)
+ else:
+ hash_file(dst_path)
diff --git a/sugar_network/db/volume.py b/sugar_network/db/volume.py
new file mode 100644
index 0000000..6e088e7
--- /dev/null
+++ b/sugar_network/db/volume.py
@@ -0,0 +1,289 @@
+# Copyright (C) 2011-2013 Aleksey Lim
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <http://www.gnu.org/licenses/>.
+
+import os
+import time
+import logging
+from contextlib import contextmanager
+from os.path import exists, join, abspath
+
+from sugar_network.db import env
+from sugar_network.db.directory import Directory
+from sugar_network.db.index import IndexWriter
+from sugar_network.db.commands import CommandsProcessor, directory_command
+from sugar_network.db.commands import document_command, property_command
+from sugar_network.db.commands import to_int, to_list
+from sugar_network.db.metadata import BlobProperty, StoredProperty
+from sugar_network.db.metadata import PropertyMetadata
+from sugar_network.toolkit import coroutine, util, exception, enforce
+
+
+_logger = logging.getLogger('db.volume')
+
+
+class _Volume(dict):
+
+ def __init__(self, root, documents, index_class, lazy_open):
+ self._root = abspath(root)
+ _logger.info('Opening %r volume', self._root)
+
+ if not exists(root):
+ os.makedirs(root)
+ self._index_class = index_class
+ self._subscriptions = {}
+ self._to_open = {}
+ self.seqno = util.Seqno(join(self._root, 'seqno'))
+
+ for document in documents:
+ if isinstance(document, basestring):
+ name = document.split('.')[-1]
+ else:
+ name = document.__name__.lower()
+ if lazy_open:
+ self._to_open[name] = document
+ else:
+ self[name] = self._open(name, document)
+
+ @property
+ def root(self):
+ return self._root
+
+ def close(self):
+ """Close operations with the server."""
+ _logger.info('Closing documents in %r', self._root)
+
+ while self:
+ __, cls = self.popitem()
+ cls.close()
+
+ def connect(self, callback, condition=None):
+ self._subscriptions[callback] = condition or {}
+
+ def disconnect(self, callback):
+ if callback in self._subscriptions:
+ del self._subscriptions[callback]
+
+ def populate(self):
+ for cls in self.values():
+ for __ in cls.populate():
+ coroutine.dispatch()
+
+ def notify(self, event):
+ for callback, condition in self._subscriptions.items():
+ for key, value in condition.items():
+ if event.get(key) not in ('*', value):
+ break
+ else:
+ try:
+ callback(event)
+ except Exception:
+ exception(_logger, 'Failed to dispatch %r', event)
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, exc_type, exc_value, traceback):
+ self.close()
+
+ def __getitem__(self, name):
+ directory = self.get(name)
+ if directory is None:
+ enforce(name in self._to_open, env.BadRequest,
+ 'Unknown %r document', name)
+ directory = self[name] = self._open(name, self._to_open.pop(name))
+ return directory
+
+ def _open(self, name, document):
+ if isinstance(document, basestring):
+ mod = __import__(document, fromlist=[name])
+ cls = getattr(mod, name.capitalize())
+ else:
+ cls = document
+ directory = Directory(join(self._root, name), cls, self._index_class,
+ self.notify, self.seqno)
+ return directory
+
+
+class SingleVolume(_Volume):
+
+ def __init__(self, root, document_classes, lazy_open=False):
+ enforce(env.index_write_queue.value > 0,
+ 'The db.index_write_queue.value should be > 0')
+ _Volume.__init__(self, root, document_classes, IndexWriter, lazy_open)
+
+
+class VolumeCommands(CommandsProcessor):
+
+ def __init__(self, volume):
+ CommandsProcessor.__init__(self, volume)
+ self.volume = volume
+
+ @directory_command(method='POST',
+ permissions=env.ACCESS_AUTH, mime_type='application/json')
+ def create(self, request):
+ with self._post(request, env.ACCESS_CREATE) as (directory, doc):
+ enforce('guid' not in doc.props, env.Forbidden,
+ "Property 'guid' cannot be set manually")
+ self.before_create(request, doc.props)
+ for prop in directory.metadata.values():
+ if prop.on_set is not None and \
+ not prop.permissions & env.ACCESS_CREATE:
+ doc[prop.name] = prop.on_set(doc, prop.default)
+ doc.guid = directory.create(doc.props)
+ return doc.guid
+
+ @directory_command(method='GET',
+ arguments={'offset': to_int, 'limit': to_int, 'reply': to_list},
+ mime_type='application/json')
+ def find(self, document, reply, request):
+ if not reply:
+ request['reply'] = ['guid']
+ self._preget(request)
+ documents, total = self.volume[document].find(**request)
+ result = [self._get_props(i, request) for i in documents]
+ return {'total': total, 'result': result}
+
+ @document_command(method='GET', cmd='exists',
+ mime_type='application/json')
+ def exists(self, document, guid):
+ directory = self.volume[document]
+ return directory.exists(guid)
+
+ @document_command(method='PUT',
+ permissions=env.ACCESS_AUTH | env.ACCESS_AUTHOR)
+ def update(self, request):
+ with self._post(request, env.ACCESS_WRITE) as (directory, doc):
+ modified = bool(doc.props)
+ self.before_update(request, doc.props)
+ if modified:
+ directory.update(doc.guid, doc.props)
+
+ @property_command(method='PUT',
+ permissions=env.ACCESS_AUTH | env.ACCESS_AUTHOR)
+ def update_prop(self, request, prop, url=None):
+ if url:
+ value = PropertyMetadata(url=url)
+ elif request.content is None:
+ value = request.content_stream
+ else:
+ value = request.content
+ request.content = {prop: value}
+ self.update(request)
+
+ @document_command(method='DELETE',
+ permissions=env.ACCESS_AUTH | env.ACCESS_AUTHOR)
+ def delete(self, document, guid):
+ directory = self.volume[document]
+ directory.delete(guid)
+
+ @document_command(method='GET', arguments={'reply': to_list},
+ mime_type='application/json')
+ def get(self, document, guid, request):
+ self._preget(request)
+ doc = self.volume[document].get(guid)
+ return self._get_props(doc, request)
+
+ @property_command(method='GET', mime_type='application/json')
+ def get_prop(self, document, guid, prop, request, response, part=None):
+ directory = self.volume[document]
+ prop = directory.metadata[prop]
+ doc = directory.get(guid)
+ doc.request = request
+
+ prop.assert_access(env.ACCESS_READ)
+
+ if isinstance(prop, StoredProperty):
+ value = doc.get(prop.name, request.accept_language)
+ value = prop.on_get(doc, value)
+ if value is None:
+ value = prop.default
+ return value
+ else:
+ meta = prop.on_get(doc, doc.meta(prop.name))
+ enforce(meta is not None and ('path' in meta or 'url' in meta),
+ env.NotFound, 'BLOB does not exist')
+ return meta
+
+ def before_create(self, request, props):
+ ts = int(time.time())
+ props['ctime'] = ts
+ props['mtime'] = ts
+
+ def before_update(self, request, props):
+ props['mtime'] = int(time.time())
+
+ @contextmanager
+ def _post(self, request, access):
+ enforce(isinstance(request.content, dict), 'Invalid value')
+
+ directory = self.volume[request['document']]
+ if 'guid' in request:
+ doc = directory.get(request['guid'])
+ else:
+ doc = directory.document_class(None, {})
+ doc.request = request
+ blobs = []
+
+ for name, value in request.content.items():
+ prop = directory.metadata[name]
+ if isinstance(prop, BlobProperty) and access == env.ACCESS_WRITE:
+ if doc.meta(name) is None:
+ prop.assert_access(env.ACCESS_CREATE)
+ else:
+ prop.assert_access(env.ACCESS_WRITE)
+ else:
+ prop.assert_access(access)
+ if prop.on_set is not None:
+ value = prop.on_set(doc, value)
+ if isinstance(prop, BlobProperty):
+ enforce(PropertyMetadata.is_blob(value), 'Invalid BLOB value')
+ blobs.append((name, value))
+ else:
+ if prop.localized and isinstance(value, basestring):
+ value = {request.accept_language[0]: value}
+ try:
+ doc.props[name] = prop.decode(value)
+ except Exception, error:
+ error = 'Value %r for %r property is invalid: %s' % \
+ (value, prop.name, error)
+ exception(error)
+ raise RuntimeError(error)
+
+ yield directory, doc
+
+ for name, value in blobs:
+ directory.set_blob(doc.guid, name, value,
+ mime_type=request.content_type)
+
+ def _preget(self, request):
+ metadata = self.volume[request['document']].metadata
+ reply = request.setdefault('reply', [])
+ if reply:
+ for prop in reply:
+ metadata[prop].assert_access(env.ACCESS_READ)
+ else:
+ reply.append('guid')
+
+ def _get_props(self, doc, request):
+ result = {}
+ metadata = doc.metadata
+ doc.request = request
+ for name in request['reply']:
+ prop = metadata[name]
+ value = prop.on_get(doc, doc.get(name, request.accept_language))
+ if value is None:
+ value = prop.default
+ result[name] = value
+ return result
diff --git a/sugar_network/zeroinstall/zeroinstall-injector b/sugar_network/lib/zeroinstall-injector
-Subproject a4b4644c4da3b6e39ea36474af7e62d20155c22
+Subproject a4b4644c4da3b6e39ea36474af7e62d20155c22
diff --git a/sugar_network/node/__init__.py b/sugar_network/node/__init__.py
index 26ad337..6a5ece5 100644
--- a/sugar_network/node/__init__.py
+++ b/sugar_network/node/__init__.py
@@ -13,7 +13,7 @@
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
-from active_toolkit.options import Option
+from sugar_network.toolkit import Option
host = Option(
diff --git a/sugar_network/node/auth.py b/sugar_network/node/auth.py
index 9261c0f..b172b91 100644
--- a/sugar_network/node/auth.py
+++ b/sugar_network/node/auth.py
@@ -17,9 +17,8 @@ import os
from ConfigParser import ConfigParser
from os.path import join, exists
-import active_document as ad
-from sugar_network import node
-from active_toolkit import enforce
+from sugar_network import db, node
+from sugar_network.toolkit import enforce
_config_mtime = 0
@@ -27,7 +26,7 @@ _config = None
def validate(request, role):
- enforce(_validate(request, role), ad.Forbidden,
+ enforce(_validate(request, role), db.Forbidden,
'No enough permissions to proceed the operation')
diff --git a/sugar_network/node/commands.py b/sugar_network/node/commands.py
index cdfb70c..ee97a97 100644
--- a/sugar_network/node/commands.py
+++ b/sugar_network/node/commands.py
@@ -18,12 +18,10 @@ import logging
import hashlib
from os.path import exists, join
-import active_document as ad
-from sugar_network import node, toolkit
+from sugar_network import db, node
from sugar_network.node import auth, obs
from sugar_network.resources.volume import Commands, VolumeCommands
-from sugar_network.toolkit import router
-from active_toolkit import util, enforce
+from sugar_network.toolkit import router, util, exception, enforce
_DEFAULT_MASTER_GUID = 'api-testing.network.sugarlabs.org'
@@ -51,7 +49,7 @@ class NodeCommands(VolumeCommands, Commands):
self._guid = f.read().strip()
self._is_master = True
else:
- self._guid = ad.uuid()
+ self._guid = db.uuid()
with file(node_path, 'w') as f:
f.write(self._guid)
@@ -71,7 +69,7 @@ class NodeCommands(VolumeCommands, Commands):
return os.listdir(path)
elif len(request.path) == 4:
path = join(obs.obs_presolve_path.value, *request.path[1:])
- return ad.PropertyMeta(path=path, filename=request.path[-1],
+ return db.PropertyMetadata(path=path, filename=request.path[-1],
mime_type='application/json')
else:
raise RuntimeError('Incorrect path')
@@ -80,15 +78,15 @@ class NodeCommands(VolumeCommands, Commands):
def try_packages(self, request, response):
enforce(len(request.path) == 4, 'Incorrect path')
path = join(obs.obs_presolve_path.value, *request.path[1:])
- enforce(exists(path), ad.NotFound, 'No such package')
+ enforce(exists(path), db.NotFound, 'No such package')
- @ad.volume_command(method='GET', cmd='stat',
+ @db.volume_command(method='GET', cmd='stat',
mime_type='application/json')
def stat(self):
# TODO Remove, it is deprecated
return self.info()
- @ad.volume_command(method='GET', cmd='info',
+ @db.volume_command(method='GET', cmd='info',
mime_type='application/json')
def info(self):
documents = {}
@@ -99,12 +97,12 @@ class NodeCommands(VolumeCommands, Commands):
'documents': documents,
}
- @ad.volume_command(method='GET', cmd='stats',
+ @db.volume_command(method='GET', cmd='stats',
mime_type='application/json', arguments={
- 'start': ad.to_int,
- 'end': ad.to_int,
- 'resolution': ad.to_int,
- 'source': ad.to_list,
+ 'start': db.to_int,
+ 'end': db.to_int,
+ 'resolution': db.to_int,
+ 'source': db.to_list,
})
def stats(self, start, end, resolution, source):
if not source:
@@ -127,28 +125,28 @@ class NodeCommands(VolumeCommands, Commands):
dbs.setdefault(db_name, []).append(ds_name)
result = {}
- for db in self._stats.rrd:
- if db.name not in dbs:
+ for rdb in self._stats.rrd:
+ if rdb.name not in dbs:
continue
- stats = result[db.name] = []
- for ts, ds_values in db.get(start, end, resolution):
+ stats = result[rdb.name] = []
+ for ts, ds_values in rdb.get(start, end, resolution):
values = {}
- for name in dbs[db.name]:
+ for name in dbs[rdb.name]:
values[name] = ds_values.get(name)
stats.append((ts, values))
return result
- @ad.document_command(method='DELETE',
- permissions=ad.ACCESS_AUTH | ad.ACCESS_AUTHOR)
+ @db.document_command(method='DELETE',
+ permissions=db.ACCESS_AUTH | db.ACCESS_AUTHOR)
def delete(self, document, guid):
# Servers data should not be deleted immediately
# to let master-node synchronization possible
directory = self.volume[document]
directory.update(guid, {'layer': ['deleted']})
- @ad.document_command(method='PUT', cmd='attach',
- permissions=ad.ACCESS_AUTH)
+ @db.document_command(method='PUT', cmd='attach',
+ permissions=db.ACCESS_AUTH)
def attach(self, document, guid, request):
auth.validate(request, 'root')
directory = self.volume[document]
@@ -157,8 +155,8 @@ class NodeCommands(VolumeCommands, Commands):
layer = list(set(doc['layer']) | set(request.content))
directory.update(guid, {'layer': layer})
- @ad.document_command(method='PUT', cmd='detach',
- permissions=ad.ACCESS_AUTH)
+ @db.document_command(method='PUT', cmd='detach',
+ permissions=db.ACCESS_AUTH)
def detach(self, document, guid, request):
auth.validate(request, 'root')
directory = self.volume[document]
@@ -167,14 +165,14 @@ class NodeCommands(VolumeCommands, Commands):
layer = list(set(doc['layer']) - set(request.content))
directory.update(guid, {'layer': layer})
- @ad.document_command(method='PUT', cmd='merge',
- permissions=ad.ACCESS_AUTH)
+ @db.document_command(method='PUT', cmd='merge',
+ permissions=db.ACCESS_AUTH)
def merge(self, document, guid, request):
auth.validate(request, 'root')
directory = self.volume[document]
directory.merge(guid, request.content)
- @ad.volume_command(method='GET', cmd='whoami',
+ @db.volume_command(method='GET', cmd='whoami',
mime_type='application/json')
def whoami(self, request):
roles = []
@@ -182,19 +180,19 @@ class NodeCommands(VolumeCommands, Commands):
roles.append('root')
return {'roles': roles, 'guid': request.principal, 'route': 'direct'}
- @ad.document_command(method='GET', cmd='clone',
- arguments={'requires': ad.to_list})
+ @db.document_command(method='GET', cmd='clone',
+ arguments={'requires': db.to_list})
def clone(self, document, guid, version, requires, stability='stable'):
enforce(document == 'context', 'No way to clone')
request = router.Request(method='GET', document='implementation',
context=guid, version=version, stability=stability,
requires=requires, order_by='-version', limit=1,
reply=['guid'])
- impls = self.call(request, ad.Response())['result']
- enforce(impls, ad.NotFound, 'No implementations found')
+ impls = self.call(request, db.Response())['result']
+ enforce(impls, db.NotFound, 'No implementations found')
request = router.Request(method='GET', document='implementation',
guid=impls[0]['guid'], prop='data')
- return self.call(request, ad.Response())
+ return self.call(request, db.Response())
def call(self, request, response=None):
try:
@@ -213,18 +211,18 @@ class NodeCommands(VolumeCommands, Commands):
if cmd is None:
return
- if cmd.permissions & ad.ACCESS_AUTH:
+ if cmd.permissions & db.ACCESS_AUTH:
enforce(auth.try_validate(request, 'user'), router.Unauthorized,
'User is not authenticated')
- if cmd.permissions & ad.ACCESS_AUTHOR and 'guid' in request:
+ if cmd.permissions & db.ACCESS_AUTHOR and 'guid' in request:
if request['document'] == 'user':
allowed = (request.principal == request['guid'])
else:
doc = self.volume[request['document']].get(request['guid'])
allowed = (request.principal in doc['author'])
enforce(allowed or auth.try_validate(request, 'root'),
- ad.Forbidden, 'Operation is permitted only for authors')
+ db.Forbidden, 'Operation is permitted only for authors')
return cmd
@@ -243,7 +241,7 @@ class NodeCommands(VolumeCommands, Commands):
VolumeCommands.before_create(self, request, props)
- @ad.directory_command_pre(method='GET')
+ @db.directory_command_pre(method='GET')
def _NodeCommands_find_pre(self, request):
if 'limit' not in request:
request['limit'] = node.find_limit.value
@@ -258,11 +256,11 @@ class NodeCommands(VolumeCommands, Commands):
layer.remove('deleted')
request['layer'] = layer
- @ad.document_command_post(method='GET')
+ @db.document_command_post(method='GET')
def _NodeCommands_get_post(self, request, response, result):
directory = self.volume[request['document']]
doc = directory.get(request['guid'])
- enforce('deleted' not in doc['layer'], ad.NotFound,
+ enforce('deleted' not in doc['layer'], db.NotFound,
'Document deleted')
return result
@@ -270,7 +268,7 @@ class NodeCommands(VolumeCommands, Commands):
def _load_pubkey(pubkey):
pubkey = pubkey.strip()
try:
- with toolkit.NamedTemporaryFile() as key_file:
+ with util.NamedTemporaryFile() as key_file:
key_file.file.write(pubkey)
key_file.file.flush()
# SSH key needs to be converted to PKCS8 to ket M2Crypto read it
@@ -278,13 +276,13 @@ def _load_pubkey(pubkey):
['ssh-keygen', '-f', key_file.name, '-e', '-m', 'PKCS8'])
except Exception:
message = 'Cannot read DSS public key gotten for registeration'
- util.exception(message)
+ exception(message)
if node.trust_users.value:
logging.warning('Failed to read registration pubkey, '
'but we trust users')
# Keep SSH key for further converting to PKCS8
pubkey_pkcs8 = pubkey
else:
- raise ad.Forbidden(message)
+ raise db.Forbidden(message)
return str(hashlib.sha1(pubkey.split()[1]).hexdigest()), pubkey_pkcs8
diff --git a/sugar_network/node/obs.py b/sugar_network/node/obs.py
index 92f5064..de36daa 100644
--- a/sugar_network/node/obs.py
+++ b/sugar_network/node/obs.py
@@ -19,9 +19,7 @@ import logging
from xml.etree import cElementTree as ElementTree
from os.path import join, exists
-from sugar_network.toolkit import http
-from active_toolkit.options import Option
-from active_toolkit import util, enforce
+from sugar_network.toolkit import Option, http, util, exception, enforce
obs_url = Option(
@@ -78,7 +76,7 @@ def presolve(names):
'exclude': 'sugar',
})
except Exception:
- util.exception('Failed to resolve %s:%s:%s for presolving',
+ exception('Failed to resolve %s:%s:%s for presolving',
repo['name'], arch, package)
continue
deps_graph = []
diff --git a/sugar_network/node/stats.py b/sugar_network/node/stats.py
index d3eb073..a77b4b7 100644
--- a/sugar_network/node/stats.py
+++ b/sugar_network/node/stats.py
@@ -1,4 +1,4 @@
-# Copyright (C) 2012 Aleksey Lim
+# Copyright (C) 2012-2013 Aleksey Lim
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
@@ -19,10 +19,8 @@ from os.path import join, exists, isdir
from pylru import lrucache
-import active_document as ad
-from active_toolkit.options import Option
from sugar_network.toolkit.rrd import Rrd
-from sugar_network.toolkit import PersistentSequence
+from sugar_network.toolkit import Option, util
stats_root = Option(
@@ -77,11 +75,11 @@ def pull(in_seq, packet):
for db in rrd:
seq = in_seq[user].get(db.name)
if seq is None:
- seq = in_seq[user][db.name] = PersistentSequence(
+ seq = in_seq[user][db.name] = util.PersistentSequence(
join(rrd.root, db.name + '.push'), [1, None])
elif seq is not dict:
- seq = in_seq[user][db.name] = ad.Sequence(seq)
- out_seq = ad.Sequence()
+ seq = in_seq[user][db.name] = util.Sequence(seq)
+ out_seq = util.Sequence()
def dump():
for start, end in seq:
@@ -100,7 +98,8 @@ def pull(in_seq, packet):
def commit(sequences):
for user, dbs in sequences.items():
for db, merged in dbs.items():
- seq = PersistentSequence(_rrd_path(user, db + '.push'), [1, None])
+ seq = util.PersistentSequence(
+ _rrd_path(user, db + '.push'), [1, None])
seq.exclude(merged)
seq.commit()
diff --git a/sugar_network/node/sync_master.py b/sugar_network/node/sync_master.py
index 0779729..dee6041 100644
--- a/sugar_network/node/sync_master.py
+++ b/sugar_network/node/sync_master.py
@@ -23,13 +23,13 @@ from os.path import exists, join
from pylru import lrucache
-import active_document as ad
-from sugar_network import node, toolkit
+from sugar_network import db, node
from sugar_network.toolkit.sneakernet import InPacket, OutBufferPacket, \
OutPacket, DiskFull
from sugar_network.toolkit.files_sync import Seeders
+from sugar_network.toolkit.util import Sequence
from sugar_network.node import stats
-from active_toolkit import coroutine, util, enforce
+from sugar_network.toolkit import tmpdir, coroutine, exception, enforce
_PULL_QUEUE_SIZE = 256
@@ -48,7 +48,7 @@ class SyncCommands(object):
self._pull_queue = lrucache(_PULL_QUEUE_SIZE,
lambda key, pull: pull.unlink())
- @ad.volume_command(method='POST', cmd='push')
+ @db.volume_command(method='POST', cmd='push')
def push(self, request, response):
with InPacket(stream=request) as in_packet:
enforce('src' in in_packet.header and
@@ -61,8 +61,8 @@ class SyncCommands(object):
out_packet = OutBufferPacket(src=self._guid,
dst=in_packet.header['src'],
filename='ack.' + in_packet.header.get('filename'))
- pushed = ad.Sequence()
- merged = ad.Sequence()
+ pushed = Sequence()
+ merged = Sequence()
cookie = _Cookie()
stats_pushed = {}
@@ -79,14 +79,14 @@ class SyncCommands(object):
elif cmd == 'files_pull':
cookie[record['directory']].include(record['sequence'])
elif cmd == 'stats_push':
- db = record['db']
+ db_name = record['db']
user = record['user']
rrd = stats.get_rrd(user)
- rrd[db].put(record['values'], record['timestamp'])
+ rrd[db_name].put(record['values'], record['timestamp'])
user_seq = stats_pushed.setdefault(user, {})
- db_seq = user_seq.setdefault(db, ad.Sequence())
+ db_seq = user_seq.setdefault(db_name, Sequence())
db_seq.include(record['sequence'])
enforce(not merged or pushed,
@@ -107,9 +107,9 @@ class SyncCommands(object):
if not out_packet.empty:
return out_packet.pop()
- @ad.volume_command(method='GET', cmd='pull',
+ @db.volume_command(method='GET', cmd='pull',
mime_type='application/octet-stream',
- arguments={'accept_length': ad.to_int})
+ arguments={'accept_length': db.to_int})
def pull(self, request, response, accept_length=None, **pulls):
cookie = _Cookie(request)
for key, seq in pulls.items():
@@ -171,7 +171,7 @@ class _Pull(object):
self.exception = None
self.seconds_remained = 0
self.content_type = None
- self._path = join(toolkit.tmpdir.value, pull_key + '.pull')
+ self._path = join(tmpdir.value, pull_key + '.pull')
self._job = None
if exists(self._path):
@@ -181,7 +181,7 @@ class _Pull(object):
self.cookie = _Cookie()
self.cookie.update(packet.header['cookie'])
except Exception:
- util.exception('Cannot open cached packet for %r, recreate',
+ exception('Cannot open cached packet for %r, recreate',
self._path)
os.unlink(self._path)
@@ -219,9 +219,9 @@ class _Pull(object):
cb(self.cookie, packet)
except DiskFull:
pass
- except Exception, exception:
- util.exception('Error while making %r pull', self.cookie)
- self.exception = exception
+ except Exception, error:
+ exception('Error while making %r pull', self.cookie)
+ self.exception = error
self.unlink()
else:
self.cookie.clear()
@@ -237,7 +237,7 @@ class _Cookie(dict):
if request is not None:
value = self._get_cookie(request, 'sugar_network_sync')
for key, seq in (value or {}).items():
- self[key] = ad.Sequence(seq)
+ self[key] = Sequence(seq)
self.delay = 0
@@ -263,7 +263,7 @@ class _Cookie(dict):
def __getitem__(self, key):
seq = self.get(key)
if seq is None:
- seq = self[key] = ad.Sequence()
+ seq = self[key] = Sequence()
return seq
def _get_cookie(self, request, name):
diff --git a/sugar_network/node/sync_node.py b/sugar_network/node/sync_node.py
index 9bd5a51..3dcb102 100644
--- a/sugar_network/node/sync_node.py
+++ b/sugar_network/node/sync_node.py
@@ -20,13 +20,11 @@ import logging
from os.path import join, dirname, exists, basename
from gettext import gettext as _
-import active_document as ad
-from sugar_network import node, client
+from sugar_network import db, node, client
from sugar_network.toolkit import mountpoints, sneakernet, files_sync
-from sugar_network.toolkit import MutableStack, PersistentSequence
+from sugar_network.toolkit import coroutine, util, exception, enforce
from sugar_network.toolkit.sneakernet import OutFilePacket, DiskFull
from sugar_network.node import stats
-from active_toolkit import coroutine, util, enforce
_SYNC_DIRNAME = '.sugar-network-sync'
@@ -38,13 +36,13 @@ class SyncCommands(object):
def __init__(self, sequences_path):
self._sync = coroutine.Pool()
- self._sync_mounts = MutableStack()
+ self._sync_mounts = util.MutableStack()
self._file_syncs = \
files_sync.Leechers(node.sync_dirs.value, sequences_path)
self._sync_session = None
- self._push_seq = PersistentSequence(
+ self._push_seq = util.PersistentSequence(
join(sequences_path, 'push'), [1, None])
- self._pull_seq = PersistentSequence(
+ self._pull_seq = util.PersistentSequence(
join(sequences_path, 'pull'), [1, None])
self._sync_script = join(dirname(sys.argv[0]), 'sugar-network-sync')
self._mount = None
@@ -64,7 +62,7 @@ class SyncCommands(object):
if self._sync_mounts:
self.start_sync()
- @ad.volume_command(method='POST', cmd='start_sync')
+ @db.volume_command(method='POST', cmd='start_sync')
def start_sync(self, rewind=False, path=None):
enforce(self._mount is not None, 'No server to sync')
@@ -78,7 +76,7 @@ class SyncCommands(object):
self._sync_mounts.rewind()
self._sync.spawn(self.sync_session, path)
- @ad.volume_command(method='POST', cmd='break_sync')
+ @db.volume_command(method='POST', cmd='break_sync')
def break_sync(self):
self._sync.kill()
@@ -86,18 +84,18 @@ class SyncCommands(object):
stats_sequence=None, session=None):
enforce(self._mount is not None, 'No server to sync')
- to_push_seq = ad.Sequence(empty_value=[1, None])
+ to_push_seq = util.Sequence(empty_value=[1, None])
if diff_sequence is None:
to_push_seq.include(self._push_seq)
else:
- to_push_seq = ad.Sequence(diff_sequence)
+ to_push_seq = util.Sequence(diff_sequence)
if stats_sequence is None:
stats_sequence = {}
if session is None:
session_is_new = True
- session = ad.uuid()
+ session = db.uuid()
else:
session_is_new = False
@@ -165,7 +163,7 @@ class SyncCommands(object):
break
break
except Exception, error:
- util.exception(_logger, 'Failed to complete synchronization')
+ exception(_logger, 'Failed to complete synchronization')
self._mount.publish({'event': 'sync_error', 'error': str(error)})
self._sync_session = None
diff --git a/sugar_network/resources/artifact.py b/sugar_network/resources/artifact.py
index a84814e..232355c 100644
--- a/sugar_network/resources/artifact.py
+++ b/sugar_network/resources/artifact.py
@@ -1,4 +1,4 @@
-# Copyright (C) 2012 Aleksey Lim
+# Copyright (C) 2012-2013 Aleksey Lim
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
@@ -15,71 +15,70 @@
from os.path import join
-import active_document as ad
-from sugar_network import resources, static
+from sugar_network import db, resources, static
from sugar_network.resources.volume import Resource
class Artifact(Resource):
- @ad.active_property(prefix='C',
- permissions=ad.ACCESS_CREATE | ad.ACCESS_READ)
+ @db.indexed_property(prefix='C',
+ permissions=db.ACCESS_CREATE | db.ACCESS_READ)
def context(self, value):
return value
- @ad.active_property(prefix='T', typecast=[resources.ARTIFACT_TYPES])
+ @db.indexed_property(prefix='T', typecast=[resources.ARTIFACT_TYPES])
def type(self, value):
return value
- @ad.active_property(slot=1, prefix='S', full_text=True, localized=True,
- permissions=ad.ACCESS_CREATE | ad.ACCESS_READ)
+ @db.indexed_property(slot=1, prefix='S', full_text=True, localized=True,
+ permissions=db.ACCESS_CREATE | db.ACCESS_READ)
def title(self, value):
return value
- @ad.active_property(prefix='D', full_text=True, localized=True,
- permissions=ad.ACCESS_CREATE | ad.ACCESS_READ)
+ @db.indexed_property(prefix='D', full_text=True, localized=True,
+ permissions=db.ACCESS_CREATE | db.ACCESS_READ)
def description(self, value):
return value
- @ad.active_property(slot=2, typecast=int, default=0,
- permissions=ad.ACCESS_READ | ad.ACCESS_SYSTEM)
+ @db.indexed_property(slot=2, typecast=int, default=0,
+ permissions=db.ACCESS_READ | db.ACCESS_SYSTEM)
def downloads(self, value):
return value
- @ad.active_property(slot=3, typecast=resources.RATINGS, default=0,
- permissions=ad.ACCESS_READ | ad.ACCESS_SYSTEM)
+ @db.indexed_property(slot=3, typecast=resources.RATINGS, default=0,
+ permissions=db.ACCESS_READ | db.ACCESS_SYSTEM)
def rating(self, value):
return value
- @ad.active_property(ad.StoredProperty, typecast=[], default=[0, 0],
- permissions=ad.ACCESS_READ | ad.ACCESS_SYSTEM)
+ @db.stored_property(typecast=[], default=[0, 0],
+ permissions=db.ACCESS_READ | db.ACCESS_SYSTEM)
def reviews(self, value):
if value is None:
return 0
else:
return value[0]
- @ad.active_property(ad.BlobProperty, mime_type='image/png')
+ @db.blob_property(mime_type='image/png')
def preview(self, value):
if value:
return value
- return ad.PropertyMeta(
+ return db.PropertyMetadata(
url='/static/images/missing.png',
path=join(static.PATH, 'images', 'missing.png'),
mime_type='image/png')
- @ad.active_property(ad.BlobProperty)
+ @db.blob_property()
def data(self, value):
if value:
value['name'] = self['title']
return value
- @ad.active_property(prefix='K', typecast=bool, default=False,
- permissions=ad.ACCESS_READ | ad.ACCESS_LOCAL)
+ @db.indexed_property(prefix='K', typecast=bool, default=False,
+ permissions=db.ACCESS_READ | db.ACCESS_LOCAL)
def favorite(self, value):
return value
- @ad.active_property(prefix='L', typecast=[0, 1, 2], default=0,
- permissions=ad.ACCESS_READ | ad.ACCESS_LOCAL)
+ @db.indexed_property(prefix='L', typecast=[0, 1, 2], default=0,
+ permissions=db.ACCESS_READ | db.ACCESS_LOCAL)
def clone(self, value):
return value
diff --git a/sugar_network/resources/comment.py b/sugar_network/resources/comment.py
index 7f94c37..a820979 100644
--- a/sugar_network/resources/comment.py
+++ b/sugar_network/resources/comment.py
@@ -1,4 +1,4 @@
-# Copyright (C) 2012 Aleksey Lim
+# Copyright (C) 2012-2013 Aleksey Lim
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
@@ -13,20 +13,19 @@
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
-import active_document as ad
-
+from sugar_network import db
from sugar_network.resources.volume import Resource
class Comment(Resource):
- @ad.active_property(prefix='C',
- permissions=ad.ACCESS_READ)
+ @db.indexed_property(prefix='C',
+ permissions=db.ACCESS_READ)
def context(self, value):
return value
- @ad.active_property(prefix='R', default='',
- permissions=ad.ACCESS_CREATE | ad.ACCESS_READ)
+ @db.indexed_property(prefix='R', default='',
+ permissions=db.ACCESS_CREATE | db.ACCESS_READ)
def review(self, value):
return value
@@ -37,8 +36,8 @@ class Comment(Resource):
self['context'] = review['context']
return value
- @ad.active_property(prefix='F', default='',
- permissions=ad.ACCESS_CREATE | ad.ACCESS_READ)
+ @db.indexed_property(prefix='F', default='',
+ permissions=db.ACCESS_CREATE | db.ACCESS_READ)
def feedback(self, value):
return value
@@ -49,8 +48,8 @@ class Comment(Resource):
self['context'] = feedback['context']
return value
- @ad.active_property(prefix='S', default='',
- permissions=ad.ACCESS_CREATE | ad.ACCESS_READ)
+ @db.indexed_property(prefix='S', default='',
+ permissions=db.ACCESS_CREATE | db.ACCESS_READ)
def solution(self, value):
return value
@@ -61,7 +60,7 @@ class Comment(Resource):
self['context'] = solution['context']
return value
- @ad.active_property(prefix='M', full_text=True, localized=True,
- permissions=ad.ACCESS_CREATE | ad.ACCESS_READ)
+ @db.indexed_property(prefix='M', full_text=True, localized=True,
+ permissions=db.ACCESS_CREATE | db.ACCESS_READ)
def message(self, value):
return value
diff --git a/sugar_network/resources/context.py b/sugar_network/resources/context.py
index aa2a0eb..a2e6e96 100644
--- a/sugar_network/resources/context.py
+++ b/sugar_network/resources/context.py
@@ -1,4 +1,4 @@
-# Copyright (C) 2012 Aleksey Lim
+# Copyright (C) 2012-2013 Aleksey Lim
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
@@ -16,115 +16,114 @@
import time
from os.path import join
-import active_document as ad
-from sugar_network import resources, static
+from sugar_network import db, resources, static
from sugar_network.resources.volume import Resource
from sugar_network.zerosugar import clones
from sugar_network.zerosugar.spec import Spec
from sugar_network.node import obs
-from active_toolkit import coroutine, util
+from sugar_network.toolkit import coroutine, exception
class Context(Resource):
- @ad.active_property(prefix='T', full_text=True,
+ @db.indexed_property(prefix='T', full_text=True,
typecast=[resources.CONTEXT_TYPES])
def type(self, value):
return value
- @ad.active_property(prefix='M',
+ @db.indexed_property(prefix='M',
full_text=True, default=[], typecast=[])
def implement(self, value):
return value
- @ad.active_property(slot=1, prefix='S', full_text=True, localized=True)
+ @db.indexed_property(slot=1, prefix='S', full_text=True, localized=True)
def title(self, value):
return value
- @ad.active_property(prefix='R', full_text=True, localized=True)
+ @db.indexed_property(prefix='R', full_text=True, localized=True)
def summary(self, value):
return value
- @ad.active_property(prefix='D', full_text=True, localized=True)
+ @db.indexed_property(prefix='D', full_text=True, localized=True)
def description(self, value):
return value
- @ad.active_property(prefix='H', default='', full_text=True)
+ @db.indexed_property(prefix='H', default='', full_text=True)
def homepage(self, value):
return value
- @ad.active_property(prefix='Y', default=[], typecast=[], full_text=True)
+ @db.indexed_property(prefix='Y', default=[], typecast=[], full_text=True)
def mime_types(self, value):
return value
- @ad.active_property(ad.BlobProperty, mime_type='image/png')
+ @db.blob_property(mime_type='image/png')
def icon(self, value):
if value:
return value
if 'package' in self['type']:
- return ad.PropertyMeta(
+ return db.PropertyMetadata(
url='/static/images/package.png',
path=join(static.PATH, 'images', 'package.png'),
mime_type='image/png')
else:
- return ad.PropertyMeta(
+ return db.PropertyMetadata(
url='/static/images/missing.png',
path=join(static.PATH, 'images', 'missing.png'),
mime_type='image/png')
- @ad.active_property(ad.BlobProperty, mime_type='image/svg+xml')
+ @db.blob_property(mime_type='image/svg+xml')
def artifact_icon(self, value):
if value:
return value
- return ad.PropertyMeta(
+ return db.PropertyMetadata(
url='/static/images/missing.svg',
path=join(static.PATH, 'images', 'missing.svg'),
mime_type='image/svg+xml')
- @ad.active_property(ad.BlobProperty, mime_type='image/png')
+ @db.blob_property(mime_type='image/png')
def preview(self, value):
if value:
return value
- return ad.PropertyMeta(
+ return db.PropertyMetadata(
url='/static/images/missing.png',
path=join(static.PATH, 'images', 'missing.png'),
mime_type='image/png')
- @ad.active_property(slot=2, typecast=int, default=0,
- permissions=ad.ACCESS_READ | ad.ACCESS_SYSTEM)
+ @db.indexed_property(slot=2, typecast=int, default=0,
+ permissions=db.ACCESS_READ | db.ACCESS_SYSTEM)
def downloads(self, value):
return value
- @ad.active_property(slot=3, typecast=resources.RATINGS, default=0,
- permissions=ad.ACCESS_READ | ad.ACCESS_SYSTEM)
+ @db.indexed_property(slot=3, typecast=resources.RATINGS, default=0,
+ permissions=db.ACCESS_READ | db.ACCESS_SYSTEM)
def rating(self, value):
return value
- @ad.active_property(ad.StoredProperty, typecast=[], default=[0, 0],
- permissions=ad.ACCESS_READ | ad.ACCESS_SYSTEM)
+ @db.stored_property(typecast=[], default=[0, 0],
+ permissions=db.ACCESS_READ | db.ACCESS_SYSTEM)
def reviews(self, value):
if value is None:
return 0
else:
return value[0]
- @ad.active_property(prefix='K', typecast=bool, default=False,
- permissions=ad.ACCESS_READ | ad.ACCESS_LOCAL)
+ @db.indexed_property(prefix='K', typecast=bool, default=False,
+ permissions=db.ACCESS_READ | db.ACCESS_LOCAL)
def favorite(self, value):
return value
- @ad.active_property(prefix='L', typecast=[0, 1, 2], default=0,
- permissions=ad.ACCESS_READ | ad.ACCESS_LOCAL)
+ @db.indexed_property(prefix='L', typecast=[0, 1, 2], default=0,
+ permissions=db.ACCESS_READ | db.ACCESS_LOCAL)
def clone(self, value):
return value
- @ad.active_property(ad.StoredProperty, typecast=[int], default=(-1, -1),
- permissions=ad.ACCESS_PUBLIC | ad.ACCESS_LOCAL)
+ @db.stored_property(typecast=[int], default=(-1, -1),
+ permissions=db.ACCESS_PUBLIC | db.ACCESS_LOCAL)
def position(self, value):
return value
- @ad.active_property(ad.StoredProperty, typecast=[], default=[],
- permissions=ad.ACCESS_PUBLIC | ad.ACCESS_LOCAL)
+ @db.stored_property(typecast=[], default=[],
+ permissions=db.ACCESS_PUBLIC | db.ACCESS_LOCAL)
def dependencies(self, value):
"""Software dependencies.
@@ -140,8 +139,8 @@ class Context(Resource):
self.volume['implementation'].mtime = int(time.time())
return value
- @ad.active_property(ad.StoredProperty, typecast=dict, default={},
- permissions=ad.ACCESS_PUBLIC | ad.ACCESS_LOCAL)
+ @db.stored_property(typecast=dict, default={},
+ permissions=db.ACCESS_PUBLIC | db.ACCESS_LOCAL)
def aliases(self, value):
return value
@@ -150,13 +149,13 @@ class Context(Resource):
coroutine.spawn(self._process_aliases, value)
return value
- @ad.active_property(ad.StoredProperty, typecast=dict, default={},
- permissions=ad.ACCESS_PUBLIC | ad.ACCESS_LOCAL | ad.ACCESS_SYSTEM)
+ @db.stored_property(typecast=dict, default={},
+ permissions=db.ACCESS_PUBLIC | db.ACCESS_LOCAL | db.ACCESS_SYSTEM)
def packages(self, value):
return value
- @ad.active_property(ad.StoredProperty, typecast=[], default=[],
- permissions=ad.ACCESS_READ | ad.ACCESS_LOCAL | ad.ACCESS_SYSTEM)
+ @db.stored_property(typecast=[], default=[],
+ permissions=db.ACCESS_READ | db.ACCESS_LOCAL | db.ACCESS_SYSTEM)
def versions(self, value):
result = []
@@ -165,7 +164,7 @@ class Context(Resource):
try:
spec = Spec(root=path)
except Exception:
- util.exception('Failed to read %r spec file', path)
+ exception('Failed to read %r spec file', path)
continue
result.append({
'guid': spec.root,
@@ -181,7 +180,7 @@ class Context(Resource):
})
else:
impls, __ = self.volume['implementation'].find(
- limit=ad.MAX_LIMIT, context=self.guid,
+ limit=db.MAX_LIMIT, context=self.guid,
layer=self.request.get('layer'))
for impl in impls:
for arch, spec in impl['spec'].items():
@@ -211,7 +210,7 @@ class Context(Resource):
else:
alias['status'] = 'no packages to resolve'
except Exception, error:
- util.exception('Failed to resolve %r', alias)
+ exception('Failed to resolve %r', alias)
alias = {'status': str(error)}
packages[repo['name']] = alias
diff --git a/sugar_network/resources/feedback.py b/sugar_network/resources/feedback.py
index eb25203..97ba348 100644
--- a/sugar_network/resources/feedback.py
+++ b/sugar_network/resources/feedback.py
@@ -1,4 +1,4 @@
-# Copyright (C) 2012 Aleksey Lim
+# Copyright (C) 2012-2013 Aleksey Lim
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
@@ -13,31 +13,29 @@
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
-import active_document as ad
-
-from sugar_network import resources
+from sugar_network import db, resources
from sugar_network.resources.volume import Resource
class Feedback(Resource):
- @ad.active_property(prefix='C',
- permissions=ad.ACCESS_CREATE | ad.ACCESS_READ)
+ @db.indexed_property(prefix='C',
+ permissions=db.ACCESS_CREATE | db.ACCESS_READ)
def context(self, value):
return value
- @ad.active_property(prefix='T', typecast=[resources.FEEDBACK_TYPES])
+ @db.indexed_property(prefix='T', typecast=[resources.FEEDBACK_TYPES])
def type(self, value):
return value
- @ad.active_property(prefix='S', full_text=True, localized=True)
+ @db.indexed_property(prefix='S', full_text=True, localized=True)
def title(self, value):
return value
- @ad.active_property(prefix='N', full_text=True, localized=True)
+ @db.indexed_property(prefix='N', full_text=True, localized=True)
def content(self, value):
return value
- @ad.active_property(prefix='A', default='')
+ @db.indexed_property(prefix='A', default='')
def solution(self, value):
return value
diff --git a/sugar_network/resources/implementation.py b/sugar_network/resources/implementation.py
index b381504..14549f1 100644
--- a/sugar_network/resources/implementation.py
+++ b/sugar_network/resources/implementation.py
@@ -1,4 +1,4 @@
-# Copyright (C) 2012 Aleksey Lim
+# Copyright (C) 2012-2013 Aleksey Lim
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
@@ -17,9 +17,8 @@
import xapian
-import active_document as ad
+from sugar_network import db, resources
from sugar_network.zerosugar.licenses import GOOD_LICENSES
-from sugar_network import resources
from sugar_network.zerosugar.spec import parse_version
from sugar_network.resources.volume import Resource
@@ -43,42 +42,42 @@ def _encode_version(version):
class Implementation(Resource):
- @ad.active_property(prefix='C',
- permissions=ad.ACCESS_CREATE | ad.ACCESS_READ)
+ @db.indexed_property(prefix='C',
+ permissions=db.ACCESS_CREATE | db.ACCESS_READ)
def context(self, value):
return value
- @ad.active_property(prefix='L', full_text=True, typecast=[GOOD_LICENSES],
- permissions=ad.ACCESS_CREATE | ad.ACCESS_READ)
+ @db.indexed_property(prefix='L', full_text=True, typecast=[GOOD_LICENSES],
+ permissions=db.ACCESS_CREATE | db.ACCESS_READ)
def license(self, value):
return value
- @ad.active_property(slot=1, prefix='V', reprcast=_encode_version,
- permissions=ad.ACCESS_CREATE | ad.ACCESS_READ)
+ @db.indexed_property(slot=1, prefix='V', reprcast=_encode_version,
+ permissions=db.ACCESS_CREATE | db.ACCESS_READ)
def version(self, value):
return value
- @ad.active_property(prefix='S',
- permissions=ad.ACCESS_CREATE | ad.ACCESS_READ,
+ @db.indexed_property(prefix='S',
+ permissions=db.ACCESS_CREATE | db.ACCESS_READ,
typecast=resources.STABILITIES)
def stability(self, value):
return value
- @ad.active_property(prefix='R', typecast=[], default=[],
- permissions=ad.ACCESS_CREATE | ad.ACCESS_READ)
+ @db.indexed_property(prefix='R', typecast=[], default=[],
+ permissions=db.ACCESS_CREATE | db.ACCESS_READ)
def requires(self, value):
return value
- @ad.active_property(prefix='N', full_text=True, localized=True,
- permissions=ad.ACCESS_CREATE | ad.ACCESS_READ)
+ @db.indexed_property(prefix='N', full_text=True, localized=True,
+ permissions=db.ACCESS_CREATE | db.ACCESS_READ)
def notes(self, value):
return value
- @ad.active_property(ad.StoredProperty, typecast=dict, default={})
+ @db.stored_property(typecast=dict, default={})
def spec(self, value):
return value
- @ad.active_property(ad.BlobProperty)
+ @db.blob_property()
def data(self, value):
if value:
context = self.volume['context'].get(self['context'])
diff --git a/sugar_network/resources/notification.py b/sugar_network/resources/notification.py
index 4b78d75..73878df 100644
--- a/sugar_network/resources/notification.py
+++ b/sugar_network/resources/notification.py
@@ -1,4 +1,4 @@
-# Copyright (C) 2012 Aleksey Lim
+# Copyright (C) 2012-2013 Aleksey Lim
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
@@ -13,37 +13,35 @@
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
-import active_document as ad
-
-from sugar_network import resources
+from sugar_network import db, resources
from sugar_network.resources.volume import Resource
class Notification(Resource):
- @ad.active_property(prefix='T',
- permissions=ad.ACCESS_CREATE | ad.ACCESS_READ,
+ @db.indexed_property(prefix='T',
+ permissions=db.ACCESS_CREATE | db.ACCESS_READ,
typecast=resources.NOTIFICATION_TYPES)
def type(self, value):
return value
- @ad.active_property(prefix='K',
- permissions=ad.ACCESS_CREATE | ad.ACCESS_READ,
+ @db.indexed_property(prefix='K',
+ permissions=db.ACCESS_CREATE | db.ACCESS_READ,
default='', typecast=resources.NOTIFICATION_OBJECT_TYPES)
def resource(self, value):
return value
- @ad.active_property(prefix='O',
- permissions=ad.ACCESS_CREATE | ad.ACCESS_READ, default='')
+ @db.indexed_property(prefix='O',
+ permissions=db.ACCESS_CREATE | db.ACCESS_READ, default='')
def object(self, value):
return value
- @ad.active_property(prefix='D',
- permissions=ad.ACCESS_CREATE | ad.ACCESS_READ, default='')
+ @db.indexed_property(prefix='D',
+ permissions=db.ACCESS_CREATE | db.ACCESS_READ, default='')
def to(self, value):
return value
- @ad.active_property(prefix='M', full_text=True, localized=True,
- permissions=ad.ACCESS_CREATE | ad.ACCESS_READ)
+ @db.indexed_property(prefix='M', full_text=True, localized=True,
+ permissions=db.ACCESS_CREATE | db.ACCESS_READ)
def message(self, value):
return value
diff --git a/sugar_network/resources/report.py b/sugar_network/resources/report.py
index cce7a27..272e7f8 100644
--- a/sugar_network/resources/report.py
+++ b/sugar_network/resources/report.py
@@ -1,4 +1,4 @@
-# Copyright (C) 2012 Aleksey Lim
+# Copyright (C) 2012-2013 Aleksey Lim
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
@@ -13,20 +13,19 @@
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
-import active_document as ad
-
+from sugar_network import db
from sugar_network.resources.volume import Resource
class Report(Resource):
- @ad.active_property(prefix='C',
- permissions=ad.ACCESS_CREATE | ad.ACCESS_READ)
+ @db.indexed_property(prefix='C',
+ permissions=db.ACCESS_CREATE | db.ACCESS_READ)
def context(self, value):
return value
- @ad.active_property(prefix='V',
- permissions=ad.ACCESS_CREATE | ad.ACCESS_READ, default='')
+ @db.indexed_property(prefix='V',
+ permissions=db.ACCESS_CREATE | db.ACCESS_READ, default='')
def implementation(self, value):
return value
@@ -37,31 +36,31 @@ class Report(Resource):
self['version'] = version['version']
return value
- @ad.active_property(prefix='D', full_text=True, localized=True,
- permissions=ad.ACCESS_CREATE | ad.ACCESS_READ)
+ @db.indexed_property(prefix='D', full_text=True, localized=True,
+ permissions=db.ACCESS_CREATE | db.ACCESS_READ)
def description(self, value):
return value
- @ad.active_property(ad.StoredProperty, default='',
- permissions=ad.ACCESS_CREATE | ad.ACCESS_READ)
+ @db.stored_property(default='',
+ permissions=db.ACCESS_CREATE | db.ACCESS_READ)
def version(self, value):
return value
- @ad.active_property(ad.StoredProperty, typecast=dict, default={},
- permissions=ad.ACCESS_CREATE | ad.ACCESS_READ)
+ @db.stored_property(typecast=dict, default={},
+ permissions=db.ACCESS_CREATE | db.ACCESS_READ)
def environ(self, value):
return value
- @ad.active_property(prefix='T',
- permissions=ad.ACCESS_CREATE | ad.ACCESS_READ)
+ @db.indexed_property(prefix='T',
+ permissions=db.ACCESS_CREATE | db.ACCESS_READ)
def error(self, value):
return value
- @ad.active_property(ad.BlobProperty)
+ @db.blob_property()
def data(self, value):
return value
- @ad.document_command(method='GET', cmd='log',
+ @db.document_command(method='GET', cmd='log',
mime_type='text/html')
def log(self, guid):
# In further implementations, `data` might be a tarball
diff --git a/sugar_network/resources/review.py b/sugar_network/resources/review.py
index fdcf878..e8274cc 100644
--- a/sugar_network/resources/review.py
+++ b/sugar_network/resources/review.py
@@ -1,4 +1,4 @@
-# Copyright (C) 2012 Aleksey Lim
+# Copyright (C) 2012-2013 Aleksey Lim
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
@@ -13,21 +13,19 @@
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
-import active_document as ad
-
-from sugar_network import resources
+from sugar_network import db, resources
from sugar_network.resources.volume import Resource
class Review(Resource):
- @ad.active_property(prefix='C',
- permissions=ad.ACCESS_CREATE | ad.ACCESS_READ)
+ @db.indexed_property(prefix='C',
+ permissions=db.ACCESS_CREATE | db.ACCESS_READ)
def context(self, value):
return value
- @ad.active_property(prefix='A', default='',
- permissions=ad.ACCESS_CREATE | ad.ACCESS_READ)
+ @db.indexed_property(prefix='A', default='',
+ permissions=db.ACCESS_CREATE | db.ACCESS_READ)
def artifact(self, value):
return value
@@ -38,17 +36,17 @@ class Review(Resource):
self['context'] = artifact['context']
return value
- @ad.active_property(prefix='S', full_text=True, localized=True,
- permissions=ad.ACCESS_CREATE | ad.ACCESS_READ)
+ @db.indexed_property(prefix='S', full_text=True, localized=True,
+ permissions=db.ACCESS_CREATE | db.ACCESS_READ)
def title(self, value):
return value
- @ad.active_property(prefix='N', full_text=True, localized=True,
- permissions=ad.ACCESS_CREATE | ad.ACCESS_READ)
+ @db.indexed_property(prefix='N', full_text=True, localized=True,
+ permissions=db.ACCESS_CREATE | db.ACCESS_READ)
def content(self, value):
return value
- @ad.active_property(slot=1, typecast=resources.RATINGS,
- permissions=ad.ACCESS_CREATE | ad.ACCESS_READ)
+ @db.indexed_property(slot=1, typecast=resources.RATINGS,
+ permissions=db.ACCESS_CREATE | db.ACCESS_READ)
def rating(self, value):
return value
diff --git a/sugar_network/resources/solution.py b/sugar_network/resources/solution.py
index b43a74b..6592b25 100644
--- a/sugar_network/resources/solution.py
+++ b/sugar_network/resources/solution.py
@@ -1,4 +1,4 @@
-# Copyright (C) 2012 Aleksey Lim
+# Copyright (C) 2012-2013 Aleksey Lim
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
@@ -13,20 +13,19 @@
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
-import active_document as ad
-
+from sugar_network import db
from sugar_network.resources.volume import Resource
class Solution(Resource):
- @ad.active_property(prefix='C',
- permissions=ad.ACCESS_READ)
+ @db.indexed_property(prefix='C',
+ permissions=db.ACCESS_READ)
def context(self, value):
return value
- @ad.active_property(prefix='P',
- permissions=ad.ACCESS_CREATE | ad.ACCESS_READ)
+ @db.indexed_property(prefix='P',
+ permissions=db.ACCESS_CREATE | db.ACCESS_READ)
def feedback(self, value):
return value
@@ -37,6 +36,6 @@ class Solution(Resource):
self['context'] = feedback['context']
return value
- @ad.active_property(prefix='N', full_text=True, localized=True)
+ @db.indexed_property(prefix='N', full_text=True, localized=True)
def content(self, value):
return value
diff --git a/sugar_network/resources/user.py b/sugar_network/resources/user.py
index 0a211e7..b995cda 100644
--- a/sugar_network/resources/user.py
+++ b/sugar_network/resources/user.py
@@ -1,4 +1,4 @@
-# Copyright (C) 2012 Aleksey Lim
+# Copyright (C) 2012-2013 Aleksey Lim
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
@@ -13,60 +13,60 @@
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
-import active_document as ad
+from sugar_network import db
from sugar_network.node import stats
-from active_toolkit import enforce
+from sugar_network.toolkit import enforce
-class User(ad.Document):
+class User(db.Document):
- @ad.active_property(prefix='L', typecast=[], default=['public'])
+ @db.indexed_property(prefix='L', typecast=[], default=['public'])
def layer(self, value):
return value
- @ad.active_property(slot=1, prefix='N', full_text=True)
+ @db.indexed_property(slot=1, prefix='N', full_text=True)
def name(self, value):
return value
- @ad.active_property(ad.StoredProperty)
+ @db.stored_property()
def color(self, value):
return value
- @ad.active_property(prefix='S', default='',
- permissions=ad.ACCESS_CREATE | ad.ACCESS_WRITE)
+ @db.indexed_property(prefix='S', default='',
+ permissions=db.ACCESS_CREATE | db.ACCESS_WRITE)
def machine_sn(self, value):
return value
- @ad.active_property(prefix='U', default='',
- permissions=ad.ACCESS_CREATE | ad.ACCESS_WRITE)
+ @db.indexed_property(prefix='U', default='',
+ permissions=db.ACCESS_CREATE | db.ACCESS_WRITE)
def machine_uuid(self, value):
return value
- @ad.active_property(ad.StoredProperty, permissions=ad.ACCESS_CREATE)
+ @db.stored_property(permissions=db.ACCESS_CREATE)
def pubkey(self, value):
return value
- @ad.active_property(prefix='T', full_text=True, default=[], typecast=[])
+ @db.indexed_property(prefix='T', full_text=True, default=[], typecast=[])
def tags(self, value):
return value
- @ad.active_property(prefix='P', full_text=True, default='')
+ @db.indexed_property(prefix='P', full_text=True, default='')
def location(self, value):
return value
- @ad.active_property(slot=2, prefix='B', default=0, typecast=int)
+ @db.indexed_property(slot=2, prefix='B', default=0, typecast=int)
def birthday(self, value):
return value
- @ad.document_command(method='GET', cmd='stats-info',
+ @db.document_command(method='GET', cmd='stats-info',
mime_type='application/json')
def _stats_info(self, request):
- enforce(request.principal == self['guid'], ad.Forbidden,
+ enforce(request.principal == self['guid'], db.Forbidden,
'Operation is permitted only for authors')
status = {}
- for db in stats.get_rrd(self.guid):
- status[db.name] = db.last + stats.stats_user_step.value
+ for rdb in stats.get_rrd(self.guid):
+ status[rdb.name] = rdb.last + stats.stats_user_step.value
# TODO Process client configuration in more general manner
return {'enable': True,
@@ -75,9 +75,9 @@ class User(ad.Document):
'status': status,
}
- @ad.document_command(method='POST', cmd='stats-upload')
+ @db.document_command(method='POST', cmd='stats-upload')
def _stats_upload(self, request):
- enforce(request.principal == self['guid'], ad.Forbidden,
+ enforce(request.principal == self['guid'], db.Forbidden,
'Operation is permitted only for authors')
name = request.content['name']
diff --git a/sugar_network/resources/volume.py b/sugar_network/resources/volume.py
index b960de4..85b44b8 100644
--- a/sugar_network/resources/volume.py
+++ b/sugar_network/resources/volume.py
@@ -1,4 +1,4 @@
-# Copyright (C) 2012 Aleksey Lim
+# Copyright (C) 2012-2013 Aleksey Lim
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
@@ -17,11 +17,9 @@ import json
import logging
from os.path import join
-import active_document as ad
-from sugar_network import client, node, toolkit, static
-from sugar_network.toolkit import http, router
-from active_toolkit.sockets import BUFFER_SIZE
-from active_toolkit import coroutine, enforce
+from sugar_network import db, client, node, static
+from sugar_network.toolkit import http, router, coroutine, util, enforce
+from sugar_network.toolkit import BUFFER_SIZE
AUTHOR_INSYSTEM = 1
@@ -40,10 +38,10 @@ def _reprcast_authors(value):
yield guid
-class Resource(ad.Document):
+class Resource(db.Document):
- @ad.active_property(prefix='RA', typecast=dict, full_text=True, default={},
- reprcast=_reprcast_authors, permissions=ad.ACCESS_READ)
+ @db.indexed_property(prefix='RA', typecast=dict, full_text=True,
+ default={}, reprcast=_reprcast_authors, permissions=db.ACCESS_READ)
def author(self, value):
result = []
for guid, props in sorted(value.items(),
@@ -67,15 +65,15 @@ class Resource(ad.Document):
return {}
return self._useradd(self.request.principal, AUTHOR_ORIGINAL)
- @ad.document_command(method='PUT', cmd='useradd',
- arguments={'role': ad.to_int},
- permissions=ad.ACCESS_AUTH | ad.ACCESS_AUTHOR)
+ @db.document_command(method='PUT', cmd='useradd',
+ arguments={'role': db.to_int},
+ permissions=db.ACCESS_AUTH | db.ACCESS_AUTHOR)
def useradd(self, user, role):
enforce(user, "Argument 'user' is not specified")
self.directory.update(self.guid, author=self._useradd(user, role))
- @ad.document_command(method='PUT', cmd='userdel',
- permissions=ad.ACCESS_AUTH | ad.ACCESS_AUTHOR)
+ @db.document_command(method='PUT', cmd='userdel',
+ permissions=db.ACCESS_AUTH | db.ACCESS_AUTHOR)
def userdel(self, user):
enforce(user, "Argument 'user' is not specified")
enforce(user != self.request.principal, 'Cannot remove yourself')
@@ -84,11 +82,11 @@ class Resource(ad.Document):
del author[user]
self.directory.update(self.guid, author=author)
- @ad.active_property(prefix='RL', typecast=[], default=['public'])
+ @db.indexed_property(prefix='RL', typecast=[], default=['public'])
def layer(self, value):
return value
- @ad.active_property(prefix='RT', full_text=True, default=[], typecast=[])
+ @db.indexed_property(prefix='RT', full_text=True, default=[], typecast=[])
def tags(self, value):
return value
@@ -119,7 +117,7 @@ class Resource(ad.Document):
return author
-class Volume(ad.SingleVolume):
+class Volume(db.SingleVolume):
RESOURCES = (
'sugar_network.resources.artifact',
@@ -139,14 +137,14 @@ class Volume(ad.SingleVolume):
document_classes = Volume.RESOURCES
self._downloader = None
self._populators = coroutine.Pool()
- ad.SingleVolume.__init__(self, root, document_classes, lazy_open)
+ db.SingleVolume.__init__(self, root, document_classes, lazy_open)
def close(self):
if self._downloader is not None:
self._downloader.close()
self._downloader = None
self._populators.kill()
- ad.SingleVolume.close(self)
+ db.SingleVolume.close(self)
def notify(self, event):
if event['event'] == 'update' and 'props' in event and \
@@ -154,10 +152,10 @@ class Volume(ad.SingleVolume):
event['event'] = 'delete'
del event['props']
- ad.SingleVolume.notify(self, event)
+ db.SingleVolume.notify(self, event)
def diff(self, in_seq, packet):
- out_seq = ad.Sequence()
+ out_seq = util.Sequence()
try:
for document, directory in self.items():
coroutine.dispatch()
@@ -196,7 +194,7 @@ class Volume(ad.SingleVolume):
return commit
def _open(self, name, document):
- directory = ad.SingleVolume._open(self, name, document)
+ directory = db.SingleVolume._open(self, name, document)
self._populators.spawn(self._populate, directory)
return directory
@@ -214,7 +212,7 @@ class Volume(ad.SingleVolume):
content_length = response.headers.get('Content-Length')
content_length = int(content_length) if content_length else 0
- ostream = toolkit.NamedTemporaryFile()
+ ostream = util.NamedTemporaryFile()
try:
chunk_size = min(content_length, BUFFER_SIZE)
# pylint: disable-msg=E1103
@@ -244,15 +242,15 @@ class Commands(object):
@router.route('GET', '/favicon.ico')
def favicon(self, request, response):
- return ad.PropertyMeta(
+ return db.PropertyMetadata(
path=join(static.PATH, 'favicon.ico'),
mime_type='image/x-icon')
- @ad.volume_command(method='GET', mime_type='text/html')
+ @db.volume_command(method='GET', mime_type='text/html')
def hello(self):
return _HELLO_HTML
- @ad.volume_command(method='GET', cmd='subscribe',
+ @db.volume_command(method='GET', cmd='subscribe',
mime_type='application/json')
def subscribe(self, request=None, response=None, only_commits=False):
"""Subscribe to Server-Sent Events.
@@ -295,9 +293,9 @@ class Commands(object):
coroutine.dispatch()
-class VolumeCommands(ad.VolumeCommands):
+class VolumeCommands(db.VolumeCommands):
- @ad.document_command(method='GET', cmd='deplist',
+ @db.document_command(method='GET', cmd='deplist',
mime_type='application/json')
def deplist(self, document, guid, repo):
"""List of native packages context is dependening on.
@@ -326,21 +324,21 @@ class VolumeCommands(ad.VolumeCommands):
return result
- @ad.directory_command_post(method='GET')
+ @db.directory_command_post(method='GET')
def _VolumeCommands_find_post(self, request, response, result):
self._mixin_blobs(request, result['result'])
return result
- @ad.document_command_pre(method='GET', arguments={'reply': ad.to_list})
+ @db.document_command_pre(method='GET', arguments={'reply': db.to_list})
def _VolumeCommands_get_pre(self, request):
if 'reply' not in request:
reply = request['reply'] = []
for prop in self.volume[request['document']].metadata.values():
- if prop.permissions & ad.ACCESS_READ and \
- not (prop.permissions & ad.ACCESS_LOCAL):
+ if prop.permissions & db.ACCESS_READ and \
+ not (prop.permissions & db.ACCESS_LOCAL):
reply.append(prop.name)
- @ad.document_command_post(method='GET')
+ @db.document_command_post(method='GET')
def _VolumeCommands_get_post(self, request, response, result):
self._mixin_blobs(request, [result])
return result
@@ -349,7 +347,7 @@ class VolumeCommands(ad.VolumeCommands):
blobs = []
metadata = self.volume[request['document']].metadata
for prop in request['reply']:
- if isinstance(metadata[prop], ad.BlobProperty):
+ if isinstance(metadata[prop], db.BlobProperty):
blobs.append(prop)
if not blobs:
return
diff --git a/sugar_network/toolkit/__init__.py b/sugar_network/toolkit/__init__.py
index c962505..ffb0b8f 100644
--- a/sugar_network/toolkit/__init__.py
+++ b/sugar_network/toolkit/__init__.py
@@ -1,4 +1,4 @@
-# Copyright (C) 2012 Aleksey Lim
+# Copyright (C) 2012-2013 Aleksey Lim
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
@@ -13,17 +13,14 @@
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
-import os
-import json
+import sys
import logging
-import hashlib
-import tempfile
-import collections
-from os.path import isfile, lexists, exists, dirname
+from os.path import join
-import active_document as ad
-from active_toolkit.options import Option
-from active_toolkit import util
+from sugar_network.toolkit.options import Option
+
+
+BUFFER_SIZE = 1024 * 10
tmpdir = Option(
@@ -32,172 +29,77 @@ tmpdir = Option(
'synchronizing Sugar Network content',
name='tmpdir')
-_logger = logging.getLogger('toolkit')
-
-def spawn(cmd_filename, *args):
- _logger.trace('Spawn %s%r', cmd_filename, args)
-
- if os.fork():
- return
+def enforce(condition, error=None, *args):
+ """Make an assertion in runtime.
- os.execvp(cmd_filename, (cmd_filename,) + args)
+ In comparing with `assert`, it will all time present in the code.
+ Just a bit of syntax sugar.
+ :param condition:
+ the condition to assert; if not False then return,
+ otherse raise an RuntimeError exception
+ :param error:
+ error message to pass to RuntimeError object
+ or Exception class to raise
+ :param args:
+ optional '%' arguments for the `error`
-def symlink(src, dst):
- if not isfile(src):
- _logger.debug('Cannot link %r to %r, source file is absent', src, dst)
+ """
+ if condition:
return
- _logger.trace('Link %r to %r', src, dst)
-
- if lexists(dst):
- os.unlink(dst)
- elif not exists(dirname(dst)):
- os.makedirs(dirname(dst))
- os.symlink(src, dst)
-
-
-def ensure_dsa_pubkey(path):
- if not exists(path):
- _logger.info('Create DSA server key')
- util.assert_call([
- '/usr/bin/ssh-keygen', '-q', '-t', 'dsa', '-f', path,
- '-C', '', '-N', ''])
-
- with file(path + '.pub') as f:
- for line in f:
- line = line.strip()
- if line.startswith('ssh-'):
- key = line.split()[1]
- return str(hashlib.sha1(key).hexdigest())
-
- raise RuntimeError('No valid DSA public key in %r' % path)
-
-
-def svg_to_png(src_path, dst_path, width, height):
- import rsvg
- import cairo
-
- svg = rsvg.Handle(src_path)
-
- surface = cairo.ImageSurface(cairo.FORMAT_ARGB32, width, height)
- context = cairo.Context(surface)
- scale = min(
- float(width) / svg.props.width,
- float(height) / svg.props.height)
- context.scale(scale, scale)
- svg.render_cairo(context)
-
- surface.write_to_png(dst_path)
-
-
-def NamedTemporaryFile(*args, **kwargs):
- if tmpdir.value:
- kwargs['dir'] = tmpdir.value
- return tempfile.NamedTemporaryFile(*args, **kwargs)
-
-
-def init_logging(debug_level):
- # pylint: disable-msg=W0212
-
- logging.addLevelName(9, 'TRACE')
- logging.addLevelName(8, 'HEARTBEAT')
-
- logging.Logger.trace = lambda self, message, *args, **kwargs: None
- logging.Logger.heartbeat = lambda self, message, *args, **kwargs: None
-
- if debug_level < 3:
- _disable_logger([
- 'requests.packages.urllib3.connectionpool',
- 'requests.packages.urllib3.poolmanager',
- 'requests.packages.urllib3.response',
- 'requests.packages.urllib3',
- 'inotify',
- 'netlink',
- 'sugar_stats',
- ])
- elif debug_level < 4:
- logging.Logger.trace = lambda self, message, *args, **kwargs: \
- self._log(9, message, args, **kwargs)
- _disable_logger(['sugar_stats'])
+ if isinstance(error, type):
+ exception_class = error
+ if args:
+ error = args[0]
+ args = args[1:]
+ else:
+ error = None
else:
- logging.Logger.heartbeat = lambda self, message, *args, **kwargs: \
- self._log(8, message, args, **kwargs)
-
-
-class PersistentSequence(ad.Sequence):
-
- def __init__(self, path, empty_value=None):
- ad.Sequence.__init__(self, empty_value=empty_value)
- self._path = path
-
- if exists(self._path):
- with file(self._path) as f:
- self[:] = json.load(f)
-
- def commit(self):
- dir_path = dirname(self._path)
- if dir_path and not exists(dir_path):
- os.makedirs(dir_path)
- with util.new_file(self._path) as f:
- json.dump(self, f)
- f.flush()
- os.fsync(f.fileno())
+ exception_class = RuntimeError
+ if args:
+ error = error % args
+ elif not error:
+ # pylint: disable-msg=W0212
+ frame = sys._getframe(1)
+ error = 'Runtime assertion failed at %s:%s' % \
+ (frame.f_globals['__file__'], frame.f_lineno - 1)
-class MutableStack(object):
- """Stack that keeps its iterators correct after changing content."""
+ raise exception_class(error)
- def __init__(self):
- self._queue = collections.deque()
- def add(self, value):
- self.remove(value)
- self._queue.appendleft([False, value])
+def exception(*args):
+ """Log about exception on low log level.
- def remove(self, value):
- for i, (__, existing) in enumerate(self._queue):
- if existing == value:
- del self._queue[i]
- break
+ That might be useful for non-critial exception. Input arguments are the
+ same as for `logging.exception` function.
- def rewind(self):
- for i in self._queue:
- i[0] = False
-
- def __len__(self):
- return len(self._queue)
-
- def __iter__(self):
- return _MutableStackIterator(self._queue)
-
- def __repr__(self):
- return str([i[1] for i in self._queue])
-
-
-class _MutableStackIterator(object):
-
- def __init__(self, queue):
- self._queue = queue
-
- def next(self):
- for i in self._queue:
- processed, value = i
- if not processed:
- i[0] = True
- return value
- raise StopIteration()
+ :param args:
+ optional arguments to pass to logging function;
+ the first argument might be a `logging.Logger` to use instead of
+ using direct `logging` calls
+ """
+ if args and isinstance(args[0], logging.Logger):
+ logger = args[0]
+ args = args[1:]
+ else:
+ logger = logging
-def _disable_logger(loggers):
- for log_name in loggers:
- logger = logging.getLogger(log_name)
- logger.propagate = False
- logger.addHandler(_NullHandler())
+ klass, error, tb = sys.exc_info()
+ import traceback
+ tb = [i.rstrip() for i in traceback.format_exception(klass, error, tb)]
-class _NullHandler(logging.Handler):
+ error_message = str(error) or '%s exception' % type(error).__name__
+ if args:
+ if len(args) == 1:
+ message = args[0]
+ else:
+ message = args[0] % args[1:]
+ error_message = '%s: %s' % (message, error_message)
- def emit(self, record):
- pass
+ logger.error(error_message)
+ logger.debug('\n'.join(tb))
diff --git a/sugar_network/toolkit/application.py b/sugar_network/toolkit/application.py
new file mode 100644
index 0000000..7b1489a
--- /dev/null
+++ b/sugar_network/toolkit/application.py
@@ -0,0 +1,390 @@
+# Copyright (C) 2012-2013 Aleksey Lim
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <http://www.gnu.org/licenses/>.
+
+"""Main process startup routines."""
+
+import os
+import sys
+import time
+import signal
+import logging
+import textwrap
+from optparse import OptionParser
+from os.path import join, abspath, exists, basename
+from gettext import gettext as _
+
+from sugar_network.toolkit import Option, printf, enforce
+
+
+debug = Option(
+ _('debug logging level; multiple argument'),
+ default=0, type_cast=int, short_option='-D', action='count',
+ name='debug')
+
+foreground = Option(
+ _('do not send the application into the background'),
+ default=False, type_cast=Option.bool_cast, short_option='-F',
+ action='store_true', name='foreground')
+
+replace = Option(
+ 'if application is already launched, replace it by new instance',
+ default=False, type_cast=Option.bool_cast,
+ action='store_true', name='replace')
+
+no_hints = Option(
+ _('suppress suggesting hints'),
+ default=False, short_option='-H', action='store_true',
+ name='no-hints')
+
+logdir = Option(
+ 'path to the directory to place log files',
+ name='logdir', default='/var/log')
+
+rundir = Option(
+ 'path to the directory to place pid files',
+ name='rundir')
+
+
+_LOGFILE_FORMAT = '%(asctime)s %(levelname)s %(name)s: %(message)s'
+
+
+def command(description='', name=None, args=None, hidden=False, **options):
+
+ def decorator(func):
+ # pylint: disable-msg=W0212
+ func._is_command = True
+ func.name = name
+ func.description = description
+ func.args = args
+ func.options = options
+ func.hidden = hidden
+ return func
+
+ return decorator
+
+
+class Application(object):
+
+ def __init__(self, name, description=None, version=None, epilog=None,
+ where=None, **parse_args):
+ self._rundir = None
+ self.args = None
+ self.name = name
+
+ self._commands = {}
+ for name in dir(self.__class__):
+ attr = getattr(self.__class__, name)
+ if hasattr(attr, '_is_command') and \
+ (attr.name != 'config' or 'config_files' in parse_args):
+ self._commands[attr.name or name] = getattr(self, name)
+
+ parser = OptionParser(usage='%prog [OPTIONS]', description=description,
+ add_help_option=False)
+
+ if version:
+ parser.add_option('-V', '--version',
+ help=_('show version number and exit'),
+ action='version')
+ parser.print_version = lambda: sys.stdout.write('%s\n' % version)
+
+ parser.add_option('-h', '--help',
+ help=_('show this help message and exit'),
+ action='store_true')
+
+ options, self.args = Option.parse_args(parser, **parse_args)
+
+ def print_desc(term, desc):
+ text = []
+ for num, line in enumerate(desc.split('\n')):
+ if num == 0:
+ for i in line:
+ if i.isalpha() and not i.isupper():
+ break
+ else:
+ term += ' ' + line
+ continue
+ text.extend(textwrap.wrap(line, 54))
+ if len(term) < 24:
+ sys.stdout.write(' %-22s' % term)
+ else:
+ text.insert(0, '')
+ sys.stdout.write(' %s' % term)
+ print ('\n' + ' ' * 24).join(text)
+
+ def print_commands():
+ if not self._commands:
+ return
+ print ''
+ print _('Commands') + ':'
+ for name, attr in sorted(self._commands.items(),
+ lambda x, y: cmp(x[0], y[0])):
+ if attr.hidden:
+ continue
+ if attr.args:
+ name += ' ' + attr.args
+ print_desc(name, attr.description)
+
+ if not self.args and not options.help:
+ prog = basename(sys.argv[0])
+ print _('Usage') + ': %s [OPTIONS] [COMMAND]' % prog
+ print ' %s -h|--help' % prog
+ print
+ print description
+ print_commands()
+ if epilog:
+ print ''
+ print epilog
+ exit(0)
+
+ if options.help:
+ parser.print_help()
+ print_commands()
+ if where:
+ print ''
+ print _('Where') + ':'
+ for term in sorted(where):
+ print_desc(term, where[term])
+ if epilog:
+ print ''
+ print epilog
+ exit(0)
+
+ if not debug.value:
+ logging_level = logging.WARNING
+ elif debug.value == 1:
+ logging_level = logging.INFO
+ elif debug.value == 2:
+ logging_level = logging.DEBUG
+ elif debug.value > 2:
+ logging_level = 0
+ logging_format = _LOGFILE_FORMAT
+
+ root_logger = logging.getLogger('')
+ for i in root_logger.handlers:
+ root_logger.removeHandler(i)
+
+ logging.basicConfig(level=logging_level, format=logging_format)
+
+ def epilog(self):
+ pass
+
+ def start(self):
+ self._rundir = abspath(rundir.value or '/var/run/' + self.name)
+
+ cmd_name = self.args.pop(0)
+ try:
+ cmd = self._commands.get(cmd_name)
+ enforce(cmd is not None, 'Unknown command "%s"' % cmd_name)
+
+ if Option.config_files:
+ logging.info('Load configuration from %s file(s)',
+ ', '.join(Option.config_files))
+
+ if cmd.options.get('keep_stdout') and not foreground.value:
+ self._keep_stdout()
+
+ exit(cmd() or 0)
+ except Exception:
+ printf.exception('%s %s', _('Aborted'), self.name)
+ exit(1)
+ finally:
+ self.epilog()
+ if not no_hints.value:
+ printf.flush_hints()
+
+ def check_for_instance(self):
+ pid = None
+ pidfile = join(self._rundir, '%s.pid' % self.name)
+ if exists(pidfile):
+ try:
+ pid = int(file(pidfile).read().strip())
+ os.getpgid(pid)
+ if basename(sys.argv[0]) not in _get_cmdline(pid):
+ # In case if pidfile was not removed after reboot
+ # and another process launched with pidfile's pid
+ pid = None
+ except (ValueError, OSError):
+ pid = None
+ return pid
+
+ def ensure_pidfile_path(self):
+ if not exists(self._rundir):
+ os.makedirs(self._rundir)
+ enforce(os.access(self._rundir, os.W_OK),
+ 'No write access to %r to store pidfile', self._rundir)
+
+ def new_instance(self):
+ self.ensure_pidfile_path()
+ pidfile_path = join(self._rundir, '%s.pid' % self.name)
+ with file(pidfile_path, 'w') as f:
+ f.write(str(os.getpid()))
+ return pidfile_path
+
+ @command('output current configuration', name='config')
+ def _cmd_config(self):
+ if self.args:
+ opt = self.args.pop(0)
+ enforce(opt in Option.items, 'Unknown option "%s"', opt)
+ exit(0 if bool(Option.items[opt].value) else 1)
+ else:
+ print '\n'.join(Option.export())
+
+ def _keep_stdout(self):
+ log_dir = abspath(logdir.value)
+ if not exists(log_dir):
+ os.makedirs(log_dir)
+ enforce(os.access(log_dir, os.W_OK), 'No write access to %s', log_dir)
+
+ # printf should still output to original streams
+ printf.stdout = os.fdopen(os.dup(sys.stdout.fileno()), 'w')
+ printf.stderr = os.fdopen(os.dup(sys.stderr.fileno()), 'w')
+
+ log_path = join(log_dir, '%s.log' % self.name)
+ logfile = file(log_path, 'a+')
+ os.dup2(logfile.fileno(), sys.stdout.fileno())
+ os.dup2(logfile.fileno(), sys.stderr.fileno())
+ logfile.close()
+
+
+class Daemon(Application):
+
+ _accept_pipe = None
+
+ def run(self):
+ raise NotImplementedError()
+
+ def shutdown(self):
+ pass
+
+ @command('start in daemon mode', name='start', keep_stdout=True)
+ def cmd_start(self):
+ while True:
+ pid = self.check_for_instance()
+ if not pid:
+ break
+ if not replace.value:
+ printf.info('%s is already run with pid %s', self.name, pid)
+ return 1
+ try:
+ printf.info('Kill previous %r instance', pid)
+ os.kill(pid, signal.SIGTERM)
+ except Exception:
+ pass
+ time.sleep(.5)
+
+ if foreground.value:
+ self._launch()
+ else:
+ self.ensure_pidfile_path()
+ self._daemonize()
+
+ return 0
+
+ @command('stop daemon', name='stop')
+ def cmd_stop(self):
+ pid = self.check_for_instance()
+ if pid:
+ os.kill(pid, signal.SIGTERM)
+ return 0
+ else:
+ printf.info('%s is not run', self.name)
+ return 1
+
+ @command('check for launched daemon', name='status')
+ def cmd_status(self):
+ pid = self.check_for_instance()
+ if pid:
+ printf.info('%s started', self.name)
+ return 0
+ else:
+ printf.info('%s stopped', self.name)
+ return 1
+
+ @command('reopen log files in daemon mode', name='reload')
+ def cmd_reload(self):
+ pid = self.check_for_instance()
+ if not pid:
+ printf.info('%s is not run', self.name)
+ return 1
+ os.kill(pid, signal.SIGHUP)
+ logging.info('Reload %s process', self.name)
+
+ def accept(self):
+ if self._accept_pipe is not None:
+ os.close(self._accept_pipe)
+ self._accept_pipe = None
+
+ def _launch(self):
+ logging.info('Start %s', self.name)
+
+ def sigterm_cb(signum, frame):
+ logging.info('Got signal %s to stop %s', signum, self.name)
+ self.shutdown()
+
+ def sighup_cb(signum, frame):
+ logging.info('Reload %s on SIGHUP signal', self.name)
+ self._keep_stdout()
+
+ signal.signal(signal.SIGINT, sigterm_cb)
+ signal.signal(signal.SIGTERM, sigterm_cb)
+ signal.signal(signal.SIGHUP, sighup_cb)
+
+ pid_path = self.new_instance()
+ try:
+ self.run()
+ finally:
+ self.epilog()
+ os.unlink(pid_path)
+
+ def _daemonize(self):
+ accept_pipe = os.pipe()
+ if os.fork() > 0:
+ os.close(accept_pipe[1])
+ os.read(accept_pipe[0], 1)
+ # Exit parent of the first child
+ return
+
+ os.close(accept_pipe[0])
+ self._accept_pipe = accept_pipe[1]
+
+ # Decouple from parent environment
+ os.chdir(os.sep)
+ os.setsid()
+
+ if os.fork() > 0:
+ # Exit from second parent
+ # pylint: disable-msg=W0212
+ os._exit(0)
+
+ # Redirect standard file descriptors
+ if not sys.stdin.closed:
+ stdin = file('/dev/null')
+ os.dup2(stdin.fileno(), sys.stdin.fileno())
+
+ try:
+ self._launch()
+ except Exception:
+ logging.exception('Aborted %s', self.name)
+ status = 1
+ else:
+ logging.info('Stopped %s', self.name)
+ status = 0
+
+ exit(status)
+
+
+def _get_cmdline(pid):
+ with file('/proc/%s/cmdline' % pid) as f:
+ return f.read()
diff --git a/sugar_network/toolkit/coroutine.py b/sugar_network/toolkit/coroutine.py
new file mode 100644
index 0000000..d251593
--- /dev/null
+++ b/sugar_network/toolkit/coroutine.py
@@ -0,0 +1,246 @@
+# Copyright (C) 2012-2013 Aleksey Lim
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <http://www.gnu.org/licenses/>.
+
+"""Wrap coroutine related procedures."""
+
+# pylint: disable-msg=W0621
+
+import logging
+
+import gevent
+import gevent.pool
+import gevent.hub
+
+
+#: Process one events loop round.
+dispatch = gevent.sleep
+
+#: Put the current coroutine to sleep for at least `seconds`.
+sleep = gevent.sleep
+
+#: Wait for the spawned events to finish.
+joinall = gevent.joinall
+
+# TODO In #3753 case, resetting glibc cache doesn't help
+# if c-ares is being used for DNS resolving.
+gevent.hub.Hub.resolver_class = ['gevent.socket.BlockingResolver']
+
+_group = gevent.pool.Group()
+_logger = logging.getLogger('coroutine')
+_wsgi_logger = logging.getLogger('wsgi')
+
+
+def spawn(callback, *args):
+ return _group.spawn(callback, *args)
+
+
+def shutdown():
+ _group.kill()
+ return _group.join()
+
+
+def socket(*args, **kwargs):
+ import gevent.socket
+ return gevent.socket.socket(*args, **kwargs)
+
+
+def gethostbyname(host):
+ import gevent.socket
+ return gevent.socket.gethostbyname(host)
+
+
+def select(rlist, wlist, xlist, timeout=None):
+ import gevent.select
+ return gevent.select.select(rlist, wlist, xlist, timeout)
+
+
+def signal(*args, **kwargs):
+ return gevent.signal(*args, **kwargs)
+
+
+def Server(*args, **kwargs):
+ import gevent.server
+ kwargs['spawn'] = spawn
+ return gevent.server.StreamServer(*args, **kwargs)
+
+
+def WSGIServer(*args, **kwargs):
+ import gevent.wsgi
+
+ class WSGIHandler(gevent.wsgi.WSGIHandler):
+
+ def log_error(self, msg, *args):
+ _wsgi_logger.error(msg, *args)
+
+ def log_request(self):
+ _wsgi_logger.debug('%s', self.format_request())
+
+ kwargs['spawn'] = spawn
+ if 'handler_class' not in kwargs:
+ if logging.getLogger().level >= logging.DEBUG:
+ WSGIHandler.log_request = lambda * args: None
+ kwargs['handler_class'] = WSGIHandler
+ return gevent.wsgi.WSGIServer(*args, **kwargs)
+
+
+def Event():
+ import gevent.event
+ return gevent.event.Event()
+
+
+def AsyncResult():
+ import gevent.event
+ return gevent.event.AsyncResult()
+
+
+def Queue(*args, **kwargs):
+ import gevent.queue
+ return gevent.queue.Queue(*args, **kwargs)
+
+
+def Lock(*args, **kwargs):
+ import gevent.coros
+ return gevent.coros.Semaphore(*args, **kwargs)
+
+
+def RLock(*args, **kwargs):
+ import gevent.coros
+ return gevent.coros.RLock(*args, **kwargs)
+
+
+class AsyncEvent(object):
+
+ def __init__(self):
+ self._async = gevent.get_hub().loop.async()
+
+ def wait(self):
+ gevent.get_hub().wait(self._async)
+
+ def send(self):
+ self._async.send()
+
+
+class Empty(Exception):
+ pass
+
+
+class AsyncQueue(object):
+
+ def __init__(self):
+ self._queue = self._new_queue()
+ self._async = gevent.get_hub().loop.async()
+ self._aborted = False
+
+ def put(self, *args, **kwargs):
+ self._put(args, kwargs)
+ self._async.send()
+
+ def get(self):
+ self._aborted = False
+ while True:
+ try:
+ return self._get()
+ except Empty:
+ gevent.get_hub().wait(self._async)
+ if self._aborted:
+ self._aborted = False
+ raise
+
+ def abort(self):
+ self._aborted = True
+ self._async.send()
+
+ def __iter__(self):
+ while True:
+ try:
+ yield self.get()
+ except Empty:
+ break
+
+ def __getattr__(self, name):
+ return getattr(self._queue, name)
+
+ def _new_queue(self):
+ from Queue import Queue
+ return Queue()
+
+ def _put(self, args, kwargs):
+ self._queue.put(*args, **kwargs)
+
+ def _get(self):
+ from Queue import Empty as empty
+ try:
+ return self._queue.get_nowait()
+ except empty:
+ raise Empty()
+
+
+class Pool(gevent.pool.Pool):
+
+ def spawn(self, *args, **kwargs):
+ job = gevent.pool.Pool.spawn(self, *args, **kwargs)
+ _group.add(job)
+ return job
+
+ # pylint: disable-msg=W0221
+ def kill(self, *args, **kwargs):
+ from gevent.queue import Empty
+ try:
+ gevent.pool.Pool.kill(self, *args, **kwargs)
+ except Empty:
+ # Avoid useless exception on empty poll
+ pass
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, exc_type, exc_value, traceback):
+ self.kill()
+
+
+def _print_exception(context, klass, value, tb):
+ self = gevent.hub.get_hub()
+ if issubclass(klass, self.NOT_ERROR + self.SYSTEM_ERROR):
+ return
+
+ import traceback
+ tb_repr = '\n'.join([i.rstrip()
+ for i in traceback.format_exception(klass, value, tb)][:-1])
+ del tb
+
+ context_repr = None
+ if context is None:
+ context = 'Undefined'
+ elif not isinstance(context, basestring):
+ if isinstance(context, dict) and 'PATH_INFO' in context:
+ context_repr = '%s%s' % \
+ (context['PATH_INFO'], context.get('QUERY_STRING') or '')
+ try:
+ context = self.format_context(context)
+ except Exception:
+ context = repr(context)
+ error = 'Failed from %r context: %s' % \
+ (context_repr or context[:40] + '..', value)
+
+ logging_level = logging.getLogger().level
+ if logging_level > logging.DEBUG:
+ _logger.error(error)
+ elif logging_level == logging.DEBUG:
+ _logger.error('\n'.join([error, tb_repr]))
+ else:
+ _logger.error('\n'.join([error, context, tb_repr]))
+
+
+gevent.hub.get_hub().print_exception = _print_exception
diff --git a/sugar_network/toolkit/files_sync.py b/sugar_network/toolkit/files_sync.py
index dfa62a0..022a2f7 100644
--- a/sugar_network/toolkit/files_sync.py
+++ b/sugar_network/toolkit/files_sync.py
@@ -19,11 +19,8 @@ import logging
from bisect import bisect_left
from os.path import join, exists, relpath, lexists, basename, dirname
-import active_document as ad
-from sugar_network.toolkit import PersistentSequence
from sugar_network.toolkit.sneakernet import DiskFull
-from active_toolkit.sockets import BUFFER_SIZE
-from active_toolkit import util, coroutine
+from sugar_network.toolkit import BUFFER_SIZE, util, coroutine
_logger = logging.getLogger('files_sync')
@@ -52,8 +49,8 @@ class Seeder(object):
# Thus, avoid changing `self._index` by different coroutines.
with self._mutex:
self._sync()
- orig_seq = ad.Sequence(in_seq)
- out_seq = ad.Sequence()
+ orig_seq = util.Sequence(in_seq)
+ out_seq = util.Sequence()
try:
self._pull(in_seq, packet, out_seq, False)
@@ -187,7 +184,7 @@ class Leecher(object):
def __init__(self, files_path, sequence_path):
self._files_path = files_path.rstrip(os.sep)
- self.sequence = PersistentSequence(sequence_path, [1, None])
+ self.sequence = util.PersistentSequence(sequence_path, [1, None])
if not exists(self._files_path):
os.makedirs(self._files_path)
diff --git a/sugar_network/toolkit/http.py b/sugar_network/toolkit/http.py
index 92cf1a3..1ca2002 100644
--- a/sugar_network/toolkit/http.py
+++ b/sugar_network/toolkit/http.py
@@ -1,4 +1,4 @@
-# Copyright (C) 2012 Aleksey Lim
+# Copyright (C) 2012-2013 Aleksey Lim
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
@@ -24,11 +24,9 @@ import requests
from requests.sessions import Session
from M2Crypto import DSA
-import active_document as ad
-from sugar_network.toolkit import sugar
+from sugar_network.toolkit import sugar, coroutine, exception, enforce
from sugar_network.toolkit.router import Redirect
-from sugar_network import client
-from active_toolkit import coroutine, util, enforce
+from sugar_network import db, client
ConnectionError = requests.ConnectionError
@@ -51,7 +49,7 @@ class Client(object):
elif client.certfile.value:
verify = client.certfile.value
- headers = {'Accept-Language': ad.default_lang()}
+ headers = {'Accept-Language': db.default_lang()}
if self._sugar_auth:
privkey_path = sugar.privkey_path()
if not exists(privkey_path):
@@ -240,7 +238,7 @@ class _Subscription(object):
except Exception:
if a_try == 0:
raise
- util.exception('Failed to read from %r subscription, '
+ exception('Failed to read from %r subscription, '
'will resubscribe', self._client.api_url)
self._response = None
@@ -248,7 +246,7 @@ class _Subscription(object):
try:
return json.loads(line.split(' ', 1)[1])
except Exception:
- util.exception('Failed to parse %r event from %r subscription',
+ exception('Failed to parse %r event from %r subscription',
line, self._client.api_url)
def _handshake(self):
@@ -265,7 +263,7 @@ class _Subscription(object):
except Exception:
if a_try == 0:
raise
- util.exception(_logger,
+ exception(_logger,
'Cannot subscribe to %r, retry in %s second(s)',
self._client.api_url, _RECONNECTION_TIMEOUT)
coroutine.sleep(_RECONNECTION_TIMEOUT)
diff --git a/sugar_network/toolkit/inotify.py b/sugar_network/toolkit/inotify.py
index 90a26af..26ad619 100644
--- a/sugar_network/toolkit/inotify.py
+++ b/sugar_network/toolkit/inotify.py
@@ -16,13 +16,8 @@
# Code is based on pyinotify sources
# http://pypi.python.org/pypi/pyinotify
-"""Linux inotify integration.
+"""Linux inotify integration."""
-$Repo: git://git.sugarlabs.org/alsroot/codelets.git$
-$File: src/inotify.py$
-$Date: 2012-07-12$
-
-"""
import os
import errno
import struct
@@ -180,7 +175,7 @@ class Inotify(object):
struct.unpack('iIII', buf[pos:pos + _EVENT_HEADER_SIZE])
pos += _EVENT_HEADER_SIZE
- filename_end = buf.find('\0', pos, pos + name_len)
+ filename_end = buf.find('\x00', pos, pos + name_len)
if filename_end == -1:
filename = ''
else:
diff --git a/sugar_network/zerosugar/lsb_release.py b/sugar_network/toolkit/lsb_release.py
index 47072d8..0e8d110 100644
--- a/sugar_network/zerosugar/lsb_release.py
+++ b/sugar_network/toolkit/lsb_release.py
@@ -13,13 +13,7 @@
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
-"""Get LSB (Linux Standard Base) Distribution information.
-
-$Repo: git://git.sugarlabs.org/alsroot/codelets.git$
-$File: src/lsb_release.py$
-$Date: 2012-08-13$
-
-"""
+"""Get LSB (Linux Standard Base) Distribution information."""
import re
import math
@@ -143,15 +137,15 @@ def _find_lsb_release():
elif exists('/etc/redhat-release'):
line = file('/etc/redhat-release').read().strip()
- match = re.search('Fedora.*?\W([0-9.]+)', line)
+ match = re.search('Fedora.*?\\W([0-9.]+)', line)
if match is not None:
return 'Fedora', match.group(1)
- match = re.search('CentOS.*?\W([0-9.]+)', line)
+ match = re.search('CentOS.*?\\W([0-9.]+)', line)
if match is not None:
return 'CentOS', match.group(1)
- match = re.search('\W([0-9.]+)', line)
+ match = re.search('\\W([0-9.]+)', line)
if match is not None:
return 'RHEL', match.group(1)
diff --git a/sugar_network/toolkit/mountpoints.py b/sugar_network/toolkit/mountpoints.py
index 100a4e3..8c2c90c 100644
--- a/sugar_network/toolkit/mountpoints.py
+++ b/sugar_network/toolkit/mountpoints.py
@@ -19,7 +19,7 @@ from os.path import join, exists
from sugar_network.toolkit.inotify import Inotify, \
IN_DELETE_SELF, IN_CREATE, IN_DELETE, IN_MOVED_TO, IN_MOVED_FROM
-from active_toolkit import coroutine, util
+from sugar_network.toolkit import coroutine, exception
_COMPLETE_MOUNT_TIMEOUT = 3
@@ -95,4 +95,4 @@ def _call(path, filename, cb):
try:
cb(path)
except Exception:
- util.exception(_logger, 'Cannot call %r for %r mount', cb, path)
+ exception(_logger, 'Cannot call %r for %r mount', cb, path)
diff --git a/sugar_network/toolkit/netlink.py b/sugar_network/toolkit/netlink.py
index 6fe214a..53b0f2c 100644
--- a/sugar_network/toolkit/netlink.py
+++ b/sugar_network/toolkit/netlink.py
@@ -13,13 +13,8 @@
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
-"""Linux Netlink integration.
+"""Linux Netlink integration."""
-$Repo: git://git.sugarlabs.org/alsroot/codelets.git$
-$File: src/netlink.py$
-$Date: 2012-08-05$
-
-"""
import os
import socket
import struct
diff --git a/sugar_network/toolkit/network.py b/sugar_network/toolkit/network.py
deleted file mode 100644
index a0ba499..0000000
--- a/sugar_network/toolkit/network.py
+++ /dev/null
@@ -1,38 +0,0 @@
-# Copyright (C) 2012 Aleksey Lim
-#
-# This program is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with this program. If not, see <http://www.gnu.org/licenses/>.
-
-import ctypes
-import logging
-from ctypes.util import find_library
-
-
-_logger = logging.getLogger('network')
-
-
-def res_init():
- """Reset resolving cache.
-
- Calling this function will enforce libc to avoid using stale resolving
- cache after getting [re]connected. For example, if application process
- was launched when there were no any DNS servers available, after getting
- connected, call `res_init()` to reuse newly appeared DNS servers.
-
- """
- try:
- lib_name = find_library('c')
- libc = ctypes.CDLL(lib_name)
- getattr(libc, '__res_init')(None)
- except Exception:
- _logger.exception('Failed to call res_init()')
diff --git a/sugar_network/toolkit/options.py b/sugar_network/toolkit/options.py
new file mode 100644
index 0000000..8860c30
--- /dev/null
+++ b/sugar_network/toolkit/options.py
@@ -0,0 +1,358 @@
+# Copyright (C) 2011-2013 Aleksey Lim
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <http://www.gnu.org/licenses/>.
+
+"""Command-line options parsing utilities."""
+
+import sys
+from os.path import exists, expanduser
+
+
+class Option(object):
+ """Configuration option.
+
+ `Option` object will be used as command-line argument and
+ configuration file option. All these objects will be automatically
+ collected from `sugar_server.env` module and from `etc` module from
+ all services.
+
+ """
+ #: Collected by `Option.seek()` options in original order.
+ unsorted_items = []
+ #: Collected by `Option.seek()` options by name.
+ items = {}
+ #: Collected by `Option.seek()` options by section.
+ sections = {}
+ #: Configure files used to form current configuration
+ config_files = []
+
+ _config = None
+ _config_files_to_save = []
+
+ def __init__(self, description=None, default=None, short_option=None,
+ type_cast=None, type_repr=None, action=None, name=None):
+ """
+ :param description:
+ description string
+ :param default:
+ default value for the option
+ :param short_option:
+ value in for of `-<char>` to use as a short option for command-line
+ parser
+ :param type_cast:
+ function that will be uses to type cast to option type
+ while setting option value
+ :param type_repr:
+ function that will be uses to type cast from option type
+ while converting option value to string
+ :param action:
+ value for `action` argument of `OptionParser.add_option()`
+ :param name:
+ specify option name instead of reusing variable name
+
+ """
+ if default is not None and type_cast is not None:
+ default = type_cast(default)
+ self.default = default
+ self._value = default
+ self.description = description
+ self.type_cast = type_cast
+ self.type_repr = type_repr
+ self.short_option = short_option or ''
+ self.action = action
+ self.section = None
+ self.name = name
+ self.attr_name = None
+
+ @property
+ def long_option(self):
+ """Long command-line argument name."""
+ return '--%s' % self.name
+
+ # pylint: disable-msg=E0202
+ @property
+ def value(self):
+ """Get option raw value."""
+ return self._value
+
+ # pylint: disable-msg=E1101, E0102, E0202
+ @value.setter
+ def value(self, x):
+ """Set option value.
+
+ The `Option.type_cast` function will be used for type casting specified
+ value to option.
+
+ """
+ if x is None:
+ self._value = None
+ elif self.type_cast is not None:
+ self._value = self.type_cast(x)
+ else:
+ self._value = str(x) or None
+
+ @staticmethod
+ def seek(section, mod=None):
+ """Collect `Option` objects.
+
+ Function will populate `Option.unsorted_items`, `Option.items` and
+ `Option.sections` values. Call this function before any usage
+ of `Option` objects.
+
+ :param section:
+ arbitrary name to group options per section
+ :param mod:
+ mdoule object to search for `Option` objects;
+ if omited, use caller's module
+
+ """
+ if mod is None:
+ mod_name = _get_frame(1).f_globals['__name__']
+ mod = sys.modules[mod_name]
+
+ if type(mod) in (list, tuple):
+ options = dict([(i.name.replace('-', '_'), i) for i in mod])
+ else:
+ options = dict([(i, getattr(mod, i)) for i in dir(mod)])
+
+ for name in sorted(options):
+ attr = options[name]
+ # Options might be from different `options` modules
+ if not (type(attr).__name__ == 'Option' and
+ type(attr).__module__.split('.')[-1] == 'options'):
+ continue
+
+ attr.attr_name = name
+ attr.name = name.replace('_', '-')
+ attr.module = mod
+ attr.section = section
+
+ Option.unsorted_items.append(attr)
+ Option.items[attr.name] = attr
+ if section not in Option.sections:
+ Option.sections[section] = {}
+ Option.sections[section][attr.name] = attr
+
+ @staticmethod
+ def load(config_files):
+ """Load option settings from configure files.
+
+ If application accepts command-line arguments,
+ use `Option.parse_args()` function instead.
+
+ :param config_files:
+ list of paths to files that will be used to read default
+ option values; this value will initiate `Option.config` variable
+
+ """
+ Option._merge(None, config_files)
+
+ @staticmethod
+ def parse_args(parser, config_files=None, stop_args=None, notice=None):
+ """Load configure files and combine them with command-line arguments.
+
+ :param parser:
+ `OptionParser` object to parse for command-line arguments
+ :param config_files:
+ list of paths to files that will be used to read default
+ option values; this value will initiate `Option.config` variable
+ :param stop_args:
+ optional list of arguments that should stop further command-line
+ arguments parsing
+ :param notice:
+ optional notice to use only in command-line related cases
+ :returns:
+ (`options`, `args`) tuple with data parsed from
+ command-line arguments
+
+ """
+ Option._bind(parser, config_files, notice)
+
+ if stop_args:
+ parser.disable_interspersed_args()
+ options, args = parser.parse_args()
+ if stop_args and args and args[0] not in stop_args:
+ parser.enable_interspersed_args()
+ options, args = parser.parse_args(args, options)
+
+ Option._merge(options, None)
+
+ # Update default values accoriding to current values
+ # to expose them while processing --help
+ for prop in [Option._config] + Option.items.values():
+ if prop is None:
+ continue
+ parser.set_default(prop.name.replace('-', '_'), prop)
+
+ return options, args
+
+ @staticmethod
+ def bind(parser, config_files=None, notice=None):
+ # DEPRECATED
+ Option._bind(parser, config_files, notice)
+
+ @staticmethod
+ def merge(options, config_files=None):
+ # DEPRECATED
+ Option._merge(options, config_files)
+
+ @staticmethod
+ def export():
+ """Current configuration in human readable form.
+
+ :returns:
+ list of lines
+
+ """
+ import textwrap
+
+ lines = []
+ sections = set()
+
+ for prop in Option.unsorted_items:
+ if prop.section not in sections:
+ if sections:
+ lines.append('')
+ lines.append('[%s]' % prop.section)
+ sections.add(prop.section)
+ lines.append('\n'.join(
+ ['# %s' % i for i in textwrap.wrap(prop.description, 78)]))
+ value = '\n\t'.join(str(prop).split('\n'))
+ lines.append('%s = %s' % (prop.name, value))
+
+ return lines
+
+ @staticmethod
+ def save(path=None):
+ if not path:
+ if not Option._config_files_to_save:
+ raise RuntimeError('No configure files to save')
+ path = Option._config_files_to_save[-1]
+ with file(path, 'w') as f:
+ f.write('\n'.join(Option.export()))
+
+ @staticmethod
+ def bool_cast(x):
+ if not x or str(x).strip().lower() in ['', 'false', 'none']:
+ return False
+ else:
+ return bool(x)
+
+ @staticmethod
+ def list_cast(x):
+ if isinstance(x, basestring):
+ return [i for i in x.strip().split() if i]
+ else:
+ return x
+
+ @staticmethod
+ def list_repr(x):
+ return ' '.join(x)
+
+ @staticmethod
+ def paths_cast(x):
+ if isinstance(x, basestring):
+ return [i for i in x.strip().split(':') if i]
+ else:
+ return x
+
+ @staticmethod
+ def paths_repr(x):
+ return ':'.join(x)
+
+ def __str__(self):
+ if self.value is None:
+ return ''
+ else:
+ if self.type_repr is None:
+ return str(self.value)
+ else:
+ return self.type_repr(self.value)
+
+ def __unicode__(self):
+ return self.__str__()
+
+ @staticmethod
+ def _bind(parser, config_files, notice):
+ import re
+
+ if config_files:
+ Option._config = Option()
+ Option._config.name = 'config'
+ Option._config.attr_name = 'config'
+ Option._config.description = \
+ 'colon separated list of paths to alternative ' \
+ 'configuration file(s)'
+ Option._config.short_option = '-c'
+ Option._config.type_cast = \
+ lambda x: [i for i in re.split('[\\s:;,]+', x) if i]
+ Option._config.type_repr = \
+ lambda x: ':'.join(x)
+ Option._config.value = ':'.join(config_files)
+
+ for prop in [Option._config] + Option.items.values():
+ if prop is None:
+ continue
+ desc = prop.description
+ if prop.value is not None:
+ desc += ' [%default]'
+ if notice:
+ desc += '; ' + notice
+ if parser is not None:
+ parser.add_option(prop.short_option, prop.long_option,
+ action=prop.action, help=desc)
+
+ @staticmethod
+ def _merge(options, config_files):
+ from ConfigParser import ConfigParser
+
+ if not config_files and Option._config is not None:
+ config_files = Option._config.value
+
+ configs = [ConfigParser()]
+ for config in config_files or []:
+ if isinstance(config, ConfigParser):
+ configs.append(config)
+ else:
+ config = expanduser(config)
+ if exists(config):
+ Option.config_files.append(config)
+ configs[0].read(config)
+ Option._config_files_to_save.append(config)
+
+ for prop in Option.items.values():
+ if hasattr(options, prop.attr_name) and \
+ getattr(options, prop.attr_name) is not None:
+ prop.value = getattr(options, prop.attr_name)
+ else:
+ for config in configs:
+ if config.has_option(prop.section, prop.name):
+ prop.value = config.get(prop.section, prop.name)
+
+
+def _get_frame(frame_no):
+ """Return Python call stack frame.
+
+ The reason to have this wrapper is that this stack information is a private
+ data and might depend on Python implementaion.
+
+ :param frame_no:
+ number of stack frame starting from caller's stack position
+ :returns:
+ frame object
+
+ """
+ # +1 since the calling `get_frame` adds one more frame
+ # pylint: disable-msg=W0212
+ return sys._getframe(frame_no + 1)
diff --git a/sugar_network/toolkit/pipe.py b/sugar_network/toolkit/pipe.py
index 4c4c6f4..5b14715 100644
--- a/sugar_network/toolkit/pipe.py
+++ b/sugar_network/toolkit/pipe.py
@@ -22,8 +22,7 @@ import threading
import cPickle as pickle
from os.path import exists
-from sugar_network import sugar
-from active_toolkit import coroutine, util
+from sugar_network.toolkit import coroutine, util, sugar
_logger = logging.getLogger('pipe')
diff --git a/sugar_network/toolkit/printf.py b/sugar_network/toolkit/printf.py
new file mode 100644
index 0000000..ca7fb59
--- /dev/null
+++ b/sugar_network/toolkit/printf.py
@@ -0,0 +1,223 @@
+# Copyright (C) 2011-2013 Aleksey Lim
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <http://www.gnu.org/licenses/>.
+
+"""Console output routines."""
+
+import sys
+import logging
+
+
+#: Disable/enable non-status output.
+VERBOSE = True
+#: Disable/enable any output.
+QUIET = False
+
+RESET = '\x1b[0m'
+BOLD = '\x1b[1m'
+BLACK, RED, GREEN, YELLOW, BLUE, MAGENTA, CYAN, WHITE = \
+ ['\x1b[1;%dm' % (30 + i_) for i_ in range(8)]
+
+stdout = sys.stdout
+stderr = sys.stderr
+
+_hints = []
+_last_line_len = 0
+_last_progress = []
+_screen_width = None
+
+
+def dump(message, *args):
+ """Print verbatim text.
+
+ :param message:
+ text to print
+ :param :*args:
+ `%` arguments to expand `message` value
+
+ """
+ _dump(False, stdout, '', [message, args], '\n')
+
+
+def info(message, *args):
+ """Print information text.
+
+ :param message:
+ text to print
+ :param :*args:
+ `%` arguments to expand `message` value
+
+ """
+ _dump(True, stderr, None, [message, args], '\n')
+ _dump_progress()
+
+
+def exception(message=None, *args):
+ """Print exception text.
+
+ Call this function in `try..except` block after getting exceptions.
+
+ :param message:
+ text to print
+ :param :*args:
+ `%` arguments to expand `message` value
+
+ """
+ import traceback
+
+ klass, error, tb = sys.exc_info()
+
+ tb_list = []
+ for line in traceback.format_exception(klass, error, tb):
+ tb_list.extend([i.rstrip() for i in line.strip().split('\n')])
+
+ if type(error).__name__ == 'dbus.exceptions.DBusException':
+ dbus_tb = str(error).split('\n')
+ if len(dbus_tb) == 1:
+ error = dbus_tb[0]
+ else:
+ # Strip the last empty line
+ del dbus_tb[-1]
+ error = '%s:%s' % \
+ (dbus_tb[0].split(':')[0], dbus_tb[-1].split(':', 1)[-1])
+
+ if message and args:
+ message = message % args
+
+ error = str(error) or 'Something weird happened'
+ if message:
+ message += ': %s' % error
+ else:
+ message = str(error)
+ _dump(True, stderr, None, message, '\n')
+
+ if logging.getLogger().level > logging.INFO:
+ hint('Use -D argument for debug info, '
+ '-DD for full debuging output and tracebacks')
+ elif logging.getLogger().level > logging.DEBUG:
+ hint('Use -DD argument for full debuging output and tracebacks')
+ else:
+ for i in tb_list:
+ _dump(True, stderr, ' ', i, '\n')
+
+ _dump_progress()
+
+
+def scan_yn(message, *args):
+ """Request for Y/N input.
+
+ :param message:
+ prefix text to print
+ :param :*args:
+ `%` arguments to expand `message` value
+ :returns:
+ `True` if user's input was `Y`
+
+ """
+ _dump(True, stderr, None, [message, args], ' [Y/N] ')
+ answer = raw_input()
+ _dump_progress()
+ return answer and answer in 'Yy'
+
+
+def progress(message, *args):
+ """Print status line text.
+
+ Status line will be shown as the last line all time and will be cleared
+ on program exit.
+
+ :param message:
+ prefix text to print
+ :param :*args:
+ `%` arguments to expand `message` value
+
+ """
+ _last_progress[:] = [message, args]
+ _dump_progress()
+
+
+def clear_progress():
+ """Clear status line on program exit."""
+ if _last_line_len:
+ stderr.write(chr(13) + ' ' * _last_line_len + chr(13))
+
+
+def hint(message, *args):
+ """Add new hint.
+
+ All hint will be queued to print them at once in `flush_hints()` function
+ on program exit.
+
+ :param message:
+ prefix text to print
+ :param :*args:
+ `%` arguments to expand `message` value
+
+ """
+ if args:
+ message = message % args
+ _hints.append(message)
+
+
+def flush_hints():
+ """Print all queued hints."""
+ clear_progress()
+ if _hints:
+ dump('')
+ while _hints:
+ _dump(True, stderr, '-- Hint: ', _hints.pop(0), '\n')
+
+
+def _dump(is_status, stream, prefix, *args):
+ if not VERBOSE or QUIET:
+ return
+
+ global _last_line_len
+ global _screen_width
+
+ if _screen_width is None:
+ try:
+ import curses
+ curses.setupterm()
+ _screen_width = curses.tigetnum('cols') or 80
+ except Exception, error:
+ logging.info('Cannot get screen width: %s', error)
+ _screen_width = 80
+
+ if prefix is None:
+ prefix = '-- '
+
+ clear_progress()
+ _last_line_len = 0
+
+ for i in [prefix] + list(args):
+ if isinstance(i, list):
+ if i:
+ message, message_args = i
+ if message_args:
+ message = message % message_args
+ else:
+ message = i
+
+ stream.write(message)
+
+ if is_status:
+ _last_line_len += len(message)
+
+ _last_line_len = min(_last_line_len, _screen_width)
+
+
+def _dump_progress():
+ _dump(True, stderr, ' ', _last_progress, chr(13))
+ stderr.flush()
diff --git a/sugar_network/toolkit/router.py b/sugar_network/toolkit/router.py
index 5e212a8..1c67a4f 100644
--- a/sugar_network/toolkit/router.py
+++ b/sugar_network/toolkit/router.py
@@ -1,4 +1,4 @@
-# Copyright (C) 2012 Aleksey Lim
+# Copyright (C) 2012-2013 Aleksey Lim
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
@@ -25,11 +25,9 @@ from urlparse import parse_qsl, urlsplit
from bisect import bisect_left
from os.path import join, isfile, split, splitext
-import active_document as ad
-from sugar_network import static
-from sugar_network.toolkit import sugar
-from active_toolkit.sockets import BUFFER_SIZE
-from active_toolkit import coroutine, util, enforce
+from sugar_network import db, static
+from sugar_network.toolkit import BUFFER_SIZE
+from sugar_network.toolkit import sugar, coroutine, exception, enforce
_logger = logging.getLogger('router')
@@ -95,7 +93,7 @@ def stream_reader(stream):
stream.close()
-class Request(ad.Request):
+class Request(db.Request):
principal = None
mountpoint = None
@@ -130,7 +128,7 @@ class Router(object):
_logger.debug('Logging %r user', user)
request = Request(method='GET', cmd='exists',
document='user', guid=user)
- enforce(self.commands.call(request, ad.Response()), Unauthorized,
+ enforce(self.commands.call(request, db.Response()), Unauthorized,
'Principal user does not exist')
self._authenticated.add(user)
@@ -138,7 +136,7 @@ class Router(object):
def call(self, request, response):
if 'HTTP_ORIGIN' in request.environ:
- enforce(self._assert_origin(request.environ), ad.Forbidden,
+ enforce(self._assert_origin(request.environ), db.Forbidden,
'Cross-site is not allowed for %r origin',
request.environ['HTTP_ORIGIN'])
response['Access-Control-Allow-Origin'] = \
@@ -159,8 +157,8 @@ class Router(object):
request.principal = self.authenticate(request)
if request.path[:1] == ['static']:
path = join(static.PATH, *request.path[1:])
- result = ad.PropertyMeta(path=path, mime_type=_get_mime_type(path),
- filename=split(path)[-1])
+ result = db.PropertyMetadata(path=path,
+ mime_type=_get_mime_type(path), filename=split(path)[-1])
else:
rout = self._routes.get((
request['method'],
@@ -170,7 +168,7 @@ class Router(object):
else:
result = self.commands.call(request, response)
- if isinstance(result, ad.PropertyMeta):
+ if isinstance(result, db.PropertyMetadata):
if 'url' in result:
raise Redirect(result['url'])
@@ -225,11 +223,11 @@ class Router(object):
response.update(error.headers)
response.content_type = None
except Exception, error:
- util.exception('Error while processing %r request', request.url)
+ exception('Error while processing %r request', request.url)
- if isinstance(error, ad.NotFound):
+ if isinstance(error, db.NotFound):
response.status = '404 Not Found'
- elif isinstance(error, ad.Forbidden):
+ elif isinstance(error, db.Forbidden):
response.status = '403 Forbidden'
elif isinstance(error, HTTPStatus):
response.status = error.status
@@ -311,7 +309,7 @@ class IPCRouter(Router):
return sugar.uid()
def call(self, request, response):
- request.access_level = ad.ACCESS_LOCAL
+ request.access_level = db.ACCESS_LOCAL
return Router.call(self, request, response)
@@ -327,7 +325,7 @@ class _Request(Request):
if not environ:
return
- self.access_level = ad.ACCESS_REMOTE
+ self.access_level = db.ACCESS_REMOTE
self.environ = environ
self.url = '/' + environ['PATH_INFO'].strip('/')
self.path = [i for i in self.url[1:].split('/') if i]
@@ -397,7 +395,7 @@ class _Request(Request):
return request
-class _Response(ad.Response):
+class _Response(db.Response):
# pylint: disable-msg=E0202
status = '200 OK'
@@ -442,7 +440,7 @@ class _Response(ad.Response):
def __repr__(self):
args = ['status=%r' % self.status,
] + ['%s=%r' % i for i in self.items()]
- return '<active_document.Response %s>' % ' '.join(args)
+ return '<db.Response %s>' % ' '.join(args)
def _parse_accept_language(accept_language):
@@ -484,7 +482,7 @@ def _filename(names, mime_type):
parts = []
for name in names:
if isinstance(name, dict):
- name = ad.gettext(name)
+ name = db.gettext(name)
parts.append(''.join([i.capitalize() for i in str(name).split()]))
result = '-'.join(parts)
if mime_type:
diff --git a/sugar_network/toolkit/rrd.py b/sugar_network/toolkit/rrd.py
index ad351fd..1fdc795 100644
--- a/sugar_network/toolkit/rrd.py
+++ b/sugar_network/toolkit/rrd.py
@@ -13,13 +13,7 @@
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
-"""Convenient access to RRD databases.
-
-$Repo: git://git.sugarlabs.org/alsroot/codelets.git$
-$File: src/rrd.py$
-$Date: 2012-11-21$
-
-"""
+"""Convenient access to RRD databases."""
import re
import os
@@ -30,8 +24,8 @@ from datetime import datetime
from os.path import exists, join
-_DB_FILENAME_RE = re.compile('(.*?)(-[0-9]+){0,1}\.rrd$')
-_INFO_RE = re.compile('([^[]+)\[([^]]+)\]\.(.*)$')
+_DB_FILENAME_RE = re.compile('(.*?)(-[0-9]+){0,1}\\.rrd$')
+_INFO_RE = re.compile('([^[]+)\\[([^]]+)\\]\\.(.*)$')
_FETCH_PAGE = 256
diff --git a/sugar_network/toolkit/sneakernet.py b/sugar_network/toolkit/sneakernet.py
index e262755..3cd6601 100644
--- a/sugar_network/toolkit/sneakernet.py
+++ b/sugar_network/toolkit/sneakernet.py
@@ -23,10 +23,8 @@ from cStringIO import StringIO
from contextlib import contextmanager
from os.path import join, exists
-import active_document as ad
-from sugar_network import toolkit
-from active_toolkit.sockets import BUFFER_SIZE
-from active_toolkit import util, enforce
+from sugar_network import db
+from sugar_network.toolkit import BUFFER_SIZE, util, exception, enforce
_RESERVED_SIZE = 1024 * 1024
@@ -64,7 +62,7 @@ class InPacket(object):
self._file = stream = file(path, 'rb')
elif not hasattr(stream, 'seek'):
# tarfile/gzip/zip might require seeking
- self._file = toolkit.NamedTemporaryFile()
+ self._file = util.NamedTemporaryFile()
if hasattr(stream, 'read'):
while True:
@@ -94,7 +92,7 @@ class InPacket(object):
except Exception, error:
self.close()
- util.exception()
+ exception()
raise RuntimeError('Malformed %r packet: %s' % (self, error))
_logger.trace('Reading %r input packet', self)
@@ -200,7 +198,7 @@ class OutPacket(object):
if seqno is not None:
self._basename += '-%s' % seqno
else:
- self._basename = ad.uuid()
+ self._basename = db.uuid()
self._basename += _PACKET_SUFFIX
kwargs['filename'] = self._basename
@@ -323,7 +321,7 @@ class OutPacket(object):
self._flush(0, True)
limit = self._enforce_limit()
- with toolkit.NamedTemporaryFile() as arcfile:
+ with util.NamedTemporaryFile() as arcfile:
while True:
limit -= len(chunk)
if limit <= 0:
@@ -416,7 +414,7 @@ class OutFilePacket(OutPacket):
def __init__(self, root=None, **kwargs):
stream = None
if root is None:
- stream = toolkit.NamedTemporaryFile()
+ stream = util.NamedTemporaryFile()
OutPacket.__init__(self, root=root, stream=stream, **kwargs)
diff --git a/sugar_network/toolkit/sugar.py b/sugar_network/toolkit/sugar.py
index b036492..03df476 100644
--- a/sugar_network/toolkit/sugar.py
+++ b/sugar_network/toolkit/sugar.py
@@ -20,8 +20,7 @@ import random
import hashlib
from os.path import join, exists, dirname
-from active_toolkit.options import Option
-from active_toolkit import enforce
+from sugar_network.toolkit import Option, enforce
_XO_SERIAL_PATH = '/ofw/mfg-data/SN'
@@ -133,4 +132,4 @@ def uuid_new():
def _read_XO_value(path):
- return file(path).read().rstrip('\0\n')
+ return file(path).read().rstrip('\x00\n')
diff --git a/sugar_network/toolkit/util.py b/sugar_network/toolkit/util.py
new file mode 100644
index 0000000..115cf02
--- /dev/null
+++ b/sugar_network/toolkit/util.py
@@ -0,0 +1,622 @@
+# Copyright (C) 2011-2012 Aleksey Lim
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <http://www.gnu.org/licenses/>.
+
+"""Swiss knife module."""
+
+import os
+import json
+import logging
+import hashlib
+import tempfile
+import collections
+from os.path import exists, join, islink, isdir, dirname, basename, abspath
+from os.path import lexists, isfile
+
+from sugar_network.toolkit import tmpdir, enforce
+
+
+_logger = logging.getLogger('toolkit.util')
+
+
+def init_logging(debug_level):
+ # pylint: disable-msg=W0212
+
+ logging.addLevelName(9, 'TRACE')
+ logging.addLevelName(8, 'HEARTBEAT')
+
+ logging.Logger.trace = lambda self, message, *args, **kwargs: None
+ logging.Logger.heartbeat = lambda self, message, *args, **kwargs: None
+
+ if debug_level < 3:
+ _disable_logger([
+ 'requests.packages.urllib3.connectionpool',
+ 'requests.packages.urllib3.poolmanager',
+ 'requests.packages.urllib3.response',
+ 'requests.packages.urllib3',
+ 'inotify',
+ 'netlink',
+ 'sugar_stats',
+ ])
+ elif debug_level < 4:
+ logging.Logger.trace = lambda self, message, *args, **kwargs: \
+ self._log(9, message, args, **kwargs)
+ _disable_logger(['sugar_stats'])
+ else:
+ logging.Logger.heartbeat = lambda self, message, *args, **kwargs: \
+ self._log(8, message, args, **kwargs)
+
+
+def res_init():
+ """Reset resolving cache.
+
+ Calling this function will enforce libc to avoid using stale resolving
+ cache after getting [re]connected. For example, if application process
+ was launched when there were no any DNS servers available, after getting
+ connected, call `res_init()` to reuse newly appeared DNS servers.
+
+ """
+ import ctypes
+ from ctypes.util import find_library
+ try:
+ lib_name = find_library('c')
+ libc = ctypes.CDLL(lib_name)
+ getattr(libc, '__res_init')(None)
+ except Exception:
+ _logger.exception('Failed to call res_init()')
+
+
+def spawn(cmd_filename, *args):
+ _logger.trace('Spawn %s%r', cmd_filename, args)
+
+ if os.fork():
+ return
+
+ os.execvp(cmd_filename, (cmd_filename,) + args)
+
+
+def symlink(src, dst):
+ if not isfile(src):
+ _logger.debug('Cannot link %r to %r, source file is absent', src, dst)
+ return
+
+ _logger.trace('Link %r to %r', src, dst)
+
+ if lexists(dst):
+ os.unlink(dst)
+ elif not exists(dirname(dst)):
+ os.makedirs(dirname(dst))
+ os.symlink(src, dst)
+
+
+def ensure_dsa_pubkey(path):
+ if not exists(path):
+ _logger.info('Create DSA server key')
+ assert_call([
+ '/usr/bin/ssh-keygen', '-q', '-t', 'dsa', '-f', path,
+ '-C', '', '-N', ''])
+
+ with file(path + '.pub') as f:
+ for line in f:
+ line = line.strip()
+ if line.startswith('ssh-'):
+ key = line.split()[1]
+ return str(hashlib.sha1(key).hexdigest())
+
+ raise RuntimeError('No valid DSA public key in %r' % path)
+
+
+def svg_to_png(src_path, dst_path, width, height):
+ import rsvg
+ import cairo
+
+ svg = rsvg.Handle(src_path)
+
+ surface = cairo.ImageSurface(cairo.FORMAT_ARGB32, width, height)
+ context = cairo.Context(surface)
+ scale = min(
+ float(width) / svg.props.width,
+ float(height) / svg.props.height)
+ context.scale(scale, scale)
+ svg.render_cairo(context)
+
+ surface.write_to_png(dst_path)
+
+
+def assert_call(cmd, stdin=None, **kwargs):
+ """Variant of `call` method with raising exception of errors.
+
+ :param cmd:
+ commad to execute, might be string or argv list
+ :param stdin:
+ text that will be used as an input for executed process
+
+ """
+ return call(cmd, stdin=stdin, asserts=True, **kwargs)
+
+
+def call(cmd, stdin=None, asserts=False, raw=False, error_cb=None, **kwargs):
+ """Convenient wrapper around subprocess call.
+
+ Note, this function is intended for processes that output finite
+ and not big amount of text.
+
+ :param cmd:
+ commad to execute, might be string or argv list
+ :param stdin:
+ text that will be used as an input for executed process
+ :param asserts:
+ whether to raise `RuntimeError` on fail execution status
+ :param error_cb:
+ call callback(stderr) on getting error exit status from the process
+ :returns:
+ `None` on errors, otherwise `str` value of stdout
+
+ """
+ import subprocess
+
+ stdout, stderr = None, None
+ returncode = 1
+ try:
+ logging.debug('Exec %r', cmd)
+ process = subprocess.Popen(cmd, stderr=subprocess.PIPE,
+ stdout=subprocess.PIPE, stdin=subprocess.PIPE, **kwargs)
+ if stdin is not None:
+ process.stdin.write(stdin)
+ process.stdin.close()
+ # Avoid using Popen.communicate()
+ # http://bugs.python.org/issue4216#msg77582
+ process.wait()
+ stdout = _nb_read(process.stdout)
+ stderr = _nb_read(process.stderr)
+ if not raw:
+ stdout = stdout.strip()
+ stderr = stderr.strip()
+ returncode = process.returncode
+ enforce(returncode == 0, 'Exit status is an error')
+ logging.debug('Successfully executed stdout=%r stderr=%r',
+ stdout.split('\n'), stderr.split('\n'))
+ return stdout
+ except Exception, error:
+ logging.debug('Failed to execute error="%s" stdout=%r stderr=%r',
+ error, str(stdout).split('\n'), str(stderr).split('\n'))
+ if asserts:
+ if type(cmd) not in (str, unicode):
+ cmd = ' '.join(cmd)
+ raise RuntimeError('Failed to execute "%s" command: %s' %
+ (cmd, error))
+ elif error_cb is not None:
+ error_cb(returncode, stdout, stderr)
+
+
+def cptree(src, dst):
+ """Efficient version of copying directories.
+
+ Function will try to make hard links for copying files at first and
+ will fallback to regular copying overwise.
+
+ :param src:
+ path to the source directory
+ :param dst:
+ path to the new directory
+
+ """
+ import shutil
+
+ if abspath(src) == abspath(dst):
+ return
+
+ do_copy = []
+ src = abspath(src)
+ dst = abspath(dst)
+
+ def link(src, dst):
+ if not exists(dirname(dst)):
+ os.makedirs(dirname(dst))
+
+ if islink(src):
+ link_to = os.readlink(src)
+ os.symlink(link_to, dst)
+ elif isdir(src):
+ cptree(src, dst)
+ elif do_copy:
+ # The first hard link was not set, do regular copying for the rest
+ shutil.copy(src, dst)
+ else:
+ if exists(dst) and os.stat(src).st_ino == os.stat(dst).st_ino:
+ return
+ if os.access(src, os.W_OK):
+ try:
+ os.link(src, dst)
+ except OSError:
+ do_copy.append(True)
+ shutil.copy(src, dst)
+ shutil.copystat(src, dst)
+ else:
+ # Avoid copystat from not current users
+ shutil.copy(src, dst)
+
+ if isdir(src):
+ for root, __, files in os.walk(src):
+ dst_root = join(dst, root[len(src):].lstrip(os.sep))
+ if not exists(dst_root):
+ os.makedirs(dst_root)
+ for i in files:
+ link(join(root, i), join(dst_root, i))
+ else:
+ link(src, dst)
+
+
+def new_file(path, mode=0644):
+ """Atomic new file creation.
+
+ Method will create temporaty file in the same directory as the specified
+ one. When file object associated with this temporaty file will be closed,
+ temporaty file will be renamed to the final destination.
+
+ :param path:
+ path to save final file to
+ :param mode:
+ mode for new file
+ :returns:
+ file object
+
+ """
+ result = _NewFile(dir=dirname(path), prefix=basename(path))
+ result.dst_path = path
+ os.fchmod(result.fileno(), mode)
+ return result
+
+
+def unique_filename(root, filename):
+ path = join(root, filename)
+ if exists(path):
+ name, suffix = os.path.splitext(filename)
+ for dup_num in xrange(1, 255):
+ path = join(root, name + '_' + str(dup_num) + suffix)
+ if not exists(path):
+ break
+ else:
+ raise RuntimeError('Cannot find unique filename for %r' %
+ join(root, filename))
+ return path
+
+
+def NamedTemporaryFile(*args, **kwargs):
+ if tmpdir.value:
+ kwargs['dir'] = tmpdir.value
+ return tempfile.NamedTemporaryFile(*args, **kwargs)
+
+
+class Seqno(object):
+ """Sequence number counter with persistent storing in a file."""
+
+ def __init__(self, path):
+ """
+ :param path:
+ path to file to [re]store seqno value
+
+ """
+ self._path = path
+ self._value = 0
+
+ if exists(path):
+ with file(path) as f:
+ self._value = int(f.read().strip())
+
+ self._orig_value = self._value
+
+ @property
+ def value(self):
+ """Current seqno value."""
+ return self._value
+
+ def next(self):
+ """Incerement seqno.
+
+ :returns:
+ new seqno value
+
+ """
+ self._value += 1
+ return self._value
+
+ def commit(self):
+ """Store current seqno value in a file.
+
+ :returns:
+ `True` if commit was happened
+
+ """
+ if self._value == self._orig_value:
+ return False
+ with new_file(self._path) as f:
+ f.write(str(self._value))
+ f.flush()
+ os.fsync(f.fileno())
+ self._orig_value = self._value
+ return True
+
+
+class Sequence(list):
+ """List of sorted and non-overlapping ranges.
+
+ List items are ranges, [`start`, `stop']. If `start` or `stop`
+ is `None`, it means the beginning or ending of the entire scale.
+
+ """
+
+ def __init__(self, value=None, empty_value=None):
+ """
+ :param value:
+ default value to initialize range
+ :param empty_value:
+ if not `None`, the initial value for empty range
+
+ """
+ if empty_value is None:
+ self._empty_value = []
+ else:
+ self._empty_value = [empty_value]
+
+ if value:
+ self.extend(value)
+ else:
+ self.clear()
+
+ def __contains__(self, value):
+ for start, end in self:
+ if value >= start and (end is None or value <= end):
+ return True
+ else:
+ return False
+
+ @property
+ def first(self):
+ if self:
+ return self[0][0]
+ else:
+ return 0
+
+ @property
+ def last(self):
+ if self:
+ return self[-1][-1]
+
+ @property
+ def empty(self):
+ """Is timeline in the initial state."""
+ return self == self._empty_value
+
+ def clear(self):
+ """Reset range to the initial value."""
+ self[:] = self._empty_value
+
+ def include(self, start, end=None):
+ """Include specified range.
+
+ :param start:
+ either including range start or a list of
+ (`start`, `end`) pairs
+ :param end:
+ including range end
+
+ """
+ if issubclass(type(start), collections.Iterable):
+ for range_start, range_end in start:
+ self._include(range_start, range_end)
+ elif start is not None:
+ self._include(start, end)
+
+ def exclude(self, start, end=None):
+ """Exclude specified range.
+
+ :param start:
+ either excluding range start or a list of
+ (`start`, `end`) pairs
+ :param end:
+ excluding range end
+
+ """
+ if issubclass(type(start), collections.Iterable):
+ for range_start, range_end in start:
+ self._exclude(range_start, range_end)
+ else:
+ enforce(end is not None)
+ self._exclude(start, end)
+
+ def _include(self, range_start, range_end):
+ if range_start is None:
+ range_start = 1
+
+ range_start_new = None
+ range_start_i = 0
+
+ for range_start_i, (start, end) in enumerate(self):
+ if range_end is not None and start - 1 > range_end:
+ break
+ if (range_end is None or start - 1 <= range_end) and \
+ (end is None or end + 1 >= range_start):
+ range_start_new = min(start, range_start)
+ break
+ else:
+ range_start_i += 1
+
+ if range_start_new is None:
+ self.insert(range_start_i, [range_start, range_end])
+ return
+
+ range_end_new = range_end
+ range_end_i = range_start_i
+ for i, (start, end) in enumerate(self[range_start_i:]):
+ if range_end is not None and start - 1 > range_end:
+ break
+ if range_end is None or end is None:
+ range_end_new = None
+ else:
+ range_end_new = max(end, range_end)
+ range_end_i = range_start_i + i
+
+ del self[range_start_i:range_end_i]
+ self[range_start_i] = [range_start_new, range_end_new]
+
+ def _exclude(self, range_start, range_end):
+ if range_start is None:
+ range_start = 1
+ enforce(range_end is not None)
+ enforce(range_start <= range_end and range_start > 0,
+ 'Start value %r is less than 0 or not less than %r',
+ range_start, range_end)
+
+ for i, interval in enumerate(self):
+ start, end = interval
+ if end is not None and end < range_start:
+ # Current `interval` is below than new one
+ continue
+
+ if end is None or end > range_end:
+ # Current `interval` will exist after changing
+ self[i] = [range_end + 1, end]
+ if start < range_start:
+ self.insert(i, [start, range_start - 1])
+ else:
+ if start < range_start:
+ self[i] = [start, range_start - 1]
+ else:
+ del self[i]
+
+ if end is not None:
+ range_start = end + 1
+ if range_start < range_end:
+ self.exclude(range_start, range_end)
+ break
+
+
+class PersistentSequence(Sequence):
+
+ def __init__(self, path, empty_value=None):
+ Sequence.__init__(self, empty_value=empty_value)
+ self._path = path
+
+ if exists(self._path):
+ with file(self._path) as f:
+ self[:] = json.load(f)
+
+ def commit(self):
+ dir_path = dirname(self._path)
+ if dir_path and not exists(dir_path):
+ os.makedirs(dir_path)
+ with new_file(self._path) as f:
+ json.dump(self, f)
+ f.flush()
+ os.fsync(f.fileno())
+
+
+class MutableStack(object):
+ """Stack that keeps its iterators correct after changing content."""
+
+ def __init__(self):
+ self._queue = collections.deque()
+
+ def add(self, value):
+ self.remove(value)
+ self._queue.appendleft([False, value])
+
+ def remove(self, value):
+ for i, (__, existing) in enumerate(self._queue):
+ if existing == value:
+ del self._queue[i]
+ break
+
+ def rewind(self):
+ for i in self._queue:
+ i[0] = False
+
+ def __len__(self):
+ return len(self._queue)
+
+ def __iter__(self):
+ return _MutableStackIterator(self._queue)
+
+ def __repr__(self):
+ return str([i[1] for i in self._queue])
+
+
+class _MutableStackIterator(object):
+
+ def __init__(self, queue):
+ self._queue = queue
+
+ def next(self):
+ for i in self._queue:
+ processed, value = i
+ if not processed:
+ i[0] = True
+ return value
+ raise StopIteration()
+
+
+class _NullHandler(logging.Handler):
+
+ def emit(self, record):
+ pass
+
+
+class _NewFile(object):
+
+ dst_path = None
+
+ def __init__(self, **kwargs):
+ self._file = tempfile.NamedTemporaryFile(delete=False, **kwargs)
+
+ @property
+ def name(self):
+ return self._file.name
+
+ def close(self):
+ self._file.close()
+ if exists(self.name):
+ os.rename(self.name, self.dst_path)
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, exc_type, exc_value, traceback):
+ self.close()
+
+ def __getattr__(self, name):
+ return getattr(self._file.file, name)
+
+
+def _nb_read(stream):
+ import fcntl
+
+ if stream is None:
+ return ''
+ fd = stream.fileno()
+ orig_flags = fcntl.fcntl(fd, fcntl.F_GETFL)
+ try:
+ fcntl.fcntl(fd, fcntl.F_SETFL, orig_flags | os.O_NONBLOCK)
+ return stream.read()
+ except Exception:
+ return ''
+ finally:
+ fcntl.fcntl(fd, fcntl.F_SETFL, orig_flags)
+
+
+def _disable_logger(loggers):
+ for log_name in loggers:
+ logger = logging.getLogger(log_name)
+ logger.propagate = False
+ logger.addHandler(_NullHandler())
diff --git a/sugar_network/zeroinstall/__init__.py b/sugar_network/zeroinstall/__init__.py
index 938e2a3..1ceb6ba 100644
--- a/sugar_network/zeroinstall/__init__.py
+++ b/sugar_network/zeroinstall/__init__.py
@@ -17,13 +17,13 @@ import sys
import logging
from os.path import isabs, join, abspath, dirname
-from sugar_network import IPCClient
-from sugar_network.toolkit import pipe
-from sugar_network.zerosugar import packagekit, lsb_release
+from sugar_network.client import IPCClient
+from sugar_network.toolkit import lsb_release, pipe, exception
+from sugar_network.zerosugar import packagekit
from sugar_network.zerosugar.spec import parse_version
-from active_toolkit import util
-sys.path.insert(0, join(abspath(dirname(__file__)), 'zeroinstall-injector'))
+sys.path.insert(0,
+ join(abspath(dirname(__file__)), '..', 'lib', 'zeroinstall-injector'))
from zeroinstall.injector import reader, model, distro
from zeroinstall.injector.config import Config
@@ -186,8 +186,7 @@ def _load_feed(context):
pipe.trace('Found %s in %s mountpoint', context, mountpoint)
break
except Exception:
- util.exception(_logger,
- 'Failed to fetch %r feed from %r mountpoint',
+ exception(_logger, 'Failed to fetch %r feed from %r mountpoint',
context, mountpoint)
if feed_content is None:
diff --git a/sugar_network/zerosugar/cache.py b/sugar_network/zerosugar/cache.py
index 6b6045d..9a03b8e 100644
--- a/sugar_network/zerosugar/cache.py
+++ b/sugar_network/zerosugar/cache.py
@@ -18,11 +18,9 @@ import shutil
import logging
from os.path import exists, join
-from sugar_network import toolkit, Client
-from sugar_network.client import local_root
+from sugar_network.client import Client, local_root
from sugar_network.zerosugar.bundle import Bundle
-from sugar_network.toolkit import pipe
-from active_toolkit.sockets import BUFFER_SIZE
+from sugar_network.toolkit import BUFFER_SIZE, pipe, util
_logger = logging.getLogger('zerosugar.cache')
@@ -41,7 +39,7 @@ def get(guid):
allow_redirects=True)
content_length = int(response.headers.get('Content-Length', '0'))
- with toolkit.NamedTemporaryFile() as tmp_file:
+ with util.NamedTemporaryFile() as tmp_file:
chunk_size = min(content_length, BUFFER_SIZE)
# pylint: disable-msg=E1103
for chunk in response.iter_content(chunk_size=chunk_size):
diff --git a/sugar_network/zerosugar/clones.py b/sugar_network/zerosugar/clones.py
index e552a09..d4351f1 100644
--- a/sugar_network/zerosugar/clones.py
+++ b/sugar_network/zerosugar/clones.py
@@ -14,6 +14,7 @@
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import os
+import errno
import shutil
import hashlib
import logging
@@ -24,9 +25,8 @@ from sugar_network.zerosugar.spec import Spec
from sugar_network.toolkit.inotify import Inotify, \
IN_DELETE_SELF, IN_CREATE, IN_DELETE, IN_CLOSE_WRITE, \
IN_MOVED_TO, IN_MOVED_FROM
-from active_document import DEFAULT_LANG
-from sugar_network import toolkit, client
-from active_toolkit import coroutine, util
+from sugar_network import client
+from sugar_network.toolkit import coroutine, util, exception
_logger = logging.getLogger('zerosugar.clones')
@@ -117,7 +117,7 @@ class _Inotify(Inotify):
try:
cb(filename, event)
except Exception:
- util.exception('Cannot dispatch 0x%X event for %r',
+ exception('Cannot dispatch 0x%X event for %r',
event, filename)
coroutine.dispatch()
@@ -131,19 +131,16 @@ class _Inotify(Inotify):
try:
spec = Spec(root=clone_path)
except Exception:
- util.exception(_logger, 'Cannot read %r spec', clone_path)
+ exception(_logger, 'Cannot read %r spec', clone_path)
return
context = spec['Activity', 'bundle_id']
- context_path = _ensure_context_path(context, hashed_path)
- if lexists(context_path):
- os.unlink(context_path)
+ context_path = _context_path(context, hashed_path)
+ _ensure_path(context_path)
os.symlink(clone_path, context_path)
- if lexists(checkin_path):
- os.unlink(checkin_path)
- client.ensure_path(checkin_path)
+ _ensure_path(checkin_path)
os.symlink(relpath(context_path, dirname(checkin_path)), checkin_path)
if self._contexts.exists(context):
@@ -153,16 +150,15 @@ class _Inotify(Inotify):
mtime = os.stat(spec.root).st_mtime
self._contexts.create(guid=context, type='activity',
- title={DEFAULT_LANG: spec['name']},
- summary={DEFAULT_LANG: spec['summary']},
- description={DEFAULT_LANG: spec['description']},
- clone=2, ctime=mtime, mtime=mtime)
+ title=spec['name'], summary=spec['summary'],
+ description=spec['description'], clone=2,
+ ctime=mtime, mtime=mtime)
icon_path = join(spec.root, spec['icon'])
if exists(icon_path):
self._contexts.set_blob(context, 'artifact_icon', icon_path)
- with toolkit.NamedTemporaryFile() as f:
- toolkit.svg_to_png(icon_path, f.name, 32, 32)
+ with util.NamedTemporaryFile() as f:
+ util.svg_to_png(icon_path, f.name, 32, 32)
self._contexts.set_blob(context, 'icon', f.name)
self._checkin_activity(spec)
@@ -177,8 +173,8 @@ class _Inotify(Inotify):
_logger.debug('Update MIME database to process found %r', src_path)
- toolkit.symlink(src_path, dst_path)
- toolkit.spawn('update-mime-database', self._mime_dir)
+ util.symlink(src_path, dst_path)
+ util.spawn('update-mime-database', self._mime_dir)
def lost(self, clone_path):
__, checkin_path = _checkin_path(clone_path)
@@ -212,7 +208,7 @@ class _Inotify(Inotify):
_logger.debug('Update MIME database to process lost %r', impl_path)
os.unlink(dst_path)
- toolkit.spawn('update-mime-database', self._mime_dir)
+ util.spawn('update-mime-database', self._mime_dir)
def _checkin_activity(self, spec):
icon_path = join(spec.root, spec['icon'])
@@ -222,7 +218,7 @@ class _Inotify(Inotify):
if not exists(self._icons_dir):
os.makedirs(self._icons_dir)
for mime_type in spec['mime_types']:
- toolkit.symlink(icon_path,
+ util.symlink(icon_path,
join(self._icons_dir,
mime_type.replace('/', '-') + '.svg'))
@@ -381,5 +377,18 @@ def _context_path(context, hashed_path):
return client.path('clones', 'context', context, hashed_path)
-def _ensure_context_path(context, hashed_path):
- return client.ensure_path('clones', 'context', context, hashed_path)
+def _ensure_path(path):
+ if lexists(path):
+ os.unlink(path)
+ return
+
+ dir_path = dirname(path)
+ if exists(dir_path):
+ return
+
+ try:
+ os.makedirs(dir_path)
+ except OSError, error:
+ # In case if another process already create directory
+ if error.errno != errno.EEXIST:
+ raise
diff --git a/sugar_network/zerosugar/injector.py b/sugar_network/zerosugar/injector.py
index 880bfe5..8d2e48c 100644
--- a/sugar_network/zerosugar/injector.py
+++ b/sugar_network/zerosugar/injector.py
@@ -19,11 +19,10 @@ import logging
import cPickle as pickle
from os.path import join, exists, basename, dirname
-from sugar_network import client, sugar
+from sugar_network import client
from sugar_network.client import journal
-from sugar_network.zerosugar import cache, lsb_release
-from sugar_network.toolkit import pipe
-from active_toolkit import util
+from sugar_network.zerosugar import cache
+from sugar_network.toolkit import pipe, lsb_release, util, sugar
_PMS_PATHS = {
diff --git a/sugar_network/zerosugar/licenses.py b/sugar_network/zerosugar/licenses.py
index 78d6791..5cfc39d 100644
--- a/sugar_network/zerosugar/licenses.py
+++ b/sugar_network/zerosugar/licenses.py
@@ -1,4 +1,4 @@
-# Copyright (C) 2011-2012, Aleksey Lim
+# Copyright (C) 2011-2012 Aleksey Lim
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
diff --git a/sugar_network/zerosugar/packagekit.py b/sugar_network/zerosugar/packagekit.py
index 1d347dc..7308277 100644
--- a/sugar_network/zerosugar/packagekit.py
+++ b/sugar_network/zerosugar/packagekit.py
@@ -25,8 +25,7 @@ import dbus
import gobject
from dbus.mainloop.glib import threads_init, DBusGMainLoop
-from sugar_network.toolkit import pipe
-from active_toolkit import enforce
+from sugar_network.toolkit import pipe, enforce
_PK_CONFILE = '/etc/PackageKit/PackageKit.conf'
diff --git a/sugar_network/zerosugar/spec.py b/sugar_network/zerosugar/spec.py
index 66ecfe1..37d9e39 100644
--- a/sugar_network/zerosugar/spec.py
+++ b/sugar_network/zerosugar/spec.py
@@ -20,7 +20,7 @@ from os.path import join, exists, dirname
from ConfigParser import ConfigParser
from sugar_network.zerosugar.licenses import GOOD_LICENSES
-from active_toolkit import util, enforce
+from sugar_network.toolkit import exception, enforce
_LIST_SEPARATOR = ';'
@@ -53,7 +53,7 @@ _VERSION_MOD_TO_VALUE = {
}
_VERSION_VALUE_TO_MOD = {}
-_RESTRICTION_RE = re.compile('(>=|<|=)\s*([0-9.]+)')
+_RESTRICTION_RE = re.compile('(>=|<|=)\\s*([0-9.]+)')
_logger = logging.getLogger('sweets-recipe')
@@ -295,7 +295,7 @@ def parse_version(version_string):
parts[x] = _VERSION_MOD_TO_VALUE[parts[x]]
return parts
except ValueError as error:
- util.exception()
+ exception()
raise RuntimeError('Invalid version format in "%s": %s' %
(version_string, error))
except KeyError as error:
diff --git a/sweets.recipe b/sweets.recipe
index 80000da..d74aa8c 100644
--- a/sweets.recipe
+++ b/sweets.recipe
@@ -12,8 +12,8 @@ version = 0.7
stability = developer
requires = m2crypto; requests; rrdtool-python; openssh-client; pylru
- active-document; sugar-network-webui; sugar-network-hub
-replaces = sugar-network-server; sweets-recipe
+ gevent >= 1; sugar-network-webui; sugar-network-hub
+replaces = sugar-network-server; sweets-recipe; active-document
[Build]
install = install -m 0755 -d %(DESTDIR)s/%(PYTHONSITEDIR)s &&
diff --git a/tests/__init__.py b/tests/__init__.py
index fdd3b93..7a326b6 100644
--- a/tests/__init__.py
+++ b/tests/__init__.py
@@ -16,14 +16,13 @@ import requests
from M2Crypto import DSA
from gevent import monkey
-import active_document as ad
-from active_toolkit import coroutine
-from sugar_network.toolkit import sugar, http, sneakernet, mountpoints
+from sugar_network.toolkit import coroutine, sugar, http, sneakernet, mountpoints, util
from sugar_network.toolkit.router import Router, IPCRouter
from sugar_network.client import journal
from sugar_network.client.mounts import HomeMount, RemoteMount
from sugar_network.client.mountset import Mountset
-from sugar_network import client, node, toolkit, zeroinstall
+from sugar_network import db, client, node, toolkit, zeroinstall
+from sugar_network.db import env
from sugar_network.zerosugar import injector
from sugar_network.resources.user import User
from sugar_network.resources.context import Context
@@ -34,7 +33,9 @@ from sugar_network.resources.volume import Volume
root = abspath(dirname(__file__))
-tmproot = join(root, '.tmp')
+# Assume that /tmp is tmpfs
+#tmproot = join(root, '.tmp')
+tmproot = '/tmp/sugar_network.tests'
tmpdir = None
monkey.patch_socket(dns=False)
@@ -54,6 +55,7 @@ class Test(unittest.TestCase):
self._overriden = []
os.environ['LANG'] = 'en_US'
+ env._default_lang = 'en-us'
global tmpdir
tmpdir = join(tmproot, '.'.join(self.id().split('.')[1:]))
@@ -76,13 +78,13 @@ class Test(unittest.TestCase):
shutil.copy(join(root, 'data', 'owner.key'), profile_dir)
shutil.copy(join(root, 'data', 'owner.key.pub'), profile_dir)
- ad.index_flush_timeout.value = 0
- ad.index_flush_threshold.value = 1
+ db.index_flush_timeout.value = 0
+ db.index_flush_threshold.value = 1
node.find_limit.value = 1024
node.data_root.value = tmpdir
node.sync_dirs.value = []
node.static_url.value = None
- ad.index_write_queue.value = 10
+ db.index_write_queue.value = 10
client.local_root.value = tmpdir
client.activity_dirs.value = [tmpdir + '/Activities']
client.api_url.value = 'http://localhost:8888'
@@ -128,7 +130,7 @@ class Test(unittest.TestCase):
for handler in logging.getLogger().handlers:
logging.getLogger().removeHandler(handler)
logging.basicConfig(level=logging.DEBUG, filename=self.logfile)
- toolkit.init_logging(10)
+ util.init_logging(10)
self.server = None
self.mounts = None
@@ -279,10 +281,10 @@ class Test(unittest.TestCase):
logging.getLogger().removeHandler(handler)
logging.basicConfig(level=logging.DEBUG)
- ad.index_flush_timeout.value = 0
- ad.index_flush_threshold.value = 1
+ db.index_flush_timeout.value = 0
+ db.index_flush_threshold.value = 1
node.find_limit.value = 1024
- ad.index_write_queue.value = 10
+ db.index_write_queue.value = 10
volume = Volume('remote', classes or [User, Context, Implementation])
cp = NodeCommands(volume)
diff --git a/tests/__main__.py b/tests/__main__.py
new file mode 100644
index 0000000..9b24e37
--- /dev/null
+++ b/tests/__main__.py
@@ -0,0 +1,16 @@
+# sugar-lint: disable
+
+import sys
+from os.path import dirname, join, abspath
+
+src_root = abspath(join(dirname(__file__), '..'))
+sys.path.insert(0, src_root)
+
+import tests
+
+from units.__main__ import *
+from integration.__main__ import *
+#from regression.__main__ import *
+
+if __name__ == '__main__':
+ tests.main()
diff --git a/tests/integration/__main__.py b/tests/integration/__main__.py
new file mode 100644
index 0000000..82a9c3e
--- /dev/null
+++ b/tests/integration/__main__.py
@@ -0,0 +1,9 @@
+# sugar-lint: disable
+
+from __init__ import tests
+
+from cli import *
+#from sync import *
+
+if __name__ == '__main__':
+ tests.main()
diff --git a/tests/integration/cli.py b/tests/integration/cli.py
index 7c63510..91a811f 100755
--- a/tests/integration/cli.py
+++ b/tests/integration/cli.py
@@ -11,11 +11,10 @@ from os.path import exists
import requests
-from __init__ import tests
+from __init__ import tests, src_root
-import active_document as ad
-from sugar_network import IPCClient
-from active_toolkit import coroutine, util
+from sugar_network.client import IPCClient
+from sugar_network.toolkit import coroutine, util
class CliTest(tests.Test):
@@ -24,7 +23,7 @@ class CliTest(tests.Test):
tests.Test.setUp(self)
os.makedirs('mnt')
- util.cptree('../../data/node', 'node')
+ util.cptree(src_root + '/tests/data/node', 'node')
self.client_pid = None
self.node_pid = self.popen(['sugar-network-node', '-F', 'start',
diff --git a/tests/integration/sync.py b/tests/integration/sync.py
index dee74f2..fd22128 100755
--- a/tests/integration/sync.py
+++ b/tests/integration/sync.py
@@ -13,13 +13,9 @@ import rrdtool
from __init__ import tests
-import active_document as ad
-from sugar_network import Client
-from sugar_network.client import local_root
-
-from sugar_network import sugar
+from sugar_network.client import local_root, Client
from sugar_network.toolkit.sneakernet import InPacket, OutPacket
-from active_toolkit import util, coroutine
+from sugar_network.toolkit import sugar, util, coroutine
class SyncTest(tests.Test):
diff --git a/tests/units/__main__.py b/tests/units/__main__.py
index 7d8a3db..105c117 100644
--- a/tests/units/__main__.py
+++ b/tests/units/__main__.py
@@ -2,32 +2,12 @@
from __init__ import tests
-from spec import *
-from http import *
-from volume import *
-from client import *
-from node import *
-from sneakernet import *
-from router import *
-#from files_sync import *
-#from sync_node import *
-#from sync_master import *
-from mountpoints import *
-from clones import *
-from proxy_commands import *
-from home_mount import *
-from remote_mount import *
-#from node_mount import *
-from injector import *
-from mountset import *
-from auth import *
-from context import *
-from comment import *
-from solution import *
-from implementation import *
-from review import *
-from obs import *
-from journal import *
-from stats import *
+from toolkit.__main__ import *
+from db.__main__ import *
+from node.__main__ import *
+from resources.__main__ import *
+from zerosugar.__main__ import *
+from client.__main__ import *
-tests.main()
+if __name__ == '__main__':
+ tests.main()
diff --git a/tests/units/client/__init__.py b/tests/units/client/__init__.py
new file mode 100644
index 0000000..345c327
--- /dev/null
+++ b/tests/units/client/__init__.py
@@ -0,0 +1,9 @@
+# sugar-lint: disable
+
+import sys
+from os.path import dirname, join, abspath
+
+src_root = abspath(join(dirname(__file__), '..', '..', '..'))
+sys.path.insert(0, src_root)
+
+import tests
diff --git a/tests/units/client/__main__.py b/tests/units/client/__main__.py
new file mode 100644
index 0000000..a3909ac
--- /dev/null
+++ b/tests/units/client/__main__.py
@@ -0,0 +1,14 @@
+# sugar-lint: disable
+
+from __init__ import tests
+
+from client import *
+from home_mount import *
+from journal import *
+from mountset import *
+#from node_mount import *
+from proxy_commands import *
+from remote_mount import *
+
+if __name__ == '__main__':
+ tests.main()
diff --git a/tests/units/client.py b/tests/units/client/client.py
index 79bae8c..344c9d4 100755
--- a/tests/units/client.py
+++ b/tests/units/client/client.py
@@ -5,7 +5,7 @@ from os.path import exists
from __init__ import tests
-import active_document as ad
+from sugar_network import db
from sugar_network.client.mounts import LocalMount
from sugar_network.toolkit.router import Request
from sugar_network.resources.volume import Volume
@@ -24,15 +24,15 @@ class LocalTest(tests.Test):
'summary': 'summary',
'description': 'description',
}
- guid = cp.call(request, ad.Response())
+ guid = cp.call(request, db.Response())
guid_path = 'db/context/%s/%s' % (guid[:2], guid)
assert exists(guid_path)
request = Request(method='DELETE', document='context', guid=guid)
- cp.call(request, ad.Response())
+ cp.call(request, db.Response())
- self.assertRaises(ad.NotFound, lambda: cp.volume['context'].get(guid).exists)
+ self.assertRaises(db.NotFound, lambda: cp.volume['context'].get(guid).exists)
assert not exists(guid_path)
def test_SetUser(self):
@@ -46,7 +46,7 @@ class LocalTest(tests.Test):
'summary': 'summary',
'description': 'description',
}
- guid = cp.call(request, ad.Response())
+ guid = cp.call(request, db.Response())
self.assertEqual(
{'uid': {'role': 2, 'order': 0}},
diff --git a/tests/units/home_mount.py b/tests/units/client/home_mount.py
index 6317507..1014756 100755
--- a/tests/units/home_mount.py
+++ b/tests/units/client/home_mount.py
@@ -9,19 +9,19 @@ from os.path import exists, abspath
from __init__ import tests
-from active_toolkit import sockets, coroutine
+from sugar_network.toolkit import coroutine
from sugar_network.resources.user import User
from sugar_network.resources.artifact import Artifact
from sugar_network import client as local
from sugar_network.zerosugar import clones
-from sugar_network import IPCClient
+from sugar_network.client import IPCClient
class HomeMountTest(tests.Test):
def test_create(self):
self.start_server()
- local = IPCClient(mountpoint='~')
+ local = IPCClient(params={'mountpoint': '~'})
guid = local.post(['context'], {
'type': 'activity',
@@ -40,7 +40,7 @@ class HomeMountTest(tests.Test):
def test_update(self):
self.start_server()
- local = IPCClient(mountpoint='~')
+ local = IPCClient(params={'mountpoint': '~'})
guid = local.post(['context'], {
'type': 'activity',
@@ -58,7 +58,7 @@ class HomeMountTest(tests.Test):
def test_find(self):
self.start_server()
- local = IPCClient(mountpoint='~')
+ local = IPCClient(params={'mountpoint': '~'})
guid_1 = local.post(['context'], {
'type': 'activity',
@@ -91,7 +91,7 @@ class HomeMountTest(tests.Test):
def test_upload_blob(self):
self.start_server()
- local = IPCClient(mountpoint='~')
+ local = IPCClient(params={'mountpoint': '~'})
guid = local.post(['context'], {
'type': 'activity',
@@ -111,7 +111,7 @@ class HomeMountTest(tests.Test):
def test_GetBLOBs(self):
self.start_server()
- client = IPCClient(mountpoint='~')
+ client = IPCClient(params={'mountpoint': '~'})
guid = client.post(['context'], {
'type': 'activity',
@@ -139,7 +139,7 @@ class HomeMountTest(tests.Test):
def test_GetAbsentBLOBs(self):
self.start_server([User, Artifact])
- client = IPCClient(mountpoint='~')
+ client = IPCClient(params={'mountpoint': '~'})
guid = client.post(['artifact'], {
'context': 'context',
@@ -160,7 +160,7 @@ class HomeMountTest(tests.Test):
def test_Subscription(self):
self.start_server()
- local = IPCClient(mountpoint='~')
+ local = IPCClient(params={'mountpoint': '~'})
events = []
def read_events():
@@ -195,7 +195,7 @@ class HomeMountTest(tests.Test):
def test_Subscription_NotifyOnlineMount(self):
self.start_server()
- local = IPCClient(mountpoint='~')
+ local = IPCClient(params={'mountpoint': '~'})
events = []
guid = local.post(['context'], {
@@ -249,7 +249,7 @@ class HomeMountTest(tests.Test):
]))
self.start_server()
- client = IPCClient(mountpoint='~')
+ client = IPCClient(params={'mountpoint': '~'})
monitor = coroutine.spawn(clones.monitor,
self.mounts.volume['context'], ['Activities'])
diff --git a/tests/units/journal.py b/tests/units/client/journal.py
index 357a1f3..2ddde29 100755
--- a/tests/units/journal.py
+++ b/tests/units/client/journal.py
@@ -10,7 +10,7 @@ import gobject
from __init__ import tests
-import active_document as ad
+from sugar_network import db
from sugar_network.client import journal, ipc_port
from sugar_network.toolkit.router import Request
@@ -105,7 +105,7 @@ class JournalTest(tests.Test):
request = Request()
request.path = ['journal']
- response = ad.Response()
+ response = db.Response()
self.assertEqual([
{'guid': 'guid1', 'title': 'title1', 'description': 'description1', 'preview': url + 'guid1/preview'},
{'guid': 'guid2', 'title': 'title2', 'description': 'description2', 'preview': url + 'guid2/preview'},
@@ -145,7 +145,7 @@ class JournalTest(tests.Test):
request = Request()
request.path = ['journal', 'guid1']
- response = ad.Response()
+ response = db.Response()
self.assertEqual(
{'guid': 'guid1', 'title': 'title1', 'description': 'description1', 'preview': url + 'guid1/preview'},
ds.journal(request, response))
@@ -157,13 +157,13 @@ class JournalTest(tests.Test):
request = Request()
request.path = ['journal', 'guid1', 'title']
- response = ad.Response()
+ response = db.Response()
self.assertEqual('title1', ds.journal(request, response))
self.assertEqual('application/json', response.content_type)
request = Request()
request.path = ['journal', 'guid1', 'preview']
- response = ad.Response()
+ response = db.Response()
self.assertEqual({
'mime_type': 'image/png',
'path': '.sugar/default/datastore/gu/guid1/metadata/preview',
diff --git a/tests/units/mountset.py b/tests/units/client/mountset.py
index 748442b..1ac1ae8 100755
--- a/tests/units/mountset.py
+++ b/tests/units/client/mountset.py
@@ -2,6 +2,7 @@
# sugar-lint: disable
import os
+import time
import socket
import shutil
import zipfile
@@ -11,21 +12,18 @@ import requests
from __init__ import tests
-import active_document as ad
-from active_toolkit import coroutine, sockets
from sugar_network.client.mountset import Mountset
from sugar_network.resources.user import User
from sugar_network.resources.context import Context
from sugar_network.resources.artifact import Artifact
from sugar_network.resources.implementation import Implementation
-from sugar_network.toolkit import http, mountpoints
-from sugar_network import client as local, sugar, node
+from sugar_network.toolkit import coroutine, http, mountpoints
+from sugar_network import client as local, node
from sugar_network.resources.volume import Volume
from sugar_network.client.mounts import HomeMount, RemoteMount
from sugar_network.toolkit.router import IPCRouter
-from sugar_network import IPCClient, Client
from sugar_network.zerosugar import injector, clones
-from sugar_network.client import journal
+from sugar_network.client import IPCClient, Client, journal
class MountsetTest(tests.Test):
@@ -77,12 +75,13 @@ class MountsetTest(tests.Test):
mounts[tests.tmpdir + '/1'].mounted.wait()
mounts[tests.tmpdir + '/2'].mounted.wait()
- self.assertEqual([
- {'event': 'handshake'},
- {'mountpoint': tests.tmpdir + '/1', 'event': 'mount', 'private': True, 'name': '1'},
- {'mountpoint': tests.tmpdir + '/2', 'event': 'mount', 'private': True, 'name': '2'},
- ],
- self.events)
+ self.assertEqual(
+ sorted([
+ {'event': 'handshake'},
+ {'mountpoint': tests.tmpdir + '/1', 'event': 'mount', 'private': True, 'name': '1'},
+ {'mountpoint': tests.tmpdir + '/2', 'event': 'mount', 'private': True, 'name': '2'},
+ ]),
+ sorted(self.events))
self.assertEqual(
sorted([
diff --git a/tests/units/node_mount.py b/tests/units/client/node_mount.py
index e918187..e7bdcda 100755
--- a/tests/units/node_mount.py
+++ b/tests/units/client/node_mount.py
@@ -10,11 +10,9 @@ from os.path import exists, abspath, join
from __init__ import tests
-import active_document as ad
-from active_toolkit import coroutine, sockets
from sugar_network.client.mounts import HomeMount
from sugar_network.client.mountset import Mountset
-from sugar_network.toolkit import mountpoints
+from sugar_network.toolkit import coroutine, mountpoints
from sugar_network.resources.user import User
from sugar_network.resources.context import Context
from sugar_network import client as local, sugar
@@ -57,7 +55,7 @@ class NodeMountTest(tests.Test):
def test_Events(self):
mounts = self.start_server()
mounts[tests.tmpdir + '/mnt'].mounted.wait()
- remote = IPCClient(mountpoint=tests.tmpdir + '/mnt')
+ remote = IPCClient(params={'mountpoint': tests.tmpdir + '/mnt'})
events = []
got_event = coroutine.Event()
@@ -110,7 +108,7 @@ class NodeMountTest(tests.Test):
def test_upload_blob(self):
mounts = self.start_server()
mounts[tests.tmpdir + '/mnt'].mounted.wait()
- client = IPCClient(mountpoint=tests.tmpdir + '/mnt')
+ client = IPCClient(params={'mountpoint': tests.tmpdir + '/mnt'})
guid = client.post(['context'], {
'type': 'activity',
@@ -131,7 +129,7 @@ class NodeMountTest(tests.Test):
def test_GetBLOBs(self):
mounts = self.start_server()
mounts[tests.tmpdir + '/mnt'].mounted.wait()
- client = IPCClient(mountpoint=tests.tmpdir + '/mnt')
+ client = IPCClient(params={'mountpoint': tests.tmpdir + '/mnt'})
guid = client.post(['context'], {
'type': 'activity',
@@ -160,7 +158,7 @@ class NodeMountTest(tests.Test):
def test_GetAbsentBLOBs(self):
mounts = self.start_server()
mounts[tests.tmpdir + '/mnt'].mounted.wait()
- client = IPCClient(mountpoint=tests.tmpdir + '/mnt')
+ client = IPCClient(params={'mountpoint': tests.tmpdir + '/mnt'})
guid = client.post(['artifact'], {
'context': 'context',
@@ -187,7 +185,7 @@ class NodeMountTest(tests.Test):
mounts = self.start_server()
mounts[tests.tmpdir + '/mnt'].mounted.wait()
- remote = IPCClient(mountpoint=tests.tmpdir + '/mnt')
+ remote = IPCClient(params={'mountpoint': tests.tmpdir + '/mnt'})
guid = remote.post(['implementation'], {
'context': 'context',
diff --git a/tests/units/proxy_commands.py b/tests/units/client/proxy_commands.py
index 58515fb..fa3fbf9 100755
--- a/tests/units/proxy_commands.py
+++ b/tests/units/client/proxy_commands.py
@@ -3,7 +3,7 @@
from __init__ import tests
-import active_document as ad
+from sugar_network import db
from sugar_network.toolkit.router import Request
from sugar_network.resources.volume import Volume, VolumeCommands
from sugar_network.resources.context import Context
@@ -16,12 +16,12 @@ class ProxyCommandsTest(tests.Test):
def test_FindsAddGuidToReply(self):
- class Commands(ad.CommandsProcessor, _ProxyCommands):
+ class Commands(db.CommandsProcessor, _ProxyCommands):
result = []
def __init__(self, volume):
- ad.CommandsProcessor.__init__(self)
+ db.CommandsProcessor.__init__(self)
_ProxyCommands.__init__(self, volume)
def proxy_call(self, request, response):
@@ -64,12 +64,12 @@ class ProxyCommandsTest(tests.Test):
def test_FindAbsents(self):
- class Commands(ad.CommandsProcessor, _ProxyCommands):
+ class Commands(db.CommandsProcessor, _ProxyCommands):
result = []
def __init__(self, volume):
- ad.CommandsProcessor.__init__(self)
+ db.CommandsProcessor.__init__(self)
_ProxyCommands.__init__(self, volume)
def proxy_call(self, request, response):
@@ -106,12 +106,12 @@ class ProxyCommandsTest(tests.Test):
def test_Activities(self):
- class Commands(ad.CommandsProcessor, _ProxyCommands):
+ class Commands(db.CommandsProcessor, _ProxyCommands):
result = []
def __init__(self, volume):
- ad.CommandsProcessor.__init__(self)
+ db.CommandsProcessor.__init__(self)
_ProxyCommands.__init__(self, volume)
def proxy_call(self, request, response):
@@ -151,12 +151,12 @@ class ProxyCommandsTest(tests.Test):
def test_Content(self):
- class Commands(ad.CommandsProcessor, _ProxyCommands):
+ class Commands(db.CommandsProcessor, _ProxyCommands):
result = []
def __init__(self, volume):
- ad.CommandsProcessor.__init__(self)
+ db.CommandsProcessor.__init__(self)
_ProxyCommands.__init__(self, volume)
def proxy_call(self, request, response):
@@ -201,7 +201,7 @@ class ProxyCommandsTest(tests.Test):
def test_Artifacts(self):
- class Commands(ad.CommandsProcessor, _ProxyCommands):
+ class Commands(db.CommandsProcessor, _ProxyCommands):
result = []
diff --git a/tests/units/remote_mount.py b/tests/units/client/remote_mount.py
index 712f937..1e99940 100755
--- a/tests/units/remote_mount.py
+++ b/tests/units/client/remote_mount.py
@@ -13,27 +13,25 @@ import requests
from __init__ import tests
-import active_document as ad
-from active_toolkit import sockets, coroutine
-from sugar_network import client as local
+from sugar_network import db, client as local
from sugar_network.toolkit.router import IPCRouter, Redirect
from sugar_network.client.mounts import RemoteMount
from sugar_network.client.mountset import Mountset
-from sugar_network.toolkit import sugar, http
+from sugar_network.toolkit import sugar, http, coroutine
from sugar_network.resources.user import User
from sugar_network.resources.context import Context
from sugar_network.resources.implementation import Implementation
from sugar_network.resources.artifact import Artifact
from sugar_network.resources.volume import Volume, Resource
from sugar_network.zerosugar import injector
-from sugar_network import IPCClient
+from sugar_network.client import IPCClient
class RemoteMountTest(tests.Test):
def test_Subscription(self):
self.start_ipc_and_restful_server()
- remote = IPCClient(mountpoint='/')
+ remote = IPCClient()
events = []
def read_events():
@@ -77,7 +75,7 @@ class RemoteMountTest(tests.Test):
('localhost', local.ipc_port.value), IPCRouter(self.mounts))
coroutine.spawn(self.server.serve_forever)
coroutine.dispatch()
- remote = IPCClient(mountpoint='/')
+ remote = IPCClient()
events = []
def read_events():
@@ -126,7 +124,7 @@ class RemoteMountTest(tests.Test):
def test_upload_blob(self):
self.start_ipc_and_restful_server()
- remote = IPCClient(mountpoint='/')
+ remote = IPCClient()
guid = remote.post(['context'], {
'type': 'activity',
@@ -146,7 +144,7 @@ class RemoteMountTest(tests.Test):
def test_GetBLOBs(self):
self.start_ipc_and_restful_server()
- remote = IPCClient(mountpoint='/')
+ remote = IPCClient()
guid = remote.post(['context'], {
'type': 'activity',
@@ -176,7 +174,7 @@ class RemoteMountTest(tests.Test):
def test_GetAbsentBLOBs(self):
self.start_ipc_and_restful_server([User, Context, Artifact, Implementation])
- remote = IPCClient(mountpoint='/')
+ remote = IPCClient()
guid = remote.post(['artifact'], {
'context': 'context',
@@ -197,7 +195,7 @@ class RemoteMountTest(tests.Test):
def test_Feed(self):
self.start_ipc_and_restful_server([User, Context, Implementation, Artifact])
- remote = IPCClient(mountpoint='/')
+ remote = IPCClient()
context = remote.post(['context'], {
'type': 'activity',
@@ -253,7 +251,7 @@ class RemoteMountTest(tests.Test):
def test_RestrictLayers(self):
self.start_ipc_and_restful_server([User, Context, Implementation, Artifact])
- remote = IPCClient(mountpoint='/')
+ remote = IPCClient()
context = remote.post(['context'], {
'type': 'activity',
@@ -360,7 +358,7 @@ class RemoteMountTest(tests.Test):
def test_InvalidateSolutions(self):
self.start_ipc_and_restful_server([User, Context, Implementation, Artifact])
- remote = IPCClient(mountpoint='/')
+ remote = IPCClient()
self.assertNotEqual(None, injector._mtime)
mtime = injector._mtime
@@ -406,7 +404,7 @@ class RemoteMountTest(tests.Test):
def test_ContentDisposition(self):
self.start_ipc_and_restful_server([User, Context, Implementation, Artifact])
- remote = IPCClient(mountpoint='/')
+ remote = IPCClient()
artifact = remote.post(['artifact'], {
'type': 'instance',
@@ -426,12 +424,12 @@ class RemoteMountTest(tests.Test):
class Document(Resource):
- @ad.active_property(ad.BlobProperty)
+ @db.blob_property()
def blob(self, value):
raise Redirect(URL)
self.start_ipc_and_restful_server([User, Document])
- remote = IPCClient(mountpoint='/')
+ remote = IPCClient()
guid = remote.post(['document'], {})
response = requests.request('GET', local.api_url.value + '/document/' + guid + '/blob', allow_redirects=False)
@@ -443,10 +441,10 @@ class RemoteMountTest(tests.Test):
pid = self.fork(self.restful_server)
self.start_server()
- client = IPCClient(mountpoint='/')
+ client = IPCClient()
self.assertRaises(RuntimeError, client.post, ['context'], {'type': 'activity', 'title': 'title', 'summary': 'summary', 'description': 'description'})
- client = IPCClient(mountpoint='/', sync=True)
+ client = IPCClient(sync=True)
guid = client.post(['context'], {'type': 'activity', 'title': 'title', 'summary': 'summary', 'description': 'description'})
self.assertEqual(guid, client.get(['context', guid, 'guid']))
diff --git a/tests/units/db/__init__.py b/tests/units/db/__init__.py
new file mode 100644
index 0000000..345c327
--- /dev/null
+++ b/tests/units/db/__init__.py
@@ -0,0 +1,9 @@
+# sugar-lint: disable
+
+import sys
+from os.path import dirname, join, abspath
+
+src_root = abspath(join(dirname(__file__), '..', '..', '..'))
+sys.path.insert(0, src_root)
+
+import tests
diff --git a/tests/units/db/__main__.py b/tests/units/db/__main__.py
new file mode 100644
index 0000000..704561e
--- /dev/null
+++ b/tests/units/db/__main__.py
@@ -0,0 +1,15 @@
+# sugar-lint: disable
+
+from __init__ import tests
+
+from commands import *
+from document import *
+from env import *
+from index import *
+from metadata import *
+from migrate import *
+from storage import *
+from volume import *
+
+if __name__ == '__main__':
+ tests.main()
diff --git a/tests/units/db/commands.py b/tests/units/db/commands.py
new file mode 100755
index 0000000..02ad455
--- /dev/null
+++ b/tests/units/db/commands.py
@@ -0,0 +1,502 @@
+#!/usr/bin/env python
+# sugar-lint: disable
+
+from cStringIO import StringIO
+
+from __init__ import tests
+
+from sugar_network import db
+from sugar_network.db import env, volume, SingleVolume, Document, \
+ property_command, document_command, directory_command, volume_command, \
+ Request, BlobProperty, Response, CommandsProcessor, \
+ CommandNotFound, NotFound, to_int, to_list
+
+
+class CommandsTest(tests.Test):
+
+ def test_VolumeCommands(self):
+ calls = []
+
+ class TestCommandsProcessor(CommandsProcessor):
+
+ @volume_command(method='PROBE')
+ def command_1(self, **kwargs):
+ calls.append(('command_1', kwargs))
+
+ @volume_command(method='PROBE', cmd='command_2')
+ def command_2(self, **kwargs):
+ calls.append(('command_2', kwargs))
+
+ cp = TestCommandsProcessor()
+
+ self.call(cp, 'PROBE')
+ self.assertRaises(CommandNotFound, self.call, cp, 'PROBE', cmd='command_1')
+ self.call(cp, 'PROBE', cmd='command_2')
+
+ self.assertEqual([
+ ('command_1', {}),
+ ('command_2', {}),
+ ],
+ calls)
+
+ def test_DirectoryCommands(self):
+ calls = []
+
+ class TestCommandsProcessor(CommandsProcessor):
+
+ @directory_command(method='PROBE')
+ def command_1(self, **kwargs):
+ calls.append(('command_1', kwargs))
+
+ @directory_command(method='PROBE', cmd='command_2')
+ def command_2(self, **kwargs):
+ calls.append(('command_2', kwargs))
+
+ cp = TestCommandsProcessor()
+
+ self.assertRaises(CommandNotFound, self.call, cp, 'PROBE')
+ self.call(cp, 'PROBE', document='testdocument')
+ self.call(cp, 'PROBE', document='fakedocument')
+ self.assertRaises(CommandNotFound, self.call, cp, 'PROBE', cmd='command_1', document='testdocument')
+ self.call(cp, 'PROBE', cmd='command_2', document='testdocument')
+ self.call(cp, 'PROBE', cmd='command_2', document='fakedocument')
+
+ self.assertEqual([
+ ('command_1', {}),
+ ('command_1', {}),
+ ('command_2', {}),
+ ('command_2', {}),
+ ],
+ calls)
+
+ def test_DocumentCommands(self):
+ calls = []
+
+ class TestCommandsProcessor(CommandsProcessor):
+
+ @document_command(method='PROBE')
+ def command_1(self, **kwargs):
+ calls.append(('command_1', kwargs))
+
+ @document_command(method='PROBE', cmd='command_2')
+ def command_2(self, **kwargs):
+ calls.append(('command_2', kwargs))
+
+ class TestDocument(Document):
+ pass
+
+ volume = SingleVolume(tests.tmpdir, [TestDocument])
+ cp = TestCommandsProcessor(volume)
+
+ self.assertRaises(CommandNotFound, self.call, cp, 'PROBE')
+ self.assertRaises(CommandNotFound, self.call, cp, 'PROBE', document='testdocument')
+ self.call(cp, 'PROBE', document='testdocument', guid='guid')
+ self.call(cp, 'PROBE', document='fakedocument', guid='guid')
+ self.assertRaises(CommandNotFound, self.call, cp, 'PROBE', cmd='command_1', document='testdocument', guid='guid')
+ self.call(cp, 'PROBE', cmd='command_2', document='testdocument', guid='guid')
+ self.call(cp, 'PROBE', cmd='command_2', document='fakedocument', guid='guid')
+
+ self.assertEqual([
+ ('command_1', {}),
+ ('command_1', {}),
+ ('command_2', {}),
+ ('command_2', {}),
+ ],
+ calls)
+
+ def test_PropertyCommands(self):
+ calls = []
+
+ class TestCommandsProcessor(CommandsProcessor):
+
+ @property_command(method='PROBE')
+ def command_1(self, **kwargs):
+ calls.append(('command_1', kwargs))
+
+ @property_command(method='PROBE', cmd='command_2')
+ def command_2(self, **kwargs):
+ calls.append(('command_2', kwargs))
+
+ class TestDocument(Document):
+ pass
+
+ volume = SingleVolume(tests.tmpdir, [TestDocument])
+ cp = TestCommandsProcessor(volume)
+
+ self.assertRaises(CommandNotFound, self.call, cp, 'PROBE')
+ self.assertRaises(CommandNotFound, self.call, cp, 'PROBE', document='testdocument')
+ self.assertRaises(CommandNotFound, self.call, cp, 'PROBE', document='testdocument', guid='guid')
+ self.call(cp, 'PROBE', document='testdocument', guid='guid', prop='prop')
+ self.call(cp, 'PROBE', document='fakedocument', guid='guid', prop='prop')
+ self.assertRaises(CommandNotFound, self.call, cp, 'PROBE', cmd='command_1', document='testdocument', guid='guid', prop='prop')
+ self.call(cp, 'PROBE', cmd='command_2', document='testdocument', guid='guid', prop='prop')
+ self.call(cp, 'PROBE', cmd='command_2', document='fakedocument', guid='guid', prop='prop')
+
+ self.assertEqual([
+ ('command_1', {}),
+ ('command_1', {}),
+ ('command_2', {}),
+ ('command_2', {}),
+ ],
+ calls)
+
+ def test_ClassDodcumentCommands(self):
+ calls = []
+
+ class TestDocument(Document):
+
+ @document_command(method='PROBE')
+ def command_1(cls, **kwargs):
+ calls.append(('command_1', kwargs))
+
+ @document_command(method='PROBE', cmd='command_2')
+ def command_2(cls, **kwargs):
+ calls.append(('command_2', kwargs))
+
+ volume = SingleVolume(tests.tmpdir, [TestDocument])
+ cp = CommandsProcessor(volume)
+
+ self.assertRaises(CommandNotFound, self.call, cp, 'PROBE')
+ self.assertRaises(CommandNotFound, self.call, cp, 'PROBE', document='testdocument')
+ self.assertRaises(NotFound, self.call, cp, 'PROBE', document='testdocument', guid='guid')
+ volume['testdocument'].create(guid='guid')
+ self.call(cp, 'PROBE', document='testdocument', guid='guid')
+ self.assertRaises(CommandNotFound, self.call, cp, 'PROBE', document='fakedocument', guid='guid')
+ self.assertRaises(CommandNotFound, self.call, cp, 'PROBE', cmd='command_1', document='testdocument', guid='guid')
+ self.call(cp, 'PROBE', cmd='command_2', document='testdocument', guid='guid')
+ self.assertRaises(CommandNotFound, self.call, cp, 'PROBE', cmd='command_2', document='fakedocument', guid='guid')
+
+ self.assertEqual([
+ ('command_1', {}),
+ ('command_2', {}),
+ ],
+ calls)
+
+ def test_ClassPropertyCommands(self):
+ calls = []
+
+ class TestDocument(Document):
+
+ @property_command(method='PROBE')
+ def command_1(cls, **kwargs):
+ calls.append(('command_1', kwargs))
+
+ @property_command(method='PROBE', cmd='command_2')
+ def command_2(cls, **kwargs):
+ calls.append(('command_2', kwargs))
+
+ volume = SingleVolume(tests.tmpdir, [TestDocument])
+ cp = CommandsProcessor(volume)
+
+ self.assertRaises(CommandNotFound, self.call, cp, 'PROBE')
+ self.assertRaises(CommandNotFound, self.call, cp, 'PROBE', document='testdocument')
+ self.assertRaises(CommandNotFound, self.call, cp, 'PROBE', document='testdocument', prop='prop')
+ self.assertRaises(NotFound, self.call, cp, 'PROBE', document='testdocument', guid='guid', prop='prop')
+ volume['testdocument'].create(guid='guid')
+ self.call(cp, 'PROBE', document='testdocument', guid='guid', prop='prop')
+ self.assertRaises(CommandNotFound, self.call, cp, 'PROBE', document='fakedocument', guid='guid', prop='prop')
+ self.assertRaises(CommandNotFound, self.call, cp, 'PROBE', cmd='command_1', document='testdocument', guid='guid', prop='prop')
+ self.call(cp, 'PROBE', cmd='command_2', document='testdocument', guid='guid', prop='prop')
+ self.assertRaises(CommandNotFound, self.call, cp, 'PROBE', cmd='command_2', document='fakedocument', guid='guid', prop='prop')
+
+ self.assertEqual([
+ ('command_1', {}),
+ ('command_2', {}),
+ ],
+ calls)
+
+ def test_AccessLevel(self):
+ calls = []
+
+ class TestCommandsProcessor(CommandsProcessor):
+
+ @volume_command(method='PROBE', cmd='all')
+ def all(self):
+ pass
+
+ @volume_command(method='PROBE', cmd='system', access_level=env.ACCESS_SYSTEM)
+ def system(self):
+ pass
+
+ @volume_command(method='PROBE', cmd='local', access_level=env.ACCESS_LOCAL)
+ def local(self):
+ pass
+
+ @volume_command(method='PROBE', cmd='remote', access_level=env.ACCESS_REMOTE)
+ def remote(self):
+ pass
+
+ cp = TestCommandsProcessor()
+
+ self.call(cp, 'PROBE', cmd='all', access_level=env.ACCESS_REMOTE)
+ self.call(cp, 'PROBE', cmd='all', access_level=env.ACCESS_LOCAL)
+ self.call(cp, 'PROBE', cmd='all', access_level=env.ACCESS_SYSTEM)
+
+ self.call(cp, 'PROBE', cmd='remote', access_level=env.ACCESS_REMOTE)
+ self.assertRaises(env.Forbidden, self.call, cp, 'PROBE', cmd='remote', access_level=env.ACCESS_LOCAL)
+ self.assertRaises(env.Forbidden, self.call, cp, 'PROBE', cmd='remote', access_level=env.ACCESS_SYSTEM)
+
+ self.assertRaises(env.Forbidden, self.call, cp, 'PROBE', cmd='local', access_level=env.ACCESS_REMOTE)
+ self.call(cp, 'PROBE', cmd='local', access_level=env.ACCESS_LOCAL)
+ self.assertRaises(env.Forbidden, self.call, cp, 'PROBE', cmd='local', access_level=env.ACCESS_SYSTEM)
+
+ self.assertRaises(env.Forbidden, self.call, cp, 'PROBE', cmd='system', access_level=env.ACCESS_REMOTE)
+ self.assertRaises(env.Forbidden, self.call, cp, 'PROBE', cmd='system', access_level=env.ACCESS_LOCAL)
+ self.call(cp, 'PROBE', cmd='system', access_level=env.ACCESS_SYSTEM)
+
+ def test_ParentClasses(self):
+ calls = []
+
+ class Parent(object):
+
+ @volume_command(method='PROBE')
+ def probe(self):
+ return 'probe'
+
+ class TestCommandsProcessor(CommandsProcessor, Parent):
+ pass
+
+ cp = TestCommandsProcessor()
+ self.assertEqual('probe', self.call(cp, 'PROBE'))
+
+ def test_OverrideInChildClass(self):
+ calls = []
+
+ class Parent(CommandsProcessor):
+
+ @volume_command(method='PROBE')
+ def probe(self):
+ return 'probe-1'
+
+ @volume_command(method='COMMON')
+ def common(self):
+ return 'common'
+
+ class Child(Parent):
+
+ @volume_command(method='PROBE')
+ def probe(self):
+ return 'probe-2'
+
+ @volume_command(method='PARTICULAR')
+ def particular(self):
+ return 'particular'
+
+ cp = Child()
+ self.assertEqual('probe-2', self.call(cp, 'PROBE'))
+ self.assertEqual('common', self.call(cp, 'COMMON'))
+ self.assertEqual('particular', self.call(cp, 'PARTICULAR'))
+
+ def test_RequestRead(self):
+
+ class Stream(object):
+
+ def __init__(self, value):
+ self.pos = 0
+ self.value = value
+
+ def read(self, size):
+ assert self.pos + size <= len(self.value)
+ result = self.value[self.pos:self.pos + size]
+ self.pos += size
+ return result
+
+ request = Request()
+ request.content_stream = Stream('123')
+ request.content_length = len(request.content_stream.value)
+ self.assertEqual('123', request.read())
+ self.assertEqual('', request.read())
+ self.assertEqual('', request.read(10))
+
+ request = Request()
+ request.content_stream = Stream('123')
+ request.content_length = len(request.content_stream.value)
+ self.assertEqual('123', request.read(10))
+
+ request = Request()
+ request.content_stream = Stream('123')
+ request.content_length = len(request.content_stream.value)
+ self.assertEqual('1', request.read(1))
+ self.assertEqual('2', request.read(1))
+ self.assertEqual('3', request.read())
+
+ def test_Arguments(self):
+
+ class TestCommandsProcessor(CommandsProcessor):
+
+ @volume_command(method='PROBE', arguments={'arg_int': to_int, 'arg_list': to_list})
+ def probe(self, arg_int=None, arg_list=None):
+ return arg_int, arg_list
+
+ cp = TestCommandsProcessor()
+
+ self.assertEqual((None, None), self.call(cp, 'PROBE'))
+ self.assertEqual((-1, [-2, None]), self.call(cp, 'PROBE', arg_int=-1, arg_list=[-2, None]))
+ self.assertEqual((4, [' foo', ' bar ', ' ']), self.call(cp, 'PROBE', arg_int='4', arg_list=' foo, bar , '))
+ self.assertEqual((None, ['foo']), self.call(cp, 'PROBE', arg_list='foo'))
+ self.assertEqual((None, []), self.call(cp, 'PROBE', arg_list=''))
+ self.assertEqual((None, [' ']), self.call(cp, 'PROBE', arg_list=' '))
+ self.assertEqual((0, None), self.call(cp, 'PROBE', arg_int=''))
+ self.assertRaises(RuntimeError, self.call, cp, 'PROBE', arg_int=' ')
+ self.assertRaises(RuntimeError, self.call, cp, 'PROBE', arg_int='foo')
+
+ def test_PassKwargs(self):
+
+ class TestCommandsProcessor(CommandsProcessor):
+
+ @volume_command(method='PROBE')
+ def probe(self, arg, request, response, **kwargs):
+ return arg, dict(request), dict(response), kwargs
+
+ cp = TestCommandsProcessor()
+
+ self.assertEqual(
+ (None, {'method': 'PROBE'}, {}, {}),
+ self.call(cp, 'PROBE'))
+ self.assertEqual(
+ (1, {'method': 'PROBE', 'arg': 1}, {}, {}),
+ self.call(cp, 'PROBE', arg=1))
+ self.assertEqual(
+ (None, {'method': 'PROBE', 'foo': 'bar'}, {}, {}),
+ self.call(cp, 'PROBE', foo='bar'))
+ self.assertEqual(
+ (-2, {'method': 'PROBE', 'foo': 'bar', 'arg': -2}, {}, {}),
+ self.call(cp, 'PROBE', foo='bar', arg=-2))
+
+ def test_PrePost(self):
+
+ class ParentCommandsProcessor(CommandsProcessor):
+
+ @db.volume_command_pre(method='PROBE')
+ def command_pre1(self, request):
+ request['probe'].append('pre1')
+
+ @db.volume_command_pre(method='PROBE')
+ def command_pre2(self, request):
+ request['probe'].append('pre2')
+
+ @db.volume_command_post(method='PROBE')
+ def command_post1(self, request, response, result):
+ request['probe'].append('post1')
+ response['probe'].append('post1')
+ return result + 1
+
+ @db.volume_command_post(method='PROBE')
+ def command_post2(self, request, response, result):
+ request['probe'].append('post2')
+ response['probe'].append('post2')
+ return result + 1
+
+ class TestCommandsProcessor(ParentCommandsProcessor):
+
+ @db.volume_command_pre(method='PROBE')
+ def command_pre3(self, request):
+ request['probe'].append('pre3')
+
+ @db.volume_command_pre(method='PROBE')
+ def command_pre4(self, request):
+ request['probe'].append('pre4')
+
+ @db.volume_command(method='PROBE')
+ def command(self, request):
+ request['probe'].append('cmd')
+ response['probe'].append('cmd')
+ return 1
+
+ @db.volume_command_post(method='PROBE')
+ def command_post3(self, request, response, result):
+ request['probe'].append('post3')
+ response['probe'].append('post3')
+ return result + 1
+
+ @db.volume_command_post(method='PROBE')
+ def command_post4(self, request, response, result):
+ request['probe'].append('post4')
+ response['probe'].append('post4')
+ return result + 1
+
+ cp = TestCommandsProcessor()
+
+ request = db.Request(method='PROBE', probe=[])
+ response = db.Response(probe=[])
+ self.assertEqual(5, cp.call(request, response))
+ self.assertEqual(['pre1', 'pre2', 'pre3', 'pre4', 'cmd', 'post1', 'post2', 'post3', 'post4'], request['probe'])
+ self.assertEqual(['cmd', 'post1', 'post2', 'post3', 'post4'], response['probe'])
+
+ def test_PrePostCallbackLess(self):
+
+ class TestCommandsProcessor(CommandsProcessor):
+
+ @db.volume_command_pre(method='PROBE')
+ def command_pre(self, request):
+ request['probe'].append('pre')
+
+ def super_call(self, request, response):
+ request['probe'].append('cmd')
+ response['probe'].append('cmd')
+ return 1
+
+ @db.volume_command_post(method='PROBE')
+ def command_post(self, request, response, result):
+ request['probe'].append('post')
+ response['probe'].append('post')
+ return result + 1
+
+ cp = TestCommandsProcessor()
+
+ request = db.Request(method='PROBE', probe=[])
+ response = db.Response(probe=[])
+ self.assertEqual(2, cp.call(request, response))
+ self.assertEqual(['pre', 'cmd', 'post'], request['probe'])
+ self.assertEqual(['cmd', 'post'], response['probe'])
+
+ def test_SubCall(self):
+
+ class TestCommandsProcessor(CommandsProcessor):
+
+ @db.volume_command(method='PROBE')
+ def command1(self, request):
+ return request.call('PROBE', cmd='command2')
+
+ @db.volume_command(method='PROBE')
+ def command2(self, request):
+ return {'access_level': request.access_level, 'accept_language': request.accept_language}
+
+ cp = TestCommandsProcessor()
+
+ request = db.Request(method='PROBE')
+ request.access_level = -1
+ request.accept_language = 'foo'
+ self.assertEqual({
+ 'access_level': -1,
+ 'accept_language': 'foo',
+ },
+ cp.call(request, db.Response()))
+
+ def call(self, cp, method, document=None, guid=None, prop=None,
+ access_level=env.ACCESS_REMOTE, **kwargs):
+
+ class TestRequest(Request):
+
+ content_stream = None
+ content_length = 0
+
+ request = TestRequest(**kwargs)
+ request['method'] = method
+ request.access_level = access_level
+ if document:
+ request['document'] = document
+ if guid:
+ request['guid'] = guid
+ if prop:
+ request['prop'] = prop
+ if 'content_stream' in request:
+ request.content_stream = request.pop('content_stream')
+ request.content_length = len(request.content_stream.getvalue())
+
+ self.response = Response()
+ return cp.call(request, self.response)
+
+
+if __name__ == '__main__':
+ tests.main()
diff --git a/tests/units/db/document.py b/tests/units/db/document.py
new file mode 100755
index 0000000..483e647
--- /dev/null
+++ b/tests/units/db/document.py
@@ -0,0 +1,1008 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# sugar-lint: disable
+
+import os
+import sys
+import stat
+import time
+import urllib2
+import hashlib
+import cPickle as pickle
+from base64 import b64encode
+from cStringIO import StringIO
+from os.path import join, exists
+
+import gobject
+
+from __init__ import tests
+
+from sugar_network import db
+from sugar_network.db import document, storage, env, index
+from sugar_network.db import directory as directory_
+from sugar_network.db.directory import Directory
+from sugar_network.db.index import IndexWriter
+from sugar_network.toolkit.util import Sequence
+
+
+class DocumentTest(tests.Test):
+
+ def test_ActiveProperty_Slotted(self):
+
+ class Document(document.Document):
+
+ @db.indexed_property(slot=1)
+ def slotted(self, value):
+ return value
+
+ @db.stored_property()
+ def not_slotted(self, value):
+ return value
+
+ directory = Directory(tests.tmpdir, Document, IndexWriter)
+ self.assertEqual(1, directory.metadata['slotted'].slot)
+
+ directory.create({'slotted': 'slotted', 'not_slotted': 'not_slotted'})
+
+ docs, total = directory.find(0, 100, order_by='slotted')
+ self.assertEqual(1, total)
+ self.assertEqual(
+ [('slotted', 'not_slotted')],
+ [(i.slotted, i.not_slotted) for i in docs])
+
+ self.assertRaises(RuntimeError, directory.find, 0, 100, order_by='not_slotted')
+
+ def test_ActiveProperty_SlottedIUnique(self):
+
+ class Document(document.Document):
+
+ @db.indexed_property(slot=1)
+ def prop_1(self, value):
+ return value
+
+ @db.indexed_property(slot=1)
+ def prop_2(self, value):
+ return value
+
+ self.assertRaises(RuntimeError, Directory, tests.tmpdir, Document, IndexWriter)
+
+ def test_ActiveProperty_Terms(self):
+
+ class Document(document.Document):
+
+ @db.indexed_property(prefix='T')
+ def term(self, value):
+ return value
+
+ @db.stored_property()
+ def not_term(self, value):
+ return value
+
+ directory = Directory(tests.tmpdir, Document, IndexWriter)
+ self.assertEqual('T', directory.metadata['term'].prefix)
+
+ guid = directory.create({'term': 'term', 'not_term': 'not_term'})
+
+ docs, total = directory.find(0, 100, term='term')
+ self.assertEqual(1, total)
+ self.assertEqual(
+ [('term', 'not_term')],
+ [(i.term, i.not_term) for i in docs])
+
+ self.assertEqual(0, directory.find(0, 100, query='not_term:not_term')[-1])
+ self.assertEqual(1, directory.find(0, 100, query='not_term:=not_term')[-1])
+
+ def test_ActiveProperty_TermsUnique(self):
+
+ class Document(document.Document):
+
+ @db.indexed_property(prefix='P')
+ def prop_1(self, value):
+ return value
+
+ @db.indexed_property(prefix='P')
+ def prop_2(self, value):
+ return value
+
+ self.assertRaises(RuntimeError, Directory, tests.tmpdir, Document, IndexWriter)
+
+ def test_ActiveProperty_FullTextSearch(self):
+
+ class Document(document.Document):
+
+ @db.indexed_property(full_text=False, slot=1)
+ def no(self, value):
+ return value
+
+ @db.indexed_property(full_text=True, slot=2)
+ def yes(self, value):
+ return value
+
+ directory = Directory(tests.tmpdir, Document, IndexWriter)
+ self.assertEqual(False, directory.metadata['no'].full_text)
+ self.assertEqual(True, directory.metadata['yes'].full_text)
+
+ guid = directory.create({'no': 'foo', 'yes': 'bar'})
+
+ self.assertEqual(0, directory.find(0, 100, query='foo')[-1])
+ self.assertEqual(1, directory.find(0, 100, query='bar')[-1])
+
+ def test_StoredProperty_Defaults(self):
+
+ class Document(document.Document):
+
+ @db.stored_property(default='default')
+ def w_default(self, value):
+ return value
+
+ @db.stored_property()
+ def wo_default(self, value):
+ return value
+
+ @db.indexed_property(slot=1, default='not_stored_default')
+ def not_stored_default(self, value):
+ return value
+
+ directory = Directory(tests.tmpdir, Document, IndexWriter)
+ self.assertEqual('default', directory.metadata['w_default'].default)
+ self.assertEqual(None, directory.metadata['wo_default'].default)
+ self.assertEqual('not_stored_default', directory.metadata['not_stored_default'].default)
+
+ guid = directory.create({'wo_default': 'wo_default'})
+
+ docs, total = directory.find(0, 100)
+ self.assertEqual(1, total)
+ self.assertEqual(
+ [('default', 'wo_default', 'not_stored_default')],
+ [(i.w_default, i.wo_default, i.not_stored_default) for i in docs])
+
+ self.assertRaises(RuntimeError, directory.create, {})
+
+ def test_properties_Blob(self):
+
+ class Document(document.Document):
+
+ @db.blob_property(mime_type='application/json')
+ def blob(self, value):
+ return value
+
+ directory = Directory(tests.tmpdir, Document, IndexWriter)
+
+ guid = directory.create({})
+ blob_path = join(tests.tmpdir, guid[:2], guid, 'blob')
+
+ self.assertEqual(db.PropertyMetadata(), directory.get(guid).blob)
+
+ data = 'payload'
+ directory.set_blob(guid, 'blob', StringIO(data))
+ self.assertEqual({
+ 'seqno': 2,
+ 'mtime': int(os.stat(blob_path).st_mtime),
+ 'digest': hashlib.sha1(data).hexdigest(),
+ 'path': join(tests.tmpdir, guid[:2], guid, 'blob.blob'),
+ 'mime_type': 'application/json',
+ },
+ directory.get(guid).meta('blob'))
+ self.assertEqual(data, file(blob_path + '.blob').read())
+
+ def test_create_FailOnExisted(self):
+
+ class Document(document.Document):
+ pass
+
+ directory = Directory(tests.tmpdir, Document, IndexWriter)
+ guid = directory.create(guid='guid')
+ assert guid == 'guid'
+ self.assertRaises(RuntimeError, directory.create, guid='guid')
+
+ def test_update(self):
+
+ class Document(document.Document):
+
+ @db.indexed_property(slot=1)
+ def prop_1(self, value):
+ return value
+
+ @db.stored_property()
+ def prop_2(self, value):
+ return value
+
+ directory = Directory(tests.tmpdir, Document, IndexWriter)
+
+ guid = directory.create({'prop_1': '1', 'prop_2': '2'})
+ self.assertEqual(
+ [('1', '2')],
+ [(i.prop_1, i.prop_2) for i in directory.find(0, 1024)[0]])
+
+ directory.update(guid, {'prop_1': '3', 'prop_2': '4'})
+ self.assertEqual(
+ [('3', '4')],
+ [(i.prop_1, i.prop_2) for i in directory.find(0, 1024)[0]])
+
+ def test_delete(self):
+
+ class Document(document.Document):
+
+ @db.indexed_property(prefix='P')
+ def prop(self, value):
+ return value
+
+ directory = Directory(tests.tmpdir, Document, IndexWriter)
+
+ guid_1 = directory.create({'prop': '1'})
+ guid_2 = directory.create({'prop': '2'})
+ guid_3 = directory.create({'prop': '3'})
+
+ self.assertEqual(
+ ['1', '2', '3'],
+ [i.prop for i in directory.find(0, 1024)[0]])
+
+ directory.delete(guid_2)
+ self.assertEqual(
+ ['1', '3'],
+ [i.prop for i in directory.find(0, 1024)[0]])
+
+ directory.delete(guid_3)
+ self.assertEqual(
+ ['1'],
+ [i.prop for i in directory.find(0, 1024)[0]])
+
+ directory.delete(guid_1)
+ self.assertEqual(
+ [],
+ [i.prop for i in directory.find(0, 1024)[0]])
+
+ def test_populate(self):
+
+ class Document(document.Document):
+
+ @db.indexed_property(slot=1)
+ def prop(self, value):
+ return value
+
+ self.touch(
+ ('1/1/guid', '{"value": "1"}'),
+ ('1/1/ctime', '{"value": 1}'),
+ ('1/1/mtime', '{"value": 1}'),
+ ('1/1/prop', '{"value": "prop-1"}'),
+ ('1/1/seqno', '{"value": 0}'),
+
+ ('2/2/guid', '{"value": "2"}'),
+ ('2/2/ctime', '{"value": 2}'),
+ ('2/2/mtime', '{"value": 2}'),
+ ('2/2/prop', '{"value": "prop-2"}'),
+ ('2/2/seqno', '{"value": 0}'),
+ )
+
+ directory = Directory(tests.tmpdir, Document, IndexWriter)
+
+ self.assertEqual(0, directory._index.mtime)
+ for i in directory.populate():
+ pass
+ self.assertNotEqual(0, directory._index.mtime)
+
+ doc = directory.get('1')
+ self.assertEqual(1, doc['ctime'])
+ self.assertEqual(1, doc['mtime'])
+ self.assertEqual('prop-1', doc['prop'])
+
+ doc = directory.get('2')
+ self.assertEqual(2, doc['ctime'])
+ self.assertEqual(2, doc['mtime'])
+ self.assertEqual('prop-2', doc['prop'])
+
+ self.assertEqual(
+ [
+ (1, 1, 'prop-1'),
+ (2, 2, 'prop-2'),
+ ],
+ [(i.ctime, i.mtime, i.prop) for i in directory.find(0, 10)[0]])
+
+ def test_populate_IgnoreBadDocuments(self):
+
+ class Document(document.Document):
+
+ @db.indexed_property(slot=1)
+ def prop(self, value):
+ return value
+
+ self.touch(
+ ('1/1/guid', '{"value": "1"}'),
+ ('1/1/ctime', '{"value": 1}'),
+ ('1/1/mtime', '{"value": 1}'),
+ ('1/1/prop', '{"value": "prop-1"}'),
+ ('1/1/seqno', '{"value": 0}'),
+
+ ('2/2/guid', '{"value": "2"}'),
+ ('2/2/ctime', ''),
+ ('2/2/mtime', '{"value": 2}'),
+ ('2/2/prop', '{"value": "prop-2"}'),
+ ('2/2/seqno', '{"value": 0}'),
+
+ ('3/3/guid', ''),
+ ('3/3/ctime', ''),
+ ('3/3/mtime', ''),
+ ('3/3/prop', ''),
+ ('3/3/seqno', ''),
+ )
+
+ directory = Directory(tests.tmpdir, Document, IndexWriter)
+
+ populated = 0
+ for i in directory.populate():
+ populated += 1
+ self.assertEqual(2, populated)
+ self.assertEqual(
+ sorted(['1', '2']),
+ sorted([i.guid for i in directory.find(0, 10)[0]]))
+ assert exists('1/1/guid')
+ assert exists('2/2/guid')
+ assert not exists('3/3/guid')
+
+ def test_create_with_guid(self):
+
+ class Document(document.Document):
+
+ @db.indexed_property(slot=1)
+ def prop(self, value):
+ return value
+
+ directory = Directory(tests.tmpdir, Document, IndexWriter)
+
+ guid = directory.create(guid='guid', prop='foo')
+ self.assertEqual(
+ [('guid', 'foo')],
+ [(i.guid, i.prop) for i in directory.find(0, 1024)[0]])
+
+ directory.update(guid, {'prop': 'probe'})
+ self.assertEqual(
+ [('guid', 'probe')],
+ [(i.guid, i.prop) for i in directory.find(0, 1024)[0]])
+
+ def test_seqno(self):
+
+ class Document(document.Document):
+
+ @db.indexed_property(slot=1, default='')
+ def prop(self, value):
+ return value
+
+ @db.blob_property()
+ def blob(self, value):
+ return value
+
+ directory = Directory(tests.tmpdir, Document, IndexWriter)
+
+ guid_1 = directory.create({})
+ seqno = directory.get(guid_1).get('seqno')
+ self.assertEqual(1, seqno)
+ self.assertEqual(
+ pickle.load(file('%s/%s/guid' % (guid_1[:2], guid_1)))['seqno'],
+ seqno)
+ self.assertEqual(
+ pickle.load(file('%s/%s/prop' % (guid_1[:2], guid_1)))['seqno'],
+ seqno)
+
+ guid_2 = directory.create({})
+ seqno = directory.get(guid_2).get('seqno')
+ self.assertEqual(2, seqno)
+ self.assertEqual(
+ pickle.load(file('%s/%s/guid' % (guid_2[:2], guid_2)))['seqno'],
+ seqno)
+ self.assertEqual(
+ pickle.load(file('%s/%s/prop' % (guid_2[:2], guid_2)))['seqno'],
+ seqno)
+
+ directory.set_blob(guid_1, 'blob', StringIO('blob'))
+ seqno = directory.get(guid_1).get('seqno')
+ self.assertEqual(3, seqno)
+ self.assertEqual(
+ pickle.load(file('%s/%s/guid' % (guid_1[:2], guid_1)))['seqno'],
+ 1)
+ self.assertEqual(
+ pickle.load(file('%s/%s/prop' % (guid_1[:2], guid_1)))['seqno'],
+ 1)
+ self.assertEqual(
+ pickle.load(file('%s/%s/blob' % (guid_1[:2], guid_1)))['seqno'],
+ seqno)
+
+ directory.update(guid_1, {'prop': 'new'})
+ seqno = directory.get(guid_1).get('seqno')
+ self.assertEqual(4, seqno)
+ self.assertEqual(
+ pickle.load(file('%s/%s/guid' % (guid_1[:2], guid_1)))['seqno'],
+ 1)
+ self.assertEqual(
+ pickle.load(file('%s/%s/prop' % (guid_1[:2], guid_1)))['seqno'],
+ seqno)
+ self.assertEqual(
+ pickle.load(file('%s/%s/blob' % (guid_1[:2], guid_1)))['seqno'],
+ 3)
+
+ def test_diff(self):
+
+ class Document(document.Document):
+
+ @db.indexed_property(slot=1)
+ def prop(self, value):
+ return value
+
+ @db.blob_property()
+ def blob(self, value):
+ return value
+
+ directory = Directory(tests.tmpdir, Document, IndexWriter)
+
+ directory.create(guid='1', prop='1', ctime=1, mtime=1)
+ directory.set_blob('1', 'blob', StringIO('1'))
+ for i in os.listdir('1/1'):
+ os.utime('1/1/%s' % i, (1, 1))
+
+ directory.create(guid='2', prop='2', ctime=2, mtime=2)
+ directory.set_blob('2', 'blob', StringIO('2'))
+ for i in os.listdir('2/2'):
+ os.utime('2/2/%s' % i, (2, 2))
+
+ directory.create(guid='3', prop='3', ctime=3, mtime=3)
+ for i in os.listdir('3/3'):
+ os.utime('3/3/%s' % i, (3, 3))
+
+ out_seq = Sequence()
+ self.assertEqual([
+ ('1', {
+ 'guid': {'value': '1', 'mtime': 1},
+ 'ctime': {'value': 1, 'mtime': 1},
+ 'prop': {'value': '1', 'mtime': 1},
+ 'mtime': {'value': 1, 'mtime': 1},
+ 'blob': {'mtime': 1, 'digest': hashlib.sha1('1').hexdigest(), 'mime_type': 'application/octet-stream', 'path': tests.tmpdir + '/1/1/blob.blob'},
+ }),
+ ('2', {
+ 'guid': {'value': '2', 'mtime': 2},
+ 'ctime': {'value': 2, 'mtime': 2},
+ 'prop': {'value': '2', 'mtime': 2},
+ 'mtime': {'value': 2, 'mtime': 2},
+ 'blob': {'mtime': 2, 'digest': hashlib.sha1('2').hexdigest(), 'mime_type': 'application/octet-stream', 'path': tests.tmpdir + '/2/2/blob.blob'},
+ }),
+ ('3', {
+ 'guid': {'value': '3', 'mtime': 3},
+ 'ctime': {'value': 3, 'mtime': 3},
+ 'prop': {'value': '3', 'mtime': 3},
+ 'mtime': {'value': 3, 'mtime': 3},
+ }),
+ ],
+ [i for i in directory.diff(Sequence([[0, None]]), out_seq)])
+ self.assertEqual([[1, 5]], out_seq)
+
+ out_seq = Sequence()
+ self.assertEqual([
+ ('2', {
+ 'guid': {'value': '2', 'mtime': 2},
+ 'ctime': {'value': 2, 'mtime': 2},
+ 'prop': {'value': '2', 'mtime': 2},
+ 'mtime': {'value': 2, 'mtime': 2},
+ 'blob': {'mtime': 2, 'digest': hashlib.sha1('2').hexdigest(), 'mime_type': 'application/octet-stream', 'path': tests.tmpdir + '/2/2/blob.blob'},
+ }),
+ ],
+ [i for i in directory.diff(Sequence([[3, 4]]), out_seq)])
+ self.assertEqual([[3, 4]], out_seq)
+
+ out_seq = Sequence()
+ self.assertEqual([
+ ],
+ [i for i in directory.diff(Sequence([[3, 3]]), out_seq)])
+ self.assertEqual([], out_seq)
+
+ out_seq = Sequence()
+ self.assertEqual([
+ ],
+ [i for i in directory.diff(Sequence([[6, 100]]), out_seq)])
+ self.assertEqual([], out_seq)
+ directory.update(guid='2', prop='22')
+ self.assertEqual([
+ ('2', {
+ 'prop': {'value': '22', 'mtime': int(os.stat('2/2/prop').st_mtime)},
+ }),
+ ],
+ [i for i in directory.diff(Sequence([[6, 100]]), out_seq)])
+ self.assertEqual([[6, 6]], out_seq)
+
+ def test_diff_Partial(self):
+
+ class Document(document.Document):
+
+ @db.indexed_property(slot=1)
+ def prop(self, value):
+ return value
+
+ directory = Directory(tests.tmpdir, Document, IndexWriter)
+ directory.create(guid='1', prop='1', ctime=1, mtime=1)
+ for i in os.listdir('1/1'):
+ os.utime('1/1/%s' % i, (1, 1))
+ directory.create(guid='2', prop='2', ctime=2, mtime=2)
+ for i in os.listdir('2/2'):
+ os.utime('2/2/%s' % i, (2, 2))
+
+ out_seq = Sequence()
+ for guid, diff in directory.diff(Sequence([[0, None]]), out_seq):
+ self.assertEqual('1', guid)
+ break
+ self.assertEqual([], out_seq)
+
+ out_seq = Sequence()
+ for guid, diff in directory.diff(Sequence([[0, None]]), out_seq):
+ if guid == '2':
+ break
+ self.assertEqual([[1, 1]], out_seq)
+
+ out_seq = Sequence()
+ for guid, diff in directory.diff(Sequence([[0, None]]), out_seq):
+ pass
+ self.assertEqual([[1, 2]], out_seq)
+
+ def test_diff_WithBlobsSetByUrl(self):
+
+ class Document(document.Document):
+
+ @db.blob_property()
+ def blob(self, value):
+ return value
+
+ directory = Directory(tests.tmpdir, Document, IndexWriter)
+
+ directory.create(guid='1', ctime=1, mtime=1)
+ directory.set_blob('1', 'blob', url='http://sugarlabs.org')
+ for i in os.listdir('1/1'):
+ os.utime('1/1/%s' % i, (1, 1))
+
+ out_seq = Sequence()
+ self.assertEqual([
+ ('1', {
+ 'guid': {'value': '1', 'mtime': 1},
+ 'ctime': {'value': 1, 'mtime': 1},
+ 'mtime': {'value': 1, 'mtime': 1},
+ 'blob': {'mtime': 1, 'mime_type': 'application/octet-stream', 'url': 'http://sugarlabs.org'},
+ }),
+ ],
+ [i for i in directory.diff(Sequence([[0, None]]), out_seq)])
+ self.assertEqual([[1, 2]], out_seq)
+
+ def test_diff_Filter(self):
+
+ class Document(document.Document):
+
+ @db.indexed_property(prefix='P')
+ def prop(self, value):
+ return value
+
+ directory = Directory(tests.tmpdir, Document, IndexWriter)
+
+ directory.create(guid='1', ctime=1, mtime=1, prop='1')
+ directory.create(guid='2', ctime=2, mtime=2, prop='2')
+ for i in os.listdir('2/2'):
+ os.utime('2/2/%s' % i, (2, 2))
+
+ out_seq = Sequence()
+ self.assertEqual([
+ ('2', {
+ 'guid': {'value': '2', 'mtime': 2},
+ 'ctime': {'value': 2, 'mtime': 2},
+ 'mtime': {'value': 2, 'mtime': 2},
+ 'prop': {'value': '2', 'mtime': 2},
+ }),
+ ],
+ [i for i in directory.diff(Sequence([[0, None]]), out_seq, prop='2')])
+ self.assertEqual([[2, 2]], out_seq)
+
+ def test_diff_GroupBy(self):
+
+ class Document(document.Document):
+
+ @db.indexed_property(slot=1, prefix='P')
+ def prop(self, value):
+ return value
+
+ directory = Directory(tests.tmpdir, Document, IndexWriter)
+
+ directory.create(guid='1', ctime=1, mtime=1, prop='0')
+ for i in os.listdir('1/1'):
+ os.utime('1/1/%s' % i, (1, 1))
+ directory.create(guid='2', ctime=2, mtime=2, prop='0')
+ for i in os.listdir('2/2'):
+ os.utime('2/2/%s' % i, (2, 2))
+
+ out_seq = Sequence()
+ self.assertEqual([
+ ('2', {
+ 'guid': {'value': '2', 'mtime': 2},
+ 'ctime': {'value': 2, 'mtime': 2},
+ 'mtime': {'value': 2, 'mtime': 2},
+ 'prop': {'value': '0', 'mtime': 2},
+ }),
+ ],
+ [i for i in directory.diff(Sequence([[0, None]]), out_seq, group_by='prop')])
+ self.assertEqual([[2, 2]], out_seq)
+
+ def test_merge_New(self):
+
+ class Document(document.Document):
+
+ @db.indexed_property(slot=1)
+ def prop(self, value):
+ return value
+
+ @db.blob_property()
+ def blob(self, value):
+ return value
+
+ directory1 = Directory('document1', Document, IndexWriter)
+
+ directory1.create(guid='1', prop='1', ctime=1, mtime=1)
+ directory1.set_blob('1', 'blob', StringIO('1'))
+ for i in os.listdir('document1/1/1'):
+ os.utime('document1/1/1/%s' % i, (1, 1))
+
+ directory1.create(guid='2', prop='2', ctime=2, mtime=2)
+ directory1.set_blob('2', 'blob', StringIO('2'))
+ for i in os.listdir('document1/2/2'):
+ os.utime('document1/2/2/%s' % i, (2, 2))
+
+ directory1.create(guid='3', prop='3', ctime=3, mtime=3)
+ for i in os.listdir('document1/3/3'):
+ os.utime('document1/3/3/%s' % i, (3, 3))
+
+ directory2 = Directory('document2', Document, IndexWriter)
+ for guid, diff in directory1.diff(Sequence([[0, None]]), Sequence()):
+ directory2.merge(guid, diff)
+
+ self.assertEqual(
+ sorted([
+ (1, '1', 1, '1'),
+ (2, '2', 2, '2'),
+ (3, '3', 3, '3'),
+ ]),
+ sorted([(i['ctime'], i['prop'], i['mtime'], i['guid']) for i in directory2.find(0, 1024)[0]]))
+
+ doc = directory2.get('1')
+ self.assertEqual(1, doc.get('seqno'))
+ self.assertEqual(1, doc.meta('guid')['mtime'])
+ self.assertEqual(1, doc.meta('ctime')['mtime'])
+ self.assertEqual(1, doc.meta('prop')['mtime'])
+ self.assertEqual(1, doc.meta('mtime')['mtime'])
+ self.assertEqual(1, doc.meta('blob')['mtime'])
+
+ doc = directory2.get('2')
+ self.assertEqual(2, doc.get('seqno'))
+ self.assertEqual(2, doc.meta('guid')['mtime'])
+ self.assertEqual(2, doc.meta('ctime')['mtime'])
+ self.assertEqual(2, doc.meta('prop')['mtime'])
+ self.assertEqual(2, doc.meta('mtime')['mtime'])
+ self.assertEqual(2, doc.meta('blob')['mtime'])
+
+ doc = directory2.get('3')
+ self.assertEqual(3, doc.get('seqno'))
+ self.assertEqual(3, doc.meta('guid')['mtime'])
+ self.assertEqual(3, doc.meta('ctime')['mtime'])
+ self.assertEqual(3, doc.meta('prop')['mtime'])
+ self.assertEqual(3, doc.meta('mtime')['mtime'])
+ self.assertEqual(None, doc.meta('blob'))
+
+ def test_merge_Update(self):
+
+ class Document(document.Document):
+
+ @db.blob_property()
+ def blob(self, value):
+ return value
+
+ directory1 = Directory('document1', Document, IndexWriter)
+ directory2 = Directory('document2', Document, IndexWriter)
+
+ directory1.create(guid='guid', ctime=1, mtime=1)
+ directory1.set_blob('guid', 'blob', StringIO('1'))
+ for i in os.listdir('document1/gu/guid'):
+ os.utime('document1/gu/guid/%s' % i, (1, 1))
+
+ directory2.create(guid='guid', ctime=2, mtime=2)
+ directory2.set_blob('guid', 'blob', StringIO('2'))
+ for i in os.listdir('document2/gu/guid'):
+ os.utime('document2/gu/guid/%s' % i, (2, 2))
+
+ self.assertEqual(
+ [(2, 2, 'guid')],
+ [(i['ctime'], i['mtime'], i['guid']) for i in directory2.find(0, 1024)[0]])
+ doc = directory2.get('guid')
+ self.assertEqual(2, doc.get('seqno'))
+ self.assertEqual(2, doc.meta('guid')['mtime'])
+ self.assertEqual(2, doc.meta('ctime')['mtime'])
+ self.assertEqual(2, doc.meta('mtime')['mtime'])
+ self.assertEqual(2, doc.meta('blob')['mtime'])
+ self.assertEqual('2', file('document2/gu/guid/blob.blob').read())
+
+ for guid, diff in directory1.diff(Sequence([[0, None]]), Sequence()):
+ directory2.merge(guid, diff)
+
+ self.assertEqual(
+ [(2, 2, 'guid')],
+ [(i['ctime'], i['mtime'], i['guid']) for i in directory2.find(0, 1024)[0]])
+ doc = directory2.get('guid')
+ self.assertEqual(2, doc.get('seqno'))
+ self.assertEqual(2, doc.meta('guid')['mtime'])
+ self.assertEqual(2, doc.meta('ctime')['mtime'])
+ self.assertEqual(2, doc.meta('mtime')['mtime'])
+ self.assertEqual(2, doc.meta('blob')['mtime'])
+ self.assertEqual('2', file('document2/gu/guid/blob.blob').read())
+
+ os.utime('document1/gu/guid/mtime', (3, 3))
+ for guid, diff in directory1.diff(Sequence([[0, None]]), Sequence()):
+ directory2.merge(guid, diff)
+
+ self.assertEqual(
+ [(2, 1, 'guid')],
+ [(i['ctime'], i['mtime'], i['guid']) for i in directory2.find(0, 1024)[0]])
+ doc = directory2.get('guid')
+ self.assertEqual(3, doc.get('seqno'))
+ self.assertEqual(2, doc.meta('guid')['mtime'])
+ self.assertEqual(2, doc.meta('ctime')['mtime'])
+ self.assertEqual(3, doc.meta('mtime')['mtime'])
+ self.assertEqual(2, doc.meta('blob')['mtime'])
+ self.assertEqual('2', file('document2/gu/guid/blob.blob').read())
+
+ os.utime('document1/gu/guid/blob', (4, 4))
+ for guid, diff in directory1.diff(Sequence([[0, None]]), Sequence()):
+ directory2.merge(guid, diff)
+
+ self.assertEqual(
+ [(2, 1, 'guid')],
+ [(i['ctime'], i['mtime'], i['guid']) for i in directory2.find(0, 1024)[0]])
+ doc = directory2.get('guid')
+ self.assertEqual(4, doc.get('seqno'))
+ self.assertEqual(2, doc.meta('guid')['mtime'])
+ self.assertEqual(2, doc.meta('ctime')['mtime'])
+ self.assertEqual(3, doc.meta('mtime')['mtime'])
+ self.assertEqual(4, doc.meta('blob')['mtime'])
+ self.assertEqual('1', file('document2/gu/guid/blob.blob').read())
+
+ def test_merge_SeqnoLessMode(self):
+
+ class Document(document.Document):
+
+ @db.indexed_property(slot=1)
+ def prop(self, value):
+ return value
+
+ directory1 = Directory('document1', Document, IndexWriter)
+ directory1.create(guid='1', prop='1', ctime=1, mtime=1)
+
+ directory2 = Directory('document2', Document, IndexWriter)
+ for guid, diff in directory1.diff(Sequence([[0, None]]), Sequence()):
+ directory2.merge(guid, diff, increment_seqno=False)
+ self.assertEqual(
+ [(1, 1, '1', '1')],
+ [(i['ctime'], i['mtime'], i['guid'], i['prop']) for i in directory2.find(0, 1024)[0]])
+ doc = directory2.get('1')
+ self.assertEqual(None, doc.get('seqno'))
+ self.assertEqual(0, doc.meta('guid')['seqno'])
+ self.assertEqual(0, doc.meta('prop')['seqno'])
+
+ directory3 = Directory('document3', Document, IndexWriter)
+ for guid, diff in directory1.diff(Sequence([[0, None]]), Sequence()):
+ directory3.merge(guid, diff=diff)
+ self.assertEqual(
+ [(1, 1, '1', '1')],
+ [(i['ctime'], i['mtime'], i['guid'], i['prop']) for i in directory3.find(0, 1024)[0]])
+ doc = directory3.get('1')
+ self.assertEqual(1, doc.get('seqno'))
+ self.assertEqual(1, doc.meta('guid')['seqno'])
+ self.assertEqual(1, doc.meta('prop')['seqno'])
+
+ time.sleep(1)
+ directory1.update(guid='1', prop='2', ctime=2, mtime=2)
+
+ for guid, diff in directory1.diff(Sequence([[0, None]]), Sequence()):
+ directory3.merge(guid, diff, increment_seqno=False)
+ self.assertEqual(
+ [(2, 2, '1', '2')],
+ [(i['ctime'], i['mtime'], i['guid'], i['prop']) for i in directory3.find(0, 1024)[0]])
+ doc = directory3.get('1')
+ self.assertEqual(1, doc.get('seqno'))
+ self.assertEqual(1, doc.meta('guid')['seqno'])
+ self.assertEqual(1, doc.meta('prop')['seqno'])
+
+ time.sleep(1)
+ directory1.update(guid='1', prop='3', ctime=3, mtime=3)
+
+ for guid, diff in directory1.diff(Sequence([[0, None]]), Sequence()):
+ directory3.merge(guid, diff)
+ self.assertEqual(
+ [(3, 3, '1', '3')],
+ [(i['ctime'], i['mtime'], i['guid'], i['prop']) for i in directory3.find(0, 1024)[0]])
+ doc = directory3.get('1')
+ self.assertEqual(2, doc.get('seqno'))
+ self.assertEqual(1, doc.meta('guid')['seqno'])
+ self.assertEqual(2, doc.meta('prop')['seqno'])
+
+ def test_merge_AvoidCalculatedBlobs(self):
+
+ class Document(document.Document):
+
+ @db.blob_property()
+ def blob(self, value):
+ return {'url': 'http://foo/bar', 'mime_type': 'image/png'}
+
+ directory1 = Directory('document1', Document, IndexWriter)
+ directory1.create(guid='guid', ctime=1, mtime=1)
+ for i in os.listdir('document1/gu/guid'):
+ os.utime('document1/gu/guid/%s' % i, (1, 1))
+
+ directory2 = Directory('document2', Document, IndexWriter)
+ for guid, diff in directory1.diff(Sequence([[0, None]]), Sequence()):
+ directory2.merge(guid, diff)
+
+ doc = directory2.get('guid')
+ self.assertEqual(1, doc.get('seqno'))
+ self.assertEqual(1, doc.meta('guid')['mtime'])
+ assert not exists('document2/gu/guid/blob')
+
+ def test_merge_Blobs(self):
+
+ class Document(document.Document):
+
+ @db.blob_property()
+ def blob(self, value):
+ return value
+
+ directory = Directory('document', Document, IndexWriter)
+ self.touch(('file', 'blob-1'))
+ directory.merge('1', {
+ 'guid': {'mtime': 1, 'value': '1'},
+ 'ctime': {'mtime': 2, 'value': 2},
+ 'mtime': {'mtime': 3, 'value': 3},
+ 'blob': {'mtime': 4, 'path': 'file'},
+ })
+
+ self.assertEqual(
+ [(2, 3, '1')],
+ [(i['ctime'], i['mtime'], i['guid']) for i in directory.find(0, 1024)[0]])
+
+ doc = directory.get('1')
+ self.assertEqual(1, doc.get('seqno'))
+ self.assertEqual(1, doc.meta('guid')['mtime'])
+ self.assertEqual(2, doc.meta('ctime')['mtime'])
+ self.assertEqual(3, doc.meta('mtime')['mtime'])
+ self.assertEqual(4, doc.meta('blob')['mtime'])
+ self.assertEqual('blob-1', file('document/1/1/blob.blob').read())
+
+ directory.merge('1', {
+ 'blob': {'mtime': 5, 'content': b64encode('blob-2')},
+ })
+
+ self.assertEqual(5, doc.meta('blob')['mtime'])
+ self.assertEqual('blob-2', file('document/1/1/blob.blob').read())
+
+ def test_MalformedGUIDs(self):
+
+ class Document(document.Document):
+ pass
+
+ directory = Directory(tests.tmpdir, Document, IndexWriter)
+
+ self.assertRaises(RuntimeError, directory.create, {'guid': 'foo/bar'})
+ self.assertRaises(RuntimeError, directory.create, {'guid': 'foo bar'})
+ self.assertRaises(RuntimeError, directory.create, {'guid': 'foo#bar'})
+ assert directory.create({'guid': 'foo-bar.1-2'})
+
+ def __test_Integers(self):
+ db = Index({
+ 'prop': ActiveProperty('prop', 1, 'A', typecast=int, full_text=True),
+ })
+
+ db.store('1', {'prop': 9}, True)
+ db.store('2', {'prop': 89}, True)
+ db.store('3', {'prop': 777}, True)
+
+ self.assertEqual(
+ [
+ {'guid': '1', 'prop': 9},
+ {'guid': '2', 'prop': 89},
+ {'guid': '3', 'prop': 777},
+ ],
+ db._find(order_by='prop')[0])
+
+ self.assertEqual(
+ [
+ {'guid': '1', 'prop': 9},
+ {'guid': '2', 'prop': 89},
+ ],
+ db._find(query='prop:0..100', order_by='prop')[0])
+
+ self.assertEqual(
+ [
+ {'guid': '1', 'prop': 9},
+ ],
+ db._find(query='prop:9', order_by='prop')[0])
+
+ self.assertEqual(
+ [
+ {'guid': '2', 'prop': 89},
+ ],
+ db._find(query='prop:=89', order_by='prop')[0])
+
+ def __test_Floats(self):
+ db = Index({
+ 'prop': ActiveProperty('prop', 1, 'A', typecast=float, full_text=True),
+ })
+
+ db.store('1', {'prop': 9.1}, True)
+ db.store('2', {'prop': 89.2}, True)
+ db.store('3', {'prop': 777.3}, True)
+
+ self.assertEqual(
+ [
+ {'guid': '1', 'prop': 9.1},
+ {'guid': '2', 'prop': 89.2},
+ {'guid': '3', 'prop': 777.3},
+ ],
+ db._find(order_by='prop')[0])
+
+ self.assertEqual(
+ [
+ {'guid': '1', 'prop': 9.1},
+ {'guid': '2', 'prop': 89.2},
+ ],
+ db._find(query='prop:0..100', order_by='prop')[0])
+
+ self.assertEqual(
+ [
+ {'guid': '1', 'prop': 9.1},
+ ],
+ db._find(query='prop:9.1', order_by='prop')[0])
+
+ self.assertEqual(
+ [
+ {'guid': '2', 'prop': 89.2},
+ ],
+ db._find(query='prop:=89.2', order_by='prop')[0])
+
+ def __test_Booleans(self):
+ db = Index({
+ 'prop': ActiveProperty('prop', 1, 'A', typecast=bool, full_text=True),
+ })
+
+ db.store('1', {'prop': True}, True)
+ db.store('2', {'prop': False}, True)
+
+ self.assertEqual(
+ [
+ {'guid': '2', 'prop': False},
+ {'guid': '1', 'prop': True},
+ ],
+ db._find(order_by='prop')[0])
+
+ self.assertEqual(
+ [
+ {'guid': '2', 'prop': False},
+ {'guid': '1', 'prop': True},
+ ],
+ db._find(query='prop:0..100', order_by='prop')[0])
+
+ self.assertEqual(
+ [
+ {'guid': '1', 'prop': True},
+ ],
+ db._find(query='prop:1..1', order_by='prop')[0])
+
+ self.assertEqual(
+ [
+ {'guid': '2', 'prop': False},
+ ],
+ db._find(query='prop:0', order_by='prop')[0])
+
+ self.assertEqual(
+ [
+ {'guid': '1', 'prop': True},
+ ],
+ db._find(query='prop:=1', order_by='prop')[0])
+
+
+if __name__ == '__main__':
+ tests.main()
diff --git a/tests/units/db/env.py b/tests/units/db/env.py
new file mode 100755
index 0000000..2898efb
--- /dev/null
+++ b/tests/units/db/env.py
@@ -0,0 +1,39 @@
+#!/usr/bin/env python
+# sugar-lint: disable
+
+import copy
+from os.path import exists
+
+from __init__ import tests
+
+from sugar_network.db import env
+
+
+class EnvTest(tests.Test):
+
+ def test_gettext(self):
+ # Fallback to default lang
+ env._default_lang = 'default'
+ self.assertEqual('foo', env.gettext({'lang': 'foo', 'default': 'bar'}, 'lang'))
+ self.assertEqual('bar', env.gettext({'lang': 'foo', 'default': 'bar'}, 'fake'))
+
+ # Exact accept_language
+ self.assertEqual('', env.gettext(None, 'lang'))
+ self.assertEqual('foo', env.gettext('foo', 'lang'))
+ self.assertEqual('foo', env.gettext({'lang': 'foo', 'fake': 'bar', 'default': 'default'}, 'lang'))
+ self.assertEqual('foo', env.gettext({'lang': 'foo', 'fake': 'bar', 'default': 'default'}, ['lang', 'fake']))
+ self.assertEqual('bar', env.gettext({'lang': 'foo', 'fake': 'bar', 'default': 'default'}, ['fake', 'lang']))
+
+ # Last resort
+ self.assertEqual('foo', env.gettext({'1': 'foo', '2': 'bar'}, 'fake'))
+
+ # Primed accept_language
+ self.assertEqual('foo', env.gettext({'1': 'foo', '2': 'bar', 'default': 'default'}, '1-a'))
+
+ # Primed i18n value
+ self.assertEqual('bar', env.gettext({'1-a': 'foo', '1': 'bar', 'default': 'default'}, '1-b'))
+ self.assertEqual('foo', env.gettext({'1-a': 'foo', '2': 'bar', 'default': 'default'}, '1-b'))
+
+
+if __name__ == '__main__':
+ tests.main()
diff --git a/tests/units/db/index.py b/tests/units/db/index.py
new file mode 100755
index 0000000..2654cd1
--- /dev/null
+++ b/tests/units/db/index.py
@@ -0,0 +1,743 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# sugar-lint: disable
+
+import os
+import uuid
+import time
+import shutil
+import locale
+from os.path import exists
+
+from __init__ import tests
+
+from sugar_network.db import index, env
+from sugar_network.db.metadata import Metadata, IndexedProperty, GUID_PREFIX
+from sugar_network.db.directory import _Query
+from sugar_network.toolkit import coroutine
+
+
+class IndexTest(tests.Test):
+
+ def test_Term_AvoidCollisionsWithGuid(self):
+ self.assertRaises(RuntimeError, IndexedProperty, 'key', 0, 'I')
+ self.assertRaises(RuntimeError, IndexedProperty, 'key', 0, 'K')
+ self.assertRaises(RuntimeError, IndexedProperty, 'key', 1, 'I')
+ IndexedProperty('key', 1, 'K')
+ IndexedProperty('guid', 0, 'I')
+
+ def test_Create(self):
+ db = Index({'key': IndexedProperty('key', 1, 'K')})
+
+ db.store('1', {'key': 'value_1'}, True)
+ self.assertEqual(
+ ([{'guid': '1', 'key': 'value_1'}], 1),
+ db._find(reply=['key']))
+
+ db.store('2', {'key': 'value_2'}, True)
+ self.assertEqual(
+ ([{'guid': '1', 'key': 'value_1'},
+ {'guid': '2', 'key': 'value_2'}], 2),
+ db._find(reply=['key']))
+
+ def test_update(self):
+ db = Index({
+ 'var_1': IndexedProperty('var_1', 1, 'A'),
+ 'var_2': IndexedProperty('var_2', 2, 'B'),
+ })
+
+ db.store('1', {'var_1': 'value_1', 'var_2': 'value_2'}, True)
+ self.assertEqual(
+ ([{'guid': '1', 'var_1': 'value_1', 'var_2': 'value_2'}], 1),
+ db._find(reply=['var_1', 'var_2']))
+
+ db.store('1', {'var_1': 'value_3', 'var_2': 'value_4'}, False)
+ self.assertEqual(
+ ([{'guid': '1', 'var_1': 'value_3', 'var_2': 'value_4'}], 1),
+ db._find(reply=['var_1', 'var_2']))
+
+ def test_delete(self):
+ db = Index({'key': IndexedProperty('key', 1, 'K')})
+
+ db.store('1', {'key': 'value'}, True)
+ self.assertEqual(
+ ([{'guid': '1', 'key': 'value'}], 1),
+ db._find(reply=['key']))
+
+ db.delete('1')
+ self.assertEqual(
+ ([], 0),
+ db._find(reply=['key']))
+
+ def test_IndexByReprcast(self):
+ db = Index({'key': IndexedProperty('key', 1, 'K', reprcast=lambda x: "foo" + x)})
+
+ db.store('1', {'key': 'bar'}, True)
+
+ self.assertEqual(
+ [{'guid': '1', 'key': 'foobar'}],
+ db._find(reply=['key'])[0])
+ self.assertEqual(
+ [{'guid': '1', 'key': 'foobar'}],
+ db._find(key='bar', reply=['key'])[0])
+ self.assertEqual(
+ [],
+ db._find(key='foobar', reply=['key'])[0])
+ self.assertEqual(
+ [],
+ db._find(key='fake', reply=['key'])[0])
+
+ def test_find(self):
+ db = Index({
+ 'var_1': IndexedProperty('var_1', 1, 'A', full_text=True),
+ 'var_2': IndexedProperty('var_2', 2, 'B', full_text=True),
+ 'var_3': IndexedProperty('var_3', 3, 'C', full_text=True),
+ })
+
+ db.store('1', {'var_1': '1', 'var_2': 'у', 'var_3': 'г'}, True)
+ db.store('2', {'var_1': '2', 'var_2': 'у', 'var_3': 'ю'}, True)
+ db.store('3', {'var_1': '3', 'var_2': 'б', 'var_3': 'ю'}, True)
+
+ self.assertEqual(
+ ([{'guid': '1', 'var_1': '1'},
+ {'guid': '2', 'var_1': '2'}], 2),
+ db._find(query='у', reply=['var_1']))
+
+ self.assertEqual(
+ ([{'guid': '2', 'var_1': '2'}], 1),
+ db._find(query='у AND ю', reply=['var_1']))
+
+ self.assertEqual(
+ ([{'guid': '2', 'var_1': '2'},
+ {'guid': '3', 'var_1': '3'}], 2),
+ db._find(query='var_3:ю', reply=['var_1']))
+
+ self.assertEqual(
+ ([{'guid': '1', 'var_1': '1'},
+ {'guid': '2', 'var_1': '2'},
+ {'guid': '3', 'var_1': '3'}], 3),
+ db._find(query='var_3:ю OR var_2:у', reply=['var_1'], order_by='guid'))
+
+ def test_find_NoneFilters(self):
+ db = Index({
+ 'prop': IndexedProperty('prop', 1, 'P', full_text=True),
+ })
+
+ db.store('guid', {'prop': 'value'}, True)
+
+ self.assertEqual(
+ [{'guid': 'guid', 'prop': 'value'}],
+ db._find(reply=['prop'])[0])
+ self.assertEqual(
+ [{'guid': 'guid', 'prop': 'value'}],
+ db._find(prop=None, reply=['prop'])[0])
+ self.assertEqual(
+ [{'guid': 'guid', 'prop': 'value'}],
+ db._find(guid=None, reply=['prop'])[0])
+
+ def test_find_WithTypeCast(self):
+ db = Index({
+ 'var_1': IndexedProperty('var_1', 1, 'A', typecast=bool),
+ })
+
+ db.store('1', {'var_1': True}, True)
+ db.store('2', {'var_1': False}, True)
+
+ self.assertEqual(
+ [{'guid': '1'}],
+ db._find(var_1=True, reply=['guid'])[0])
+ self.assertEqual(
+ [{'guid': '2'}],
+ db._find(var_1=False, reply=['guid'])[0])
+
+ def test_find_WithProps(self):
+ db = Index({
+ 'var_1': IndexedProperty('var_1', 1, 'A', full_text=True),
+ 'var_2': IndexedProperty('var_2', 2, 'B', full_text=True),
+ 'var_3': IndexedProperty('var_3', 3, 'C', full_text=True),
+ })
+
+ db.store('1', {'var_1': '1', 'var_2': 'у', 'var_3': 'г'}, True)
+ db.store('2', {'var_1': '2', 'var_2': 'у', 'var_3': 'ю'}, True)
+ db.store('3', {'var_1': '3', 'var_2': 'б', 'var_3': 'ю'}, True)
+
+ self.assertEqual(
+ ([{'guid': '1', 'var_1': '1'},
+ {'guid': '2', 'var_1': '2'}], 2),
+ db._find(var_2='у', reply=['var_1']))
+
+ self.assertEqual(
+ ([{'guid': '1', 'var_1': '1'}], 1),
+ db._find(var_2='у', var_3='г', reply=['var_1']))
+
+ self.assertEqual(
+ ([], 0),
+ db._find(query='var_1:0', var_2='у', var_3='г', reply=['var_1']))
+
+ self.assertEqual(
+ ([{'guid': '3', 'var_1': '3'}], 1),
+ db._find(query='var_3:ю', var_2='б', reply=['var_1']))
+
+ def test_find_WithAllBooleanProps(self):
+ db = Index({
+ 'var_1': IndexedProperty('var_1', 1, 'A', boolean=True, full_text=True),
+ 'var_2': IndexedProperty('var_2', 2, 'B', boolean=True, full_text=True),
+ 'var_3': IndexedProperty('var_3', 3, 'C', boolean=True, full_text=True),
+ })
+
+ db.store('1', {'var_1': '1', 'var_2': 'у', 'var_3': 'г'}, True)
+ db.store('2', {'var_1': '2', 'var_2': 'у', 'var_3': 'ю'}, True)
+ db.store('3', {'var_1': '3', 'var_2': 'б', 'var_3': 'ю'}, True)
+
+ self.assertEqual(
+ ([{'guid': '1', 'var_1': '1'}], 1),
+ db._find(var_1='1', var_2='у', var_3='г', reply=['var_1']))
+
+ self.assertEqual(
+ ([{'guid': '1', 'var_1': '1'}], 1),
+ db._find(query='г', var_1='1', var_2='у', var_3='г', reply=['var_1']))
+
+ self.assertEqual(
+ ([], 0),
+ db._find(query='б', var_1='1', var_2='у', var_3='г', reply=['var_1']))
+
+ def test_find_WithBooleanProps(self):
+ db = Index({
+ 'var_1': IndexedProperty('var_1', 1, 'A', boolean=True, full_text=True),
+ 'var_2': IndexedProperty('var_2', 2, 'B', boolean=False, full_text=True),
+ 'var_3': IndexedProperty('var_3', 3, 'C', boolean=True, full_text=True),
+ })
+
+ db.store('1', {'var_1': '1', 'var_2': 'у', 'var_3': 'г'}, True)
+ db.store('2', {'var_1': '2', 'var_2': 'у', 'var_3': 'ю'}, True)
+ db.store('3', {'var_1': '3', 'var_2': 'б', 'var_3': 'ю'}, True)
+
+ self.assertEqual(
+ ([{'guid': '1', 'var_1': '1'}], 1),
+ db._find(var_1='1', var_2='у', var_3='г', reply=['var_1']))
+
+ self.assertEqual(
+ ([{'guid': '1', 'var_1': '1'}], 1),
+ db._find(query='г', var_1='1', var_2='у', var_3='г', reply=['var_1']))
+
+ self.assertEqual(
+ ([], 0),
+ db._find(query='б', var_1='1', var_2='у', var_3='г', reply=['var_1']))
+
+ def test_find_ExactQuery(self):
+ db = Index({'key': IndexedProperty('key', 1, 'K', full_text=True)})
+
+ db.store('1', {'key': 'фу'}, True)
+ db.store('2', {'key': 'фу бар'}, True)
+ db.store('3', {'key': 'фу бар тест'}, True)
+
+ self.assertEqual(
+ ([{'guid': '1', 'key': u'фу'}, {'guid': '2', 'key': u'фу бар'}, {'guid': '3', 'key': u'фу бар тест'}], 3),
+ db._find(query='key:фу', reply=['key']))
+ self.assertEqual(
+ ([{'guid': '2', 'key': u'фу бар'}, {'guid': '3', 'key': u'фу бар тест'}], 2),
+ db._find(query='key:"фу бар"', reply=['key']))
+
+ self.assertEqual(
+ ([{'guid': '1', 'key': u'фу'}], 1),
+ db._find(query='key:=фу', reply=['key']))
+ self.assertEqual(
+ ([{'guid': '2', 'key': u'фу бар'}], 1),
+ db._find(query='key:="фу бар"', reply=['key']))
+ self.assertEqual(
+ ([{'guid': '3', 'key': u'фу бар тест'}], 1),
+ db._find(query='key:="фу бар тест"', reply=['key']))
+
+ def test_find_ExactQueryTerms(self):
+ term = 'azAZ09_'
+
+ db = Index({term: IndexedProperty(term, 1, 'T', full_text=True)})
+
+ db.store('1', {term: 'test'}, True)
+ db.store('2', {term: 'test fail'}, True)
+
+ self.assertEqual(
+ ([{'guid': '1'}], 1),
+ db._find(query='%s:=test' % term, reply=['guid']))
+
+ def test_find_ReturnPortions(self):
+ db = Index({'key': IndexedProperty('key', 1, 'K')})
+
+ db.store('1', {'key': '1'}, True)
+ db.store('2', {'key': '2'}, True)
+ db.store('3', {'key': '3'}, True)
+
+ self.assertEqual(
+ ([{'guid': '1', 'key': '1'}], 3),
+ db._find(offset=0, limit=1, reply=['key']))
+ self.assertEqual(
+ ([{'guid': '2', 'key': '2'}], 3),
+ db._find(offset=1, limit=1, reply=['key']))
+ self.assertEqual(
+ ([{'guid': '3', 'key': '3'}], 3),
+ db._find(offset=2, limit=1, reply=['key']))
+ self.assertEqual(
+ ([], 3),
+ db._find(offset=3, limit=1, reply=['key']))
+
+ def test_find_OrderBy(self):
+ db = Index({
+ 'var_1': IndexedProperty('var_1', 1, 'A'),
+ 'var_2': IndexedProperty('var_2', 2, 'B'),
+ })
+
+ db.store('1', {'var_1': '1', 'var_2': '3'}, True)
+ db.store('2', {'var_1': '2', 'var_2': '2'}, True)
+ db.store('3', {'var_1': '3', 'var_2': '1'}, True)
+
+ self.assertEqual(
+ ([{'guid': '1'}, {'guid': '2'}, {'guid': '3'}], 3),
+ db._find(order_by='var_1'))
+ self.assertEqual(
+ ([{'guid': '1'}, {'guid': '2'}, {'guid': '3'}], 3),
+ db._find(order_by='+var_1'))
+ self.assertEqual(
+ ([{'guid': '3'}, {'guid': '2'}, {'guid': '1'}], 3),
+ db._find(order_by='-var_1'))
+
+ self.assertEqual(
+ ([{'guid': '3'}, {'guid': '2'}, {'guid': '1'}], 3),
+ db._find(order_by='var_2'))
+ self.assertEqual(
+ ([{'guid': '3'}, {'guid': '2'}, {'guid': '1'}], 3),
+ db._find(order_by='+var_2'))
+ self.assertEqual(
+ ([{'guid': '1'}, {'guid': '2'}, {'guid': '3'}], 3),
+ db._find(order_by='-var_2'))
+
+ def test_find_GroupBy(self):
+ db = Index({
+ 'var_1': IndexedProperty('var_1', 1, 'A'),
+ 'var_2': IndexedProperty('var_2', 2, 'B'),
+ 'var_3': IndexedProperty('var_3', 3, 'C'),
+ 'var_4': IndexedProperty('var_4', 4, 'D'),
+ })
+
+ db.store('1', {'var_1': '1', 'var_2': '1', 'var_3': '3', 'var_4': 0}, True)
+ db.store('2', {'var_1': '2', 'var_2': '1', 'var_3': '4', 'var_4': 0}, True)
+ db.store('3', {'var_1': '3', 'var_2': '2', 'var_3': '4', 'var_4': 0}, True)
+
+ self.assertEqual(
+ [{'guid': '1', 'var_1': '1'}, {'guid': '3', 'var_1': '3'}],
+ db._find(reply=['var_1'], group_by='var_2')[0])
+ self.assertEqual(
+ [{'guid': '1', 'var_1': '1'}, {'guid': '2', 'var_1': '2'}],
+ db._find(reply=['var_1'], group_by='var_3')[0])
+ self.assertEqual(
+ [{'guid': '1'}],
+ db._find(reply=['guid'], group_by='var_4', order_by='var_1')[0])
+ self.assertEqual(
+ [{'guid': '3'}],
+ db._find(reply=['guid'], group_by='var_4', order_by='-var_1')[0])
+
+ def test_MultipleValues(self):
+ db = Index({
+ 'prop': IndexedProperty('prop', prefix='B', typecast=[1, 2], full_text=True),
+ })
+ db.store('1', {'prop': [1, 2]}, True)
+ db.store('2', {'prop': [2, 3]}, True)
+ self.assertEqual(
+ [{'guid': '1'}],
+ db._find(prop=1, reply=['guid'])[0])
+ self.assertEqual(
+ [{'guid': '1'}, {'guid': '2'}],
+ db._find(prop=2, reply=['guid'])[0])
+ self.assertEqual(
+ [{'guid': '1'}, {'guid': '2'}],
+ db._find(query='2', reply=['guid'])[0])
+ self.assertEqual(
+ [{'guid': '2'}],
+ db._find(query='3', reply=['guid'])[0])
+ db.close()
+
+ db = Index({
+ 'prop': IndexedProperty('prop', prefix='B', typecast=[], full_text=True),
+ })
+ db.store('1', {'prop': ['a', 'b']}, True)
+ db.store('2', {'prop': ['b', 'c']}, True)
+ self.assertEqual(
+ [{'guid': '1'}],
+ db._find(prop='a', reply=['guid'])[0])
+ self.assertEqual(
+ [{'guid': '1'}, {'guid': '2'}],
+ db._find(prop='b', reply=['guid'])[0])
+ self.assertEqual(
+ [{'guid': '1'}, {'guid': '2'}],
+ db._find(query='b', reply=['guid'])[0])
+ self.assertEqual(
+ [{'guid': '2'}],
+ db._find(query='c', reply=['guid'])[0])
+ db.close()
+
+ def test_Callbacks(self):
+ db = Index({})
+
+ pre_stored = []
+ post_stored = []
+ deleted = []
+
+ db.store('1', {}, True,
+ lambda *args: pre_stored.append(args),
+ lambda *args: post_stored.append(args))
+ self.assertEqual(1, len(pre_stored))
+ self.assertEqual(1, len(post_stored))
+
+ db.store('1', {}, False,
+ lambda *args: pre_stored.append(args),
+ lambda *args: post_stored.append(args))
+ self.assertEqual(2, len(pre_stored))
+ self.assertEqual(2, len(post_stored))
+
+ db.delete('1', lambda *args: deleted.append(args))
+ self.assertEqual(1, len(deleted))
+
+ def test_mtime(self):
+ # No index at start; checkpoint didn't happen
+ db = Index({})
+ self.assertEqual(0, db.mtime)
+ db.store('1', {}, True)
+ db.commit()
+ db.close()
+
+ # Index exists at start; checkpoint didn't happen
+ db = Index({})
+ self.assertEqual(0, db.mtime)
+ db.store('2', {}, True)
+ db.commit()
+ self.assertEqual(0, db.mtime)
+ db.close()
+
+ # Index exists at start; mtime exists at start; checkpoint didn't happen
+ self.touch('index/mtime')
+ os.utime('index/mtime', (1, 1))
+ db = Index({})
+ self.assertEqual(1, db.mtime)
+ db.store('3', {}, True)
+ db.commit()
+ self.assertEqual(1, db.mtime)
+ db.close()
+
+ # Index exists at start; checkpoint happened
+ db = Index({})
+ db.checkpoint()
+ self.assertNotEqual(1, db.mtime)
+ os.utime('index/mtime', (1, 1))
+ self.assertEqual(1, db.mtime)
+ db.store('4', {}, True)
+ db.commit()
+ self.assertNotEqual(1, db.mtime)
+ db.close()
+
+ def test_find_OrderByGUIDAllTime(self):
+ db = Index({'prop': IndexedProperty('prop', 1, 'P')})
+
+ db.store('3', {'prop': '1'}, True)
+ db.store('2', {'prop': '1'}, True)
+ db.store('1', {'prop': '3'}, True)
+
+ self.assertEqual(
+ ([{'guid': '1', 'prop': '3'}, {'guid': '2', 'prop': '1'}, {'guid': '3', 'prop': '1'}], 3),
+ db._find(reply=['prop']))
+
+ self.assertEqual(
+ ([{'guid': '2', 'prop': '1'}, {'guid': '3', 'prop': '1'}, {'guid': '1', 'prop': '3'}], 3),
+ db._find(reply=['prop'], order_by='prop'))
+
+ self.assertEqual(
+ ([{'guid': '1', 'prop': '3'}, {'guid': '2', 'prop': '1'}, {'guid': '3', 'prop': '1'}], 3),
+ db._find(reply=['prop'], order_by='-prop'))
+
+ def test_find_Region(self):
+ term = 'azAZ09_'
+
+ db = Index({term: IndexedProperty(term, 1, 'T', full_text=True)})
+
+ db.store('1', {term: 'test'}, True)
+ db.store('2', {term: 'test fail'}, True)
+
+ self.assertEqual(
+ ([{'guid': '1'}], 1),
+ db._find(query='%s:=test' % term, reply=['guid']))
+
+ def test_find_WithListProps(self):
+ db = Index({'prop': IndexedProperty('prop', None, 'A', full_text=True, typecast=[])})
+
+ db.store('1', {'prop': ('a', )}, True)
+ db.store('2', {'prop': ('a', 'aa')}, True)
+ db.store('3', {'prop': ('aa', 'aaa')}, True)
+
+ self.assertEqual(
+ ([{'guid': '1'}, {'guid': '2'}], 2),
+ db._find(prop='a', reply=['prop']))
+
+ self.assertEqual(
+ ([{'guid': '2'}, {'guid': '3'}], 2),
+ db._find(prop='aa'))
+
+ self.assertEqual(
+ ([{'guid': '3'}], 1),
+ db._find(prop='aaa'))
+
+ def test_FlushThreshold(self):
+ commits = []
+
+ db = Index({}, lambda: commits.append(True))
+ coroutine.dispatch()
+ env.index_flush_threshold.value = 1
+ db.store('1', {}, True)
+ coroutine.dispatch()
+ db.store('2', {}, True)
+ coroutine.dispatch()
+ db.store('3', {}, True)
+ coroutine.dispatch()
+ self.assertEqual(3, len(commits))
+ db.close()
+
+ del commits[:]
+ db = Index({}, lambda: commits.append(True))
+ coroutine.dispatch()
+ env.index_flush_threshold.value = 2
+ db.store('4', {}, True)
+ coroutine.dispatch()
+ db.store('5', {}, True)
+ coroutine.dispatch()
+ db.store('6', {}, True)
+ coroutine.dispatch()
+ db.store('7', {}, True)
+ coroutine.dispatch()
+ db.store('8', {}, True)
+ coroutine.dispatch()
+ self.assertEqual(2, len(commits))
+ db.close()
+
+ def test_FlushTimeout(self):
+ env.index_flush_threshold.value = 0
+ env.index_flush_timeout.value = 1
+
+ commits = []
+
+ db = Index({}, lambda: commits.append(True))
+ coroutine.dispatch()
+
+ db.store('1', {}, True)
+ coroutine.dispatch()
+ self.assertEqual(0, len(commits))
+ db.store('2', {}, True)
+ coroutine.dispatch()
+ self.assertEqual(0, len(commits))
+
+ coroutine.sleep(1.5)
+ self.assertEqual(1, len(commits))
+
+ db.store('1', {}, True)
+ coroutine.dispatch()
+ self.assertEqual(1, len(commits))
+ db.store('2', {}, True)
+ coroutine.dispatch()
+ self.assertEqual(1, len(commits))
+
+ coroutine.sleep(1.5)
+ self.assertEqual(2, len(commits))
+
+ coroutine.sleep(1.5)
+ self.assertEqual(2, len(commits))
+
+ def test_DoNotMissImmediateCommitEvent(self):
+ env.index_flush_threshold.value = 1
+ commits = []
+ db = Index({}, lambda: commits.append(True))
+
+ db.store('1', {}, True)
+ coroutine.dispatch()
+ self.assertEqual(1, len(commits))
+
+ def test_SortLocalizedProps(self):
+ env._default_lang = 'default_lang'
+ current_lang = locale.getdefaultlocale()[0].replace('_', '-')
+
+ db = Index({'prop': IndexedProperty('prop', 1, 'A', localized=True)})
+
+ db.store('0', {'prop': {'foo': '5'}}, True)
+ db.store('1', {'prop': {current_lang: '4', 'default_lang': '1', 'foo': '3'}}, True)
+ db.store('2', {'prop': {'default_lang': '2', 'foo': '2'}}, True)
+ db.store('3', {'prop': {current_lang: '3', 'foo': '6'}}, True)
+
+ self.assertEqual([
+ {'guid': '1'},
+ {'guid': '2'},
+ {'guid': '3'},
+ {'guid': '0'},
+ ],
+ db._find(order_by='prop')[0])
+
+ self.assertEqual([
+ {'guid': '0'},
+ {'guid': '3'},
+ {'guid': '2'},
+ {'guid': '1'},
+ ],
+ db._find(order_by='-prop')[0])
+
+ def test_find_MultipleFilter(self):
+ db = Index({'prop': IndexedProperty('prop', 1, 'A')})
+
+ db.store('1', {'prop': 'a'}, True)
+ db.store('2', {'prop': 'b'}, True)
+ db.store('3', {'prop': 'c'}, True)
+
+ self.assertEqual(
+ sorted([
+ {'guid': '1'},
+ {'guid': '2'},
+ {'guid': '3'},
+ ]),
+ db._find(prop=[], reply=['guid'])[0])
+
+ self.assertEqual(
+ sorted([
+ {'guid': '1'},
+ ]),
+ db._find(prop='a', reply=['guid'])[0])
+
+ self.assertEqual(
+ sorted([
+ {'guid': '1'},
+ {'guid': '2'},
+ ]),
+ db._find(prop=['a', 'b'], reply=['guid'])[0])
+
+ self.assertEqual(
+ sorted([
+ {'guid': '1'},
+ {'guid': '2'},
+ {'guid': '3'},
+ ]),
+ db._find(prop=['a', 'b', 'c'], reply=['guid'])[0])
+
+ self.assertEqual(
+ sorted([
+ {'guid': '2'},
+ ]),
+ db._find(prop=['b', 'foo', 'bar'], reply=['guid'])[0])
+
+ def test_find_NotFilter(self):
+ db = Index({'prop': IndexedProperty('prop', 1, 'A')})
+
+ db.store('1', {'prop': 'a'}, True)
+ db.store('2', {'prop': 'b'}, True)
+ db.store('3', {'prop': 'c'}, True)
+
+ self.assertEqual(
+ sorted([
+ {'guid': '2'},
+ {'guid': '3'},
+ ]),
+ db._find(prop='!a', reply=['guid'])[0])
+
+ self.assertEqual(
+ sorted([
+ {'guid': '3'},
+ ]),
+ db._find(prop=['!a', '!b'], reply=['guid'])[0])
+
+ self.assertEqual(
+ sorted([
+ ]),
+ db._find(prop=['!a', '!b', '!c'], reply=['guid'])[0])
+
+ self.assertEqual(
+ sorted([
+ {'guid': '1'},
+ {'guid': '3'},
+ ]),
+ db._find(prop=['!b', 'c'], reply=['guid'])[0])
+
+ self.assertEqual(
+ sorted([
+ {'guid': '1'},
+ {'guid': '3'},
+ ]),
+ db._find(prop=['a', '!b', 'c'], reply=['guid'])[0])
+
+ def test_find_AndNotFilter(self):
+ db = Index({'prop': IndexedProperty('prop', 1, 'A')})
+
+ db.store('1', {'prop': 'a'}, True)
+ db.store('2', {'prop': 'b'}, True)
+ db.store('3', {'prop': 'c'}, True)
+
+ self.assertEqual(
+ sorted([
+ {'guid': '2'},
+ {'guid': '3'},
+ ]),
+ db._find(prop='-a', reply=['guid'])[0])
+
+ self.assertEqual(
+ sorted([
+ {'guid': '3'},
+ ]),
+ db._find(prop=['-a', '-b'], reply=['guid'])[0])
+
+ self.assertEqual(
+ sorted([
+ ]),
+ db._find(prop=['-a', '-b', '-c'], reply=['guid'])[0])
+
+ self.assertEqual(
+ sorted([
+ {'guid': '3'},
+ ]),
+ db._find(prop=['-b', 'c'], reply=['guid'])[0])
+
+ self.assertEqual(
+ sorted([
+ {'guid': '1'},
+ {'guid': '3'},
+ ]),
+ db._find(prop=['a', '-b', 'c'], reply=['guid'])[0])
+
+
+class Index(index.IndexWriter):
+
+ def __init__(self, props, *args):
+
+ class Document(object):
+ pass
+
+ metadata = Metadata(Index)
+ metadata.update(props)
+ metadata['guid'] = IndexedProperty('guid',
+ permissions=env.ACCESS_CREATE | env.ACCESS_READ, slot=0,
+ prefix=GUID_PREFIX)
+
+ index.IndexWriter.__init__(self, tests.tmpdir + '/index', metadata, *args)
+
+ def _find(self, *args, **kwargs):
+ if 'reply' not in kwargs:
+ kwargs['reply'] = {}
+ if 'order_by' not in kwargs:
+ kwargs['order_by'] = 'guid'
+
+ mset = self.find(_Query(*args, **kwargs))
+ result = []
+
+ for hit in self.find(_Query(*args, **kwargs)):
+ props = {}
+ for name in kwargs['reply']:
+ prop = self.metadata[name]
+ if prop.slot is not None:
+ props[name] = hit.document.get_value(prop.slot).decode('utf8')
+ props['guid'] = hit.document.get_value(0).decode('utf8')
+ result.append(props)
+
+ return result, mset.get_matches_estimated()
+
+
+if __name__ == '__main__':
+ tests.main()
diff --git a/tests/units/db/metadata.py b/tests/units/db/metadata.py
new file mode 100755
index 0000000..5cbe11c
--- /dev/null
+++ b/tests/units/db/metadata.py
@@ -0,0 +1,125 @@
+#!/usr/bin/env python
+# sugar-lint: disable
+
+from __init__ import tests
+
+from sugar_network.db.metadata import _Property
+
+
+class MetadataTest(tests.Test):
+
+ def test_Property_decode(self):
+ prop = _Property('prop', typecast=int)
+ self.assertEqual(1, prop.decode(1))
+ self.assertEqual(1, prop.decode(1.1))
+ self.assertEqual(1, prop.decode('1'))
+ self.assertRaises(ValueError, prop.decode, '1.0')
+ self.assertRaises(ValueError, prop.decode, '')
+ self.assertRaises(ValueError, prop.decode, None)
+
+ prop = _Property('prop', typecast=float)
+ self.assertEqual(1.0, prop.decode(1))
+ self.assertEqual(1.1, prop.decode(1.1))
+ self.assertEqual(1.0, prop.decode('1'))
+ self.assertEqual(1.1, prop.decode('1.1'))
+ self.assertRaises(ValueError, prop.decode, '')
+ self.assertRaises(ValueError, prop.decode, None)
+
+ prop = _Property('prop', typecast=bool)
+ self.assertEqual(False, prop.decode(0))
+ self.assertEqual(True, prop.decode(1))
+ self.assertEqual(True, prop.decode(1.1))
+ self.assertEqual(True, prop.decode('1'))
+ self.assertEqual(True, prop.decode('A'))
+ self.assertEqual(False, prop.decode(''))
+ self.assertRaises(ValueError, prop.decode, None)
+
+ prop = _Property('prop', typecast=[int])
+ self.assertEqual((1,), prop.decode(1))
+ self.assertRaises(ValueError, prop.decode, None)
+ self.assertRaises(ValueError, prop.decode, '')
+ self.assertEqual((), prop.decode([]))
+ self.assertEqual((123,), prop.decode('123'))
+ self.assertRaises(ValueError, prop.decode, 'a')
+ self.assertEqual((123, 4, 5), prop.decode(['123', 4, 5.6]))
+
+ prop = _Property('prop', typecast=[1, 2])
+ self.assertRaises(ValueError, prop.decode, 0)
+ self.assertRaises(ValueError, prop.decode, None)
+ self.assertRaises(ValueError, prop.decode, '')
+ self.assertRaises(ValueError, prop.decode, 'A')
+ self.assertEqual(1, prop.decode(1))
+ self.assertEqual(2, prop.decode(2))
+
+ prop = _Property('prop', typecast=[[True, False, 'probe']])
+ self.assertRaises(ValueError, prop.decode, None)
+ self.assertEqual((0, ), prop.decode(0))
+ self.assertRaises(ValueError, prop.decode, 'A')
+ self.assertEqual((True, ), prop.decode(True))
+ self.assertEqual((False, ), prop.decode(False))
+ self.assertRaises(ValueError, prop.decode, [3])
+ self.assertRaises(ValueError, prop.decode, ['A'])
+ self.assertRaises(ValueError, prop.decode, '')
+ self.assertEqual((), prop.decode([]))
+ self.assertEqual((True,), prop.decode([True]))
+ self.assertEqual((False,), prop.decode([False]))
+ self.assertEqual((True, False, True), prop.decode([True, False, True]))
+ self.assertEqual((True, False, 'probe'), prop.decode([True, False, 'probe']))
+ self.assertRaises(ValueError, prop.decode, [True, None])
+
+ prop = _Property('prop', typecast=[str])
+ self.assertEqual(('',), prop.decode(''))
+ self.assertEqual(('',), prop.decode(['']))
+ self.assertEqual((), prop.decode([]))
+
+ prop = _Property('prop', typecast=[])
+ self.assertRaises(ValueError, prop.decode, None)
+ self.assertEqual(('',), prop.decode(''))
+ self.assertEqual(('',), prop.decode(['']))
+ self.assertEqual((), prop.decode([]))
+ self.assertEqual(('0',), prop.decode(0))
+ self.assertEqual(('',), prop.decode(''))
+ self.assertEqual(('foo',), prop.decode('foo'))
+
+ prop = _Property('prop', typecast=[['A', 'B', 'C']])
+ self.assertRaises(ValueError, prop.decode, '')
+ self.assertRaises(ValueError, prop.decode, [''])
+ self.assertEqual((), prop.decode([]))
+ self.assertEqual(('A', 'B', 'C'), prop.decode(['A', 'B', 'C']))
+ self.assertRaises(ValueError, prop.decode, ['a'])
+ self.assertRaises(ValueError, prop.decode, ['A', 'x'])
+
+ prop = _Property('prop', typecast=[frozenset(['A', 'B', 'C'])])
+ self.assertEqual(('A', 'B', 'C'), prop.decode(['A', 'B', 'C']))
+
+ prop = _Property('prop', typecast=lambda x: x + 1)
+ self.assertEqual(1, prop.decode(0))
+
+ def test_Property_to_string(self):
+ prop = _Property('prop', typecast=int)
+ self.assertEqual(['0'], prop.to_string(0))
+ self.assertEqual(['1'], prop.to_string(1))
+
+ prop = _Property('prop', typecast=float)
+ self.assertEqual(['0'], prop.to_string(0))
+ self.assertEqual(['1.1'], prop.to_string(1.1))
+
+ prop = _Property('prop', typecast=bool)
+ self.assertEqual(['1'], prop.to_string(True))
+ self.assertEqual(['0'], prop.to_string(False))
+
+ prop = _Property('prop', typecast=[int])
+ self.assertEqual(['0', '1'], prop.to_string([0, 1]))
+
+ prop = _Property('prop', typecast=[1, 2])
+ self.assertEqual(['2', '1'], prop.to_string([2, 1]))
+
+ prop = _Property('prop', typecast=[[True, 0, 'probe']])
+ self.assertEqual(['probe', '1', '0'], prop.to_string(['probe', True, 0]))
+
+ prop = _Property('prop', reprcast=lambda x: x.keys())
+ self.assertEqual(['a', '2'], prop.to_string({'a': 1, 2: 'b'}))
+
+
+if __name__ == '__main__':
+ tests.main()
diff --git a/tests/units/db/migrate.py b/tests/units/db/migrate.py
new file mode 100755
index 0000000..73b4794
--- /dev/null
+++ b/tests/units/db/migrate.py
@@ -0,0 +1,289 @@
+#!/usr/bin/env python
+# sugar-lint: disable
+
+import os
+from os.path import exists, lexists
+
+from __init__ import tests
+
+from sugar_network import db
+from sugar_network.db import document, env
+from sugar_network.db import directory as directory_
+from sugar_network.db.directory import Directory
+from sugar_network.db.index import IndexWriter
+
+
+class MigrateTest(tests.Test):
+
+ def test_To1(self):
+
+ class Document(document.Document):
+
+ @db.indexed_property(prefix='P', localized=True)
+ def prop(self, value):
+ return value
+
+ @db.blob_property()
+ def blob(self, value):
+ return value
+
+ self.touch(
+ ('gu/guid/.seqno', ''),
+ ('gu/guid/guid', '"guid"'),
+ ('gu/guid/guid.seqno', ''),
+ ('gu/guid/ctime', '1'),
+ ('gu/guid/ctime.seqno', ''),
+ ('gu/guid/mtime', '1'),
+ ('gu/guid/mtime.seqno', ''),
+ ('gu/guid/prop', '"prop"'),
+ ('gu/guid/prop.seqno', ''),
+ ('gu/guid/blob', 'blob'),
+ ('gu/guid/blob.seqno', ''),
+ ('gu/guid/blob.sha1', 'digest'),
+ )
+ for i in os.listdir('gu/guid'):
+ os.utime('gu/guid/%s' % i, (1, 1))
+
+ directory = Directory(tests.tmpdir, Document, IndexWriter)
+ for i in directory.populate():
+ pass
+ assert exists('layout')
+ self.assertEqual(str(directory_._LAYOUT_VERSION), file('layout').read())
+
+ assert not exists('gu/guid/.seqno')
+ assert not exists('gu/guid/guid.seqno')
+ assert not exists('gu/guid/ctime.seqno')
+ assert not exists('gu/guid/mtime.seqno')
+ assert not exists('gu/guid/prop.seqno')
+ assert not exists('gu/guid/blob.seqno')
+ assert not exists('gu/guid/blob.sha1')
+ assert exists('gu/guid/blob.blob')
+
+ def test_meta():
+ doc = directory.get('guid')
+ self.assertEqual(
+ {'value': 'guid', 'mtime': 1, 'seqno': 1},
+ doc.meta('guid'))
+ self.assertEqual(
+ {'value': 1, 'mtime': 1, 'seqno': 1},
+ doc.meta('ctime'))
+ self.assertEqual(
+ {'value': 1, 'mtime': 1, 'seqno': 1},
+ doc.meta('mtime'))
+ self.assertEqual(
+ {'value': {env.default_lang(): 'prop'}, 'mtime': 1, 'seqno': 1},
+ doc.meta('prop'))
+ self.assertEqual(
+ {'digest': 'digest', 'mtime': 1, 'seqno': 1, 'mime_type': 'application/octet-stream', 'path': tests.tmpdir + '/gu/guid/blob.blob'},
+ doc.meta('blob'))
+ self.assertEqual('blob', file('gu/guid/blob.blob').read())
+
+ test_meta()
+
+ directory.close()
+ with file('layout', 'w') as f:
+ f.write('*')
+ directory = Directory(tests.tmpdir, Document, IndexWriter)
+ for i in directory.populate():
+ pass
+ self.assertEqual(str(directory_._LAYOUT_VERSION), file('layout').read())
+
+ test_meta()
+
+ def test_To1_MissedBlobs(self):
+
+ class Document(document.Document):
+
+ @db.blob_property()
+ def blob(self, value):
+ return value
+
+ self.touch(
+ ('1/1/.seqno', ''),
+ ('1/1/guid', '"guid"'),
+ ('1/1/guid.seqno', ''),
+ ('1/1/ctime', '1'),
+ ('1/1/ctime.seqno', ''),
+ ('1/1/mtime', '1'),
+ ('1/1/mtime.seqno', ''),
+ ('1/1/blob.seqno', ''),
+ ('1/1/blob.sha1', 'digest'),
+
+ ('2/2/.seqno', ''),
+ ('2/2/guid', '"guid"'),
+ ('2/2/guid.seqno', ''),
+ ('2/2/ctime', '1'),
+ ('2/2/ctime.seqno', ''),
+ ('2/2/mtime', '1'),
+ ('2/2/mtime.seqno', ''),
+ ('2/2/blob.seqno', ''),
+ ('2/2/blob.sha1', 'digest'),
+
+ ('3/3/.seqno', ''),
+ ('3/3/guid', '"guid"'),
+ ('3/3/guid.seqno', ''),
+ ('3/3/ctime', '1'),
+ ('3/3/ctime.seqno', ''),
+ ('3/3/mtime', '1'),
+ ('3/3/mtime.seqno', ''),
+ ('3/3/blob.seqno', ''),
+
+ ('4/4/.seqno', ''),
+ ('4/4/guid', '"guid"'),
+ ('4/4/guid.seqno', ''),
+ ('4/4/ctime', '1'),
+ ('4/4/ctime.seqno', ''),
+ ('4/4/mtime', '1'),
+ ('4/4/mtime.seqno', ''),
+ ('4/4/blob', 'blob'),
+ ('4/4/blob.seqno', ''),
+ )
+
+ for i in os.listdir('1/1'):
+ os.utime('1/1/%s' % i, (1, 1))
+ for i in os.listdir('2/2'):
+ os.utime('2/2/%s' % i, (2, 2))
+ for i in os.listdir('3/3'):
+ os.utime('3/3/%s' % i, (3, 3))
+ os.symlink('/foo', '3/3/blob')
+ for i in os.listdir('4/4'):
+ os.utime('4/4/%s' % i, (4, 4))
+
+ directory = Directory(tests.tmpdir, Document, IndexWriter)
+ for i in directory.populate():
+ pass
+
+ assert not exists('1/1/blob')
+ assert not exists('1/1/blob.seqno')
+ assert not exists('1/1/blob.sha1')
+ assert not exists('1/1/blob.blob')
+ assert not exists('2/2/blob')
+ assert not exists('2/2/blob.seqno')
+ assert not exists('2/2/blob.sha1')
+ assert not exists('2/2/blob.blob')
+ assert not exists('3/3/blob')
+ assert not lexists('3/3/blob')
+ assert not exists('3/3/blob.seqno')
+ assert not exists('3/3/blob.sha1')
+ assert not exists('3/3/blob.blob')
+ assert not lexists('3/3/blob.blob')
+ assert exists('4/4/blob')
+ assert not exists('4/4/blob.seqno')
+ assert not exists('4/4/blob.sha1')
+ assert exists('4/4/blob.blob')
+
+ self.assertEqual(None, directory.get('1').meta('blob'))
+ self.assertEqual(None, directory.get('2').meta('blob'))
+ self.assertEqual(None, directory.get('3').meta('blob'))
+ self.assertEqual(
+ {'digest': '', 'mtime': 4, 'seqno': 4, 'mime_type': 'application/octet-stream', 'path': tests.tmpdir + '/4/4/blob.blob'},
+ directory.get('4').meta('blob'))
+ self.assertEqual('blob', file('4/4/blob.blob').read())
+
+ def test_To1_MissedValues(self):
+
+ class Document(document.Document):
+
+ @db.indexed_property(prefix='P')
+ def prop1(self, value):
+ return value
+
+ @db.indexed_property(prefix='A', default='default')
+ def prop2(self, value):
+ return value
+
+ self.touch(
+ ('gu/guid/.seqno', ''),
+ ('gu/guid/guid', '"guid"'),
+ ('gu/guid/guid.seqno', ''),
+ ('gu/guid/ctime', '1'),
+ ('gu/guid/ctime.seqno', ''),
+ ('gu/guid/mtime', '1'),
+ ('gu/guid/mtime.seqno', ''),
+ ('gu/guid/prop1.seqno', ''),
+ ('gu/guid/prop2.seqno', ''),
+ )
+ for i in os.listdir('gu/guid'):
+ os.utime('gu/guid/%s' % i, (1, 1))
+
+ directory = Directory(tests.tmpdir, Document, IndexWriter)
+ for i in directory.populate():
+ pass
+
+ assert not exists('gu/guid/.seqno')
+ assert not exists('gu/guid/guid.seqno')
+ assert not exists('gu/guid/ctime.seqno')
+ assert not exists('gu/guid/mtime.seqno')
+ assert exists('gu/guid/prop1')
+ assert not exists('gu/guid/prop1.seqno')
+ assert exists('gu/guid/prop2')
+ assert not exists('gu/guid/prop2.seqno')
+
+ doc = directory.get('guid')
+ self.assertEqual(
+ {'value': None, 'seqno': 1, 'mtime': int(os.stat('gu/guid/prop1').st_mtime)},
+ doc.meta('prop1'))
+ assert int(os.stat('gu/guid/prop1').st_mtime) > 1
+ self.assertEqual(
+ {'value': 'default', 'seqno': 1, 'mtime': int(os.stat('gu/guid/prop1').st_mtime)},
+ doc.meta('prop2'))
+ assert int(os.stat('gu/guid/prop2').st_mtime) > 1
+
+ def test_MissedProps(self):
+
+ class Document(document.Document):
+
+ @db.indexed_property(prefix='P')
+ def prop1(self, value):
+ return value
+
+ @db.indexed_property(prefix='A', default='default')
+ def prop2(self, value):
+ return value
+
+ self.touch(
+ ('gu/guid/.seqno', ''),
+ ('gu/guid/guid', '"guid"'),
+ ('gu/guid/guid.seqno', ''),
+ ('gu/guid/ctime', '1'),
+ ('gu/guid/ctime.seqno', ''),
+ ('gu/guid/mtime', '1'),
+ ('gu/guid/mtime.seqno', ''),
+ )
+
+ directory = Directory(tests.tmpdir, Document, IndexWriter)
+ for i in directory.populate():
+ pass
+
+ assert not exists('gu/guid/prop1')
+ assert exists('gu/guid/prop2')
+
+ doc = directory.get('guid')
+ self.assertEqual(
+ {'value': 'default', 'seqno': 0, 'mtime': int(os.stat('gu/guid/prop2').st_mtime)},
+ doc.meta('prop2'))
+
+ def test_ConvertFromJson(self):
+
+ class Document(document.Document):
+
+ @db.indexed_property(prefix='P', default='value')
+ def prop(self, value):
+ return value
+
+ guid_value = '{"value": "guid"}'
+ self.touch(('gu/guid/guid', guid_value))
+
+ directory = Directory(tests.tmpdir, Document, IndexWriter)
+ for i in directory.populate():
+ pass
+
+ doc = directory.get('guid')
+ self.assertEqual(
+ {'value': 'guid', 'mtime': int(os.stat('gu/guid/guid').st_mtime)},
+ doc.meta('guid'))
+ self.assertNotEqual(guid_value, file('gu/guid/guid').read())
+
+
+if __name__ == '__main__':
+ tests.main()
diff --git a/tests/units/db/storage.py b/tests/units/db/storage.py
new file mode 100755
index 0000000..8442f10
--- /dev/null
+++ b/tests/units/db/storage.py
@@ -0,0 +1,214 @@
+#!/usr/bin/env python
+# sugar-lint: disable
+
+import os
+import time
+import hashlib
+import threading
+import cPickle as pickle
+from cStringIO import StringIO
+from os.path import exists
+
+from __init__ import tests
+
+from sugar_network.db import env
+from sugar_network.db.metadata import Metadata, StoredProperty
+from sugar_network.db.metadata import BlobProperty
+from sugar_network.db.storage import Storage
+from sugar_network.toolkit import BUFFER_SIZE, util
+
+
+class StorageTest(tests.Test):
+
+ def storage(self, props):
+
+ class Test(object):
+ pass
+
+ metadata = Metadata(Test)
+ for i in props:
+ metadata[i.name] = i
+ return Storage(tests.tmpdir, metadata)
+
+ def test_Record_get(self):
+ storage = self.storage([StoredProperty('prop')])
+
+ self.assertEqual(None, storage.get('guid').get('prop'))
+ self.touch(('gu/guid/prop', pickle.dumps({
+ 'value': 'value',
+ 'foo': 'bar',
+ })))
+ self.assertEqual({
+ 'value': 'value',
+ 'foo': 'bar',
+ 'mtime': int(os.stat('gu/guid/prop').st_mtime),
+ },
+ storage.get('guid').get('prop'))
+
+ def test_Record_set(self):
+ storage = self.storage([StoredProperty('prop')])
+
+ storage.get('guid').set('prop', value='value', foo='bar')
+ self.assertEqual({
+ 'value': 'value',
+ 'foo': 'bar',
+ 'mtime': int(os.stat('gu/guid/prop').st_mtime),
+ },
+ storage.get('guid').get('prop'))
+
+ def test_Record_set_blob_ByStream(self):
+ storage = self.storage([BlobProperty('prop')])
+
+ record = storage.get('guid1')
+ data = '!' * BUFFER_SIZE * 2
+ record.set_blob('prop', StringIO(data))
+ self.assertEqual({
+ 'path': tests.tmpdir + '/gu/guid1/prop.blob',
+ 'mtime': int(os.stat('gu/guid1/prop').st_mtime),
+ 'digest': hashlib.sha1(data).hexdigest(),
+ },
+ record.get('prop'))
+ self.assertEqual(data, file('gu/guid1/prop.blob').read())
+
+ record = storage.get('guid2')
+ record.set_blob('prop', StringIO('12345'), 1)
+ self.assertEqual({
+ 'path': tests.tmpdir + '/gu/guid2/prop.blob',
+ 'mtime': int(os.stat('gu/guid2/prop').st_mtime),
+ 'digest': hashlib.sha1('1').hexdigest(),
+ },
+ record.get('prop'))
+ self.assertEqual('1', file('gu/guid2/prop.blob').read())
+
+ def test_Record_set_blob_ByPath(self):
+ storage = self.storage([BlobProperty('prop')])
+
+ record = storage.get('guid1')
+ self.touch(('file', 'data'))
+ record.set_blob('prop', tests.tmpdir + '/file')
+ self.assertEqual({
+ 'path': tests.tmpdir + '/gu/guid1/prop.blob',
+ 'mtime': int(os.stat('gu/guid1/prop').st_mtime),
+ 'digest': hashlib.sha1('data').hexdigest(),
+ },
+ record.get('prop'))
+ self.assertEqual('data', file('gu/guid1/prop.blob').read())
+
+ record = storage.get('guid2')
+ self.touch(('directory/1', '1'))
+ self.touch(('directory/2/3', '3'))
+ self.touch(('directory/2/4/5', '5'))
+ record.set_blob('prop', tests.tmpdir + '/directory')
+ self.assertEqual({
+ 'path': tests.tmpdir + '/gu/guid2/prop.blob',
+ 'mtime': int(os.stat('gu/guid2/prop').st_mtime),
+ 'digest': hashlib.sha1(
+ '1' '1'
+ '2/3' '3'
+ '2/4/5' '5'
+ ).hexdigest(),
+ },
+ record.get('prop'))
+ util.assert_call('diff -r directory gu/guid2/prop.blob', shell=True)
+
+ def test_Record_set_blob_ByUrl(self):
+ storage = self.storage([BlobProperty('prop')])
+ record = storage.get('guid1')
+
+ record.set_blob('prop', url='http://sugarlabs.org')
+ self.assertEqual({
+ 'url': 'http://sugarlabs.org',
+ 'mtime': int(os.stat('gu/guid1/prop').st_mtime),
+ },
+ record.get('prop'))
+ assert not exists('gu/guid1/prop.blob')
+
+ def test_Record_set_blob_ByValue(self):
+ storage = self.storage([BlobProperty('prop')])
+ record = storage.get('guid')
+
+ record.set_blob('prop', '/foo/bar')
+ self.assertEqual({
+ 'path': tests.tmpdir + '/gu/guid/prop.blob',
+ 'mtime': int(os.stat('gu/guid/prop').st_mtime),
+ 'digest': hashlib.sha1('/foo/bar').hexdigest(),
+ },
+ record.get('prop'))
+ self.assertEqual('/foo/bar', file('gu/guid/prop.blob').read())
+
+ def test_delete(self):
+ storage = self.storage([StoredProperty('prop')])
+
+ assert not exists('ab/absent')
+ storage.delete('absent')
+
+ record = storage.get('guid')
+ self.touch(('directory/1/2/3', '3'))
+ record.set_blob('prop', 'directory')
+ record.set('prop', value='value')
+ assert exists('gu/guid')
+ storage.delete('guid')
+ assert not exists('gu/guid')
+
+ def test_Record_consistent(self):
+ storage = self.storage([
+ StoredProperty('guid'),
+ StoredProperty('prop'),
+ ])
+ record = storage.get('guid')
+
+ self.assertEqual(False, record.consistent)
+
+ record.set('prop', value='value')
+ self.assertEqual(False, record.consistent)
+
+ record.set('guid', value='value')
+ self.assertEqual(True, record.consistent)
+
+ def test_walk(self):
+ storage = self.storage([StoredProperty('guid')])
+
+ storage.get('guid1').set('guid', value=1, mtime=1)
+ storage.get('guid2').set('guid', value=2, mtime=2)
+ storage.get('guid3').set('guid', value=3, mtime=3)
+
+ self.assertEqual(
+ sorted(['guid1', 'guid2', 'guid3']),
+ sorted([i for i in storage.walk(0)]))
+
+ self.assertEqual(
+ sorted(['guid2', 'guid3']),
+ sorted([i for i in storage.walk(1)]))
+
+ self.assertEqual(
+ sorted(['guid3']),
+ sorted([i for i in storage.walk(2)]))
+
+ self.assertEqual(
+ sorted([]),
+ sorted([i for i in storage.walk(3)]))
+
+ def test_walk_SkipGuidLess(self):
+ storage = self.storage([
+ StoredProperty('guid'),
+ StoredProperty('prop'),
+ ])
+
+ record = storage.get('guid1')
+ record.set('guid', value=1)
+ record.set('prop', value=1)
+
+ record = storage.get('guid2')
+ record.set('prop', value=2)
+
+ record = storage.get('guid3')
+ record.set('guid', value=3)
+ record.set('prop', value=3)
+
+ self.assertEqual(
+ sorted(['guid1', 'guid3']),
+ sorted([i for i in storage.walk(0)]))
+
+
+if __name__ == '__main__':
+ tests.main()
diff --git a/tests/units/db/volume.py b/tests/units/db/volume.py
new file mode 100755
index 0000000..66ab9d6
--- /dev/null
+++ b/tests/units/db/volume.py
@@ -0,0 +1,969 @@
+#!/usr/bin/env python
+# sugar-lint: disable
+
+import os
+import sys
+import time
+import shutil
+import hashlib
+from cStringIO import StringIO
+from email.message import Message
+from os.path import dirname, join, abspath, exists
+
+src_root = abspath(dirname(__file__))
+
+from __init__ import tests
+
+from sugar_network import db
+from sugar_network.db import env
+from sugar_network.db.volume import VolumeCommands
+from sugar_network.toolkit import coroutine
+
+
+class VolumeTest(tests.Test):
+
+ def setUp(self):
+ tests.Test.setUp(self)
+ self.response = db.Response()
+
+ def test_Populate(self):
+ self.touch(
+ ('document/1/1/guid', '{"value": "1"}'),
+ ('document/1/1/ctime', '{"value": 1}'),
+ ('document/1/1/mtime', '{"value": 1}'),
+ ('document/1/1/seqno', '{"value": 0}'),
+
+ ('document/2/2/guid', '{"value": "2"}'),
+ ('document/2/2/ctime', '{"value": 2}'),
+ ('document/2/2/mtime', '{"value": 2}'),
+ ('document/2/2/seqno', '{"value": 0}'),
+ )
+
+ class Document(db.Document):
+ pass
+
+ with db.SingleVolume(tests.tmpdir, [Document]) as volume:
+ for cls in volume.values():
+ for __ in cls.populate():
+ pass
+ self.assertEqual(
+ sorted(['1', '2']),
+ sorted([i.guid for i in volume['document'].find()[0]]))
+
+ shutil.rmtree('document/index')
+
+ class Document(db.Document):
+ pass
+
+ with db.SingleVolume(tests.tmpdir, [Document]) as volume:
+ for cls in volume.values():
+ for __ in cls.populate():
+ pass
+ self.assertEqual(
+ sorted(['1', '2']),
+ sorted([i.guid for i in volume['document'].find()[0]]))
+
+ def test_Commands(self):
+
+ class TestDocument(db.Document):
+
+ @db.indexed_property(slot=1, default='')
+ def prop(self, value):
+ return value
+
+ @db.indexed_property(prefix='L', localized=True, default='')
+ def localized_prop(self, value):
+ return value
+
+ self.volume = db.SingleVolume(tests.tmpdir, [TestDocument])
+ self.volume['testdocument'].create(guid='guid')
+
+ self.assertEqual({
+ 'total': 1,
+ 'result': [
+ {'guid': 'guid', 'prop': ''},
+ ],
+ },
+ self.call('GET', document='testdocument', reply=['guid', 'prop']))
+
+ guid_1 = self.call('POST', document='testdocument', content={'prop': 'value_1'})
+ assert guid_1
+ guid_2 = self.call('POST', document='testdocument', content={'prop': 'value_2'})
+ assert guid_2
+
+ self.assertEqual(
+ sorted([
+ {'guid': 'guid', 'prop': ''},
+ {'guid': guid_1, 'prop': 'value_1'},
+ {'guid': guid_2, 'prop': 'value_2'},
+ ]),
+ sorted(self.call('GET', document='testdocument', reply=['guid', 'prop'])['result']))
+
+ self.call('PUT', document='testdocument', guid=guid_1, content={'prop': 'value_3'})
+
+ self.assertEqual(
+ sorted([
+ {'guid': 'guid', 'prop': ''},
+ {'guid': guid_1, 'prop': 'value_3'},
+ {'guid': guid_2, 'prop': 'value_2'},
+ ]),
+ sorted(self.call('GET', document='testdocument', reply=['guid', 'prop'])['result']))
+
+ self.call('DELETE', document='testdocument', guid=guid_2)
+
+ self.assertEqual(
+ sorted([
+ {'guid': 'guid', 'prop': ''},
+ {'guid': guid_1, 'prop': 'value_3'},
+ ]),
+ sorted(self.call('GET', document='testdocument', reply=['guid', 'prop'])['result']))
+
+ self.assertRaises(db.NotFound, self.call, 'GET', document='testdocument', guid=guid_2)
+
+ self.assertEqual(
+ {'guid': guid_1, 'prop': 'value_3'},
+ self.call('GET', document='testdocument', guid=guid_1, reply=['guid', 'prop']))
+
+ self.assertEqual(
+ 'value_3',
+ self.call('GET', document='testdocument', guid=guid_1, prop='prop'))
+
+ def test_SetBLOBs(self):
+
+ class TestDocument(db.Document):
+
+ @db.blob_property()
+ def blob(self, value):
+ return value
+
+ self.volume = db.SingleVolume(tests.tmpdir, [TestDocument])
+ guid = self.call('POST', document='testdocument', content={})
+
+ self.assertRaises(RuntimeError, self.call, 'PUT', document='testdocument', guid=guid, prop='blob', content={'path': '/'})
+
+ self.call('PUT', document='testdocument', guid=guid, prop='blob', content='blob1')
+ self.assertEqual('blob1', file(self.call('GET', document='testdocument', guid=guid, prop='blob')['path']).read())
+
+ self.call('PUT', document='testdocument', guid=guid, prop='blob', content_stream=StringIO('blob2'))
+ self.assertEqual('blob2', file(self.call('GET', document='testdocument', guid=guid, prop='blob')['path']).read())
+
+ self.call('PUT', document='testdocument', guid=guid, prop='blob', content=None)
+ self.assertRaises(db.NotFound, self.call, 'GET', document='testdocument', guid=guid, prop='blob')
+
+ def test_SetBLOBsWithMimeType(self):
+
+ class TestDocument(db.Document):
+
+ @db.blob_property(mime_type='default')
+ def blob(self, value):
+ return value
+
+ self.volume = db.SingleVolume(tests.tmpdir, [TestDocument])
+ guid = self.call('POST', document='testdocument', content={})
+
+ self.call('PUT', document='testdocument', guid=guid, prop='blob', content='blob1')
+ self.assertEqual('default', self.call('GET', document='testdocument', guid=guid, prop='blob')['mime_type'])
+ self.assertEqual('default', self.response.content_type)
+
+ self.call('PUT', document='testdocument', guid=guid, prop='blob', content='blob1', content_type='foo')
+ self.assertEqual('foo', self.call('GET', document='testdocument', guid=guid, prop='blob')['mime_type'])
+ self.assertEqual('foo', self.response.content_type)
+
+ def test_GetBLOBs(self):
+
+ class TestDocument(db.Document):
+
+ @db.blob_property()
+ def blob(self, value):
+ return value
+
+ self.volume = db.SingleVolume(tests.tmpdir, [TestDocument])
+ guid = self.call('POST', document='testdocument', content={})
+ self.call('PUT', document='testdocument', guid=guid, prop='blob', content='blob')
+
+ blob_path = tests.tmpdir + '/testdocument/%s/%s/blob' % (guid[:2], guid)
+ blob_meta = {
+ 'seqno': 2,
+ 'path': blob_path + '.blob',
+ 'digest': hashlib.sha1('blob').hexdigest(),
+ 'mime_type': 'application/octet-stream',
+ 'mtime': int(os.stat(blob_path).st_mtime),
+ }
+
+ self.assertEqual('blob', file(self.call('GET', document='testdocument', guid=guid, prop='blob')['path']).read())
+
+ self.assertEqual(
+ {'guid': guid, 'blob': blob_meta},
+ self.call('GET', document='testdocument', guid=guid, reply=['guid', 'blob']))
+
+ self.assertEqual([
+ {'guid': guid, 'blob': blob_meta},
+ ],
+ self.call('GET', document='testdocument', reply=['guid', 'blob'])['result'])
+
+ def test_CommandsGetAbsentBlobs(self):
+
+ class TestDocument(db.Document):
+
+ @db.indexed_property(slot=1, default='')
+ def prop(self, value):
+ return value
+
+ @db.blob_property()
+ def blob(self, value):
+ return value
+
+ self.volume = db.SingleVolume(tests.tmpdir, [TestDocument])
+
+ guid = self.call('POST', document='testdocument', content={'prop': 'value'})
+ self.assertEqual('value', self.call('GET', document='testdocument', guid=guid, prop='prop'))
+ self.assertRaises(db.NotFound, self.call, 'GET', document='testdocument', guid=guid, prop='blob')
+ self.assertEqual(
+ {'blob': db.PropertyMetadata()},
+ self.call('GET', document='testdocument', guid=guid, reply=['blob']))
+
+ def test_Command_ReplyForGET(self):
+
+ class TestDocument(db.Document):
+
+ @db.indexed_property(slot=1, default='')
+ def prop(self, value):
+ return value
+
+ @db.indexed_property(prefix='L', localized=True, default='')
+ def localized_prop(self, value):
+ return value
+
+ self.volume = db.SingleVolume(tests.tmpdir, [TestDocument])
+ guid = self.call('POST', document='testdocument', content={'prop': 'value'})
+
+ self.assertEqual(
+ ['guid', 'prop'],
+ self.call('GET', document='testdocument', guid=guid, reply=['guid', 'prop']).keys())
+
+ self.assertEqual(
+ ['guid'],
+ self.call('GET', document='testdocument')['result'][0].keys())
+
+ self.assertEqual(
+ sorted(['guid', 'prop']),
+ sorted(self.call('GET', document='testdocument', reply=['prop', 'guid'])['result'][0].keys()))
+
+ self.assertEqual(
+ sorted(['prop']),
+ sorted(self.call('GET', document='testdocument', reply=['prop'])['result'][0].keys()))
+
+ def test_DecodeBeforeSetting(self):
+
+ class TestDocument(db.Document):
+
+ @db.indexed_property(slot=1, typecast=int)
+ def prop(self, value):
+ return value
+
+ self.volume = db.SingleVolume(tests.tmpdir, [TestDocument])
+
+ guid = self.call(method='POST', document='testdocument', content={'prop': '-1'})
+ self.assertEqual(-1, self.call(method='GET', document='testdocument', guid=guid, prop='prop'))
+
+ def test_LocalizedSet(self):
+ env._default_lang = 'en'
+
+ class TestDocument(db.Document):
+
+ @db.indexed_property(slot=1, default='')
+ def prop(self, value):
+ return value
+
+ @db.indexed_property(prefix='L', localized=True, default='')
+ def localized_prop(self, value):
+ return value
+
+ self.volume = db.SingleVolume(tests.tmpdir, [TestDocument])
+ directory = self.volume['testdocument']
+
+ guid = directory.create({'localized_prop': 'value_raw'})
+ self.assertEqual({'en': 'value_raw'}, directory.get(guid)['localized_prop'])
+ self.assertEqual(
+ [guid],
+ [i.guid for i in directory.find(0, 100, localized_prop='value_raw')[0]])
+
+ directory.update(guid, {'localized_prop': 'value_raw2'})
+ self.assertEqual({'en': 'value_raw2'}, directory.get(guid)['localized_prop'])
+ self.assertEqual(
+ [guid],
+ [i.guid for i in directory.find(0, 100, localized_prop='value_raw2')[0]])
+
+ guid = self.call('POST', document='testdocument', accept_language=['ru'], content={'localized_prop': 'value_ru'})
+ self.assertEqual({'ru': 'value_ru'}, directory.get(guid)['localized_prop'])
+ self.assertEqual(
+ [guid],
+ [i.guid for i in directory.find(0, 100, localized_prop='value_ru')[0]])
+
+ self.call('PUT', document='testdocument', guid=guid, accept_language=['en'], content={'localized_prop': 'value_en'})
+ self.assertEqual({'ru': 'value_ru', 'en': 'value_en'}, directory.get(guid)['localized_prop'])
+ self.assertEqual(
+ [guid],
+ [i.guid for i in directory.find(0, 100, localized_prop='value_ru')[0]])
+ self.assertEqual(
+ [guid],
+ [i.guid for i in directory.find(0, 100, localized_prop='value_en')[0]])
+
+ def test_LocalizedGet(self):
+
+ class TestDocument(db.Document):
+
+ @db.indexed_property(slot=1, default='')
+ def prop(self, value):
+ return value
+
+ @db.indexed_property(prefix='L', localized=True, default='')
+ def localized_prop(self, value):
+ return value
+
+ self.volume = db.SingleVolume(tests.tmpdir, [TestDocument])
+ directory = self.volume['testdocument']
+
+ guid = self.call('POST', document='testdocument', content={
+ 'localized_prop': {
+ 'ru': 'value_ru',
+ 'es': 'value_es',
+ 'en': 'value_en',
+ },
+ })
+
+ env._default_lang = 'en'
+
+ self.assertEqual(
+ {'localized_prop': 'value_en'},
+ self.call('GET', document='testdocument', guid=guid, reply=['localized_prop']))
+ self.assertEqual(
+ {'localized_prop': 'value_ru'},
+ self.call('GET', document='testdocument', guid=guid, accept_language=['ru'], reply=['localized_prop']))
+ self.assertEqual(
+ 'value_ru',
+ self.call('GET', document='testdocument', guid=guid, accept_language=['ru', 'es'], prop='localized_prop'))
+ self.assertEqual(
+ [{'localized_prop': 'value_ru'}],
+ self.call('GET', document='testdocument', accept_language=['foo', 'ru', 'es'], reply=['localized_prop'])['result'])
+
+ self.assertEqual(
+ {'localized_prop': 'value_ru'},
+ self.call('GET', document='testdocument', guid=guid, accept_language=['ru-RU'], reply=['localized_prop']))
+ self.assertEqual(
+ 'value_ru',
+ self.call('GET', document='testdocument', guid=guid, accept_language=['ru-RU', 'es'], prop='localized_prop'))
+ self.assertEqual(
+ [{'localized_prop': 'value_ru'}],
+ self.call('GET', document='testdocument', accept_language=['foo', 'ru-RU', 'es'], reply=['localized_prop'])['result'])
+
+ self.assertEqual(
+ {'localized_prop': 'value_es'},
+ self.call('GET', document='testdocument', guid=guid, accept_language=['es'], reply=['localized_prop']))
+ self.assertEqual(
+ 'value_es',
+ self.call('GET', document='testdocument', guid=guid, accept_language=['es', 'ru'], prop='localized_prop'))
+ self.assertEqual(
+ [{'localized_prop': 'value_es'}],
+ self.call('GET', document='testdocument', accept_language=['foo', 'es', 'ru'], reply=['localized_prop'])['result'])
+
+ self.assertEqual(
+ {'localized_prop': 'value_en'},
+ self.call('GET', document='testdocument', guid=guid, accept_language=['fr'], reply=['localized_prop']))
+ self.assertEqual(
+ 'value_en',
+ self.call('GET', document='testdocument', guid=guid, accept_language=['fr', 'za'], prop='localized_prop'))
+ self.assertEqual(
+ [{'localized_prop': 'value_en'}],
+ self.call('GET', document='testdocument', accept_language=['foo', 'fr', 'za'], reply=['localized_prop'])['result'])
+
+ env._default_lang = 'foo'
+ fallback_lang = sorted(['ru', 'es', 'en'])[0]
+
+ self.assertEqual(
+ {'localized_prop': 'value_%s' % fallback_lang},
+ self.call('GET', document='testdocument', guid=guid, accept_language=['fr'], reply=['localized_prop']))
+ self.assertEqual(
+ 'value_%s' % fallback_lang,
+ self.call('GET', document='testdocument', guid=guid, accept_language=['fr', 'za'], prop='localized_prop'))
+ self.assertEqual(
+ [{'localized_prop': 'value_%s' % fallback_lang}],
+ self.call('GET', document='testdocument', accept_language=['foo', 'fr', 'za'], reply=['localized_prop'])['result'])
+
+ def test_LazyOpen(self):
+
+ class Document1(db.Document):
+ pass
+
+ class Document2(db.Document):
+ pass
+
+ volume = db.SingleVolume('.', [Document1, Document2], lazy_open=True)
+ assert not exists('document1/index')
+ assert not exists('document2/index')
+ volume['document1'].find()
+ volume['document2'].find()
+ assert exists('document1/index')
+ assert exists('document2/index')
+ volume['document1'].find()
+ volume['document2'].find()
+ volume.close()
+
+ shutil.rmtree('document1')
+ shutil.rmtree('document2')
+
+ volume = db.SingleVolume('.', [Document1, Document2], lazy_open=False)
+ assert exists('document1/index')
+ assert exists('document2/index')
+ volume.close()
+
+ def test_OpenByModuleName(self):
+ self.touch(
+ ('foo/bar.py', [
+ 'from sugar_network import db',
+ 'class Bar(db.Document): pass',
+ ]),
+ ('foo/__init__.py', ''),
+ )
+ sys.path.insert(0, '.')
+
+ volume = db.SingleVolume('.', ['foo.bar'])
+ assert exists('bar/index')
+ volume['bar'].find()
+ volume.close()
+
+ def test_Command_GetBlobSetByUrl(self):
+
+ class TestDocument(db.Document):
+
+ @db.indexed_property(slot=1, default='')
+ def prop(self, value):
+ return value
+
+ @db.blob_property()
+ def blob(self, value):
+ return value
+
+ @db.indexed_property(prefix='L', localized=True, default='')
+ def localized_prop(self, value):
+ return value
+
+ self.volume = db.SingleVolume(tests.tmpdir, [TestDocument])
+ guid = self.call('POST', document='testdocument', content={})
+ self.call('PUT', document='testdocument', guid=guid, prop='blob', url='http://sugarlabs.org')
+
+ self.assertEqual(
+ 'http://sugarlabs.org',
+ self.call('GET', document='testdocument', guid=guid, prop='blob')['url'])
+
+ def test_before_create(self):
+
+ class TestDocument(db.Document):
+
+ @db.indexed_property(slot=1, default='')
+ def prop(self, value):
+ return value
+
+ @db.indexed_property(prefix='L', localized=True, default='')
+ def localized_prop(self, value):
+ return value
+
+ self.volume = db.SingleVolume(tests.tmpdir, [TestDocument])
+
+ ts = time.time()
+ guid = self.call(method='POST', document='testdocument', content={})
+ assert self.volume['testdocument'].get(guid)['ctime'] in range(ts - 1, ts + 1)
+ assert self.volume['testdocument'].get(guid)['mtime'] in range(ts - 1, ts + 1)
+
+ def test_before_create_Override(self):
+
+ class Commands(VolumeCommands):
+
+ def before_create(self, request, props):
+ props['prop'] = 'overriden'
+ VolumeCommands.before_create(self, request, props)
+
+ class TestDocument(db.Document):
+
+ @db.indexed_property(slot=1, default='')
+ def prop(self, value):
+ return value
+
+ @db.indexed_property(prefix='L', localized=True, default='')
+ def localized_prop(self, value):
+ return value
+
+ volume = db.SingleVolume(tests.tmpdir, [TestDocument])
+ cp = Commands(volume)
+
+ request = db.Request(method='POST', document='testdocument')
+ request.content = {'prop': 'foo'}
+ guid = cp.call(request, db.Response())
+ self.assertEqual('overriden', volume['testdocument'].get(guid)['prop'])
+
+ request = db.Request(method='PUT', document='testdocument', guid=guid)
+ request.content = {'prop': 'bar'}
+ cp.call(request, db.Response())
+ self.assertEqual('bar', volume['testdocument'].get(guid)['prop'])
+
+ def test_before_update(self):
+
+ class TestDocument(db.Document):
+
+ @db.indexed_property(slot=1, default='')
+ def prop(self, value):
+ return value
+
+ @db.indexed_property(prefix='L', localized=True, default='')
+ def localized_prop(self, value):
+ return value
+
+ self.volume = db.SingleVolume(tests.tmpdir, [TestDocument])
+ guid = self.call(method='POST', document='testdocument', content={})
+ prev_mtime = self.volume['testdocument'].get(guid)['mtime']
+
+ time.sleep(1)
+
+ self.call(method='PUT', document='testdocument', guid=guid, content={'prop': 'probe'})
+ assert self.volume['testdocument'].get(guid)['mtime'] - prev_mtime >= 1
+
+ def test_before_update_Override(self):
+
+ class Commands(VolumeCommands):
+
+ def before_update(self, request, props):
+ props['prop'] = 'overriden'
+ VolumeCommands.before_update(self, request, props)
+
+ class TestDocument(db.Document):
+
+ @db.indexed_property(slot=1, default='')
+ def prop(self, value):
+ return value
+
+ @db.indexed_property(prefix='L', localized=True, default='')
+ def localized_prop(self, value):
+ return value
+
+ volume = db.SingleVolume(tests.tmpdir, [TestDocument])
+ cp = Commands(volume)
+
+ request = db.Request(method='POST', document='testdocument')
+ request.content = {'prop': 'foo'}
+ guid = cp.call(request, db.Response())
+ self.assertEqual('foo', volume['testdocument'].get(guid)['prop'])
+
+ request = db.Request(method='PUT', document='testdocument', guid=guid)
+ request.content = {'prop': 'bar'}
+ cp.call(request, db.Response())
+ self.assertEqual('overriden', volume['testdocument'].get(guid)['prop'])
+
+ def test_DoNotPassGuidsForCreate(self):
+
+ class TestDocument(db.Document):
+
+ @db.indexed_property(slot=1, default='')
+ def prop(self, value):
+ return value
+
+ @db.indexed_property(prefix='L', localized=True, default='')
+ def localized_prop(self, value):
+ return value
+
+ self.volume = db.SingleVolume(tests.tmpdir, [TestDocument])
+ self.assertRaises(env.Forbidden, self.call, method='POST', document='testdocument', content={'guid': 'foo'})
+ guid = self.call(method='POST', document='testdocument', content={})
+ assert guid
+
+ def test_seqno(self):
+
+ class Document1(db.Document):
+ pass
+
+ class Document2(db.Document):
+ pass
+
+ volume = db.SingleVolume(tests.tmpdir, [Document1, Document2])
+
+ assert not exists('seqno')
+ self.assertEqual(0, volume.seqno.value)
+
+ volume['document1'].create(guid='1')
+ self.assertEqual(1, volume['document1'].get('1')['seqno'])
+ volume['document2'].create(guid='1')
+ self.assertEqual(2, volume['document2'].get('1')['seqno'])
+ volume['document1'].create(guid='2')
+ self.assertEqual(3, volume['document1'].get('2')['seqno'])
+ volume['document2'].create(guid='2')
+ self.assertEqual(4, volume['document2'].get('2')['seqno'])
+
+ self.assertEqual(4, volume.seqno.value)
+ assert not exists('seqno')
+ volume.seqno.commit()
+ assert exists('seqno')
+ volume = db.SingleVolume(tests.tmpdir, [Document1, Document2])
+ self.assertEqual(4, volume.seqno.value)
+
+ def test_Events(self):
+ env.index_flush_threshold.value = 0
+ env.index_flush_timeout.value = 0
+
+ class Document1(db.Document):
+
+ @db.indexed_property(slot=1, default='')
+ def prop(self, value):
+ pass
+
+ class Document2(db.Document):
+
+ @db.indexed_property(slot=1, default='')
+ def prop(self, value):
+ pass
+
+ @db.blob_property()
+ def blob(self, value):
+ return value
+
+ self.touch(
+ ('document1/1/1/guid', '{"value": "1"}'),
+ ('document1/1/1/ctime', '{"value": 1}'),
+ ('document1/1/1/mtime', '{"value": 1}'),
+ ('document1/1/1/prop', '{"value": ""}'),
+ ('document1/1/1/seqno', '{"value": 0}'),
+ )
+
+ events = []
+ volume = db.SingleVolume(tests.tmpdir, [Document1, Document2])
+ volume.connect(lambda event: events.append(event))
+
+ volume.populate()
+ self.assertEqual([
+ {'event': 'commit', 'document': 'document1'},
+ {'event': 'populate', 'document': 'document1'},
+ ],
+ events)
+ del events[:]
+
+ volume['document1'].create(guid='guid1')
+ volume['document2'].create(guid='guid2')
+ self.assertEqual([
+ {'event': 'create', 'document': 'document1', 'guid': 'guid1', 'props': {
+ 'ctime': 0,
+ 'mtime': 0,
+ 'seqno': 0,
+ 'prop': '',
+ 'guid': 'guid1',
+ }},
+ {'event': 'create', 'document': 'document2', 'guid': 'guid2', 'props': {
+ 'ctime': 0,
+ 'mtime': 0,
+ 'seqno': 0,
+ 'prop': '',
+ 'guid': 'guid2',
+ }},
+ ],
+ events)
+ del events[:]
+
+ volume['document1'].update('guid1', prop='foo')
+ volume['document2'].update('guid2', prop='bar')
+ self.assertEqual([
+ {'event': 'update', 'document': 'document1', 'guid': 'guid1', 'props': {
+ 'prop': 'foo',
+ }},
+ {'event': 'update', 'document': 'document2', 'guid': 'guid2', 'props': {
+ 'prop': 'bar',
+ }},
+ ],
+ events)
+ del events[:]
+
+ volume['document2'].set_blob('guid2', 'blob', StringIO('blob'))
+ self.assertEqual([
+ {'event': 'update', 'document': 'document2', 'guid': 'guid2', 'props': {
+ 'seqno': 5,
+ }},
+ ],
+ events)
+ del events[:]
+
+ volume['document1'].delete('guid1')
+ self.assertEqual([
+ {'event': 'delete', 'document': 'document1', 'guid': 'guid1'},
+ ],
+ events)
+ del events[:]
+
+ volume['document1'].commit()
+ volume['document2'].commit()
+
+ self.assertEqual([
+ {'event': 'commit', 'document': 'document1'},
+ {'event': 'commit', 'document': 'document2'},
+ ],
+ events)
+
+ def test_PermissionsNoWrite(self):
+
+ class TestDocument(db.Document):
+
+ @db.indexed_property(slot=1, default='', permissions=db.ACCESS_READ)
+ def prop(self, value):
+ pass
+
+ @db.blob_property(permissions=db.ACCESS_READ)
+ def blob(self, value):
+ return value
+
+ self.volume = db.SingleVolume(tests.tmpdir, [TestDocument])
+ guid = self.call('POST', document='testdocument', content={})
+
+ self.assertRaises(db.Forbidden, self.call, 'POST', document='testdocument', content={'prop': 'value'})
+ self.assertRaises(db.Forbidden, self.call, 'PUT', document='testdocument', guid=guid, content={'prop': 'value'})
+ self.assertRaises(db.Forbidden, self.call, 'PUT', document='testdocument', guid=guid, content={'blob': 'value'})
+ self.assertRaises(db.Forbidden, self.call, 'PUT', document='testdocument', guid=guid, prop='prop', content='value')
+ self.assertRaises(db.Forbidden, self.call, 'PUT', document='testdocument', guid=guid, prop='blob', content='value')
+
+ def test_BlobsWritePermissions(self):
+
+ class TestDocument(db.Document):
+
+ @db.blob_property(permissions=db.ACCESS_CREATE | db.ACCESS_WRITE)
+ def blob1(self, value):
+ return value
+
+ @db.blob_property(permissions=db.ACCESS_CREATE)
+ def blob2(self, value):
+ return value
+
+ self.volume = db.SingleVolume(tests.tmpdir, [TestDocument])
+
+ guid = self.call('POST', document='testdocument', content={})
+ self.call('PUT', document='testdocument', guid=guid, content={'blob1': 'value1', 'blob2': 'value2'})
+ self.call('PUT', document='testdocument', guid=guid, content={'blob1': 'value1'})
+ self.assertRaises(db.Forbidden, self.call, 'PUT', document='testdocument', guid=guid, content={'blob2': 'value2_'})
+
+ guid = self.call('POST', document='testdocument', content={})
+ self.call('PUT', document='testdocument', guid=guid, prop='blob1', content='value1')
+ self.call('PUT', document='testdocument', guid=guid, prop='blob2', content='value2')
+ self.call('PUT', document='testdocument', guid=guid, prop='blob1', content='value1_')
+ self.assertRaises(db.Forbidden, self.call, 'PUT', document='testdocument', guid=guid, prop='blob2', content='value2_')
+
+ def test_properties_OverrideGet(self):
+
+ class TestDocument(db.Document):
+
+ @db.indexed_property(slot=1, default='1')
+ def prop1(self, value):
+ return value
+
+ @db.indexed_property(slot=2, default='2')
+ def prop2(self, value):
+ return -1
+
+ @db.blob_property()
+ def blob(self, meta):
+ meta['path'] = 'new-blob'
+ return meta
+
+ self.volume = db.SingleVolume(tests.tmpdir, [TestDocument])
+ guid = self.call('POST', document='testdocument', content={})
+ self.touch(('new-blob', 'new-blob'))
+ self.call('PUT', document='testdocument', guid=guid, prop='blob', content='old-blob')
+
+ self.assertEqual(
+ 'new-blob',
+ self.call('GET', document='testdocument', guid=guid, prop='blob')['path'])
+ self.assertEqual(
+ '1',
+ self.call('GET', document='testdocument', guid=guid, prop='prop1'))
+ self.assertEqual(
+ -1,
+ self.call('GET', document='testdocument', guid=guid, prop='prop2'))
+ self.assertEqual(
+ {'prop1': '1', 'prop2': -1},
+ self.call('GET', document='testdocument', guid=guid, reply=['prop1', 'prop2']))
+
+ def test_properties_OverrideSet(self):
+
+ class TestDocument(db.Document):
+
+ @db.indexed_property(slot=1, default='1')
+ def prop(self, value):
+ return value
+
+ @prop.setter
+ def prop(self, value):
+ return '_%s' % value
+
+ @db.blob_property()
+ def blob1(self, meta):
+ return meta
+
+ @blob1.setter
+ def blob1(self, value):
+ return db.PropertyMetadata(url=value)
+
+ @db.blob_property()
+ def blob2(self, meta):
+ return meta
+
+ @blob2.setter
+ def blob2(self, value):
+ return ' %s ' % value
+
+ self.volume = db.SingleVolume(tests.tmpdir, [TestDocument])
+ guid = self.call('POST', document='testdocument', content={})
+
+ self.assertEqual('1', self.call('GET', document='testdocument', guid=guid, prop='prop'))
+ self.assertRaises(db.NotFound, self.call, 'GET', document='testdocument', guid=guid, prop='blob1')
+
+ self.call('PUT', document='testdocument', guid=guid, prop='prop', content='2')
+ self.assertEqual('_2', self.call('GET', document='testdocument', guid=guid, prop='prop'))
+ self.assertRaises(db.NotFound, self.call, 'GET', document='testdocument', guid=guid, prop='blob1')
+
+ self.call('PUT', document='testdocument', guid=guid, content={'prop': 3})
+ self.assertEqual('_3', self.call('GET', document='testdocument', guid=guid, prop='prop'))
+ self.assertRaises(db.NotFound, self.call, 'GET', document='testdocument', guid=guid, prop='blob1')
+
+ self.call('PUT', document='testdocument', guid=guid, prop='blob1', content='blob2')
+ self.assertEqual('blob2', self.call('GET', document='testdocument', guid=guid, prop='blob1')['url'])
+
+ guid = self.call('POST', document='testdocument', content={'blob2': 'foo'})
+ self.assertEqual(' foo ', file(self.call('GET', document='testdocument', guid=guid, prop='blob2')['path']).read())
+
+ self.call('PUT', document='testdocument', guid=guid, prop='blob2', content='bar')
+ self.assertEqual(' bar ', file(self.call('GET', document='testdocument', guid=guid, prop='blob2')['path']).read())
+
+ def test_SubCall(self):
+
+ class TestDocument(db.Document):
+
+ @db.blob_property(mime_type='application/json')
+ def blob(self, value):
+ return value
+
+ @blob.setter
+ def blob(self, value):
+ if '!' not in value:
+ meta = self.meta('blob')
+ if meta:
+ value = file(meta['path']).read() + value
+ coroutine.spawn(self.post, value)
+ return value
+
+ def post(self, value):
+ self.request.call('PUT', document='testdocument', guid=self.guid, prop='blob', content=value + '!')
+
+ self.volume = db.SingleVolume(tests.tmpdir, [TestDocument])
+
+ guid = self.call('POST', document='testdocument', content={'blob': '0'})
+ coroutine.dispatch()
+ self.assertEqual('0!', file(self.call('GET', document='testdocument', guid=guid, prop='blob')['path']).read())
+
+ self.call('PUT', document='testdocument', guid=guid, prop='blob', content='1')
+ coroutine.dispatch()
+ self.assertEqual('0!1!', file(self.call('GET', document='testdocument', guid=guid, prop='blob')['path']).read())
+
+ def test_Group(self):
+
+ class TestDocument(db.Document):
+
+ @db.indexed_property(slot=1)
+ def prop(self, value):
+ return value
+
+ self.volume = db.SingleVolume(tests.tmpdir, [TestDocument])
+
+ self.call('POST', document='testdocument', content={'prop': 1})
+ self.call('POST', document='testdocument', content={'prop': 2})
+ self.call('POST', document='testdocument', content={'prop': 1})
+
+ self.assertEqual(
+ sorted([{'prop': 1}, {'prop': 2}]),
+ sorted(self.call('GET', document='testdocument', reply='prop', group_by='prop')['result']))
+
+ def test_CallSetterEvenIfThereIsNoCreatePermissions(self):
+
+ class TestDocument(db.Document):
+
+ @db.indexed_property(slot=1, permissions=db.ACCESS_READ, default=0)
+ def prop(self, value):
+ return value
+
+ @prop.setter
+ def prop(self, value):
+ return value + 1
+
+ self.volume = db.SingleVolume(tests.tmpdir, [TestDocument])
+
+ self.assertRaises(db.Forbidden, self.call, 'POST', document='testdocument', content={'prop': 1})
+
+ guid = self.call('POST', document='testdocument', content={})
+ self.assertEqual(1, self.call('GET', document='testdocument', guid=guid, prop='prop'))
+
+ def test_ReturnDefualtsForMissedProps(self):
+
+ class TestDocument(db.Document):
+
+ @db.indexed_property(slot=1, default='default')
+ def prop(self, value):
+ return value
+
+ self.volume = db.SingleVolume(tests.tmpdir, [TestDocument])
+ guid = self.call('POST', document='testdocument', content={'prop': 'set'})
+
+ self.assertEqual(
+ [{'prop': 'set'}],
+ self.call('GET', document='testdocument', reply='prop')['result'])
+ self.assertEqual(
+ {'prop': 'set'},
+ self.call('GET', document='testdocument', guid=guid, reply='prop'))
+ self.assertEqual(
+ 'set',
+ self.call('GET', document='testdocument', guid=guid, prop='prop'))
+
+ os.unlink('testdocument/%s/%s/prop' % (guid[:2], guid))
+
+ self.assertEqual(
+ [{'prop': 'default'}],
+ self.call('GET', document='testdocument', reply='prop')['result'])
+ self.assertEqual(
+ {'prop': 'default'},
+ self.call('GET', document='testdocument', guid=guid, reply='prop'))
+ self.assertEqual(
+ 'default',
+ self.call('GET', document='testdocument', guid=guid, prop='prop'))
+
+ def call(self, method, document=None, guid=None, prop=None,
+ accept_language=None, content=None, content_stream=None,
+ content_type=None, if_modified_since=None, **kwargs):
+
+ class TestRequest(db.Request):
+
+ content_stream = None
+ content_length = 0
+
+ request = TestRequest(**kwargs)
+ request.content = content
+ request.content_stream = content_stream
+ request.content_type = content_type
+ request.accept_language = accept_language
+ request.if_modified_since = if_modified_since
+ request['method'] = method
+ if document:
+ request['document'] = document
+ if guid:
+ request['guid'] = guid
+ if prop:
+ request['prop'] = prop
+ if request.content_stream is not None:
+ request.content_length = len(request.content_stream.getvalue())
+
+ self.response = db.Response()
+ cp = VolumeCommands(self.volume)
+ return cp.call(request, self.response)
+
+
+if __name__ == '__main__':
+ tests.main()
diff --git a/tests/units/node/__init__.py b/tests/units/node/__init__.py
new file mode 100644
index 0000000..345c327
--- /dev/null
+++ b/tests/units/node/__init__.py
@@ -0,0 +1,9 @@
+# sugar-lint: disable
+
+import sys
+from os.path import dirname, join, abspath
+
+src_root = abspath(join(dirname(__file__), '..', '..', '..'))
+sys.path.insert(0, src_root)
+
+import tests
diff --git a/tests/units/node/__main__.py b/tests/units/node/__main__.py
new file mode 100644
index 0000000..cb7e793
--- /dev/null
+++ b/tests/units/node/__main__.py
@@ -0,0 +1,13 @@
+# sugar-lint: disable
+
+from __init__ import tests
+
+from auth import *
+from node import *
+from obs import *
+from stats import *
+#from sync_master import *
+#from sync_node import *
+
+if __name__ == '__main__':
+ tests.main()
diff --git a/tests/units/auth.py b/tests/units/node/auth.py
index 57458e7..13b2378 100755
--- a/tests/units/auth.py
+++ b/tests/units/node/auth.py
@@ -6,12 +6,12 @@ import cPickle as pickle
from __init__ import tests
-import active_document as ad
+from sugar_network import db
from sugar_network.node import auth
-from sugar_network import IPCClient, Client
+from sugar_network.client import IPCClient, Client
from sugar_network.toolkit.router import Request
from sugar_network.resources.user import User
-from active_toolkit import enforce
+from sugar_network.toolkit import enforce
class AuthTest(tests.Test):
@@ -31,13 +31,13 @@ class AuthTest(tests.Test):
request.principal = 'user_2'
self.assertEqual(False, auth.try_validate(request, 'role_2'))
- self.assertRaises(ad.Forbidden, auth.validate, request, 'role_2')
+ self.assertRaises(db.Forbidden, auth.validate, request, 'role_2')
request.principal = 'user_3'
self.assertEqual(False, auth.try_validate(request, 'role_1'))
self.assertEqual(False, auth.try_validate(request, 'role_2'))
- self.assertRaises(ad.Forbidden, auth.validate, request, 'role_1')
- self.assertRaises(ad.Forbidden, auth.validate, request, 'role_2')
+ self.assertRaises(db.Forbidden, auth.validate, request, 'role_1')
+ self.assertRaises(db.Forbidden, auth.validate, request, 'role_2')
def test_FullWriteForRoot(self):
client = Client()
@@ -140,15 +140,15 @@ class AuthTest(tests.Test):
def test_DefaultAuthorization(self):
- class Document(ad.Document):
+ class Document(db.Document):
- @ad.document_command(method='GET', cmd='probe1',
+ @db.document_command(method='GET', cmd='probe1',
mime_type='application/json')
def probe1(cls, directory):
return 'ok1'
- @ad.document_command(method='GET', cmd='probe2',
- permissions=ad.ACCESS_AUTH, mime_type='application/json')
+ @db.document_command(method='GET', cmd='probe2',
+ permissions=db.ACCESS_AUTH, mime_type='application/json')
def probe2(cls, directory):
return 'ok2'
diff --git a/tests/units/node.py b/tests/units/node/node.py
index e0d7838..61bcac7 100755
--- a/tests/units/node.py
+++ b/tests/units/node/node.py
@@ -6,8 +6,8 @@ from os.path import exists
from __init__ import tests
-import active_document as ad
-from sugar_network import node, Client
+from sugar_network import db, node
+from sugar_network.client import Client
from sugar_network.toolkit.rrd import Rrd
from sugar_network.toolkit.router import Unauthorized
from sugar_network.node import stats, obs
@@ -154,7 +154,7 @@ class NodeTest(tests.Test):
call(cp, method='DELETE', document='context', guid=guid, principal='principal')
assert exists(guid_path)
- self.assertRaises(ad.NotFound, call, cp, method='GET', document='context', guid=guid, reply=['guid', 'title'])
+ self.assertRaises(db.NotFound, call, cp, method='GET', document='context', guid=guid, reply=['guid', 'title'])
self.assertEqual(['deleted'], volume['context'].get(guid)['layer'])
def test_RegisterUser(self):
@@ -174,12 +174,12 @@ class NodeTest(tests.Test):
class Document(Resource):
- @ad.document_command(method='GET', cmd='probe1',
- permissions=ad.ACCESS_AUTH)
+ @db.document_command(method='GET', cmd='probe1',
+ permissions=db.ACCESS_AUTH)
def probe1(self, directory):
pass
- @ad.document_command(method='GET', cmd='probe2')
+ @db.document_command(method='GET', cmd='probe2')
def probe2(self, directory):
pass
@@ -193,23 +193,23 @@ class NodeTest(tests.Test):
class Document(Resource):
- @ad.document_command(method='GET', cmd='probe1',
- permissions=ad.ACCESS_AUTHOR)
+ @db.document_command(method='GET', cmd='probe1',
+ permissions=db.ACCESS_AUTHOR)
def probe1(self):
pass
- @ad.document_command(method='GET', cmd='probe2')
+ @db.document_command(method='GET', cmd='probe2')
def probe2(self):
pass
- class User(ad.Document):
+ class User(db.Document):
pass
cp = NodeCommands(Volume('db', [User, Document]))
guid = call(cp, method='POST', document='document', principal='principal', content={})
- self.assertRaises(ad.Forbidden, call, cp, method='GET', cmd='probe1', document='document', guid=guid)
- self.assertRaises(ad.Forbidden, call, cp, method='GET', cmd='probe1', document='document', guid=guid, principal='fake')
+ self.assertRaises(db.Forbidden, call, cp, method='GET', cmd='probe1', document='document', guid=guid)
+ self.assertRaises(db.Forbidden, call, cp, method='GET', cmd='probe1', document='document', guid=guid, principal='fake')
call(cp, method='GET', cmd='probe1', document='document', guid=guid, principal='principal')
call(cp, method='GET', cmd='probe2', document='document', guid=guid)
@@ -226,7 +226,7 @@ class NodeTest(tests.Test):
self.assertEqual('user1', call(cp, method='GET', document='user', guid=tests.UID, prop='name'))
self.assertRaises(Unauthorized, call, cp, method='PUT', document='user', guid=tests.UID, content={'name': 'user2'})
- self.assertRaises(ad.Forbidden, call, cp, method='PUT', document='user', guid=tests.UID, principal='fake', content={'name': 'user2'})
+ self.assertRaises(db.Forbidden, call, cp, method='PUT', document='user', guid=tests.UID, principal='fake', content={'name': 'user2'})
call(cp, method='PUT', document='user', guid=tests.UID, principal=tests.UID, content={'name': 'user2'})
self.assertEqual('user2', call(cp, method='GET', document='user', guid=tests.UID, prop='name'))
@@ -288,7 +288,7 @@ class NodeTest(tests.Test):
volume['context'].update(guid, layer=['deleted'])
- self.assertRaises(ad.NotFound, call, cp, method='GET', document='context', guid=guid)
+ self.assertRaises(db.NotFound, call, cp, method='GET', document='context', guid=guid)
self.assertEqual([], call(cp, method='GET', document='context')['result'])
def test_SetGuidOnMaster(self):
@@ -301,7 +301,7 @@ class NodeTest(tests.Test):
'description': 'description',
'implement': 'foo',
})
- self.assertRaises(ad.NotFound, call, cp1, method='GET', document='context', guid='foo')
+ self.assertRaises(db.NotFound, call, cp1, method='GET', document='context', guid='foo')
volume2 = Volume('db2')
self.touch('db2/master')
@@ -370,7 +370,7 @@ def call(cp, principal=None, content=None, **kwargs):
request.principal = principal
request.content = content
request.environ = {'HTTP_HOST': 'localhost'}
- return cp.call(request, ad.Response())
+ return cp.call(request, db.Response())
if __name__ == '__main__':
diff --git a/tests/units/obs.py b/tests/units/node/obs.py
index 288301b..288301b 100755
--- a/tests/units/obs.py
+++ b/tests/units/node/obs.py
diff --git a/tests/units/stats.py b/tests/units/node/stats.py
index 9cb80d1..9cb80d1 100755
--- a/tests/units/stats.py
+++ b/tests/units/node/stats.py
diff --git a/tests/units/sync_master.py b/tests/units/node/sync_master.py
index 989af92..14479b4 100755
--- a/tests/units/sync_master.py
+++ b/tests/units/node/sync_master.py
@@ -12,15 +12,14 @@ import rrdtool
from __init__ import tests
-import active_document as ad
-from active_document.directory import Directory
-from sugar_network import node
+from sugar_network.db.directory import Directory
+from sugar_network import db, node
from sugar_network.toolkit.sneakernet import InPacket, OutPacket, OutBufferPacket
from sugar_network.toolkit.files_sync import Seeder
from sugar_network.toolkit.router import Request
from sugar_network.node import sync_master
from sugar_network.resources.volume import Volume
-from active_toolkit import coroutine
+from sugar_network.toolkit import coroutine, util
CHUNK = 100000
@@ -31,7 +30,7 @@ class SyncMasterTest(tests.Test):
def setUp(self):
tests.Test.setUp(self)
self.uuid = 0
- self.override(ad, 'uuid', self.next_uuid)
+ self.override(db, 'uuid', self.next_uuid)
sync_master._PULL_QUEUE_SIZE = 256
def next_uuid(self):
@@ -40,7 +39,7 @@ class SyncMasterTest(tests.Test):
def test_push_MisaddressedPackets(self):
master = MasterCommands('master')
- response = ad.Response()
+ response = db.Response()
packet = OutBufferPacket()
request = Request()
@@ -81,7 +80,7 @@ class SyncMasterTest(tests.Test):
def test_push_ProcessPushes(self):
master = MasterCommands('master')
request = Request()
- response = ad.Response()
+ response = db.Response()
packet = OutBufferPacket(src='node', dst='master')
packet.push(document='document', data=[
@@ -125,7 +124,7 @@ class SyncMasterTest(tests.Test):
request.content_stream = packet.pop()
request.content_length = len(request.content_stream.getvalue())
- response = ad.Response()
+ response = db.Response()
reply = master.push(request, response)
assert reply is None
self.assertEqual([
@@ -144,7 +143,7 @@ class SyncMasterTest(tests.Test):
request.content_stream = packet.pop()
request.content_length = len(request.content_stream.getvalue())
- response = ad.Response()
+ response = db.Response()
reply = master.push(request, response)
assert reply is None
self.assertEqual([
@@ -162,7 +161,7 @@ class SyncMasterTest(tests.Test):
request.content_stream = packet.pop()
request.content_length = len(request.content_stream.getvalue())
- response = ad.Response()
+ response = db.Response()
reply = master.push(request, response)
assert reply is None
self.assertEqual([
@@ -174,7 +173,7 @@ class SyncMasterTest(tests.Test):
def test_push_TweakPullAccordingToPush(self):
master = MasterCommands('master')
request = Request()
- response = ad.Response()
+ response = db.Response()
packet = OutBufferPacket(src='node', dst='master')
packet.push(document='document', data=[
@@ -203,7 +202,7 @@ class SyncMasterTest(tests.Test):
def test_push_DoNotTweakPullAccordingToPushIfCookieWasPassed(self):
master = MasterCommands('master')
request = Request()
- response = ad.Response()
+ response = db.Response()
packet = OutBufferPacket(src='node', dst='master')
packet.push(document='document', data=[
@@ -235,7 +234,7 @@ class SyncMasterTest(tests.Test):
def test_push_ProcessStatsPushes(self):
master = MasterCommands('master')
request = Request()
- response = ad.Response()
+ response = db.Response()
ts = int(time.time()) - 1000
packet = OutBufferPacket(src='node', dst='master')
@@ -284,7 +283,7 @@ class SyncMasterTest(tests.Test):
def test_pull_ProcessPulls(self):
master = MasterCommands('master')
request = Request()
- response = ad.Response()
+ response = db.Response()
master.volume['document'].create(guid='1')
master.volume['document'].create(guid='2')
@@ -301,7 +300,7 @@ class SyncMasterTest(tests.Test):
request = Request()
request.environ['HTTP_COOKIE'] = ';'.join(cookie)
- response = ad.Response()
+ response = db.Response()
reply = master.pull(request, response)
assert reply is not None
self.assertEqual('application/x-tar', response.content_type)
@@ -331,7 +330,7 @@ class SyncMasterTest(tests.Test):
],
[i for i in packet])
- response = ad.Response()
+ response = db.Response()
reply = master.pull(request, response, sn_pull='[[1, null]]')
assert reply is not None
self.assertEqual('application/x-tar', response.content_type)
@@ -348,7 +347,7 @@ class SyncMasterTest(tests.Test):
def test_pull_AvoidEmptyPacketsOnPull(self):
master = MasterCommands('master')
request = Request()
- response = ad.Response()
+ response = db.Response()
reply = master.pull(request, response, sn_pull='[[1, null]]')
assert reply is None
@@ -362,7 +361,7 @@ class SyncMasterTest(tests.Test):
request = Request()
request.environ['HTTP_COOKIE'] = ';'.join(cookie)
- response = ad.Response()
+ response = db.Response()
reply = master.pull(request, response)
assert reply is None
self.assertEqual('application/x-tar', response.content_type)
@@ -379,7 +378,7 @@ class SyncMasterTest(tests.Test):
master.volume['document'].create(guid='2', prop='*' * CHUNK)
request = Request()
- response = ad.Response()
+ response = db.Response()
reply = master.pull(request, response, accept_length=CHUNK * 1.5, sn_pull='[[1, null]]')
assert reply is None
self.assertEqual(None, response.content_type)
@@ -391,7 +390,7 @@ class SyncMasterTest(tests.Test):
coroutine.sleep(1)
request = Request()
- response = ad.Response()
+ response = db.Response()
request.environ['HTTP_COOKIE'] = ';'.join(cookie)
reply = master.pull(request, response, accept_length=CHUNK * 1.5, sn_pull='[[1, null]]')
assert reply is not None
@@ -417,7 +416,7 @@ class SyncMasterTest(tests.Test):
[i for i in packet])
request = Request()
- response = ad.Response()
+ response = db.Response()
reply = master.pull(request, response, accept_length=CHUNK * 1.5, sn_pull='[[1, null]]')
assert reply is not None
self.assertEqual('application/x-tar', response.content_type)
@@ -436,7 +435,7 @@ class SyncMasterTest(tests.Test):
master._pull_queue.clear()
request = Request()
- response = ad.Response()
+ response = db.Response()
reply = master.pull(request, response, accept_length=CHUNK * 2.5, sn_pull='[[1, null]]')
assert reply is None
self.assertEqual(None, response.content_type)
@@ -448,7 +447,7 @@ class SyncMasterTest(tests.Test):
coroutine.sleep(1)
request = Request()
- response = ad.Response()
+ response = db.Response()
request.environ['HTTP_COOKIE'] = ';'.join(cookie)
reply = master.pull(request, response, accept_length=CHUNK * 2.5, sn_pull='[[1, null]]')
assert reply is not None
@@ -482,7 +481,7 @@ class SyncMasterTest(tests.Test):
def test_pull_ReusePullSeqFromCookies(self):
master = MasterCommands('master')
request = Request()
- response = ad.Response()
+ response = db.Response()
master.volume['document'].create(guid='1')
@@ -501,7 +500,7 @@ class SyncMasterTest(tests.Test):
request = Request()
request.environ['HTTP_COOKIE'] = ';'.join(cookie)
- response = ad.Response()
+ response = db.Response()
reply = master.pull(request, response)
assert reply is not None
self.assertEqual('application/x-tar', response.content_type)
@@ -536,7 +535,7 @@ class SyncMasterTest(tests.Test):
self.override(Directory, 'diff', diff)
request = Request()
- response = ad.Response()
+ response = db.Response()
reply = master.pull(request, response, sn_pull='[[1, null]]')
assert reply is None
self.assertEqual(None, response.content_type)
@@ -549,7 +548,7 @@ class SyncMasterTest(tests.Test):
coroutine.sleep(1)
request = Request()
- response = ad.Response()
+ response = db.Response()
reply = master.pull(request, response, sn_pull='[[1, null]]')
assert reply is None
self.assertEqual(None, response.content_type)
@@ -562,7 +561,7 @@ class SyncMasterTest(tests.Test):
def test_clone(self):
master = MasterCommands('master')
request = Request()
- response = ad.Response()
+ response = db.Response()
master.volume['document'].create(guid='1')
master.volume['document'].create(guid='2')
@@ -579,7 +578,7 @@ class SyncMasterTest(tests.Test):
request = Request()
request.environ['HTTP_COOKIE'] = ';'.join(cookie)
- response = ad.Response()
+ response = db.Response()
reply = master.pull(request, response)
assert reply is not None
self.assertEqual('application/x-tar', response.content_type)
@@ -611,10 +610,10 @@ class SyncMasterTest(tests.Test):
def test_pull_ProcessFilePulls(self):
node.sync_dirs.value = ['files']
- seqno = ad.Seqno('seqno')
+ seqno = util.Seqno('seqno')
master = MasterCommands('master')
request = Request()
- response = ad.Response()
+ response = db.Response()
self.touch(('files/1', '1'))
self.touch(('files/2', '2'))
@@ -633,7 +632,7 @@ class SyncMasterTest(tests.Test):
request = Request()
request.environ['HTTP_COOKIE'] = ';'.join(cookie)
- response = ad.Response()
+ response = db.Response()
reply = master.pull(request, response)
assert reply is not None
self.assertEqual('application/x-tar', response.content_type)
@@ -646,7 +645,7 @@ class SyncMasterTest(tests.Test):
self.assertEqual('master', packet.header['src'])
self.assertEqual(None, packet.header.get('dst'))
- response = ad.Response()
+ response = db.Response()
reply = master.pull(request, response, files='[[1, null]]')
assert reply is not None
self.assertEqual('application/x-tar', response.content_type)
@@ -672,7 +671,7 @@ class SyncMasterTest(tests.Test):
packet.push(data=[None])
request = Request()
- response = ad.Response()
+ response = db.Response()
reply = master.pull(request, response, sn_pull='[[1, null]]')
assert reply is not None
self.assertEqual('application/x-tar', response.content_type)
@@ -693,7 +692,7 @@ class SyncMasterTest(tests.Test):
packet.push(data=[None])
request = Request()
- response = ad.Response()
+ response = db.Response()
reply = master.pull(request, response, sn_pull='[[1, null]]')
assert reply is not None
self.assertEqual('application/x-tar', response.content_type)
@@ -712,7 +711,7 @@ class SyncMasterTest(tests.Test):
master.volume['document'].create(guid='1')
master.volume['document'].create(guid='2')
- response = ad.Response()
+ response = db.Response()
reply = master.pull(Request(), response, sn_pull='[[1, null]]')
cookie = [
'sugar_network_sync=%s; Max-Age=3600; HttpOnly' % base64.b64encode(json.dumps({'sn_pull': [[1, None]]})),
@@ -721,7 +720,7 @@ class SyncMasterTest(tests.Test):
self.assertEqual(cookie, response.get('Set-Cookie'))
assert exists(join('tmp', pull_hash({'sn_pull': [[1, None]]}) + '.pull'))
- response = ad.Response()
+ response = db.Response()
reply = master.pull(Request(), response, sn_pull='[[2, null]]')
cookie = [
'sugar_network_sync=%s; Max-Age=3600; HttpOnly' % base64.b64encode(json.dumps({'sn_pull': [[2, None]]})),
@@ -732,10 +731,10 @@ class SyncMasterTest(tests.Test):
assert exists(join('tmp', pull_hash({'sn_pull': [[2, None]]}) + '.pull'))
-class Request(ad.Request):
+class Request(db.Request):
def __init__(self, environ=None):
- ad.Request.__init__(self)
+ db.Request.__init__(self)
self.environ = environ or {}
@@ -752,9 +751,9 @@ class MasterCommands(sync_master.SyncCommands):
def new_volume(root):
- class Document(ad.Document):
+ class Document(db.Document):
- @ad.active_property(slot=1, default='')
+ @db.indexed_property(slot=1, default='')
def prop(self, value):
return value
diff --git a/tests/units/sync_node.py b/tests/units/node/sync_node.py
index f9da32d..90e5fa5 100755
--- a/tests/units/sync_node.py
+++ b/tests/units/node/sync_node.py
@@ -10,13 +10,12 @@ import rrdtool
from __init__ import tests
-import active_document as ad
+from sugar_network import db
from sugar_network.toolkit.sneakernet import InPacket, OutFilePacket
from sugar_network.client import api_url
from sugar_network.node import sync_node
-from sugar_network.toolkit import sneakernet
+from sugar_network.toolkit import sneakernet, coroutine
from sugar_network.resources.volume import Volume
-from active_toolkit import coroutine
class SyncNodeTest(tests.Test):
@@ -24,7 +23,7 @@ class SyncNodeTest(tests.Test):
def setUp(self):
tests.Test.setUp(self)
self.uuid = 0
- self.override(ad, 'uuid', self.next_uuid)
+ self.override(db, 'uuid', self.next_uuid)
def next_uuid(self):
self.uuid += 1
@@ -408,9 +407,9 @@ class SyncCommands(sync_node.SyncCommands):
def new_volume(root):
- class Document(ad.Document):
+ class Document(db.Document):
- @ad.active_property(slot=1, default='')
+ @db.indexed_property(slot=1, default='')
def prop(self, value):
return value
diff --git a/tests/units/resources/__init__.py b/tests/units/resources/__init__.py
new file mode 100644
index 0000000..345c327
--- /dev/null
+++ b/tests/units/resources/__init__.py
@@ -0,0 +1,9 @@
+# sugar-lint: disable
+
+import sys
+from os.path import dirname, join, abspath
+
+src_root = abspath(join(dirname(__file__), '..', '..', '..'))
+sys.path.insert(0, src_root)
+
+import tests
diff --git a/tests/units/resources/__main__.py b/tests/units/resources/__main__.py
new file mode 100644
index 0000000..4444f07
--- /dev/null
+++ b/tests/units/resources/__main__.py
@@ -0,0 +1,13 @@
+# sugar-lint: disable
+
+from __init__ import tests
+
+from comment import *
+from context import *
+from implementation import *
+from review import *
+from solution import *
+from volume import *
+
+if __name__ == '__main__':
+ tests.main()
diff --git a/tests/units/comment.py b/tests/units/resources/comment.py
index e9464b1..61f6cd0 100755
--- a/tests/units/comment.py
+++ b/tests/units/resources/comment.py
@@ -3,7 +3,7 @@
from __init__ import tests
-from sugar_network import Client
+from sugar_network.client import Client
from sugar_network.resources.user import User
from sugar_network.resources.context import Context
from sugar_network.resources.review import Review
diff --git a/tests/units/context.py b/tests/units/resources/context.py
index 60ca83a..ed9d7d7 100755
--- a/tests/units/context.py
+++ b/tests/units/resources/context.py
@@ -4,8 +4,8 @@
from __init__ import tests
from sugar_network.node import obs
-from sugar_network import IPCClient, Client
-from active_toolkit import coroutine, enforce
+from sugar_network.client import IPCClient, Client
+from sugar_network.toolkit import coroutine, enforce
class ContextTest(tests.Test):
@@ -20,7 +20,7 @@ class ContextTest(tests.Test):
self.override(obs, 'presolve', lambda: None)
self.start_server()
- client = IPCClient(mountpoint='~')
+ client = IPCClient(params={'mountpoint': '~'})
guid = client.post(['context'], {
'type': 'activity',
@@ -63,7 +63,7 @@ class ContextTest(tests.Test):
self.override(obs, 'presolve', lambda: None)
self.start_server()
- client = IPCClient(mountpoint='~')
+ client = IPCClient(params={'mountpoint': '~'})
guid = client.post(['context'], {
'type': 'activity',
@@ -104,7 +104,7 @@ class ContextTest(tests.Test):
self.override(obs, 'presolve', lambda: None)
self.start_server()
- client = IPCClient(mountpoint='~')
+ client = IPCClient(params={'mountpoint': '~'})
events = []
def read_events():
@@ -138,7 +138,7 @@ class ContextTest(tests.Test):
def test_InvalidateSolutionsOnDependenciesChanges(self):
self.start_server()
- client = IPCClient(mountpoint='~')
+ client = IPCClient(params={'mountpoint': '~'})
events = []
def read_events():
diff --git a/tests/units/implementation.py b/tests/units/resources/implementation.py
index ba6cdf6..40d1d7e 100755
--- a/tests/units/implementation.py
+++ b/tests/units/resources/implementation.py
@@ -9,7 +9,7 @@ from sugar_network.toolkit.router import Request
from sugar_network.resources.volume import Volume
from sugar_network.resources.implementation import _encode_version, Implementation
from sugar_network.node.commands import NodeCommands
-from sugar_network import IPCClient
+from sugar_network.client import IPCClient
class ImplementationTest(tests.Test):
@@ -64,7 +64,7 @@ class ImplementationTest(tests.Test):
def test_SetMimeTypeForActivities(self):
self.start_server()
- client = IPCClient(mountpoint='~')
+ client = IPCClient(params={'mountpoint': '~'})
context = client.post(['context'], {
'type': 'content',
diff --git a/tests/units/review.py b/tests/units/resources/review.py
index 861675a..1a454ee 100755
--- a/tests/units/review.py
+++ b/tests/units/resources/review.py
@@ -3,7 +3,7 @@
from __init__ import tests
-from sugar_network import Client
+from sugar_network.client import Client
from sugar_network.resources.user import User
from sugar_network.resources.context import Context
from sugar_network.resources.review import Review
diff --git a/tests/units/solution.py b/tests/units/resources/solution.py
index 896b160..9bad829 100755
--- a/tests/units/solution.py
+++ b/tests/units/resources/solution.py
@@ -3,7 +3,7 @@
from __init__ import tests
-from sugar_network import Client
+from sugar_network.client import Client
from sugar_network.resources.user import User
from sugar_network.resources.context import Context
from sugar_network.resources.feedback import Feedback
diff --git a/tests/units/volume.py b/tests/units/resources/volume.py
index d795466..35722a6 100755
--- a/tests/units/volume.py
+++ b/tests/units/resources/volume.py
@@ -9,22 +9,21 @@ from os.path import exists
from __init__ import tests
-import active_document as ad
-from sugar_network import node, sugar
+from sugar_network import db, node
from sugar_network.toolkit.sneakernet import InPacket, OutBufferPacket, DiskFull
from sugar_network.resources.volume import Volume, Resource, Commands, VolumeCommands
from sugar_network.resources.user import User
from sugar_network.toolkit.router import Request
-from active_toolkit import coroutine
+from sugar_network.toolkit import coroutine, sugar, util
class VolumeTest(tests.Test):
def test_diff(self):
- class Document(ad.Document):
+ class Document(db.Document):
- @ad.active_property(slot=1)
+ @db.indexed_property(slot=1)
def prop(self, value):
return value
@@ -43,7 +42,7 @@ class VolumeTest(tests.Test):
return True
packet = Packet()
- in_seq = ad.Sequence([[1, None]])
+ in_seq = util.Sequence([[1, None]])
volume.diff(in_seq, packet)
self.assertEqual([
{'document': 'document'},
@@ -70,9 +69,9 @@ class VolumeTest(tests.Test):
def test_diff_Partial(self):
- class Document(ad.Document):
+ class Document(db.Document):
- @ad.active_property(slot=1)
+ @db.indexed_property(slot=1)
def prop(self, value):
return value
@@ -93,7 +92,7 @@ class VolumeTest(tests.Test):
return True
packet = Packet()
- in_seq = ad.Sequence([[1, None]])
+ in_seq = util.Sequence([[1, None]])
volume.diff(in_seq, packet)
self.assertEqual([
{'document': 'document'},
@@ -111,7 +110,7 @@ class VolumeTest(tests.Test):
return True
packet = Packet()
- in_seq = ad.Sequence([[1, None]])
+ in_seq = util.Sequence([[1, None]])
volume.diff(in_seq, packet)
self.assertEqual([
{'document': 'document'},
@@ -130,9 +129,9 @@ class VolumeTest(tests.Test):
def test_diff_Collapsed(self):
- class Document(ad.Document):
+ class Document(db.Document):
- @ad.active_property(slot=1)
+ @db.indexed_property(slot=1)
def prop(self, value):
return value
@@ -156,7 +155,7 @@ class VolumeTest(tests.Test):
return True
packet = Packet()
- in_seq = ad.Sequence([[1, None]])
+ in_seq = util.Sequence([[1, None]])
volume.diff(in_seq, packet)
self.assertEqual([
{'document': 'document'},
@@ -188,7 +187,7 @@ class VolumeTest(tests.Test):
return True
packet = Packet()
- in_seq = ad.Sequence([[1, None]])
+ in_seq = util.Sequence([[1, None]])
volume.diff(in_seq, packet)
self.assertEqual([
{'document': 'document'},
@@ -223,13 +222,13 @@ class VolumeTest(tests.Test):
def test_diff_TheSameInSeqForAllDocuments(self):
- class Document1(ad.Document):
+ class Document1(db.Document):
pass
- class Document2(ad.Document):
+ class Document2(db.Document):
pass
- class Document3(ad.Document):
+ class Document3(db.Document):
pass
volume = Volume('db', [Document1, Document2, Document3])
@@ -250,7 +249,7 @@ class VolumeTest(tests.Test):
return True
packet = Packet()
- in_seq = ad.Sequence([[1, None]])
+ in_seq = util.Sequence([[1, None]])
volume.diff(in_seq, packet)
self.assertEqual([
{'document': 'document1'},
@@ -284,13 +283,13 @@ class VolumeTest(tests.Test):
def test_merge_Create(self):
- class Document1(ad.Document):
+ class Document1(db.Document):
- @ad.active_property(slot=1)
+ @db.indexed_property(slot=1)
def prop(self, value):
return value
- class Document2(ad.Document):
+ class Document2(db.Document):
pass
volume = Volume('db', [Document1, Document2])
@@ -335,9 +334,9 @@ class VolumeTest(tests.Test):
def test_merge_Update(self):
- class Document(ad.Document):
+ class Document(db.Document):
- @ad.active_property(slot=1)
+ @db.indexed_property(slot=1)
def prop(self, value):
return value
@@ -396,7 +395,7 @@ class VolumeTest(tests.Test):
def test_merge_StopOnCommit(self):
- class Document(ad.Document):
+ class Document(db.Document):
pass
volume = Volume('db', [Document])
@@ -441,7 +440,7 @@ class VolumeTest(tests.Test):
class Document(Resource):
- @ad.active_property(slot=1)
+ @db.indexed_property(slot=1)
def prop(self, value):
return value
@@ -450,7 +449,7 @@ class VolumeTest(tests.Test):
events = []
def read_events():
- for event in cp.subscribe(Request(), ad.Response()):
+ for event in cp.subscribe(Request(), db.Response()):
if not event.strip():
continue
assert event.startswith('data: ')
@@ -484,7 +483,7 @@ class VolumeTest(tests.Test):
class Document(Resource):
- @ad.active_property(slot=1)
+ @db.indexed_property(slot=1)
def prop(self, value):
return value
@@ -493,7 +492,7 @@ class VolumeTest(tests.Test):
events = []
def read_events():
- for event in cp.subscribe(Request(), ad.Response(), only_commits=True):
+ for event in cp.subscribe(Request(), db.Response(), only_commits=True):
if not event.strip():
continue
assert event.startswith('data: ')
@@ -920,7 +919,7 @@ def call(cp, principal=None, content=None, **kwargs):
request.content = content
request.environ = {'HTTP_HOST': 'localhost'}
request.commands = cp
- return cp.call(request, ad.Response())
+ return cp.call(request, db.Response())
if __name__ == '__main__':
diff --git a/tests/units/toolkit/__init__.py b/tests/units/toolkit/__init__.py
new file mode 100644
index 0000000..345c327
--- /dev/null
+++ b/tests/units/toolkit/__init__.py
@@ -0,0 +1,9 @@
+# sugar-lint: disable
+
+import sys
+from os.path import dirname, join, abspath
+
+src_root = abspath(join(dirname(__file__), '..', '..', '..'))
+sys.path.insert(0, src_root)
+
+import tests
diff --git a/tests/units/toolkit/__main__.py b/tests/units/toolkit/__main__.py
new file mode 100644
index 0000000..30a7b47
--- /dev/null
+++ b/tests/units/toolkit/__main__.py
@@ -0,0 +1,13 @@
+# sugar-lint: disable
+
+from __init__ import tests
+
+#from files_sync import *
+from http import *
+from mountpoints import *
+from router import *
+from sneakernet import *
+from util import *
+
+if __name__ == '__main__':
+ tests.main()
diff --git a/tests/units/files_sync.py b/tests/units/toolkit/files_sync.py
index 66c2c6a..eaeb0c0 100755
--- a/tests/units/files_sync.py
+++ b/tests/units/toolkit/files_sync.py
@@ -9,7 +9,8 @@ from os.path import exists
from __init__ import tests
-import active_document as ad
+from sugar_network import db
+from sugar_network.toolkit import util
from sugar_network.toolkit.files_sync import Seeder, Leecher
from sugar_network.toolkit.sneakernet import OutBufferPacket, InPacket, DiskFull, OutFilePacket
@@ -22,21 +23,21 @@ class FilesSyncTest(tests.Test):
def setUp(self):
tests.Test.setUp(self)
self.uuid = 0
- self.override(ad, 'uuid', self.next_uuid)
+ self.override(db, 'uuid', self.next_uuid)
def next_uuid(self):
self.uuid += 1
return str(self.uuid)
def test_Seeder_pull_Populate(self):
- seqno = ad.Seqno('seqno')
+ seqno = util.Seqno('seqno')
seeder = Seeder('files', 'index', seqno)
os.utime('files', (1, 1))
- assert not seeder.pending(ad.Sequence([[1, None]]))
+ assert not seeder.pending(util.Sequence([[1, None]]))
packet = OutBufferPacket()
- in_seq = ad.Sequence([[1, None]])
+ in_seq = util.Sequence([[1, None]])
seeder.pull(in_seq, packet)
self.assertEqual([[1, None]], in_seq)
self.assertEqual(0, seqno.value)
@@ -49,8 +50,8 @@ class FilesSyncTest(tests.Test):
self.utime('files', 1)
os.utime('files', (1, 1))
- assert not seeder.pending(ad.Sequence([[1, None]]))
- in_seq = ad.Sequence([[1, None]])
+ assert not seeder.pending(util.Sequence([[1, None]]))
+ in_seq = util.Sequence([[1, None]])
seeder.pull(in_seq, packet)
self.assertEqual([[1, None]], in_seq)
self.assertEqual(0, seqno.value)
@@ -60,8 +61,8 @@ class FilesSyncTest(tests.Test):
self.utime('files', 2)
os.utime('files', (2, 2))
- assert seeder.pending(ad.Sequence([[1, None]]))
- in_seq = ad.Sequence([[1, None]])
+ assert seeder.pending(util.Sequence([[1, None]]))
+ in_seq = util.Sequence([[1, None]])
seeder.pull(in_seq, packet)
self.assertEqual([[4, None]], in_seq)
self.assertEqual(3, seqno.value)
@@ -84,16 +85,16 @@ class FilesSyncTest(tests.Test):
]),
read_records(packet))
- assert not seeder.pending(ad.Sequence([[4, None]]))
+ assert not seeder.pending(util.Sequence([[4, None]]))
packet = OutBufferPacket()
- in_seq = ad.Sequence([[4, None]])
+ in_seq = util.Sequence([[4, None]])
seeder.pull(in_seq, packet)
self.assertEqual([[4, None]], in_seq)
self.assertEqual(3, seqno.value)
self.assertEqual(True, packet.empty)
def test_Seeder_pull_NotFull(self):
- seqno = ad.Seqno('seqno')
+ seqno = util.Seqno('seqno')
seeder = Seeder('files', 'index', seqno)
self.touch(('files/1', '1'))
@@ -104,7 +105,7 @@ class FilesSyncTest(tests.Test):
self.utime('files', 1)
out_packet = OutBufferPacket()
- in_seq = ad.Sequence([[2, 2], [4, 10], [20, None]])
+ in_seq = util.Sequence([[2, 2], [4, 10], [20, None]])
seeder.pull(in_seq, out_packet)
self.assertEqual([[6, 10], [20,None]], in_seq)
self.assertEqual(
@@ -117,7 +118,7 @@ class FilesSyncTest(tests.Test):
read_records(out_packet))
def test_Seeder_pull_DiskFull(self):
- seqno = ad.Seqno('seqno')
+ seqno = util.Seqno('seqno')
seeder = Seeder('files', 'index', seqno)
self.touch(('files/1', '*' * CHUNK))
@@ -126,7 +127,7 @@ class FilesSyncTest(tests.Test):
self.utime('files', 1)
out_packet = OutBufferPacket(limit=CHUNK * 2.5)
- in_seq = ad.Sequence([[1, None]])
+ in_seq = util.Sequence([[1, None]])
try:
seeder.pull(in_seq, out_packet)
assert False
@@ -142,7 +143,7 @@ class FilesSyncTest(tests.Test):
read_records(out_packet))
def test_Seeder_pull_UpdateFiles(self):
- seqno = ad.Seqno('seqno')
+ seqno = util.Seqno('seqno')
seeder = Seeder('files', 'index', seqno)
self.touch(('files/1', '1'))
@@ -152,19 +153,19 @@ class FilesSyncTest(tests.Test):
os.utime('files', (1, 1))
out_packet = OutBufferPacket()
- seeder.pull(ad.Sequence([[1, None]]), out_packet)
+ seeder.pull(util.Sequence([[1, None]]), out_packet)
self.assertEqual(3, seqno.value)
os.utime('files/2', (2, 2))
out_packet = OutBufferPacket()
- seeder.pull(ad.Sequence([[4, None]]), out_packet)
+ seeder.pull(util.Sequence([[4, None]]), out_packet)
self.assertEqual(3, seqno.value)
os.utime('files', (3, 3))
out_packet = OutBufferPacket()
- seeder.pull(ad.Sequence([[4, None]]), out_packet)
+ seeder.pull(util.Sequence([[4, None]]), out_packet)
self.assertEqual(4, seqno.value)
self.assertEqual(
sorted([
@@ -178,7 +179,7 @@ class FilesSyncTest(tests.Test):
os.utime('files', (4, 4))
out_packet = OutBufferPacket()
- seeder.pull(ad.Sequence([[5, None]]), out_packet)
+ seeder.pull(util.Sequence([[5, None]]), out_packet)
self.assertEqual(6, seqno.value)
self.assertEqual(
sorted([
@@ -189,7 +190,7 @@ class FilesSyncTest(tests.Test):
read_records(out_packet))
out_packet = OutBufferPacket()
- seeder.pull(ad.Sequence([[1, None]]), out_packet)
+ seeder.pull(util.Sequence([[1, None]]), out_packet)
self.assertEqual(6, seqno.value)
self.assertEqual(
sorted([
@@ -201,7 +202,7 @@ class FilesSyncTest(tests.Test):
read_records(out_packet))
def test_Seeder_pull_CreateFiles(self):
- seqno = ad.Seqno('seqno')
+ seqno = util.Seqno('seqno')
seeder = Seeder('files', 'index', seqno)
self.touch(('files/1', '1'))
@@ -211,7 +212,7 @@ class FilesSyncTest(tests.Test):
os.utime('files', (1, 1))
out_packet = OutBufferPacket()
- seeder.pull(ad.Sequence([[1, None]]), out_packet)
+ seeder.pull(util.Sequence([[1, None]]), out_packet)
self.assertEqual(3, seqno.value)
self.touch(('files/4', '4'))
@@ -219,14 +220,14 @@ class FilesSyncTest(tests.Test):
os.utime('files', (1, 1))
out_packet = OutBufferPacket()
- seeder.pull(ad.Sequence([[4, None]]), out_packet)
+ seeder.pull(util.Sequence([[4, None]]), out_packet)
self.assertEqual(3, seqno.value)
os.utime('files/4', (2, 2))
os.utime('files', (2, 2))
out_packet = OutBufferPacket()
- seeder.pull(ad.Sequence([[4, None]]), out_packet)
+ seeder.pull(util.Sequence([[4, None]]), out_packet)
self.assertEqual(4, seqno.value)
self.assertEqual(
sorted([
@@ -242,7 +243,7 @@ class FilesSyncTest(tests.Test):
os.utime('files', (3, 3))
out_packet = OutBufferPacket()
- seeder.pull(ad.Sequence([[5, None]]), out_packet)
+ seeder.pull(util.Sequence([[5, None]]), out_packet)
self.assertEqual(6, seqno.value)
self.assertEqual(
sorted([
@@ -253,7 +254,7 @@ class FilesSyncTest(tests.Test):
read_records(out_packet))
def test_Seeder_pull_DeleteFiles(self):
- seqno = ad.Seqno('seqno')
+ seqno = util.Seqno('seqno')
seeder = Seeder('files', 'index', seqno)
self.touch(('files/1', '1'))
@@ -263,7 +264,7 @@ class FilesSyncTest(tests.Test):
os.utime('files', (1, 1))
out_packet = OutBufferPacket()
- in_seq = ad.Sequence([[1, None]])
+ in_seq = util.Sequence([[1, None]])
seeder.pull(in_seq, out_packet)
self.assertEqual([[4, None]], in_seq)
self.assertEqual(3, seqno.value)
@@ -271,9 +272,9 @@ class FilesSyncTest(tests.Test):
os.unlink('files/2')
os.utime('files', (2, 2))
- assert seeder.pending(ad.Sequence([[4, None]]))
+ assert seeder.pending(util.Sequence([[4, None]]))
out_packet = OutBufferPacket()
- in_seq = ad.Sequence([[1, None]])
+ in_seq = util.Sequence([[1, None]])
seeder.pull(in_seq, out_packet)
self.assertEqual([[2, 2], [5, None]], in_seq)
self.assertEqual(4, seqno.value)
@@ -290,9 +291,9 @@ class FilesSyncTest(tests.Test):
os.unlink('files/3')
os.utime('files', (3, 3))
- assert seeder.pending(ad.Sequence([[5, None]]))
+ assert seeder.pending(util.Sequence([[5, None]]))
out_packet = OutBufferPacket()
- in_seq = ad.Sequence([[1, None]])
+ in_seq = util.Sequence([[1, None]])
seeder.pull(in_seq, out_packet)
self.assertEqual([[1, 3], [7, None]], in_seq)
self.assertEqual(6, seqno.value)
@@ -306,7 +307,7 @@ class FilesSyncTest(tests.Test):
read_records(out_packet))
out_packet = OutBufferPacket()
- in_seq = ad.Sequence([[4, None]])
+ in_seq = util.Sequence([[4, None]])
seeder.pull(in_seq, out_packet)
self.assertEqual([[7, None]], in_seq)
self.assertEqual(6, seqno.value)
@@ -320,7 +321,7 @@ class FilesSyncTest(tests.Test):
read_records(out_packet))
def test_Leecher_push(self):
- seqno = ad.Seqno('seqno')
+ seqno = util.Seqno('seqno')
seeder = Seeder('src/files', 'src/index', seqno)
leecher = Leecher('dst/files', 'dst/sequence')
@@ -331,7 +332,7 @@ class FilesSyncTest(tests.Test):
os.utime('src/files', (1, 1))
with OutFilePacket('.') as packet:
- seeder.pull(ad.Sequence([[1, None]]), packet)
+ seeder.pull(util.Sequence([[1, None]]), packet)
self.assertEqual(3, seqno.value)
for i in InPacket(packet.path):
leecher.push(i)
@@ -353,7 +354,7 @@ class FilesSyncTest(tests.Test):
os.utime('src/files', (2, 2))
with OutFilePacket('.') as packet:
- seeder.pull(ad.Sequence([[4, None]]), packet)
+ seeder.pull(util.Sequence([[4, None]]), packet)
self.assertEqual(4, seqno.value)
for i in InPacket(packet.path):
leecher.push(i)
@@ -372,7 +373,7 @@ class FilesSyncTest(tests.Test):
os.utime('src/files', (3, 3))
with OutFilePacket('.') as packet:
- seeder.pull(ad.Sequence([[5, None]]), packet)
+ seeder.pull(util.Sequence([[5, None]]), packet)
self.assertEqual(7, seqno.value)
for i in InPacket(packet.path):
leecher.push(i)
diff --git a/tests/units/http.py b/tests/units/toolkit/http.py
index fde043f..871c7d5 100755
--- a/tests/units/http.py
+++ b/tests/units/toolkit/http.py
@@ -5,10 +5,8 @@ import select
from __init__ import tests
-import active_document as ad
-from sugar_network import client as local
-from sugar_network.toolkit import router, http
-from active_toolkit import coroutine
+from sugar_network import db, client as local
+from sugar_network.toolkit import coroutine, router, http
class HTTPTest(tests.Test):
@@ -26,7 +24,7 @@ class HTTPTest(tests.Test):
coroutine.sleep(.3)
yield Router.events.pop(0) + '\n'
- self.server = coroutine.WSGIServer(('localhost', local.ipc_port.value), Router(ad.CommandsProcessor()))
+ self.server = coroutine.WSGIServer(('localhost', local.ipc_port.value), Router(db.CommandsProcessor()))
coroutine.spawn(self.server.serve_forever)
coroutine.dispatch()
client = http.Client('http://localhost:%s' % local.ipc_port.value, sugar_auth=False)
diff --git a/tests/units/mountpoints.py b/tests/units/toolkit/mountpoints.py
index 3e7c4df..577bddd 100755
--- a/tests/units/mountpoints.py
+++ b/tests/units/toolkit/mountpoints.py
@@ -6,8 +6,7 @@ import shutil
from __init__ import tests
-from sugar_network.toolkit import mountpoints
-from active_toolkit import coroutine
+from sugar_network.toolkit import coroutine, mountpoints
class MountpointsTest(tests.Test):
diff --git a/tests/units/router.py b/tests/units/toolkit/router.py
index 87b16e7..55846eb 100755
--- a/tests/units/router.py
+++ b/tests/units/toolkit/router.py
@@ -12,12 +12,12 @@ from email.utils import formatdate
from cStringIO import StringIO
from os.path import exists
-from __init__ import tests
+from __init__ import tests, src_root
-import active_document as ad
-from sugar_network import node, sugar, static, Client
+from sugar_network import db, node, static
+from sugar_network.client import Client
from sugar_network.toolkit.router import Router, _Request, _parse_accept_language, Unauthorized, route, Redirect, NotModified, _filename
-from active_toolkit import util
+from sugar_network.toolkit import util, sugar
from sugar_network.resources.user import User
from sugar_network.resources.volume import Volume, Resource
from sugar_network import client as local
@@ -29,15 +29,15 @@ class RouterTest(tests.Test):
class Document(Resource):
- @ad.active_property(slot=1, prefix='A', full_text=True, default='')
+ @db.indexed_property(slot=1, prefix='A', full_text=True, default='')
def term(self, value):
return value
- @ad.active_property(ad.StoredProperty, default='')
+ @db.stored_property(default='')
def stored(self, value):
return value
- @ad.active_property(ad.BlobProperty)
+ @db.blob_property()
def blob(self, value):
return value
@@ -123,9 +123,9 @@ class RouterTest(tests.Test):
def test_StreamedResponse(self):
- class CommandsProcessor(ad.CommandsProcessor):
+ class CommandsProcessor(db.CommandsProcessor):
- @ad.volume_command()
+ @db.volume_command()
def get_stream(self, response):
return StringIO('stream')
@@ -141,17 +141,17 @@ class RouterTest(tests.Test):
def test_EmptyResponse(self):
- class CommandsProcessor(ad.CommandsProcessor):
+ class CommandsProcessor(db.CommandsProcessor):
- @ad.volume_command(cmd='1', mime_type='application/octet-stream')
+ @db.volume_command(cmd='1', mime_type='application/octet-stream')
def get_binary(self, response):
pass
- @ad.volume_command(cmd='2', mime_type='application/json')
+ @db.volume_command(cmd='2', mime_type='application/json')
def get_json(self, response):
pass
- @ad.volume_command(cmd='3')
+ @db.volume_command(cmd='3')
def no_get(self, response):
pass
@@ -186,11 +186,11 @@ class RouterTest(tests.Test):
class Document(Resource):
- @ad.active_property(slot=1, prefix='A', full_text=True, default='')
+ @db.indexed_property(slot=1, prefix='A', full_text=True, default='')
def term(self, value):
return value
- @ad.active_property(ad.StoredProperty, default='')
+ @db.stored_property(default='')
def stored(self, value):
return value
@@ -208,11 +208,11 @@ class RouterTest(tests.Test):
class Document(Resource):
- @ad.active_property(slot=1, prefix='A', full_text=True, default='')
+ @db.indexed_property(slot=1, prefix='A', full_text=True, default='')
def term(self, value):
return value
- @ad.active_property(ad.StoredProperty, default='')
+ @db.stored_property(default='')
def stored(self, value):
return value
@@ -222,7 +222,7 @@ class RouterTest(tests.Test):
self.waitpid(pid)
with Volume(tests.tmpdir + '/remote', [User]) as documents:
- cp = ad.VolumeCommands(documents)
+ cp = db.VolumeCommands(documents)
router = Router(cp)
request = _Request({
@@ -243,11 +243,11 @@ class RouterTest(tests.Test):
class Document2(Resource):
- @ad.active_property(ad.BlobProperty)
+ @db.blob_property()
def blob(self, value):
raise Redirect(URL)
- @ad.active_property(slot=1, prefix='A', full_text=True, default='')
+ @db.indexed_property(slot=1, prefix='A', full_text=True, default='')
def term(self, value):
return value
@@ -360,38 +360,38 @@ class RouterTest(tests.Test):
class TestDocument(Resource):
- @ad.active_property(slot=100, localized=True)
+ @db.indexed_property(slot=100, localized=True)
def prop(self, value):
return value
self.fork(self.restful_server, [User, TestDocument])
- self.override(ad, 'default_lang', lambda: 'en')
+ self.override(db, 'default_lang', lambda: 'en')
client = Client(local.api_url.value, sugar_auth=True)
guid = client.post(['testdocument'], {'prop': 'en'})
self.assertEqual('en', client.get(['testdocument', guid, 'prop']))
- self.override(ad, 'default_lang', lambda: 'ru')
+ self.override(db, 'default_lang', lambda: 'ru')
client = Client(local.api_url.value, sugar_auth=True)
self.assertEqual('en', client.get(['testdocument', guid, 'prop']))
client.put(['testdocument', guid, 'prop'], 'ru')
self.assertEqual('ru', client.get(['testdocument', guid, 'prop']))
- self.override(ad, 'default_lang', lambda: 'es')
+ self.override(db, 'default_lang', lambda: 'es')
client = Client(local.api_url.value, sugar_auth=True)
self.assertEqual('en', client.get(['testdocument', guid, 'prop']))
client.put(['testdocument', guid, 'prop'], 'es')
self.assertEqual('es', client.get(['testdocument', guid, 'prop']))
- self.override(ad, 'default_lang', lambda: 'ru')
+ self.override(db, 'default_lang', lambda: 'ru')
client = Client(local.api_url.value, sugar_auth=True)
self.assertEqual('ru', client.get(['testdocument', guid, 'prop']))
- self.override(ad, 'default_lang', lambda: 'en')
+ self.override(db, 'default_lang', lambda: 'en')
client = Client(local.api_url.value, sugar_auth=True)
self.assertEqual('en', client.get(['testdocument', guid, 'prop']))
- self.override(ad, 'default_lang', lambda: 'foo')
+ self.override(db, 'default_lang', lambda: 'foo')
client = Client(local.api_url.value, sugar_auth=True)
self.assertEqual('en', client.get(['testdocument', guid, 'prop']))
@@ -399,7 +399,7 @@ class RouterTest(tests.Test):
class TestDocument(Resource):
- @ad.active_property(slot=100, typecast=int)
+ @db.indexed_property(slot=100, typecast=int)
def prop(self, value):
if not self.request.if_modified_since or self.request.if_modified_since >= value:
return value
@@ -428,16 +428,16 @@ class RouterTest(tests.Test):
class TestDocument(Resource):
- @ad.active_property(slot=100, typecast=int)
+ @db.indexed_property(slot=100, typecast=int)
def prop1(self, value):
self.request.response.last_modified = value
return value
- @ad.active_property(slot=101, typecast=int)
+ @db.indexed_property(slot=101, typecast=int)
def prop2(self, value):
return value
- @ad.active_property(ad.BlobProperty)
+ @db.blob_property()
def prop3(self, value):
return value
@@ -465,7 +465,7 @@ class RouterTest(tests.Test):
client = Client(local.api_url.value, sugar_auth=True)
guid = client.post(['testdocument'], {})
- local_path = '../../../sugar_network/static/httpdocs/images/missing.png'
+ local_path = src_root + '/sugar_network/static/httpdocs/images/missing.png'
response = client.request('GET', ['static', 'images', 'missing.png'])
self.assertEqual(200, response.status_code)
assert file(local_path).read() == response.content
@@ -482,7 +482,7 @@ class RouterTest(tests.Test):
client = Client(local.api_url.value, sugar_auth=True)
guid = client.post(['testdocument'], {})
- mtime = os.stat('../../../sugar_network/static/httpdocs/images/missing.png').st_mtime
+ mtime = os.stat(src_root + '/sugar_network/static/httpdocs/images/missing.png').st_mtime
self.assertEqual(
304,
client.request('GET', ['static', 'images', 'missing.png'], headers={
@@ -503,7 +503,7 @@ class RouterTest(tests.Test):
class TestDocument(Resource):
- @ad.active_property(ad.BlobProperty)
+ @db.blob_property()
def blob(self, value):
return value
@@ -535,7 +535,7 @@ class RouterTest(tests.Test):
class Document(Resource):
- @ad.active_property(slot=1, prefix='A', full_text=True, default='')
+ @db.indexed_property(slot=1, prefix='A', full_text=True, default='')
def term(self, value):
return value
@@ -574,25 +574,25 @@ class RouterTest(tests.Test):
self.assertEqual('Foo-Bar.gif', _filename(['foo', 'bar'], 'image/gif'))
self.assertEqual('Fake', _filename('fake', 'foo/bar'))
- self.assertEqual('Eng', _filename({ad.DEFAULT_LANG: 'eng'}, None))
- self.assertEqual('Eng', _filename([{ad.DEFAULT_LANG: 'eng'}], None))
- self.assertEqual('Bar-1', _filename([{'lang': 'foo', ad.DEFAULT_LANG: 'bar'}, 1], None))
+ self.assertEqual('Eng', _filename({db.default_lang(): 'eng'}, None))
+ self.assertEqual('Eng', _filename([{db.default_lang(): 'eng'}], None))
+ self.assertEqual('Bar-1', _filename([{'lang': 'foo', db.default_lang(): 'bar'}, 1], None))
def test_ContentDisposition(self):
class TestDocument(Resource):
- @ad.active_property(ad.BlobProperty)
+ @db.blob_property()
def blob1(self, value):
if value:
value['name'] = 'foo'
return value
- @ad.active_property(ad.BlobProperty)
+ @db.blob_property()
def blob2(self, value):
return value
- @ad.active_property(ad.BlobProperty)
+ @db.blob_property()
def blob3(self, value):
if value:
value['filename'] = 'foo.bar'
diff --git a/tests/units/sneakernet.py b/tests/units/toolkit/sneakernet.py
index abab510..abab510 100755
--- a/tests/units/sneakernet.py
+++ b/tests/units/toolkit/sneakernet.py
diff --git a/tests/units/toolkit/util.py b/tests/units/toolkit/util.py
new file mode 100755
index 0000000..c5b1311
--- /dev/null
+++ b/tests/units/toolkit/util.py
@@ -0,0 +1,342 @@
+#!/usr/bin/env python
+# sugar-lint: disable
+
+import copy
+from os.path import exists
+
+from __init__ import tests
+
+from sugar_network.toolkit.util import Seqno, Sequence
+
+
+class UtilTest(tests.Test):
+
+ def test_Seqno_commit(self):
+ seqno = Seqno('seqno')
+
+ self.assertEqual(False, seqno.commit())
+ assert not exists('seqno')
+
+ seqno.next()
+ self.assertEqual(True, seqno.commit())
+ assert exists('seqno')
+ self.assertEqual(False, seqno.commit())
+ seqno.next()
+
+ seqno = Seqno('seqno')
+ self.assertEqual(1, seqno.value)
+ self.assertEqual(False, seqno.commit())
+
+ def test_Sequence_empty(self):
+ scale = Sequence(empty_value=[1, None])
+ self.assertEqual(
+ [[1, None]],
+ scale)
+ assert scale.empty
+ scale.exclude(1, 1)
+ assert not scale.empty
+
+ scale = Sequence()
+ self.assertEqual(
+ [],
+ scale)
+ assert scale.empty
+ scale.include(1, None)
+ assert not scale.empty
+
+ def test_Sequence_exclude(self):
+ scale = Sequence(empty_value=[1, None])
+ scale.exclude(1, 10)
+ self.assertEqual(
+ [[11, None]],
+ scale)
+
+ scale = Sequence(empty_value=[1, None])
+ scale.exclude(5, 10)
+ self.assertEqual(
+ [[1, 4], [11, None]],
+ scale)
+
+ scale.exclude(2, 2)
+ self.assertEqual(
+ [[1, 1], [3, 4], [11, None]],
+ scale)
+
+ scale.exclude(1, 1)
+ self.assertEqual(
+ [[3, 4], [11, None]],
+ scale)
+
+ scale.exclude(3, 3)
+ self.assertEqual(
+ [[4, 4], [11, None]],
+ scale)
+
+ scale.exclude(1, 20)
+ self.assertEqual(
+ [[21, None]],
+ scale)
+
+ scale.exclude(21, 21)
+ self.assertEqual(
+ [[22, None]],
+ scale)
+
+ def test_Sequence_include_JoinExistingItems(self):
+ scale = Sequence()
+
+ scale.include(1, None)
+ self.assertEqual(
+ [[1, None]],
+ scale)
+
+ scale.include(2, None)
+ self.assertEqual(
+ [[1, None]],
+ scale)
+
+ scale.include(4, 5)
+ self.assertEqual(
+ [[1, None]],
+ scale)
+
+ scale.exclude(2, 2)
+ scale.exclude(4, 4)
+ scale.exclude(6, 6)
+ scale.exclude(9, 9)
+ self.assertEqual(
+ [[1, 1],
+ [3, 3],
+ [5, 5],
+ [7, 8],
+ [10, None]],
+ scale)
+
+ scale.include(10, 20)
+ self.assertEqual(
+ [[1, 1],
+ [3, 3],
+ [5, 5],
+ [7, 8],
+ [10, None]],
+ scale)
+
+ scale.include(8, 20)
+ self.assertEqual(
+ [[1, 1],
+ [3, 3],
+ [5, 5],
+ [7, None]],
+ scale)
+
+ scale.include(5, None)
+ self.assertEqual(
+ [[1, 1],
+ [3, 3],
+ [5, None]],
+ scale)
+
+ scale.include(1, None)
+ self.assertEqual(
+ [[1, None]],
+ scale)
+
+ def test_Sequence_include_InsertNewItems(self):
+ scale = Sequence()
+
+ scale.include(8, 10)
+ scale.include(3, 3)
+ self.assertEqual(
+ [[3, 3],
+ [8, 10]],
+ scale)
+
+ scale.include(9, 11)
+ self.assertEqual(
+ [[3, 3],
+ [8, 11]],
+ scale)
+
+ scale.include(7, 12)
+ self.assertEqual(
+ [[3, 3],
+ [7, 12]],
+ scale)
+
+ scale.include(5, 5)
+ self.assertEqual(
+ [[3, 3],
+ [5, 5],
+ [7, 12]],
+ scale)
+
+ scale.include(4, 4)
+ self.assertEqual(
+ [[3, 5],
+ [7, 12]],
+ scale)
+
+ scale.include(1, 1)
+ self.assertEqual(
+ [[1, 1],
+ [3, 5],
+ [7, 12]],
+ scale)
+
+ scale.include(2, None)
+ self.assertEqual(
+ [[1, None]],
+ scale)
+
+ def teste_Sequence_Invert(self):
+ scale_1 = Sequence(empty_value=[1, None])
+ scale_1.exclude(2, 2)
+ scale_1.exclude(5, 10)
+
+ scale_2 = copy.deepcopy(scale_1[:])
+ scale_2[-1][1] = 20
+
+ self.assertEqual(
+ [
+ [1, 1],
+ [3, 4],
+ [11, None],
+ ],
+ scale_1)
+ scale_1.exclude(scale_2)
+ self.assertEqual(
+ [[21, None]],
+ scale_1)
+
+ def test_Sequence_contains(self):
+ scale = Sequence(empty_value=[1, None])
+ scale.exclude(2, 2)
+ scale.exclude(5, 10)
+
+ assert 1 in scale
+ assert 2 not in scale
+ assert 3 in scale
+ assert 5 not in scale
+ assert 10 not in scale
+ assert 11 in scale
+
+ def test_Sequence_first(self):
+ scale = Sequence()
+ self.assertEqual(0, scale.first)
+
+ scale = Sequence(empty_value=[1, None])
+ self.assertEqual(1, scale.first)
+ scale.exclude(1, 3)
+ self.assertEqual(4, scale.first)
+
+ def test_Sequence_include(self):
+ rng = Sequence()
+ rng.include(2, 2)
+ self.assertEqual(
+ [[2, 2]],
+ rng)
+ rng.include(7, 10)
+ self.assertEqual(
+ [[2, 2], [7, 10]],
+ rng)
+ rng.include(5, 5)
+ self.assertEqual(
+ [[2, 2], [5, 5], [7, 10]],
+ rng)
+ rng.include(15, None)
+ self.assertEqual(
+ [[2, 2], [5, 5], [7, 10], [15, None]],
+ rng)
+ rng.include(3, 5)
+ self.assertEqual(
+ [[2, 5], [7, 10], [15, None]],
+ rng)
+ rng.include(11, 14)
+ self.assertEqual(
+ [[2, 5], [7, None]],
+ rng)
+
+ rng = Sequence()
+ rng.include(10, None)
+ self.assertEqual(
+ [[10, None]],
+ rng)
+ rng.include(7, 8)
+ self.assertEqual(
+ [[7, 8], [10, None]],
+ rng)
+ rng.include(2, 2)
+ self.assertEqual(
+ [[2, 2], [7, 8], [10, None]],
+ rng)
+
+ def test_Sequence_Union(self):
+ seq_1 = Sequence()
+ seq_1.include(1, 2)
+ seq_2 = Sequence()
+ seq_2.include(3, 4)
+ seq_1.include(seq_2)
+ self.assertEqual(
+ [[1, 4]],
+ seq_1)
+
+ seq_1 = Sequence()
+ seq_1.include(1, None)
+ seq_2 = Sequence()
+ seq_2.include(3, 4)
+ seq_1.include(seq_2)
+ self.assertEqual(
+ [[1, None]],
+ seq_1)
+
+ seq_2 = Sequence()
+ seq_2.include(1, None)
+ seq_1 = Sequence()
+ seq_1.include(3, 4)
+ seq_1.include(seq_2)
+ self.assertEqual(
+ [[1, None]],
+ seq_1)
+
+ seq_1 = Sequence()
+ seq_1.include(1, None)
+ seq_2 = Sequence()
+ seq_2.include(2, None)
+ seq_1.include(seq_2)
+ self.assertEqual(
+ [[1, None]],
+ seq_1)
+
+ seq_1 = Sequence()
+ seq_2 = Sequence()
+ seq_2.include(seq_1)
+ self.assertEqual([], seq_2)
+
+ seq_1 = Sequence()
+ seq_2 = Sequence()
+ seq_2.include(1, None)
+ seq_2.include(seq_1)
+ self.assertEqual([[1, None]], seq_2)
+
+ seq = Sequence()
+ seq.include(10, 11)
+ seq.include(None)
+ self.assertEqual([[10, 11]], seq)
+
+ def test_Sequence_last(self):
+ seq = Sequence()
+ self.assertEqual(None, seq.last)
+
+ seq = Sequence()
+ seq.include(10, None)
+ self.assertEqual(None, seq.last)
+
+ seq = Sequence()
+ seq.include(1, 1)
+ seq.include(3, 5)
+ seq.include(10, 11)
+ self.assertEqual(11, seq.last)
+
+
+if __name__ == '__main__':
+ tests.main()
diff --git a/tests/units/zerosugar/__init__.py b/tests/units/zerosugar/__init__.py
new file mode 100644
index 0000000..345c327
--- /dev/null
+++ b/tests/units/zerosugar/__init__.py
@@ -0,0 +1,9 @@
+# sugar-lint: disable
+
+import sys
+from os.path import dirname, join, abspath
+
+src_root = abspath(join(dirname(__file__), '..', '..', '..'))
+sys.path.insert(0, src_root)
+
+import tests
diff --git a/tests/units/zerosugar/__main__.py b/tests/units/zerosugar/__main__.py
new file mode 100644
index 0000000..feb487f
--- /dev/null
+++ b/tests/units/zerosugar/__main__.py
@@ -0,0 +1,10 @@
+# sugar-lint: disable
+
+from __init__ import tests
+
+from clones import *
+from injector import *
+from spec import *
+
+if __name__ == '__main__':
+ tests.main()
diff --git a/tests/units/clones.py b/tests/units/zerosugar/clones.py
index e616bae..60bb9cb 100755
--- a/tests/units/clones.py
+++ b/tests/units/zerosugar/clones.py
@@ -8,14 +8,12 @@ from os.path import abspath, lexists, exists
from __init__ import tests
-import active_document as ad
from sugar_network.resources.user import User
from sugar_network.resources.context import Context
-from active_toolkit import coroutine, util
from sugar_network.client.mounts import HomeMount
from sugar_network.client.mountset import Mountset
from sugar_network.zerosugar import clones
-from sugar_network.toolkit import sugar
+from sugar_network.toolkit import coroutine, util, sugar
from sugar_network.resources.volume import Volume
@@ -305,7 +303,7 @@ class ImplementationsTest(tests.Test):
os.readlink('clones/context/org.sugarlabs.HelloWorld/' + hashed_path))
self.assertEqual(
- {'guid': 'org.sugarlabs.HelloWorld', 'title': {'en': 'HelloWorld'}, 'favorite': False, 'clone': 2},
+ {'guid': 'org.sugarlabs.HelloWorld', 'title': {'en-us': 'HelloWorld'}, 'favorite': False, 'clone': 2},
self.mounts.volume['context'].get('org.sugarlabs.HelloWorld').properties(['guid', 'title', 'favorite', 'clone']))
def test_Checkout(self):
diff --git a/tests/units/injector.py b/tests/units/zerosugar/injector.py
index 89aa285..f5299b6 100755
--- a/tests/units/injector.py
+++ b/tests/units/zerosugar/injector.py
@@ -12,15 +12,15 @@ from os.path import exists, dirname
from __init__ import tests
-from active_toolkit import coroutine, enforce
from sugar_network import zeroinstall
from sugar_network.client import journal
-from sugar_network.toolkit import pipe as pipe_
+from sugar_network.toolkit import coroutine, enforce, pipe as pipe_, lsb_release
from sugar_network.resources.user import User
from sugar_network.resources.context import Context
from sugar_network.resources.implementation import Implementation
-from sugar_network.zerosugar import lsb_release, packagekit, injector, clones
-from sugar_network import IPCClient, client as local
+from sugar_network.zerosugar import packagekit, injector, clones
+from sugar_network.client import IPCClient
+from sugar_network import client as local
class InjectorTest(tests.Test):
@@ -31,7 +31,7 @@ class InjectorTest(tests.Test):
def test_clone_Online(self):
self.start_ipc_and_restful_server([User, Context, Implementation])
- remote = IPCClient(mountpoint='/')
+ remote = IPCClient()
context = remote.post(['context'], {
'type': 'activity',
@@ -124,7 +124,7 @@ class InjectorTest(tests.Test):
def test_clone_impl(self):
self.start_ipc_and_restful_server([User, Context, Implementation])
- remote = IPCClient(mountpoint='/')
+ remote = IPCClient()
context = remote.post(['context'], {
'type': 'activity',
@@ -171,7 +171,7 @@ class InjectorTest(tests.Test):
def test_launch_Online(self):
self.start_ipc_and_restful_server([User, Context, Implementation])
- remote = IPCClient(mountpoint='/')
+ remote = IPCClient()
context = remote.post(['context'], {
'type': 'activity',
@@ -310,7 +310,7 @@ class InjectorTest(tests.Test):
def test_InstallDeps(self):
self.touch('remote/master')
self.start_ipc_and_restful_server([User, Context, Implementation])
- remote = IPCClient(mountpoint='/')
+ remote = IPCClient()
context = remote.post(['context'], {
'type': 'activity',
@@ -485,7 +485,7 @@ class InjectorTest(tests.Test):
def test_clone_SetExecPermissionsForActivities(self):
self.start_ipc_and_restful_server([User, Context, Implementation])
- remote = IPCClient(mountpoint='/')
+ remote = IPCClient()
context = remote.post(['context'], {
'type': 'activity',
@@ -550,7 +550,7 @@ class InjectorTest(tests.Test):
def test_ProcessCommonDependencies(self):
self.touch('remote/master')
self.start_ipc_and_restful_server([User, Context, Implementation])
- remote = IPCClient(mountpoint='/')
+ remote = IPCClient()
context = remote.post(['context'], {
'type': 'activity',
@@ -635,7 +635,7 @@ class InjectorTest(tests.Test):
def test_NoDepsClonning(self):
self.touch('remote/master')
self.start_ipc_and_restful_server([User, Context, Implementation])
- remote = IPCClient(mountpoint='/')
+ remote = IPCClient()
context = remote.post(['context'], {
'type': 'activity',
@@ -673,7 +673,7 @@ class InjectorTest(tests.Test):
def test_LoadFeed_SetPackages(self):
self.touch('remote/master')
self.start_ipc_and_restful_server([User, Context, Implementation])
- remote = IPCClient(mountpoint='/')
+ remote = IPCClient()
context = remote.post(['context'], {
'type': 'activity',
@@ -744,7 +744,7 @@ class InjectorTest(tests.Test):
self.touch('remote/master')
self.start_ipc_and_restful_server([User, Context, Implementation])
- remote = IPCClient(mountpoint='/')
+ remote = IPCClient()
context = remote.post(['context'], {
'type': 'activity',
diff --git a/tests/units/spec.py b/tests/units/zerosugar/spec.py
index 1a3745b..1a3745b 100755
--- a/tests/units/spec.py
+++ b/tests/units/zerosugar/spec.py