From 6447b7951a66021f288f5b2b0c8cc301257d36ee Mon Sep 17 00:00:00 2001 From: Aleksey Lim Date: Tue, 04 Feb 2014 12:19:15 +0000 Subject: Polish design The major points: * minimize number of resource types * use aggregated properties instead of resources * separate blob storage from resources db --- diff --git a/.gitignore b/.gitignore index 209a8d5..910b5eb 100644 --- a/.gitignore +++ b/.gitignore @@ -13,3 +13,4 @@ sugar_network/lib/zeroinstall/tests/** sugar_network/lib/requests/build/** sugar_network/lib/requests/docs/** sugar_network/lib/requests/tests/** +sugar_network/toolkit/languages.py diff --git a/TODO b/TODO index fd59b82..8213191 100644 --- a/TODO +++ b/TODO @@ -1,3 +1,9 @@ +- push local offline changes to the node on getting online +- diff/merge while checking in node context +- deliver spawn events only to local subscribers +- test/run presolve + + 0.10 ==== - Context prop for Update Hub sort diff --git a/sugar_network/static/httpdocs/images/activity.svg b/blobs/activity.svg index c5302fd..c5302fd 100644 --- a/sugar_network/static/httpdocs/images/activity.svg +++ b/blobs/activity.svg diff --git a/sugar_network/static/httpdocs/images/book.svg b/blobs/book.svg index 92fb811..92fb811 100644 --- a/sugar_network/static/httpdocs/images/book.svg +++ b/blobs/book.svg diff --git a/sugar_network/static/httpdocs/favicon.ico b/blobs/favicon.ico index 80e42ba..80e42ba 100644 --- a/sugar_network/static/httpdocs/favicon.ico +++ b/blobs/favicon.ico Binary files differ diff --git a/sugar_network/static/httpdocs/images/group.svg b/blobs/group.svg index c9a6b64..c9a6b64 100644 --- a/sugar_network/static/httpdocs/images/group.svg +++ b/blobs/group.svg diff --git a/sugar_network/static/httpdocs/images/missing-logo.png b/blobs/missing-logo.png index 98be121..98be121 100644 --- a/sugar_network/static/httpdocs/images/missing-logo.png +++ b/blobs/missing-logo.png Binary files differ diff --git a/sugar_network/static/httpdocs/images/missing.png b/blobs/missing.png index 91a65a8..91a65a8 100644 --- a/sugar_network/static/httpdocs/images/missing.png +++ b/blobs/missing.png Binary files differ diff --git a/sugar_network/static/httpdocs/images/missing.svg b/blobs/missing.svg index 7e6a568..7e6a568 100644 --- a/sugar_network/static/httpdocs/images/missing.svg +++ b/blobs/missing.svg diff --git a/sugar_network/static/httpdocs/images/package-logo.png b/blobs/package-logo.png index c6cf086..c6cf086 100644 --- a/sugar_network/static/httpdocs/images/package-logo.png +++ b/blobs/package-logo.png Binary files differ diff --git a/sugar_network/static/httpdocs/images/package.png b/blobs/package.png index 24bd5ac..24bd5ac 100644 --- a/sugar_network/static/httpdocs/images/package.png +++ b/blobs/package.png Binary files differ diff --git a/sugar_network/static/httpdocs/images/package.svg b/blobs/package.svg index a5fd32d..a5fd32d 100644 --- a/sugar_network/static/httpdocs/images/package.svg +++ b/blobs/package.svg diff --git a/doc/objects.dia b/doc/objects.dia index f232524..815102a 100644 --- a/doc/objects.dia +++ b/doc/objects.dia @@ -2,10 +2,10 @@ - + - + @@ -37,6 +37,9 @@ + + + @@ -53,7 +56,7 @@ - + @@ -61,59 +64,42 @@ + + + + + + + + + + + + + + + + + + + - - ## - - - - - - - - - - - - ## - - - ## - - - - - - - - - ## - - - #*# - - - - - - - - + - + - - - - + + + + @@ -123,18 +109,6 @@ - - - - - - - - - - - - ## @@ -171,55 +145,41 @@ - - - - - - - - - - - - - - - - - - - + + - - + + - + + + + - + - - + + - + - + - + - + #Context# @@ -260,17 +220,20 @@ + + + - + - + - + @@ -541,6 +504,29 @@ + #releases# + + + #[] [R WA]# + + + #[]# + + + #List on Context releases; see Wiki for details# + + + + + + + + + + + + + #downloads# @@ -592,18 +578,18 @@ - + - + - + @@ -647,17 +633,20 @@ + + + - + - + - + @@ -707,7 +696,7 @@ ## - #User name (the same as Sugar nickname); the only users friendly identifier# + #Users friendly person identifier (the same as Sugar nickname); is not assumed to be unique, look for guid for uniqueness# @@ -776,7 +765,7 @@ ## - #DSA public key for the key generated by Sugar Shell in user's profile# + #RSA public key to authenticate the user# @@ -795,7 +784,7 @@ - + @@ -850,17 +839,20 @@ + + + - + - + - + @@ -1044,7 +1036,30 @@ - + + + + + + + + + + + + + + + + + + + + + + + + ## @@ -1081,55 +1096,41 @@ - - - - - - - - - - - - - - - - - - - + + - - + + - + + + + - + - - + + - + - + - + - + #Post# @@ -1170,17 +1171,20 @@ + + + - + - + - + @@ -1385,13 +1389,13 @@ #comments# - #[object] [R W S]# + #[str] [R W F I]# #[]# - #List of JSON objects with users' comments; see Wiki for details# + #Users' comments# @@ -1502,21 +1506,21 @@ - + - + - + - + - + - + #Report# @@ -1525,7 +1529,7 @@ #Resource# - #Reports about issues with Releases# + #Failure reports about issues with the Context# @@ -1557,17 +1561,20 @@ + + + - + - + - + @@ -1631,16 +1638,16 @@ - #release# + #version# - #Release [R WN]# + #str [R WN]# #""# - #Implementation the Report belongs to; if empty, there is no way to detect what exact Implementation is affected# + #Release version introduced the fail; if empty, there is no way to detect what exact version is affected# @@ -1654,16 +1661,16 @@ - #environ# + #error# - #dict [R WN]# + #str [R WN F]# ## - #Dictionary with useful information about failure environment# + #Error string generated by the application# @@ -1677,16 +1684,85 @@ - #data# + #uname# - #blob [R WN]# + #str [R WN F]# ## - #Tarball with logs# + #Output of the `uname -a` command# + + + + + + + + + + + + + + #lsb_release# + + + #dict [R WN F]# + + + ## + + + #Outpout of the `lsb_release -a` command# + + + + + + + + + + + + + + #solution# + + + #[] [R WN F]# + + + #[]# + + + #Dependency solution which was used while launching# + + + + + + + + + + + + + + #logs# + + + #[blob] [R WN F]# + + + ## + + + #All affected log files# @@ -1706,7 +1782,8 @@ - + + @@ -1726,7 +1803,7 @@ - + @@ -1735,12 +1812,12 @@ - + - + @@ -1757,7 +1834,7 @@ - + @@ -1768,12 +1845,12 @@ - + - + @@ -1790,7 +1867,7 @@ - + @@ -1801,12 +1878,12 @@ - + - + @@ -1823,7 +1900,7 @@ - + @@ -1834,12 +1911,12 @@ - + - + @@ -1856,7 +1933,7 @@ - + @@ -1867,12 +1944,12 @@ - + - + @@ -1889,7 +1966,7 @@ - + @@ -1900,12 +1977,12 @@ - + - + @@ -1922,7 +1999,7 @@ - + @@ -1933,12 +2010,12 @@ - + - + @@ -1955,7 +2032,7 @@ - + @@ -1966,12 +2043,12 @@ - + - + @@ -1988,7 +2065,7 @@ - + @@ -1999,12 +2076,12 @@ - + - + @@ -2021,7 +2098,7 @@ - + @@ -2032,12 +2109,12 @@ - + - + @@ -2054,7 +2131,7 @@ - + @@ -2065,12 +2142,12 @@ - + - + @@ -2087,7 +2164,7 @@ - + @@ -2099,277 +2176,5 @@ - - - - - - - - - - - - - - - - - - #Release# - - - #Resource# - - - #Implementation of the Context, e.g., Sugar activity bundle or .xol content# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - #context# - - - #Context [R WN]# - - - ## - - - #The Context this Version belongs to# - - - - - - - - - - - - - - #license# - - - #[enum] [R WN F]# - - - ## - - - #List of licenses that the Implementation is covered by; see the Wiki for details# - - - - - - - - - - - - - - #version# - - - #str [R WN S]# - - - ## - - - #Version number# - - - - - - - - - - - - - - #stability# - - - #enum [R WN F]# - - - ## - - - #Version stability level in Zero Install notation; see the Wiki for details# - - - - - - - - - - - - - - #notes# - - - #markdown [R WN F I]# - - - ## - - - #Release notes# - - - - - - - - - - - - - - #requires# - - - #[str] [R]# - - - ## - - - #List of dependencies; see Wiki for details# - - - - - - - - - - - - - - #data# - - - #blob [R WN]# - - - ## - - - #Contain implementation data# - - - - - - - - - - - - - - - - - - diff --git a/misc/aslo-sync b/misc/aslo-sync index f736681..581581a 100755 --- a/misc/aslo-sync +++ b/misc/aslo-sync @@ -527,7 +527,7 @@ class Application(application.Application): (sugar_min, sugar_max), 'license': alicense, }), - bundle_path) as impl: + file(bundle_path, 'rb')) as (impl, data): impl['guid'] = version_id if 'notes' not in impl: impl['notes'] = self.get_i18n_field(releasenotes) @@ -538,9 +538,9 @@ class Application(application.Application): 'order': 0, 'role': 3, 'name': fullname, }} impl['layer'] = layers - impl['data']['url'] = \ + data['url'] = \ '/'.join([DOWNLOAD_URL, str(addon_id), filename]) - impl['data']['blob_size'] = os.stat(bundle_path).st_size + data['size'] = os.stat(bundle_path).st_size except Exception, error: print '-- Failed to sync %r[%s]' % (filename, version_id) traceback.print_exception(*sys.exc_info()) diff --git a/sugar-network b/sugar-network index cb81d79..818721e 100755 --- a/sugar-network +++ b/sugar-network @@ -21,6 +21,7 @@ import sys import shlex import types import locale +import gettext from json import dumps, loads from os.path import join, exists, isfile @@ -35,6 +36,9 @@ from sugar_network.toolkit import application, coroutine from sugar_network.toolkit import Option, BUFFER_SIZE, enforce +gettext.textdomain('sugar-network') + + quiet = Option( 'turn off any output', default=False, type_cast=Option.bool_cast, action='store_true', @@ -71,7 +75,7 @@ _LIST_RE = re.compile(r'\s*[;,:]+\s*') class ClientRouter(Router, ClientRoutes): def __init__(self): - home = db.Volume(client.path('db'), RESOURCES, lazy_open=True) + home = db.Volume(client.path('db'), RESOURCES) Router.__init__(self, self) ClientRoutes.__init__(self, home, client.api_url.value if not offline.value else None, diff --git a/sugar-network-client b/sugar-network-client index 5147168..6e0d772 100755 --- a/sugar-network-client +++ b/sugar-network-client @@ -19,6 +19,7 @@ import os import errno import signal import locale +import gettext import logging from os.path import join, abspath, exists @@ -34,6 +35,9 @@ from sugar_network.toolkit import mountpoints, printf, application from sugar_network.toolkit import Option, coroutine +gettext.textdomain('sugar-network') + + class Application(application.Daemon): def __init__(self, **kwargs): @@ -96,7 +100,7 @@ class Application(application.Daemon): self.cmd_start() def run(self): - volume = db.Volume(client.path('db'), RESOURCES, lazy_open=True) + volume = db.Volume(client.path('db'), RESOURCES) routes = CachedClientRoutes(volume, client.api_url.value if not client.server_mode.value else None) router = Router(routes, allow_spawn=True) diff --git a/sugar-network-node b/sugar-network-node index 07bd96d..6721337 100755 --- a/sugar-network-node +++ b/sugar-network-node @@ -17,23 +17,24 @@ import os import locale +import gettext import logging from os.path import exists, join from gevent import monkey -import sugar_network_webui as webui -from sugar_network import db, node, client, toolkit -from sugar_network.node import stats_node, stats_user, obs +from sugar_network import db, node, client, toolkit, model +from sugar_network.node import stats_user, obs, master, slave +from sugar_network.node import model as master_model from sugar_network.node.routes import generate_node_stats -from sugar_network.node.master import MasterRoutes -from sugar_network.node.slave import SlaveRoutes -from sugar_network.model import RESOURCES from sugar_network.toolkit.http import Connection from sugar_network.toolkit.router import Router, Request, Response from sugar_network.toolkit import coroutine, application, Option, enforce +gettext.textdomain('sugar-network') + + class Application(application.Daemon): jobs = coroutine.Pool() @@ -54,18 +55,21 @@ class Application(application.Daemon): if node.certfile.value: ssl_args['certfile'] = node.certfile.value - volume = db.Volume(node.data_root.value, RESOURCES) - self.jobs.spawn(volume.populate) - master_path = join(node.data_root.value, 'master') if exists(master_path): with file(master_path) as f: - guid = f.read().strip() - logging.info('Start %s node in master mode', guid) - cp = MasterRoutes(guid, volume) + node_key = f.read().strip() + node_class = master.MasterRoutes + resources = master_model.RESOURCES + logging.info('Start %s node in master mode', node_key) else: + node_key = join(node.data_root.value, 'node.key') + node_class = slave.SlaveRoutes + resources = model.RESOURCES logging.info('Start slave node') - cp = SlaveRoutes(join(node.data_root.value, 'node.key'), volume) + volume = db.Volume(node.data_root.value, resources) + cp = node_class(node_key, volume, find_limit=node.find_limit.value) + self.jobs.spawn(volume.populate) logging.info('Listening for requests on %s:%s', node.host.value, node.port.value) @@ -74,27 +78,6 @@ class Application(application.Daemon): self.jobs.spawn(server.serve_forever) self.accept() - if webui.webui.value: - # XXX Until implementing regular web users - from sugar_network.client.routes import ClientRoutes - - client.login.value = 'demo' - # Point client API to volume directly - client.mounts_root.value = None - - home = db.Volume(join(application.rundir.value, 'db'), RESOURCES) - client_routes = ClientRoutes(home, - api_url='http://localhost:%s' % node.port.value) - client_app = Router(client_routes) - host = (node.host.value, webui.webui_port.value) - logging.info('Start Web server on %s:%s port', *host) - server = coroutine.WSGIServer(host, - webui.get_app( - lambda **kwargs: client_app.call(Request(**kwargs), - Response()), - client.api_url.value, True)) - self.jobs.spawn(server.serve_forever) - try: self.jobs.join() finally: @@ -123,7 +106,7 @@ class Application(application.Daemon): 're-generate node statistics', name='restat') def restat(self): enforce(not self.check_for_instance(), 'Shutdown the server at first') - volume = db.Volume(node.data_root.value, RESOURCES) + volume = db.Volume(node.data_root.value, model.RESOURCES) volume.populate() generate_node_stats(volume, join(node.stats_root.value, 'node')) @@ -143,10 +126,8 @@ locale.setlocale(locale.LC_ALL, '') Option.seek('main', application) Option.seek('main', [toolkit.cachedir]) -Option.seek('webui', webui) Option.seek('client', [client.api_url]) Option.seek('node', node) -Option.seek('node-stats', stats_node) Option.seek('user-stats', stats_user) Option.seek('obs', obs) Option.seek('db', db) diff --git a/sugar_network/client/cache.py b/sugar_network/client/cache.py index e13ccb6..df76a29 100644 --- a/sugar_network/client/cache.py +++ b/sugar_network/client/cache.py @@ -17,9 +17,10 @@ import os import sys import time import logging -from os.path import exists, basename +from os.path import exists from sugar_network import client +from sugar_network.db import files from sugar_network.toolkit import pylru, enforce @@ -30,8 +31,7 @@ _logger = logging.getLogger('cache') class Cache(object): - def __init__(self, volume): - self._volume = volume + def __init__(self): self._pool = None self._du = 0 self._acquired = {} @@ -71,14 +71,18 @@ class Cache(object): self.checkin(guid, acquired[1]) del self._acquired[guid] - def checkin(self, guid, size): + def checkin(self, digest, size): self._ensure_open() - if guid in self._pool: - self._pool.__getitem__(guid) + if digest in self._pool: + self._pool.__getitem__(digest) return + + + _logger.debug('Checkin %r %d bytes long', guid, size) - mtime = os.stat(self._volume['release'].path(guid)).st_mtime - self._pool[guid] = (size, mtime) + + mtime = os.stat(files.get(digest).path).st_mtime + self._pool[digest] = (size, mtime) self._du += size def checkout(self, guid, *args): @@ -112,17 +116,25 @@ class Cache(object): _logger.debug('Open releases pool') pool = [] - impls = self._volume['release'] - for res in impls.find(not_layer=['local'])[0]: - meta = res.meta('data') - if not meta or 'blob_size' not in meta: - continue - clone = self._volume['context'].path(res['context'], '.clone') - if exists(clone) and basename(os.readlink(clone)) == res.guid: + for release in self._volume['release'].find(not_layer=['local'])[0]: + meta = files.get(release['data']) + if not meta: continue + + """ + TODO + + solution_path = client.path('solutions', release['context']) + if exists(solution_path): + with file(path) as f: + cached_api_url, cached_stability, solution = json.load(f) + if solution[0]['guid'] == release['guid']: + continue + + """ pool.append(( - os.stat(impls.path(res.guid)).st_mtime, - res.guid, + os.stat(meta.path).st_mtime, + release.guid, meta.get('unpack_size') or meta['blob_size'], )) diff --git a/sugar_network/client/journal.py b/sugar_network/client/journal.py index ee2a2f3..0dcae12 100644 --- a/sugar_network/client/journal.py +++ b/sugar_network/client/journal.py @@ -19,8 +19,8 @@ import logging from shutil import copyfileobj from tempfile import NamedTemporaryFile -from sugar_network import client -from sugar_network.toolkit.router import Blob, route, Request +from sugar_network import client, toolkit +from sugar_network.toolkit.router import route, Request from sugar_network.toolkit import enforce @@ -105,15 +105,13 @@ class Routes(object): @route('GET', ['journal', None, 'preview']) def journal_get_preview(self, request, response): - return Blob({ - 'blob': _prop_path(request.guid, 'preview'), + return toolkit.File(_prop_path(request.guid, 'preview'), { 'mime_type': 'image/png', }) @route('GET', ['journal', None, 'data']) def journal_get_data(self, request, response): - return Blob({ - 'blob': _ds_path(request.guid, 'data'), + return toolkit.File(_ds_path(request.guid, 'data'), { 'mime_type': get(request.guid, 'mime_type') or 'application/octet', }) diff --git a/sugar_network/client/releases.py b/sugar_network/client/releases.py index ff35d16..c93a91a 100644 --- a/sugar_network/client/releases.py +++ b/sugar_network/client/releases.py @@ -32,7 +32,8 @@ from sugar_network.client.cache import Cache from sugar_network.client import journal, packagekit from sugar_network.toolkit.router import Request, Response, route from sugar_network.toolkit.bundle import Bundle -from sugar_network.toolkit import http, coroutine, enforce +from sugar_network.toolkit.coroutine import this +from sugar_network.toolkit import i18n, http, coroutine, enforce _MIMETYPE_DEFAULTS_KEY = '/desktop/sugar/journal/defaults' @@ -43,22 +44,20 @@ _logger = logging.getLogger('releases') class Routes(object): - def __init__(self, local_volume): - self._volume = local_volume + def __init__(self): self._node_mtime = None self._call = lambda **kwargs: \ self._map_exceptions(self.fallback, **kwargs) - self._cache = Cache(local_volume) + self._cache = Cache() def invalidate_solutions(self, mtime): self._node_mtime = mtime @route('GET', ['context', None], cmd='path') def path(self, request): - clone_path = self._volume['context'].path(request.guid, '.clone') - enforce(exists(clone_path), http.NotFound) - clone_impl = basename(os.readlink(clone_path)) - return self._volume['release'].path(clone_impl, 'data') + clone = self._solve(request) + enforce(clone is not None, http.NotFound, 'No clones') + return clone['path'] @route('GET', ['context', None], cmd='launch', arguments={'args': list}, mime_type='text/event-stream') @@ -75,18 +74,18 @@ class Routes(object): acquired = [] try: - impl = self._solve_impl(context, request) + impl = self._solve(request, context['type']) if 'activity' not in context['type']: app = request.get('context') or \ _mimetype_context(impl['data']['mime_type']) enforce(app, 'Cannot find proper application') - acquired += self._checkin_impl( + acquired += self._checkin( context, request, self._cache.acquire) request = Request(path=['context', app], object_id=impl['path'], session=request.session) for context in self._checkin_context(request): - impl = self._solve_impl(context, request) - acquired += self._checkin_impl( + impl = self._solve(request, context['type']) + acquired += self._checkin( context, request, self._cache.acquire) child = _exec(context, request, impl) @@ -105,19 +104,15 @@ class Routes(object): enforce(not request.content or self.inline(), http.ServiceUnavailable, 'Not available in offline') for context in self._checkin_context(request, 'clone'): - cloned_path = context.path('.clone') if request.content: - impl = self._solve_impl(context, request) - self._checkin_impl(context, request, self._cache.checkout) - impl_path = relpath(dirname(impl['path']), context.path()) - os.symlink(impl_path, cloned_path) + impl = self._solve(request, context['type']) + self._checkin(context, request, self._cache.checkout) yield {'event': 'ready'} else: - cloned_impl = basename(os.readlink(cloned_path)) - meta = self._volume['release'].get(cloned_impl).meta('data') + clone = self._solve(request) + meta = this.volume['release'].get(clone['guid']).meta('data') size = meta.get('unpack_size') or meta['blob_size'] - self._cache.checkin(cloned_impl, size) - os.unlink(cloned_path) + self._cache.checkin(clone['guid'], size) @route('GET', ['context', None], cmd='clone', arguments={'requires': list}) @@ -147,18 +142,14 @@ class Routes(object): raise http.ServiceUnavailable, error, sys.exc_info()[2] def _checkin_context(self, request, layer=None): - contexts = self._volume['context'] + contexts = this.volume['context'] guid = request.guid if layer and not request.content and not contexts.exists(guid): return if not contexts.exists(guid): - context = self._call(method='GET', path=['context', guid]) - contexts.create(context, setters=True) - for prop in ('icon', 'artifact_icon', 'logo'): - blob = self._call(method='GET', path=['context', guid, prop]) - if blob is not None: - contexts.update(guid, {prop: {'blob': blob}}) + patch = self._call(method='GET', path=['context', guid], cmd='diff') + contexts.merge(guid, patch) context = contexts.get(guid) if layer and bool(request.content) == (layer in context['layer']): return @@ -171,14 +162,9 @@ class Routes(object): else: layer_value = set(context['layer']) - set([layer]) contexts.update(guid, {'layer': list(layer_value)}) - self.broadcast({ - 'event': 'update', - 'resource': 'context', - 'guid': guid, - }) _logger.debug('Checked %r in: %r', guid, layer_value) - def _solve_impl(self, context, request): + def _solve(self, request, force_type=None): stability = request.get('stability') or \ client.stability(request.guid) @@ -193,9 +179,11 @@ class Routes(object): solution, stale = self._cache_solution_get(request.guid, stability) if stale is False: _logger.debug('Reuse cached %r solution', request.guid) - elif solution is not None and not self.inline(): - _logger.debug('Reuse stale %r in offline', request.guid) - elif 'activity' in context['type']: + elif solution is not None and (not force_type or not self.inline()): + _logger.debug('Reuse stale %r solution', request.guid) + elif not force_type: + return None + elif 'activity' in force_type: from sugar_network.client import solver solution = self._map_exceptions(solver.solve, self.fallback, request.guid, stability) @@ -203,16 +191,18 @@ class Routes(object): response = Response() blob = self._call(method='GET', path=['context', request.guid], cmd='clone', stability=stability, response=response) - response.meta['data']['blob'] = blob - solution = [response.meta] + release = response.meta + release['mime_type'] = response.content_type + release['size'] = response.content_length + files.post(blob, digest=release['spec']['*-*']['bundle']) + solution = [release] request.session['solution'] = solution return solution[0] - def _checkin_impl(self, context, request, cache_call): + def _checkin(self, context, request, cache_call): if 'clone' in context['layer']: cache_call = self._cache.checkout - impls = self._volume['release'] if 'activity' in context['type']: to_install = [] @@ -226,49 +216,42 @@ class Routes(object): def cache_impl(sel): guid = sel['guid'] - data = sel['data'] - sel['path'] = impls.path(guid, 'data') - size = data.get('unpack_size') or data['blob_size'] - - blob = None - if 'blob' in data: - blob = data.pop('blob') - - if impls.exists(guid): - return cache_call(guid, size) - - if blob is None: - blob = self._call(method='GET', path=['release', guid, 'data']) - - blob_dir = dirname(sel['path']) - if not exists(blob_dir): - os.makedirs(blob_dir) - - with toolkit.mkdtemp(dir=blob_dir) as blob_dir: - if 'activity' in context['type']: - self._cache.ensure(size, data['blob_size']) - with toolkit.TemporaryFile() as tmp_file: - shutil.copyfileobj(blob, tmp_file) - tmp_file.seek(0) - with Bundle(tmp_file, 'application/zip') as bundle: - bundle.extractall(blob_dir, prefix=bundle.rootdir) - for exec_dir in ('bin', 'activity'): - bin_path = join(blob_dir, exec_dir) - if not exists(bin_path): - continue - for filename in os.listdir(bin_path): - os.chmod(join(bin_path, filename), 0755) - blob = blob_dir - else: - self._cache.ensure(size) - with file(join(blob_dir, 'data'), 'wb') as f: - shutil.copyfileobj(blob, f) - blob = f.name - impl = deepcopy(sel) - impl['mtime'] = impl['ctime'] - impl['data']['blob'] = blob - impls.create(impl) - return cache_call(guid, size) + + + + + data = files.get(guid) + + if data is not None: + return cache_call(guid, data['unpack_size']) + + response = Response() + blob = self._call(method='GET', path=['release', guid, 'data'], + response=response) + + if 'activity' not in context['type']: + self._cache.ensure(response.content_length) + files.post(blob, response.meta, sel['data']) + return cache_call(guid, response.content_length) + + with toolkit.mkdtemp(dir=files.path(sel['data'])) as blob_dir: + self._cache.ensure( + response.meta['unpack_size'], + response.content_length) + with toolkit.TemporaryFile() as tmp_file: + shutil.copyfileobj(blob, tmp_file) + tmp_file.seek(0) + with Bundle(tmp_file, 'application/zip') as bundle: + bundle.extractall(blob_dir, prefix=bundle.rootdir) + for exec_dir in ('bin', 'activity'): + bin_path = join(blob_dir, exec_dir) + if not exists(bin_path): + continue + for filename in os.listdir(bin_path): + os.chmod(join(bin_path, filename), 0755) + + files.update(sel['data'], response.meta) + return cache_call(guid, response.meta['unpack_size']) result = [] for sel in request.session['solution']: @@ -278,11 +261,8 @@ class Routes(object): request.session['stability'], request.session['solution']) return result - def _cache_solution_path(self, guid): - return client.path('solutions', guid[:2], guid) - def _cache_solution_get(self, guid, stability): - path = self._cache_solution_path(guid) + path = client.path('solutions', guid) solution = None if exists(path): try: @@ -305,7 +285,7 @@ class Routes(object): def _cache_solution_set(self, guid, stability, solution): if isinstance(solution, _CachedSolution): return - path = self._cache_solution_path(guid) + path = client.path('solutions', guid) if not exists(dirname(path)): os.makedirs(dirname(path)) with file(path, 'w') as f: @@ -315,13 +295,12 @@ class Routes(object): for context in self._checkin_context(request): if 'clone' not in context['layer']: return self._map_exceptions(self.fallback, request, response) - guid = basename(os.readlink(context.path('.clone'))) - impl = self._volume['release'].get(guid) - response.meta = impl.properties([ + release = this.volume['release'].get(self._solve(request)['guid']) + response.meta = release.properties([ 'guid', 'ctime', 'layer', 'author', 'tags', 'context', 'version', 'stability', 'license', 'notes', 'data', ]) - return impl.meta('data') + return release.meta('data') def _activity_id_new(): @@ -397,7 +376,7 @@ def _exec(context, request, sel): environ['SUGAR_BUNDLE_PATH'] = impl_path environ['SUGAR_BUNDLE_ID'] = context.guid environ['SUGAR_BUNDLE_NAME'] = \ - toolkit.gettext(context['title']).encode('utf8') + i18n.decode(context['title']).encode('utf8') environ['SUGAR_BUNDLE_VERSION'] = sel['version'] environ['SUGAR_ACTIVITY_ROOT'] = datadir environ['SUGAR_LOCALEDIR'] = join(impl_path, 'locale') diff --git a/sugar_network/client/routes.py b/sugar_network/client/routes.py index c6ea6d2..50d8632 100644 --- a/sugar_network/client/routes.py +++ b/sugar_network/client/routes.py @@ -24,6 +24,7 @@ from sugar_network import db, client, node, toolkit, model from sugar_network.client import journal, releases from sugar_network.node.slave import SlaveRoutes from sugar_network.toolkit import netlink, mountpoints +from sugar_network.toolkit.coroutine import this from sugar_network.toolkit.router import ACL, Request, Response, Router from sugar_network.toolkit.router import route, fallbackroute from sugar_network.toolkit import zeroconf, coroutine, http, exception, enforce @@ -189,44 +190,38 @@ class ClientRoutes(model.FrontRoutes, releases.Routes, journal.Routes): yield {'event': 'done', 'guid': guid} @fallbackroute() - def fallback(self, request=None, response=None, method=None, path=None, - cmd=None, content=None, content_stream=None, content_type=None, - **kwargs): + def fallback(self, request=None, response=None, **kwargs): if request is None: - request = Request(method=method, path=path, cmd=cmd, - content=content, content_stream=content_stream, - content_type=content_type) + request = Request(**kwargs) if response is None: response = Response() - request.update(kwargs) - if self._inline.is_set(): - if client.layers.value and \ - request.resource in ('context', 'release'): - request.add('layer', *client.layers.value) - request.principal = self._auth.login - try: - reply = self._node.call(request, response) - if hasattr(reply, 'read'): - if response.relocations: - return reply - else: - return _ResponseStream(reply, self._restart_online) - else: - return reply - except (http.ConnectionError, IncompleteRead): + + if not self._inline.is_set(): + return self._local.call(request, response) + + if client.layers.value and request.resource in ('context', 'release'): + request.add('layer', *client.layers.value) + request.principal = self._auth.login + try: + reply = self._node.call(request, response) + if hasattr(reply, 'read'): if response.relocations: - raise - self._restart_online() - return self._local.call(request, response) - else: + return reply + else: + return _ResponseStream(reply, self._restart_online) + else: + return reply + except (http.ConnectionError, IncompleteRead): + if response.relocations: + raise + self._restart_online() return self._local.call(request, response) def _got_online(self): enforce(not self._inline.is_set()) _logger.debug('Got online on %r', self._node) self._inline.set() - self.broadcast({'event': 'inline', 'state': 'online'}) - self._local.volume.broadcast = None + this.localcast({'event': 'inline', 'state': 'online'}) def _got_offline(self, force=False): if not force and not self._inline.is_set(): @@ -235,9 +230,8 @@ class ClientRoutes(model.FrontRoutes, releases.Routes, journal.Routes): self._node.close() if self._inline.is_set(): _logger.debug('Got offline on %r', self._node) - self.broadcast({'event': 'inline', 'state': 'offline'}) + this.localcast({'event': 'inline', 'state': 'offline'}) self._inline.clear() - self._local.volume.broadcast = self.broadcast def _restart_online(self): _logger.debug('Lost %r connection, try to reconnect in %s seconds', @@ -266,16 +260,19 @@ class ClientRoutes(model.FrontRoutes, releases.Routes, journal.Routes): mtime = event.get('mtime') if mtime: self.invalidate_solutions(mtime) - self.broadcast(event) + this.broadcast(event) def handshake(url): _logger.debug('Connecting to %r node', url) self._node = client.Connection(url, auth=self._auth) status = self._node.get(cmd='status') self._auth.allow_basic_auth = (status.get('level') == 'master') + """ + TODO switch to seqno impl_info = status['resources'].get('release') if impl_info: self.invalidate_solutions(impl_info['mtime']) + """ if self._inline.is_set(): _logger.info('Reconnected to %r node', url) else: @@ -284,7 +281,7 @@ class ClientRoutes(model.FrontRoutes, releases.Routes, journal.Routes): def connect(): timeout = _RECONNECT_TIMEOUT while True: - self.broadcast({'event': 'inline', 'state': 'connecting'}) + this.localcast({'event': 'inline', 'state': 'connecting'}) for url in self._remote_urls: while True: try: @@ -329,8 +326,7 @@ class ClientRoutes(model.FrontRoutes, releases.Routes, journal.Routes): profile['guid'] = self._auth.login volume['user'].create(profile) - self._node = _NodeRoutes(join(db_path, 'node'), volume, - self.broadcast) + self._node = _NodeRoutes(join(db_path, 'node'), volume) self._jobs.spawn(volume.populate) logging.info('Start %r node on %s port', volume.root, node.port.value) @@ -364,6 +360,11 @@ class CachedClientRoutes(ClientRoutes): ClientRoutes._got_offline(self, force) def _push(self): + # TODO should work using regular pull/push + return + + + pushed_seq = toolkit.Sequence() skiped_seq = toolkit.Sequence() volume = self._local.volume @@ -388,24 +389,11 @@ class CachedClientRoutes(ClientRoutes): diff_seq = toolkit.Sequence() post_requests = [] for prop, meta, seqno in patch: - if 'blob' in meta: - request = Request(method='PUT', path=[res, guid, prop]) - request.content_type = meta['mime_type'] - request.content_length = os.stat(meta['blob']).st_size - request.content_stream = \ - toolkit.iter_file(meta['blob']) - post_requests.append((request, seqno)) - elif 'url' in meta: - request = Request(method='PUT', path=[res, guid, prop]) - request.content_type = 'application/json' - request.content = meta - post_requests.append((request, seqno)) - else: - value = meta['value'] - if prop == 'layer': - value = list(set(value) - _LOCAL_LAYERS) - diff[prop] = value - diff_seq.include(seqno, seqno) + value = meta['value'] + if prop == 'layer': + value = list(set(value) - _LOCAL_LAYERS) + diff[prop] = value + diff_seq.include(seqno, seqno) if not diff: continue if 'guid' in diff: @@ -426,7 +414,6 @@ class CachedClientRoutes(ClientRoutes): if not pushed_seq: if not self._push_seq.mtime: self._push_seq.commit() - self.broadcast({'event': 'push'}) return _logger.info('Pushed %r local cache', pushed_seq) @@ -441,38 +428,32 @@ class CachedClientRoutes(ClientRoutes): volume['report'].wipe() self._push_seq.commit() - self.broadcast({'event': 'push'}) -class _LocalRoutes(model.VolumeRoutes, Router): +class _LocalRoutes(db.Routes, Router): def __init__(self, volume): - model.VolumeRoutes.__init__(self, volume) + db.Routes.__init__(self, volume) Router.__init__(self, self) - def on_create(self, request, props, event): + def on_create(self, request, props): props['layer'] = tuple(props['layer']) + ('local',) - model.VolumeRoutes.on_create(self, request, props, event) + db.Routes.on_create(self, request, props) class _NodeRoutes(SlaveRoutes, Router): - def __init__(self, key_path, volume, localcast): + def __init__(self, key_path, volume): SlaveRoutes.__init__(self, key_path, volume) Router.__init__(self, self) self.api_url = 'http://127.0.0.1:%s' % node.port.value - self._localcast = localcast self._mounts = toolkit.Pool() self._jobs = coroutine.Pool() mountpoints.connect(_SYNC_DIRNAME, self.__found_mountcb, self.__lost_mount_cb) - def broadcast(self, event=None, request=None): - SlaveRoutes.broadcast(self, event, request) - self._localcast(event) - def close(self): self.volume.close() @@ -481,27 +462,27 @@ class _NodeRoutes(SlaveRoutes, Router): (self.volume.root, self.api_url) def _sync_mounts(self): - self._localcast({'event': 'sync_start'}) + this.localcast({'event': 'sync_start'}) for mountpoint in self._mounts: - self._localcast({'event': 'sync_next', 'path': mountpoint}) + this.localcast({'event': 'sync_next', 'path': mountpoint}) try: self._offline_session = self._offline_sync( join(mountpoint, _SYNC_DIRNAME), **(self._offline_session or {})) except Exception, error: _logger.exception('Failed to complete synchronization') - self._localcast({'event': 'sync_abort', 'error': str(error)}) + this.localcast({'event': 'sync_abort', 'error': str(error)}) self._offline_session = None raise if self._offline_session is None: _logger.debug('Synchronization completed') - self._localcast({'event': 'sync_complete'}) + this.localcast({'event': 'sync_complete'}) else: _logger.debug('Postpone synchronization with %r session', self._offline_session) - self._localcast({'event': 'sync_paused'}) + this.localcast({'event': 'sync_paused'}) def __found_mountcb(self, path): self._mounts.add(path) diff --git a/sugar_network/client/solver.py b/sugar_network/client/solver.py index 67350b6..84eb9cf 100644 --- a/sugar_network/client/solver.py +++ b/sugar_network/client/solver.py @@ -20,6 +20,7 @@ import logging from os.path import isabs, join, dirname from sugar_network.client import packagekit +from sugar_network.toolkit.router import ACL from sugar_network.toolkit.spec import parse_version from sugar_network.toolkit import http, lsb_release @@ -191,12 +192,10 @@ def _load_feed(context): feed.name = context return feed - feed_content = None + releases = None try: - feed_content = _call(method='GET', path=['context', context], - cmd='feed', layer='origin', stability=_stability, - distro=lsb_release.distributor_id()) - _logger.trace('[%s] Found feed: %r', context, feed_content) + releases = _call(method='GET', path=['context', context, 'releases']) + _logger.trace('[%s] Found feed: %r', context, releases) except http.ServiceUnavailable: _logger.trace('[%s] Failed to fetch the feed', context) raise @@ -204,13 +203,33 @@ def _load_feed(context): _logger.exception('[%s] Failed to fetch the feed', context) return None + """ + for digest, release in releases: + if [i for i in release['author'].values() + if i['role'] & ACL.ORIGINAL] and \ + release['stability'] == _stability and \ + f + + + + + + stability=_stability, + distro=lsb_release.distributor_id()) + """ + + for impl in feed_content['releases']: + feed.implement(impl) + + + # XXX 0install fails on non-ascii `name` values feed.name = context feed.to_resolve = feed_content.get('packages') if not feed.to_resolve: _logger.trace('[%s] No compatible packages', context) - for impl in feed_content['releases']: - feed.implement(impl) + + if not feed.to_resolve and not feed.implementations: _logger.trace('[%s] No releases', context) diff --git a/sugar_network/db/__init__.py b/sugar_network/db/__init__.py index 2f22a36..b2ceb67 100644 --- a/sugar_network/db/__init__.py +++ b/sugar_network/db/__init__.py @@ -350,8 +350,8 @@ Volume """ from sugar_network.db.metadata import \ - indexed_property, stored_property, blob_property, \ - Property, StoredProperty, BlobProperty, IndexedProperty, AggregatedType + stored_property, indexed_property, Property, Numeric, Boolean, Dict, \ + Enum, List, Aggregated, Blob, Localized from sugar_network.db.index import index_flush_timeout, \ index_flush_threshold, index_write_queue from sugar_network.db.resource import Resource diff --git a/sugar_network/db/directory.py b/sugar_network/db/directory.py index 944f73a..c6957d7 100644 --- a/sugar_network/db/directory.py +++ b/sugar_network/db/directory.py @@ -1,4 +1,4 @@ -# Copyright (C) 2011-2013 Aleksey Lim +# Copyright (C) 2011-2014 Aleksey Lim # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by @@ -16,15 +16,12 @@ import os import shutil import logging -from cStringIO import StringIO from os.path import exists, join from sugar_network import toolkit -from sugar_network.toolkit.router import ACL from sugar_network.db.storage import Storage -from sugar_network.db.metadata import BlobProperty, Metadata, GUID_PREFIX -from sugar_network.db.metadata import IndexedProperty, StoredProperty -from sugar_network.db.metadata import AggregatedType +from sugar_network.db.metadata import Metadata, Guid +from sugar_network.toolkit.coroutine import this from sugar_network.toolkit import http, exception, enforce @@ -36,8 +33,7 @@ _logger = logging.getLogger('db.directory') class Directory(object): - def __init__(self, root, resource_class, index_class, - broadcast=None, seqno=None): + def __init__(self, root, resource_class, index_class, seqno=None): """ :param index_class: what class to use to access to indexes, for regular casses @@ -51,12 +47,10 @@ class Directory(object): if resource_class.metadata is None: # Metadata cannot be recreated resource_class.metadata = Metadata(resource_class) - resource_class.metadata['guid'] = IndexedProperty('guid', - slot=0, prefix=GUID_PREFIX, acl=ACL.CREATE | ACL.READ) + resource_class.metadata['guid'] = Guid() self.metadata = resource_class.metadata self.resource_class = resource_class - self.broadcast = broadcast or (lambda event: None) self._index_class = index_class self._root = root self._seqno = _SessionSeqno() if seqno is None else seqno @@ -65,25 +59,6 @@ class Directory(object): self._open() - @property - def mtime(self): - return self._index.mtime - - def checkpoint(self): - ts = self._index.checkpoint() - self.broadcast({'event': 'populate', 'mtime': ts}) - - def path(self, guid, *args): - record = self._storage.get(guid) - if not args: - return record.path() - prop = args[0] - if prop in self.metadata and \ - isinstance(self.metadata[prop], BlobProperty): - return record.blob_path(*args) - else: - return record.path(*args) - def wipe(self): self.close() _logger.debug('Wipe %r directory', self.metadata.name) @@ -102,7 +77,7 @@ class Directory(object): """Flush pending chnages to disk.""" self._index.commit() - def create(self, props, event=None, setters=False): + def create(self, props): """Create new document. If `guid` property is not specified, it will be auto set. @@ -116,24 +91,12 @@ class Directory(object): guid = props.get('guid') if not guid: guid = props['guid'] = toolkit.uuid() - if setters: - # XXX Setters are being proccessed on routes level, but, - # while creating resources gotten from routes, it is important - # to call setters as well, e.g., `author` property - doc = self.resource_class(guid, None, props) - for key, value in props.items(): - prop = self.metadata.get(key) - if prop is not None and prop.on_set is not None: - props[key] = prop.on_set(doc, value) _logger.debug('Create %s[%s]: %r', self.metadata.name, guid, props) - post_event = {'event': 'create', 'guid': guid} - if event: - post_event.update(event) - self._index.store(guid, props, self._pre_store, self._post_store, - post_event) + event = {'event': 'create', 'guid': guid} + self._index.store(guid, props, self._prestore, self._broadcast, event) return guid - def update(self, guid, props, event=None): + def update(self, guid, props): """Update properties for an existing document. :param guid: @@ -143,11 +106,8 @@ class Directory(object): """ _logger.debug('Update %s[%s]: %r', self.metadata.name, guid, props) - post_event = {'event': 'update', 'guid': guid} - if event: - post_event.update(event) - self._index.store(guid, props, self._pre_store, self._post_store, - post_event) + event = {'event': 'update', 'guid': guid} + self._index.store(guid, props, self._prestore, self._broadcast, event) def delete(self, guid): """Delete document. @@ -158,7 +118,7 @@ class Directory(object): """ _logger.debug('Delete %s[%s]', self.metadata.name, guid) event = {'event': 'delete', 'guid': guid} - self._index.delete(guid, self._post_delete, event) + self._index.delete(guid, self._postdelete, guid, event) def exists(self, guid): return self._storage.get(guid).consistent @@ -171,6 +131,9 @@ class Directory(object): guid, self.metadata.name) return self.resource_class(guid, record, cached_props) + def __getitem__(self, guid): + return self.get(guid) + def find(self, **kwargs): mset = self._index.find(**kwargs) @@ -195,9 +158,9 @@ class Directory(object): """ found = False - migrate = (self.mtime == 0) + migrate = (self._index.mtime == 0) - for guid in self._storage.walk(self.mtime): + for guid in self._storage.walk(self._index.mtime): if not found: _logger.info('Start populating %r index', self.metadata.name) found = True @@ -208,9 +171,7 @@ class Directory(object): record = self._storage.get(guid) try: props = {} - for name, prop in self.metadata.items(): - if not isinstance(prop, StoredProperty): - continue + for name in self.metadata: meta = record.get(name) if meta is not None: props[name] = meta['value'] @@ -224,33 +185,11 @@ class Directory(object): if found: self._save_layout() self.commit() - self.checkpoint() - - def patch(self, guid, props, accept_language=None): - if not accept_language: - accept_language = toolkit.default_lang() - orig = self.get(guid) - patch = {} - for prop, value in (props or {}).items(): - if orig[prop] == value: - continue - if isinstance(self.metadata[prop], StoredProperty) and \ - self.metadata[prop].localized: - if isinstance(value, dict): - if value == dict([(i, orig[prop].get(i)) for i in value]): - continue - elif orig.get(prop, accept_language) == value: - continue - elif isinstance(self.metadata[prop], BlobProperty) and \ - isinstance(value, dict) and \ - value.get('digest') == orig[prop].get('digest'): - continue - patch[prop] = value - return patch def diff(self, seq, exclude_seq=None, **params): - if exclude_seq is None: - exclude_seq = [] + if exclude_seq is not None: + for start, end in exclude_seq: + seq.exclude(start, end) if 'group_by' in params: # Pickup only most recent change params['order_by'] = '-seqno' @@ -263,82 +202,30 @@ class Directory(object): if end: query += str(end) documents, __ = self.find(query=query, **params) - for doc in documents: + yield doc.guid, doc.diff(seq) - def patch(): - for name, prop in self.metadata.items(): - if name == 'seqno' or prop.acl & ACL.CALC: - continue - meta = doc.meta(name) - if meta is None: - continue - seqno = meta.get('seqno') - if seqno not in seq or seqno in exclude_seq: - continue - if isinstance(prop, BlobProperty): - del meta['seqno'] - else: - value = meta.get('value') - if prop.typecast is AggregatedType: - value_ = {} - for key, agg in value.items(): - aggseqno = agg.pop('seqno') - if aggseqno >= start and \ - (not end or aggseqno <= end): - value_[key] = agg - value = value_ - meta = {'mtime': meta['mtime'], 'value': value} - yield name, meta, seqno - - yield doc.guid, patch() - - def merge(self, guid, diff, shift_seqno=True, op=None, **kwargs): + def merge(self, guid, diff): """Apply changes for documents.""" - record = self._storage.get(guid) - seqno = None - merge = {} - patch = {} + doc = self.resource_class(guid, self._storage.get(guid)) for prop, meta in diff.items(): - orig_meta = record.get(prop) - if orig_meta is not None and orig_meta['mtime'] >= meta['mtime']: + orig_meta = doc.meta(prop) + if orig_meta and orig_meta['mtime'] >= meta['mtime']: continue - if shift_seqno: - if not seqno: - seqno = self._seqno.next() - meta['seqno'] = seqno - else: - meta['seqno'] = (orig_meta or {}).get('seqno') or 0 - meta.update(kwargs) - if self.metadata.get(prop).typecast is AggregatedType: - for agg in meta['value'].values(): - agg['seqno'] = meta['seqno'] - if orig_meta: - orig_meta['value'].update(meta['value']) - meta['value'] = orig_meta['value'] - merge[prop] = meta - if op is not None: - patch[prop] = meta.get('value') - - if not merge: - return seqno, False - - if op is not None: - op(patch) - for prop, meta in merge.items(): - is_blob = isinstance(self.metadata.get(prop), BlobProperty) - record.set(prop, cleanup_blob=is_blob, **meta) - - if record.consistent: - props = {} - if seqno: - props['seqno'] = seqno + if doc.post_seqno is None: + doc.post_seqno = self._seqno.next() + doc.post(prop, **meta) + + if doc.post_seqno is None: + return None, False + + if doc.exists: # No need in after-merge event, further commit event - # is enough to avoid events flow on nodes synchronization - self._index.store(guid, props, self._pre_store, self._post_store) + # is enough to avoid increasing events flow + self._index.store(guid, doc.props, self._preindex) - return seqno, True + return doc.post_seqno, True def _open(self): if not exists(self._root): @@ -352,63 +239,37 @@ class Directory(object): self._save_layout() self._storage = Storage(self._root, self.metadata) self._index = self._index_class(index_path, self.metadata, - self._post_commit) + self._postcommit) _logger.debug('Open %r resource', self.resource_class) - def _pre_store(self, guid, changes, event=None): - seqno = changes.get('seqno') - if event is not None and not seqno: - seqno = changes['seqno'] = self._seqno.next() + def _broadcast(self, event): + event['resource'] = self.metadata.name + this.broadcast(event) + + def _preindex(self, guid, changes): + doc = self.resource_class(guid, self._storage.get(guid), changes) + for prop in self.metadata: + enforce(doc[prop] is not None, 'Empty %r property', prop) + return doc.props + + def _prestore(self, guid, changes, event): + doc = self.resource_class(guid, self._storage.get(guid)) + doc.post_seqno = self._seqno.next() + for prop in self.metadata.keys(): + value = changes.get(prop) + if value is None: + enforce(doc[prop] is not None, 'Empty %r property', prop) + else: + doc.post(prop, value) + return doc.props - record = self._storage.get(guid) - existed = record.exists - - for name, prop in self.metadata.items(): - value = changes.get(name) - if isinstance(prop, BlobProperty): - if isinstance(value, dict): - record.set(name, seqno=seqno, cleanup_blob=True, **value) - elif isinstance(value, basestring): - record.set(name, seqno=seqno, blob=StringIO(value)) - elif isinstance(prop, StoredProperty): - if value is None: - enforce(existed or prop.default is not None, - 'Value is not specified for %r property', name) - meta = record.get(name) - if meta is not None: - value = meta['value'] - changes[name] = prop.default if value is None else value - else: - if prop.typecast is AggregatedType: - for aggvalue in value.values(): - aggvalue['seqno'] = seqno - if existed: - value_ = record.get(name)['value'] - value_.update(value) - value = value_ - elif prop.localized: - if not isinstance(value, dict): - value = {toolkit.default_lang(): value} - if existed and \ - type(value) is dict: # TODO To reset `value` - meta = record.get(name) - if meta is not None: - meta['value'].update(value) - value = meta['value'] - changes[name] = value - record.set(name, value=value, seqno=seqno) - - def _post_store(self, guid, changes, event=None): - if event is not None: - self.broadcast(event) - - def _post_delete(self, guid, event): + def _postdelete(self, guid, event): self._storage.delete(guid) - self.broadcast(event) + self._broadcast(event) - def _post_commit(self): + def _postcommit(self): self._seqno.commit() - self.broadcast({'event': 'commit', 'mtime': self.mtime}) + self._broadcast({'event': 'commit', 'mtime': self._index.mtime}) def _save_layout(self): path = join(self._root, 'layout') diff --git a/sugar_network/db/files.py b/sugar_network/db/files.py new file mode 100644 index 0000000..a675ea3 --- /dev/null +++ b/sugar_network/db/files.py @@ -0,0 +1,146 @@ +# Copyright (C) 2014 Aleksey Lim +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . + +from sugar_network import toolkit +from sugar_network.toolkit import http, enforce + + +class Digest(str): + pass + + +def post(content, meta=None): + # if fileobj is tmp then move files + pass + + +def update(digest, meta): + pass + + +def get(digest): + pass + + +def delete(digest): + pass + + +def path(digest): + pass + + + + + + +""" + +def diff(volume, in_seq, out_seq=None, exclude_seq=None, layer=None, + fetch_blobs=False, ignore_documents=None, **kwargs): + + if 'blob' in meta: + blob_path = meta.pop('blob') + yield {'guid': guid, + 'diff': {prop: meta}, + 'blob_size': meta['blob_size'], + 'blob': toolkit.iter_file(blob_path), + } + elif fetch_blobs and 'url' in meta: + url = meta.pop('url') + try: + blob = connection.request('GET', url, + allow_redirects=True, + # We need uncompressed size + headers={'Accept-Encoding': ''}) + except Exception: + _logger.exception('Cannot fetch %r for %s:%s:%s', + url, resource, guid, prop) + is_the_only_seq = False + continue + yield {'guid': guid, + 'diff': {prop: meta}, + 'blob_size': + int(blob.headers['Content-Length']), + 'blob': blob.iter_content(toolkit.BUFFER_SIZE), + } + else: + + + + + + + + + + + + + + + + + + 'digest': hashlib.sha1(png.getvalue()).hexdigest(), + + + + + if value is None: + value = {'blob': None} + elif isinstance(value, basestring) or hasattr(value, 'read'): + value = _read_blob(request, prop, value) + blobs.append(value['blob']) + elif isinstance(value, dict): + enforce('url' in value or 'blob' in value, 'No bundle') + else: + raise RuntimeError('Incorrect BLOB value') + +def _read_blob(request, prop, value): + digest = hashlib.sha1() + dst = toolkit.NamedTemporaryFile(delete=False) + + try: + if isinstance(value, basestring): + digest.update(value) + dst.write(value) + else: + size = request.content_length or sys.maxint + while size > 0: + chunk = value.read(min(size, toolkit.BUFFER_SIZE)) + if not chunk: + break + dst.write(chunk) + size -= len(chunk) + digest.update(chunk) + except Exception: + os.unlink(dst.name) + raise + finally: + dst.close() + + if request.prop and request.content_type: + mime_type = request.content_type + else: + mime_type = prop.mime_type + + return {'blob': dst.name, + 'digest': digest.hexdigest(), + 'mime_type': mime_type, + } + +) +""" diff --git a/sugar_network/db/index.py b/sugar_network/db/index.py index 7ff43bb..b44bdfb 100644 --- a/sugar_network/db/index.py +++ b/sugar_network/db/index.py @@ -22,8 +22,7 @@ from os.path import exists, join import xapian -from sugar_network import toolkit -from sugar_network.db.metadata import IndexedProperty, GUID_PREFIX, LIST_TYPES +from sugar_network.db.metadata import GUID_PREFIX from sugar_network.toolkit import Option, coroutine, exception, enforce @@ -65,7 +64,7 @@ class IndexReader(object): self._commit_cb = commit_cb for name, prop in self.metadata.items(): - if isinstance(prop, IndexedProperty): + if prop.indexed: self._props[name] = prop @property @@ -73,11 +72,6 @@ class IndexReader(object): """UNIX seconds of the last `commit()` call.""" return int(os.stat(self._mtime_path).st_mtime) - def checkpoint(self): - ts = time.time() - os.utime(self._mtime_path, (ts, ts)) - return int(ts) - def ensure_open(self): if not exists(self._mtime_path): with file(self._mtime_path, 'w'): @@ -200,8 +194,7 @@ class IndexReader(object): else: parser.add_prefix(name, prop.prefix) parser.add_prefix('', prop.prefix) - if prop.slot is not None and \ - prop.sortable_serialise is not None: + if prop.slot is not None: value_range = xapian.NumberValueRangeProcessor( prop.slot, name + ':') parser.add_valuerangeprocessor(value_range) @@ -230,9 +223,7 @@ class IndexReader(object): for needle in value if type(value) in (tuple, list) else [value]: if needle is None: continue - if prop.parse is not None: - needle = prop.parse(needle) - needle = next(_fmt_prop_value(prop, needle)) + needle = prop.decode(needle) queries.append(xapian.Query(_term(prop.prefix, needle))) if len(sub_queries) == 1: all_queries.append(sub_queries[0]) @@ -313,7 +304,7 @@ class IndexReader(object): query = query[:exact_term.start()] + query[exact_term.end():] term, __, value = exact_term.groups() prop = self.metadata.get(term) - if isinstance(prop, IndexedProperty) and prop.prefix: + if prop.indexed and prop.prefix: props[term] = value return query @@ -345,7 +336,7 @@ class IndexWriter(IndexReader): self.ensure_open() if pre_cb is not None: - pre_cb(guid, properties, *args) + properties = pre_cb(guid, properties, *args) _logger.debug('Index %r object: %r', self.metadata.name, properties) @@ -359,17 +350,10 @@ class IndexWriter(IndexReader): else properties.get(name, prop.default) if prop.slot is not None: - if prop.sortable_serialise is not None: - slotted_value = xapian.sortable_serialise( - prop.sortable_serialise(value)) - elif prop.localized: - slotted_value = toolkit.gettext(value) or '' - else: - slotted_value = next(_fmt_prop_value(prop, value)) - doc.add_value(prop.slot, slotted_value) + doc.add_value(prop.slot, prop.slotting(value)) if prop.prefix or prop.full_text: - for value_ in _fmt_prop_value(prop, value): + for value_ in prop.encode(value): if prop.prefix: if prop.boolean: doc.add_boolean_term(_term(prop.prefix, value_)) @@ -383,7 +367,7 @@ class IndexWriter(IndexReader): self._pending_updates += 1 if post_cb is not None: - post_cb(guid, properties, *args) + post_cb(*args) self._check_for_commit() @@ -397,7 +381,7 @@ class IndexWriter(IndexReader): self._pending_updates += 1 if post_cb is not None: - post_cb(guid, *args) + post_cb(*args) self._check_for_commit() @@ -433,10 +417,13 @@ class IndexWriter(IndexReader): self._db.commit() else: self._db.flush() - ts = self.checkpoint() - ts + + checkpoint = time.time() + os.utime(self._mtime_path, (checkpoint, checkpoint)) self._pending_updates = 0 - _logger.debug('Commit to %r took %s seconds', self.metadata.name, ts) + _logger.debug('Commit to %r took %s seconds', + self.metadata.name, checkpoint - ts) if self._commit_cb is not None: self._commit_cb() @@ -461,20 +448,3 @@ class IndexWriter(IndexReader): def _term(prefix, value): return _EXACT_PREFIX + prefix + str(value).split('\n')[0][:243] - - -def _fmt_prop_value(prop, value): - - def fmt(value): - if type(value) is unicode: - yield value.encode('utf8') - elif isinstance(value, basestring): - yield value - elif type(value) in LIST_TYPES: - for i in value: - for j in fmt(i): - yield j - elif value is not None: - yield str(value) - - return fmt(value if prop.fmt is None else prop.fmt(value)) diff --git a/sugar_network/db/metadata.py b/sugar_network/db/metadata.py index 55942a7..5282fd1 100644 --- a/sugar_network/db/metadata.py +++ b/sugar_network/db/metadata.py @@ -1,4 +1,4 @@ -# Copyright (C) 2011-2013 Aleksey Lim +# Copyright (C) 2011-2014 Aleksey Lim # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by @@ -13,20 +13,20 @@ # You should have received a copy of the GNU General Public License # along with this program. If not, see . -import types +import xapian from sugar_network import toolkit +from sugar_network.db import files from sugar_network.toolkit.router import ACL -from sugar_network.toolkit import http, enforce +from sugar_network.toolkit.coroutine import this +from sugar_network.toolkit import i18n, http, enforce #: Xapian term prefix for GUID value GUID_PREFIX = 'I' -LIST_TYPES = (list, tuple, frozenset, types.GeneratorType) - -def indexed_property(property_class=None, *args, **kwargs): +def stored_property(klass=None, *args, **kwargs): def getter(func, self): value = self[func.__name__] @@ -34,7 +34,7 @@ def indexed_property(property_class=None, *args, **kwargs): def decorate_setter(func, attr): attr.prop.setter = lambda self, value: \ - self.set(attr.name, func(self, value)) + self._set(attr.name, func(self, value)) attr.prop.on_set = func return attr @@ -46,20 +46,18 @@ def indexed_property(property_class=None, *args, **kwargs): # pylint: disable-msg=W0212 attr._is_db_property = True attr.name = func.__name__ - attr.prop = (property_class or IndexedProperty)( - attr.name, *args, **kwargs) + attr.prop = (klass or Property)(*args, name=attr.name, **kwargs) attr.prop.on_get = func return attr return decorate_getter -stored_property = lambda ** kwargs: indexed_property(StoredProperty, **kwargs) -blob_property = lambda ** kwargs: indexed_property(BlobProperty, **kwargs) - - -class AggregatedType(dict): - pass +def indexed_property(klass=None, *args, **kwargs): + enforce('slot' in kwargs or 'prefix' in kwargs or 'full_text' in kwargs, + "None of 'slot', 'prefix' or 'full_text' was specified " + 'for indexed property') + return stored_property(klass, *args, **kwargs) class Metadata(dict): @@ -83,7 +81,6 @@ class Metadata(dict): for attr in [getattr(cls, i) for i in dir(cls)]: if not hasattr(attr, '_is_db_property'): continue - prop = attr.prop if hasattr(prop, 'slot'): @@ -117,52 +114,73 @@ class Metadata(dict): class Property(object): - """Basic class to collect information about document property.""" + """Collect information about document properties.""" - def __init__(self, name, acl=ACL.PUBLIC, typecast=None, - parse=None, fmt=None, default=None, sortable_serialise=None): + def __init__(self, name=None, + slot=None, prefix=None, full_text=False, boolean=False, + acl=ACL.PUBLIC, default=None): """ :param name: property name; :param acl: access to the property, might be an ORed composition of `db.ACCESS_*` constants; - :param typecast: - cast property value before storing in the system; - supported values are `None` (strings), `int` (intergers), - `float` (floats), `bool` (booleans repesented by symbols - `0` and `1`), a sequence of strings (property value should - confirm one of values from the sequencei); - :param parse: - parse property value from a string; - :param fmt: - format property value to a string or a list of strings; :param default: - default property value or None; - :param sortable_serialise: - cast property value before storing as a srotable value. + default property value; + :param slot: + Xapian document's slot number to add property value to; + :param prefix: + Xapian serach term prefix, if `None`, property is not a term; + :param full_text: + the property takes part in full-text search; + :param boolean: + Xapian will use boolean search for this property; """ - if typecast is bool: - if fmt is None: - fmt = lambda x: '1' if x else '0' - if parse is None: - parse = lambda x: str(x).lower() in ('true', '1', 'on', 'yes') - if sortable_serialise is None and typecast in [int, float, bool]: - sortable_serialise = typecast + enforce(name == 'guid' or slot != 0, + "Slot '0' is reserved for internal needs in %r", + name) + enforce(name == 'guid' or prefix != GUID_PREFIX, + 'Prefix %r is reserved for internal needs in %r', + GUID_PREFIX, name) self.setter = None self.on_get = lambda self, x: x self.on_set = None self.name = name self.acl = acl - self.typecast = typecast - self.parse = parse - self.fmt = fmt self.default = default - self.sortable_serialise = sortable_serialise + self.indexed = slot is not None or prefix is not None or full_text + self.slot = slot + self.prefix = prefix + self.full_text = full_text + self.boolean = boolean - def assert_access(self, mode): + def typecast(self, value): + """Convert input values to types stored in the system.""" + return value + + def reprcast(self, value): + """Convert output values before returning out of the system.""" + return self.default if value is None else value + + def encode(self, value): + """Convert stored value to strings capable for indexing.""" + yield toolkit.ascii(value) + + def decode(self, value): + """Make input string capable for indexing.""" + return toolkit.ascii(value) + + def slotting(self, value): + """Convert stored value to xapian.NumberValueRangeProcessor values.""" + return next(self.encode(value)) + + def teardown(self, value): + """Cleanup property value on resetting.""" + pass + + def assert_access(self, mode, value=None): """Is access to the property permitted. If there are no permissions, function should raise @@ -178,106 +196,255 @@ class Property(object): ACL.NAMES[mode], self.name) -class StoredProperty(Property): - """Property to save only in persistent storage, no index.""" +class Boolean(Property): - def __init__(self, name, localized=False, typecast=None, fmt=None, - **kwargs): - """ - :param: localized: - property value will be stored per locale; - :param: **kwargs - :class:`.Property` arguments + def typecast(self, value): + if isinstance(value, basestring): + return value.lower() in ('true', '1', 'on', 'yes') + return bool(value) - """ - self.localized = localized + def encode(self, value): + yield '1' if value else '0' - if localized: - enforce(typecast is None, - 'typecast should be None for localized properties') - enforce(fmt is None, - 'fmt should be None for localized properties') - typecast = _localized_typecast - fmt = _localized_fmt + def decode(self, value): + return '1' if self.typecast(value) else '0' - Property.__init__(self, name, typecast=typecast, fmt=fmt, **kwargs) + def slotting(self, value): + return xapian.sortable_serialise(value) -class IndexedProperty(StoredProperty): - """Property which needs to be indexed.""" +class Numeric(Property): - def __init__(self, name, slot=None, prefix=None, full_text=False, - boolean=False, **kwargs): - """ - :param slot: - Xapian document's slot number to add property value to; - :param prefix: - Xapian serach term prefix, if `None`, property is not a term; - :param full_text: - property takes part in full-text search; - :param boolean: - Xapian will use boolean search for this property; - :param: **kwargs - :class:`.StoredProperty` arguments + def typecast(self, value): + return int(value) - """ - enforce(name == 'guid' or slot != 0, - "For %r property, slot '0' is reserved for internal needs", - name) - enforce(name == 'guid' or prefix != GUID_PREFIX, - 'For %r property, prefix %r is reserved for internal needs', - name, GUID_PREFIX) - enforce(slot is not None or prefix or full_text, - 'For %r property, either slot, prefix or full_text ' - 'need to be set', - name) - enforce(slot is None or _is_sloted_prop(kwargs.get('typecast')) or - kwargs.get('sortable_serialise'), - 'Slot can be set only for properties for str, int, float, ' - 'bool types, or, for list of these types') + def encode(self, value): + yield str(value) - StoredProperty.__init__(self, name, **kwargs) - self.slot = slot - self.prefix = prefix - self.full_text = full_text - self.boolean = boolean + def decode(self, value): + return str(int(value)) + def slotting(self, value): + return xapian.sortable_serialise(value) -class BlobProperty(Property): - """Binary large objects which needs to be fetched alone, no index.""" - def __init__(self, name, acl=ACL.PUBLIC, - mime_type='application/octet-stream'): - """ - :param mime_type: - MIME type for BLOB content; - by default, MIME type is application/octet-stream; - :param: **kwargs - :class:`.Property` arguments +class List(Property): - """ - Property.__init__(self, name, acl=acl) - self.mime_type = mime_type + def __init__(self, subtype=None, **kwargs): + Property.__init__(self, **kwargs) + self._subtype = subtype or Property() + + def typecast(self, value): + if value is None: + return [] + if type(value) not in (list, tuple): + return [self._subtype.typecast(value)] + return [self._subtype.typecast(i) for i in value] + + def encode(self, value): + for i in value: + for j in self._subtype.encode(i): + yield j + def decode(self, value): + return self._subtype.decode(value) -def _is_sloted_prop(typecast): - if typecast in [None, int, float, bool, str]: - return True - if type(typecast) in LIST_TYPES: - if typecast and [i for i in typecast - if type(i) in [None, int, float, bool, str]]: - return True +class Dict(Property): -def _localized_typecast(value): - if isinstance(value, dict): + def __init__(self, subtype=None, **kwargs): + Property.__init__(self, **kwargs) + self._subtype = subtype or Property() + + def typecast(self, value): + for key, value_ in value.items(): + value[key] = self._subtype.typecast(value_) return value - else: - return {toolkit.default_lang(): value} + def encode(self, items): + for i in items.values(): + for j in self._subtype.encode(i): + yield j + + +class Enum(Property): + + def __init__(self, items, **kwargs): + enforce(items, 'Enum should not be empty') + Property.__init__(self, **kwargs) + self._items = items + if type(next(iter(items))) in (int, long): + self._subtype = Numeric() + else: + self._subtype = Property() + + def typecast(self, value): + value = self._subtype.typecast(value) + enforce(value in self._items, ValueError, + "Value %r is not in '%s' enum", + value, ', '.join([str(i) for i in self._items])) + return value -def _localized_fmt(value): - if isinstance(value, dict): - return value.values() - else: - return [value] + def slotting(self, value): + return self._subtype.slotting(value) + + +class Blob(Property): + + def __init__(self, mime_type='application/octet-stream', default='', + **kwargs): + Property.__init__(self, default=default, **kwargs) + self.mime_type = mime_type + + def typecast(self, value): + if isinstance(value, toolkit.File): + return value.digest + if isinstance(value, files.Digest): + return value + + enforce(value is None or isinstance(value, basestring) or \ + isinstance(value, dict) and value or hasattr(value, 'read'), + 'Inappropriate blob value') + + if not value: + return '' + + if not isinstance(value, dict): + return files.post(value, { + 'mime_type': this.request.content_type or self.mime_type, + }).digest + + digest = this.resource[self.name] if self.name else None + if digest: + meta = files.get(digest) + enforce('digest' not in value or value.pop('digest') == digest, + "Inappropriate 'digest' value") + enforce(meta.path or 'url' in meta or 'url' in value, + 'Blob points to nothing') + if 'url' in value and meta.path: + files.delete(digest) + meta.update(value) + value = meta + else: + enforce('url' in value, 'Blob points to nothing') + enforce('digest' in value, "Missed 'digest' value") + if 'mime_type' not in value: + value['mime_type'] = self.mime_type + digest = value.pop('digest') + + files.update(digest, value) + return digest + + def reprcast(self, value): + if not value: + return toolkit.File.AWAY + meta = files.get(value) + if 'url' not in meta: + meta['url'] = '%s/blobs/%s' % (this.request.static_prefix, value) + meta['size'] = meta.size + meta['mtime'] = meta.mtime + meta['digest'] = value + return meta + + def teardown(self, value): + if value: + files.delete(value) + + def assert_access(self, mode, value=None): + if mode == ACL.WRITE and not value: + mode = ACL.CREATE + Property.assert_access(self, mode, value) + + +class Composite(Property): + pass + + +class Localized(Composite): + + def typecast(self, value): + if isinstance(value, dict): + return value + return {this.request.accept_language[0]: value} + + def reprcast(self, value): + if value is None: + return self.default + return i18n.decode(value, this.request.accept_language) + + def encode(self, value): + for i in value.values(): + yield toolkit.ascii(i) + + def slotting(self, value): + # TODO Multilingual sorting + return i18n.decode(value) or '' + + +class Aggregated(Composite): + + def __init__(self, subtype=None, acl=ACL.READ | ACL.INSERT | ACL.REMOVE, + **kwargs): + enforce(not (acl & (ACL.CREATE | ACL.WRITE)), + 'ACL.CREATE|ACL.WRITE not allowed for aggregated properties') + Property.__init__(self, acl=acl, default={}, **kwargs) + self._subtype = subtype or Property() + + def subtypecast(self, value): + return self._subtype.typecast(value) + + def subteardown(self, value): + self._subtype.teardown(value) + + def typecast(self, value): + return dict(value) + + def encode(self, items): + for agg in items.values(): + if 'value' in agg: + for j in self._subtype.encode(agg['value']): + yield j + + +class Guid(Property): + + def __init__(self): + Property.__init__(self, name='guid', slot=0, prefix=GUID_PREFIX, + acl=ACL.CREATE | ACL.READ) + + +class Authors(Dict): + + def typecast(self, value): + if type(value) not in (list, tuple): + return dict(value) + result = {} + for order, author in enumerate(value): + user = author.pop('guid') + author['order'] = order + result[user] = author + return result + + def reprcast(self, value): + result = [] + for guid, props in sorted(value.items(), + cmp=lambda x, y: cmp(x[1]['order'], y[1]['order'])): + if 'name' in props: + result.append({ + 'guid': guid, + 'name': props['name'], + 'role': props['role'], + }) + else: + result.append({ + 'name': guid, + 'role': props['role'], + }) + return result + + def encode(self, value): + for guid, props in value.items(): + if 'name' in props: + yield props['name'] + if not (props['role'] & ACL.INSYSTEM): + yield guid diff --git a/sugar_network/db/resource.py b/sugar_network/db/resource.py index 207824e..2636dca 100644 --- a/sugar_network/db/resource.py +++ b/sugar_network/db/resource.py @@ -1,4 +1,4 @@ -# Copyright (C) 2011-2012 Aleksey Lim +# Copyright (C) 2011-2014 Aleksey Lim # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by @@ -13,10 +13,11 @@ # You should have received a copy of the GNU General Public License # along with this program. If not, see . -from sugar_network import toolkit -from sugar_network.db.metadata import indexed_property -from sugar_network.db.metadata import StoredProperty, BlobProperty -from sugar_network.toolkit.router import Blob, ACL +from sugar_network.db.metadata import indexed_property, Localized +from sugar_network.db.metadata import Numeric, List, Authors +from sugar_network.db.metadata import Composite, Aggregated +from sugar_network.toolkit.coroutine import this +from sugar_network.toolkit.router import ACL class Resource(object): @@ -25,85 +26,69 @@ class Resource(object): #: `Metadata` object that describes the document metadata = None - def __init__(self, guid, record, cached_props=None, request=None): + def __init__(self, guid, record, cached_props=None): self.props = cached_props or {} self.guid = guid self.is_new = not bool(guid) - self._record = record - self.request = request - self._modifies = set() + self.record = record + self._post_seqno = None @property - def volume(self): - return self.request.routes.volume + def post_seqno(self): + return self._post_seqno - @property - def directory(self): - return self.volume[self.metadata.name] + @post_seqno.setter + def post_seqno(self, value): + if self._post_seqno is None: + self._post_seqno = value + self.post('seqno', value) - @indexed_property(slot=1000, prefix='RC', typecast=int, default=0, - acl=ACL.READ) - def ctime(self, value): + @indexed_property(Numeric, slot=1000, prefix='RS', acl=0) + def seqno(self, value): return value - @indexed_property(slot=1001, prefix='RM', typecast=int, default=0, - acl=ACL.READ) - def mtime(self, value): + @indexed_property(Numeric, slot=1001, prefix='RC', default=0, acl=ACL.READ) + def ctime(self, value): return value - @indexed_property(slot=1002, prefix='RS', typecast=int, default=0, acl=0) - def seqno(self, value): + @indexed_property(Numeric, slot=1002, prefix='RM', default=0, acl=ACL.READ) + def mtime(self, value): return value - @indexed_property(prefix='RA', typecast=dict, full_text=True, default={}, - fmt=lambda x: _fmt_authors(x), acl=ACL.READ) + @indexed_property(Authors, prefix='RA', default={}, full_text=True, + acl=ACL.READ) def author(self, value): - result = [] - for guid, props in sorted(value.items(), - cmp=lambda x, y: cmp(x[1]['order'], y[1]['order'])): - if 'name' in props: - result.append({ - 'guid': guid, - 'name': props['name'], - 'role': props['role'], - }) - else: - result.append({ - 'name': guid, - 'role': props['role'], - }) - return result + return value - @author.setter - def author(self, value): - if type(value) not in (list, tuple): - return value - result = {} - for order, author in enumerate(value): - user = author.pop('guid') - author['order'] = order - result[user] = author - return result + @indexed_property(List, prefix='RL', default=[]) + def layer(self, value): + return value - @indexed_property(prefix='RL', typecast=[], default=[]) + @layer.setter def layer(self, value): + orig = self['layer'] + if 'deleted' in value: + if this.request.method != 'POST' and 'deleted' not in orig: + self.deleted() + elif this.request.method != 'POST' and 'deleted' in orig: + self.restored() return value - @indexed_property(prefix='RT', full_text=True, default=[], typecast=[]) + @indexed_property(List, prefix='RT', full_text=True, default=[]) def tags(self, value): return value - def path(self, *args): - if not args: - return self._record.path() - prop = args[0] - if prop in self.metadata and \ - isinstance(self.metadata[prop], BlobProperty): - return self._record.blob_path(*args) - else: - return self._record.path(*args) - - def get(self, prop, accept_language=None): + @property + def exists(self): + return self.record is not None and self.record.consistent + + def deleted(self): + pass + + def restored(self): + pass + + def get(self, prop): """Get document's property value. :param prop: @@ -113,57 +98,83 @@ class Resource(object): """ prop = self.metadata[prop] - value = self.props.get(prop.name) - if value is None and self._record is not None: - meta = self._record.get(prop.name) - if isinstance(prop, StoredProperty): - if meta is not None: - value = meta.get('value') - else: - value = prop.default + if value is None and self.record is not None: + meta = self.record.get(prop.name) + if meta is not None: + value = meta.get('value') else: - value = meta or Blob() + value = prop.default self.props[prop.name] = value - - if value is not None and accept_language: - if isinstance(prop, StoredProperty) and prop.localized: - value = toolkit.gettext(value, accept_language) - return value - def properties(self, props, accept_language=None): + def properties(self, props): result = {} for i in props: - result[i] = self.get(i, accept_language) + result[i] = self.get(i) return result def meta(self, prop): - return self._record.get(prop) + if self.record is not None: + return self.record.get(prop) + + def diff(self, seq): + for name, prop in self.metadata.items(): + if name == 'seqno' or prop.acl & ACL.CALC: + continue + meta = self.meta(name) + if meta is None: + continue + seqno = meta.get('seqno') + if seqno not in seq: + continue + value = meta.get('value') + if isinstance(prop, Aggregated): + value_ = {} + for key, agg in value.items(): + if agg.pop('seqno') in seq: + value_[key] = agg + value = value_ + meta = {'mtime': meta['mtime'], 'value': value} + yield name, meta, seqno + + def patch(self, props): + if not props: + return {} + patch = {} + for prop, value in props.items(): + if self[prop] == value: + continue + orig_value = self[prop] + if orig_value and isinstance(self.metadata[prop], Localized): + for lang, subvalue in value.items(): + if orig_value.get(lang) != subvalue: + break + else: + continue + patch[prop] = value + return patch - def modified(self, prop): - return prop in self._modifies + def post(self, prop, value, **meta): + prop = self.metadata[prop] + if prop.on_set is not None: + value = prop.on_set(self, value) + if isinstance(prop, Aggregated): + for agg in value.values(): + agg['seqno'] = self.post_seqno + if isinstance(prop, Composite): + old_value = self[prop.name] + if old_value: + old_value.update(value) + value = old_value + self.record.set(prop.name, value=value, seqno=self.post_seqno, **meta) + self.props[prop.name] = value + + def _set(self, prop, value): + self.props[prop] = value def __contains__(self, prop): - return self.get(prop) + return prop in self.props def __getitem__(self, prop): return self.get(prop) - - def __setitem__(self, prop, value): - self.props[prop] = value - self._modifies.add(prop) - - -def _fmt_authors(value): - if isinstance(value, dict): - for guid, props in value.items(): - if not isinstance(props, dict): - yield guid - else: - if 'name' in props: - yield props['name'] - if not (props['role'] & ACL.INSYSTEM): - yield guid - else: - yield value diff --git a/sugar_network/db/routes.py b/sugar_network/db/routes.py index 19ad26c..2f8fc69 100644 --- a/sugar_network/db/routes.py +++ b/sugar_network/db/routes.py @@ -1,4 +1,4 @@ -# Copyright (C) 2011-2013 Aleksey Lim +# Copyright (C) 2011-2014 Aleksey Lim # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by @@ -13,21 +13,17 @@ # You should have received a copy of the GNU General Public License # along with this program. If not, see . -import os import re -import sys import time import json -import types -import hashlib import logging from contextlib import contextmanager -from os.path import exists from sugar_network import toolkit -from sugar_network.db.metadata import AggregatedType -from sugar_network.db.metadata import BlobProperty, StoredProperty, LIST_TYPES -from sugar_network.toolkit.router import Blob, ACL, route +from sugar_network.db import files +from sugar_network.db.metadata import Aggregated +from sugar_network.toolkit.router import ACL, route, preroute, fallbackroute +from sugar_network.toolkit.coroutine import this from sugar_network.toolkit import http, enforce @@ -38,62 +34,86 @@ _logger = logging.getLogger('db.routes') class Routes(object): - def __init__(self, volume): + def __init__(self, volume, find_limit=None): self.volume = volume + self._find_limit = find_limit + this.volume = self.volume - @route('POST', [None], - acl=ACL.AUTH, mime_type='application/json') + @preroute + def __preroute__(self, op, request, response): + this.request = request + this.response = response + + @route('POST', [None], acl=ACL.AUTH, mime_type='application/json') def create(self, request): - with self._post(request, ACL.CREATE) as (directory, doc): - event = {} - self.on_create(request, doc.props, event) - if 'guid' not in doc.props: - doc.props['guid'] = toolkit.uuid() - doc.guid = doc.props['guid'] - directory.create(doc.props, event) - return doc.guid + with self._post(request, ACL.CREATE) as doc: + self.on_create(request, doc.props) + self.volume[request.resource].create(doc.props) + self.after_post(doc) + return doc['guid'] @route('GET', [None], - arguments={'offset': int, 'limit': int, 'reply': ('guid',)}, + arguments={ + 'offset': int, + 'limit': int, + 'layer': [], + 'reply': ('guid',), + }, mime_type='application/json') - def find(self, request, reply): + def find(self, request, reply, limit, layer): self._preget(request) - documents, total = self.volume[request.resource].find(**request) - result = [self._get_props(i, request, reply) for i in documents] + if self._find_limit: + if limit <= 0: + request['limit'] = self._find_limit + elif limit > self._find_limit: + _logger.warning('The find limit is restricted to %s', + self._find_limit) + request['limit'] = self._find_limit + if 'deleted' in layer: + _logger.warning('Requesting "deleted" layer, will ignore') + layer.remove('deleted') + documents, total = self.volume[request.resource].find( + not_layer='deleted', **request) + result = [self._postget(request, i, reply) for i in documents] return {'total': total, 'result': result} - @route('GET', [None, None], cmd='exists', - mime_type='application/json') + @route('GET', [None, None], cmd='exists', mime_type='application/json') def exists(self, request): directory = self.volume[request.resource] return directory.exists(request.guid) - @route('PUT', [None, None], - acl=ACL.AUTH | ACL.AUTHOR) + @route('PUT', [None, None], acl=ACL.AUTH | ACL.AUTHOR) def update(self, request): - with self._post(request, ACL.WRITE) as (directory, doc): + with self._post(request, ACL.WRITE) as doc: if not doc.props: return - event = {} - self.on_update(request, doc.props, event) - directory.update(doc.guid, doc.props, event) - - @route('PUT', [None, None, None], - acl=ACL.AUTH | ACL.AUTHOR) - def update_prop(self, request, url=None): - if url: - value = Blob({'url': url}) - elif request.content is None: + self.on_update(request, doc.props) + self.volume[request.resource].update(doc.guid, doc.props) + self.after_post(doc) + + @route('GET', [None, None], cmd='diff', mime_type='application/json') + def diff(self, request): + result = {} + res = self.volume[request.resource][request.guid] + for prop, meta, __ in res.diff(toolkit.Sequence([[0, None]])): + result[prop] = meta + return result + + @route('PUT', [None, None, None], acl=ACL.AUTH | ACL.AUTHOR) + def update_prop(self, request): + if request.content is None: value = request.content_stream else: value = request.content request.content = {request.prop: value} self.update(request) - @route('DELETE', [None, None], - acl=ACL.AUTH | ACL.AUTHOR) + @route('DELETE', [None, None], acl=ACL.AUTH | ACL.AUTHOR) def delete(self, request): - self.volume[request.resource].delete(request.guid) + # Node data should not be deleted immediately + # to make master-slave synchronization possible + request.content = {'layer': 'deleted'} + self.update(request) @route('GET', [None, None], arguments={'reply': list}, mime_type='application/json') @@ -101,65 +121,56 @@ class Routes(object): if not reply: reply = [] for prop in self.volume[request.resource].metadata.values(): - if prop.acl & ACL.READ and not (prop.acl & ACL.LOCAL): + if prop.acl & ACL.READ and not (prop.acl & ACL.LOCAL) and \ + not isinstance(prop, Aggregated): reply.append(prop.name) self._preget(request) doc = self.volume[request.resource].get(request.guid) - return self._get_props(doc, request, reply) + enforce('deleted' not in doc['layer'], http.NotFound, 'Deleted') + return self._postget(request, doc, reply) @route('GET', [None, None, None], mime_type='application/json') def get_prop(self, request, response): - return self._prop_meta(request, response) + directory = self.volume[request.resource] + doc = directory.get(request.guid) + + prop = directory.metadata[request.prop] + prop.assert_access(ACL.READ) + + meta = doc.meta(prop.name) or {} + if 'value' in meta: + value = _get_prop(doc, prop, meta.pop('value')) + enforce(value is not toolkit.File.AWAY, http.NotFound, 'No blob') + else: + value = prop.default + + response.meta = meta + response.last_modified = meta.get('mtime') + if isinstance(value, toolkit.File): + response.content_length = value.get('size') or 0 + else: + response.content_length = len(json.dumps(value)) + + return value @route('HEAD', [None, None, None]) def get_prop_meta(self, request, response): - self._prop_meta(request, response) + self.get_prop(request, response) @route('POST', [None, None, None], acl=ACL.AUTH, mime_type='application/json') def insert_to_aggprop(self, request): - content = request.content or {} - enforce(isinstance(content, dict), http.BadRequest, 'Invalid value') - - directory = self.volume[request.resource] - prop = directory.metadata[request.prop] + return self._aggpost(request, ACL.INSERT) - enforce(prop.typecast is AggregatedType, http.BadRequest, - 'Property is not aggregated') - prop.assert_access(ACL.INSERT) - self.on_aggprop_update(request, prop, None) - - if request.principal: - authors = content['author'] = {} - self._useradd(authors, request.principal, ACL.ORIGINAL) - guid = content.pop('guid') if 'guid' in content else toolkit.uuid() - props = {request.prop: {guid: content}} - event = {} - self.on_update(request, props, event) - directory.update(request.guid, props, event) - - return guid + @route('PUT', [None, None, None, None], + acl=ACL.AUTH, mime_type='application/json') + def update_aggprop(self, request): + self._aggpost(request, ACL.REPLACE, request.key) @route('DELETE', [None, None, None, None], acl=ACL.AUTH, mime_type='application/json') def remove_from_aggprop(self, request): - directory = self.volume[request.resource] - doc = directory.get(request.guid) - prop = directory.metadata[request.prop] - - enforce(prop.typecast is AggregatedType, http.BadRequest, - 'Property is not aggregated') - prop.assert_access(ACL.REMOVE) - - guid = request.path[3] - enforce(guid in doc[request.prop], http.NotFound, - 'No such aggregated item') - self.on_aggprop_update(request, prop, doc[request.prop][guid]) - - props = {request.prop: {guid: {}}} - event = {} - self.on_update(request, props, event) - directory.update(request.guid, props, event) + self._aggpost(request, ACL.REMOVE, request.key) @route('PUT', [None, None], cmd='useradd', arguments={'role': 0}, acl=ACL.AUTH | ACL.AUTHOR) @@ -180,7 +191,11 @@ class Routes(object): del authors[user] directory.update(request.guid, {'author': authors}) - def on_create(self, request, props, event): + @fallbackroute('GET', ['blobs']) + def blobs(self, request): + return files.get(request.guid) + + def on_create(self, request, props): ts = int(time.time()) props['ctime'] = ts props['mtime'] = ts @@ -189,7 +204,7 @@ class Routes(object): authors = props['author'] = {} self._useradd(authors, request.principal, ACL.ORIGINAL) - def on_update(self, request, props, event): + def on_update(self, request, props): props['mtime'] = int(time.time()) def on_aggprop_update(self, request, prop, value): @@ -200,103 +215,57 @@ class Routes(object): @contextmanager def _post(self, request, access): - content = request.content or {} - enforce(isinstance(content, dict), 'Invalid value') - + content = request.content + enforce(isinstance(content, dict), http.BadRequest, 'Invalid value') directory = self.volume[request.resource] - if request.guid: - doc = directory.get(request.guid) - else: - doc = directory.resource_class(None, {}) - doc.request = request - blobs = [] - - for name, value in content.items(): - prop = directory.metadata[name] - if isinstance(prop, BlobProperty): - prop.assert_access(ACL.CREATE if - access == ACL.WRITE and doc.meta(name) is None - else access) - if value is None: - value = {'blob': None} - elif isinstance(value, basestring) or hasattr(value, 'read'): - value = _read_blob(request, prop, value) - blobs.append(value['blob']) - elif isinstance(value, dict): - enforce('url' in value or 'blob' in value, 'No bundle') - else: - raise RuntimeError('Incorrect BLOB value') - else: - prop.assert_access(access) - if prop.localized and isinstance(value, basestring): - value = {request.accept_language[0]: value} - try: - value = _typecast_prop_value(prop.typecast, value) - except Exception, error: - error = 'Value %r for %r property is invalid: %s' % \ - (value, prop.name, error) - toolkit.exception(error) - raise RuntimeError(error) - doc[name] = value if access == ACL.CREATE: - for name, prop in directory.metadata.items(): - if not isinstance(prop, BlobProperty) and \ - content.get(name) is None and \ - (prop.default is not None or prop.on_set is not None): - doc[name] = prop.default - if doc['guid']: + doc = directory.resource_class(None, None) + if 'guid' in content: # TODO Temporal security hole, see TODO - enforce(not self.volume[request.resource].exists(doc['guid']), - '%s already exists', doc['guid']) - enforce(_GUID_RE.match(doc['guid']) is not None, - 'Malformed %s GUID', doc['guid']) + guid = content['guid'] + enforce(not directory.exists(guid), + http.BadRequest, '%s already exists', guid) + enforce(_GUID_RE.match(guid) is not None, + http.BadRequest, 'Malformed %s GUID', guid) else: - doc['guid'] = toolkit.uuid() + doc.props['guid'] = toolkit.uuid() + for name, prop in directory.metadata.items(): + if name not in content and prop.default is not None: + doc.props[name] = prop.default + orig = None + this.resource = doc + else: + doc = directory.get(request.guid) + orig = directory.get(request.guid) + this.resource = orig - try: - for name, value in doc.props.items(): + def teardown(new): + if orig is None: + return + for name, orig_value in orig.props.items(): + if doc[name] == orig_value: + continue prop = directory.metadata[name] - if prop.on_set is not None: - doc.props[name] = prop.on_set(doc, value) - yield directory, doc - finally: - for path in blobs: - if exists(path): - os.unlink(path) + prop.teardown(doc[name] if new else orig_value) - self.after_post(doc) - - def _prop_meta(self, request, response): - directory = self.volume[request.resource] - prop = directory.metadata[request.prop] - doc = directory.get(request.guid) - doc.request = request - - prop.assert_access(ACL.READ) - - if isinstance(prop, StoredProperty): - meta = doc.meta(prop.name) or {} - if 'value' in meta: - del meta['value'] - value = doc.get(prop.name, request.accept_language) - value = prop.on_get(doc, value) - response.content_length = len(json.dumps(value)) + try: + for name, value in content.items(): + prop = directory.metadata[name] + prop.assert_access(access, orig[name] if orig else None) + try: + doc.props[name] = prop.typecast(value) + except Exception, error: + error = 'Value %r for %r property is invalid: %s' % \ + (value, prop.name, error) + toolkit.exception(error) + raise http.BadRequest(error) + yield doc + except Exception: + teardown(True) + raise else: - value = prop.on_get(doc, doc.meta(prop.name)) - enforce(value is not None and ('blob' in value or 'url' in value), - http.NotFound, 'BLOB does not exist') - if 'blob' in value: - meta = value.copy() - meta.pop('blob') - else: - meta = value - - response.meta = meta - response.last_modified = meta.get('mtime') - response.content_length = meta.get('blob_size') or 0 - - return value + teardown(False) def _preget(self, request): reply = request.get('reply') @@ -307,27 +276,11 @@ class Routes(object): for prop in reply: directory.metadata[prop].assert_access(ACL.READ) - def _get_props(self, doc, request, props): + def _postget(self, request, doc, props): result = {} - metadata = doc.metadata - doc.request = request for name in props: - prop = metadata[name] - value = prop.on_get(doc, doc.get(name, request.accept_language)) - if value is None: - value = prop.default - elif isinstance(value, Blob): - for key in ('mtime', 'seqno', 'blob'): - if key in value: - del value[key] - url = value.get('url') - if url is None: - value['url'] = '/'.join([ - request.static_prefix, metadata.name, doc.guid, name, - ]) - elif url.startswith('/'): - value['url'] = request.static_prefix + url - result[name] = value + prop = doc.metadata[name] + result[name] = _get_prop(doc, prop, doc.get(name)) return result def _useradd(self, authors, user, role): @@ -351,83 +304,48 @@ class Routes(object): props['order'] = 0 authors[user] = props + def _aggpost(self, request, acl, aggid=None): + doc = this.resource = self.volume[request.resource][request.guid] + prop = doc.metadata[request.prop] + enforce(isinstance(prop, Aggregated), http.BadRequest, + 'Property is not aggregated') + prop.assert_access(acl) -def _read_blob(request, prop, value): - digest = hashlib.sha1() - dst = toolkit.NamedTemporaryFile(delete=False) - - try: - if isinstance(value, basestring): - digest.update(value) - dst.write(value) - else: - size = request.content_length or sys.maxint - while size > 0: - chunk = value.read(min(size, toolkit.BUFFER_SIZE)) - if not chunk: - break - dst.write(chunk) - size -= len(chunk) - digest.update(chunk) - except Exception: - os.unlink(dst.name) - raise - finally: - dst.close() - - if request.prop and request.content_type: - mime_type = request.content_type - else: - mime_type = prop.mime_type - - return {'blob': dst.name, - 'digest': digest.hexdigest(), - 'mime_type': mime_type, - } - - -def _typecast_prop_value(typecast, value): - if typecast is None: - return value - enforce(value is not None, ValueError, 'Property value cannot be None') - - def cast(typecast, value): - if isinstance(typecast, types.FunctionType): - return typecast(value) - elif typecast is unicode: - return value.encode('utf-8') - elif typecast is str: - return str(value) - elif typecast is int: - return int(value) - elif typecast is float: - return float(value) - elif typecast is bool: - return bool(value) - elif typecast is dict: - return dict(value) + if aggid and aggid in doc[request.prop]: + aggvalue = doc[request.prop][aggid] + self.on_aggprop_update(request, prop, aggvalue) + prop.subteardown(aggvalue['value']) else: - raise ValueError('Unknown typecast') + enforce(acl != ACL.REMOVE, http.NotFound, 'No aggregated item') + self.on_aggprop_update(request, prop, None) + + aggvalue = {} + if acl != ACL.REMOVE: + value = prop.subtypecast( + request.content_stream if request.content is None + else request.content) + if type(value) is tuple: + aggid_, value = value + enforce(not aggid or aggid == aggid_, http.BadRequest, + 'Wrong aggregated id') + aggid = aggid_ + elif not aggid: + aggid = toolkit.uuid() + aggvalue['value'] = value + + if request.principal: + authors = aggvalue['author'] = {} + role = ACL.ORIGINAL if request.principal in doc['author'] else 0 + self._useradd(authors, request.principal, role) + props = {request.prop: {aggid: aggvalue}} + self.on_update(request, props) + self.volume[request.resource].update(request.guid, props) + + return aggid - if type(typecast) in LIST_TYPES: - if typecast: - first = iter(typecast).next() - else: - first = None - if first is not None and type(first) is not type and \ - type(first) not in LIST_TYPES: - value = cast(type(first), value) - enforce(value in typecast, ValueError, - "Value %r is not in '%s' list", - value, ', '.join([str(i) for i in typecast])) - else: - enforce(len(typecast) <= 1, ValueError, - 'List values should contain values of the same type') - if type(value) not in LIST_TYPES: - value = (value,) - typecast, = typecast or [str] - value = tuple([_typecast_prop_value(typecast, i) for i in value]) - else: - value = cast(typecast, value) +def _get_prop(doc, prop, value): + value = prop.reprcast(value) + if prop.on_get is not None: + value = prop.on_get(doc, value) return value diff --git a/sugar_network/db/storage.py b/sugar_network/db/storage.py index a280a13..72cbcf7 100644 --- a/sugar_network/db/storage.py +++ b/sugar_network/db/storage.py @@ -1,4 +1,4 @@ -# Copyright (C) 2012-2013 Aleksey Lim +# Copyright (C) 2012-2014 Aleksey Lim # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by @@ -20,10 +20,6 @@ import shutil from os.path import exists, join, isdir, basename from sugar_network import toolkit -from sugar_network.toolkit.router import Blob - - -_BLOB_SUFFIX = '.blob' class Storage(object): @@ -67,9 +63,8 @@ class Storage(object): :param mtime: return entities that were modified after `mtime` :returns: - generator returns (guid, properties) typle for all found - documents; the properties dictionary will contain only - `StoredProperty` properties + generator returns (guid, properties) tuple for all found + documents """ if not exists(self._root): @@ -113,9 +108,6 @@ class Record(object): def path(self, *args): return join(self._root, *args) - def blob_path(self, prop, *args): - return join(self._root, prop + _BLOB_SUFFIX, *args) - def invalidate(self): guid_path = join(self._root, 'guid') if exists(guid_path): @@ -126,32 +118,14 @@ class Record(object): if not exists(path): return None with file(path) as f: - meta = Blob(json.load(f)) - blob_path = path + _BLOB_SUFFIX - if exists(blob_path): - meta['blob'] = blob_path - if 'blob_size' not in meta: - meta['blob_size'] = os.stat(blob_path).st_size + meta = json.load(f) meta['mtime'] = int(os.stat(path).st_mtime) return meta - def set(self, prop, mtime=None, cleanup_blob=False, blob=None, **meta): + def set(self, prop, mtime=None, **meta): if not exists(self._root): os.makedirs(self._root) meta_path = join(self._root, prop) - dst_blob_path = meta_path + _BLOB_SUFFIX - - if (cleanup_blob or blob is not None) and exists(dst_blob_path): - os.unlink(dst_blob_path) - - if blob is not None: - if hasattr(blob, 'read'): - with toolkit.new_file(dst_blob_path) as f: - shutil.copyfileobj(blob, f) - elif blob is not None: - os.rename(blob, dst_blob_path) - elif exists(dst_blob_path): - os.unlink(dst_blob_path) with toolkit.new_file(meta_path) as f: json.dump(meta, f) diff --git a/sugar_network/db/volume.py b/sugar_network/db/volume.py index 3080eb8..6457b93 100644 --- a/sugar_network/db/volume.py +++ b/sugar_network/db/volume.py @@ -1,4 +1,4 @@ -# Copyright (C) 2011-2013 Aleksey Lim +# Copyright (C) 2011-2014 Aleksey Lim # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by @@ -30,11 +30,9 @@ class Volume(dict): _flush_pool = [] - def __init__(self, root, documents, broadcast=None, index_class=None, - lazy_open=False): + def __init__(self, root, documents, index_class=None): Volume._flush_pool.append(self) self.resources = {} - self.broadcast = broadcast or (lambda event: None) self._populators = coroutine.Pool() if index_class is None: @@ -46,7 +44,8 @@ class Volume(dict): if not exists(root): os.makedirs(root) self._index_class = index_class - self.seqno = toolkit.Seqno(join(self._root, 'seqno')) + self.seqno = toolkit.Seqno(join(self._root, 'db.seqno')) + self.releases_seqno = toolkit.Seqno(join(self._root, 'releases.seqno')) for document in documents: if isinstance(document, basestring): @@ -54,20 +53,11 @@ class Volume(dict): else: name = document.__name__.lower() self.resources[name] = document - if not lazy_open: - self[name] = self._open(name, document) @property def root(self): return self._root - def mtime(self, name): - path = join(self._root, name, 'index', 'mtime') - if exists(path): - return int(os.stat(path).st_mtime) - else: - return 0 - def close(self): """Close operations with the server.""" _logger.info('Closing documents in %r', self._root) @@ -75,6 +65,7 @@ class Volume(dict): while self: __, cls = self.popitem() cls.close() + self.releases_seqno.commit() def populate(self): for cls in self.values(): @@ -92,25 +83,18 @@ class Volume(dict): if directory is None: enforce(name in self.resources, http.BadRequest, 'Unknown %r resource', name) - directory = self[name] = self._open(name, self.resources[name]) - return directory - - def _open(self, name, resource): - if isinstance(resource, basestring): - mod = __import__(resource, fromlist=[name]) - cls = getattr(mod, name.capitalize()) - else: - cls = resource - directory = Directory(join(self._root, name), cls, self._index_class, - lambda event: self._broadcast(name, event), self.seqno) - self._populators.spawn(self._populate, directory) + resource = self.resources[name] + if isinstance(resource, basestring): + mod = __import__(resource, fromlist=[name]) + cls = getattr(mod, name.capitalize()) + else: + cls = resource + directory = Directory(join(self._root, name), cls, + self._index_class, self.seqno) + self._populators.spawn(self._populate, directory) + self[name] = directory return directory def _populate(self, directory): for __ in directory.populate(): coroutine.dispatch() - - def _broadcast(self, resource, event): - if self.broadcast is not None: - event['resource'] = resource - self.broadcast(event) diff --git a/sugar_network/model/__init__.py b/sugar_network/model/__init__.py index 167eb30..7278d10 100644 --- a/sugar_network/model/__init__.py +++ b/sugar_network/model/__init__.py @@ -13,34 +13,283 @@ # You should have received a copy of the GNU General Public License # along with this program. If not, see . -from sugar_network.model.routes import VolumeRoutes, FrontRoutes +import os +import gettext +import logging +from os.path import join + +import xapian + +from sugar_network import toolkit, db +from sugar_network.db import files +from sugar_network.model.routes import FrontRoutes +from sugar_network.toolkit.spec import parse_version, parse_requires +from sugar_network.toolkit.spec import EMPTY_LICENSE +from sugar_network.toolkit.coroutine import this +from sugar_network.toolkit.bundle import Bundle +from sugar_network.toolkit.router import ACL +from sugar_network.toolkit import i18n, http, exception, enforce CONTEXT_TYPES = [ 'activity', 'group', 'package', 'book', ] + POST_TYPES = [ - 'review', # Review the Context - 'object', # Object generated by Context application - 'question', # Q&A request - 'answer', # Q&A response - 'issue', # Propblem with the Context - 'announce', # General announcement - 'update', # Auto-generated Post for updates within the Context - 'feedback', # Review parent Post - 'comment', # Dependent Post + 'review', # Review the Context + 'object', # Object generated by Context application + 'question', # Q&A request + 'answer', # Q&A response + 'issue', # Propblem with the Context + 'announce', # General announcement + 'notification', # Auto-generated Post for updates within the Context + 'feedback', # Review parent Post + 'post', # General purpose dependent Post ] STABILITIES = [ 'insecure', 'buggy', 'developer', 'testing', 'stable', ] -RATINGS = [0, 1, 2, 3, 4, 5] - RESOURCES = ( 'sugar_network.model.context', 'sugar_network.model.post', - 'sugar_network.model.release', 'sugar_network.model.report', 'sugar_network.model.user', ) + +_logger = logging.getLogger('model') + + +class Rating(db.List): + + def __init__(self, **kwargs): + db.List.__init__(self, db.Numeric(), default=[0, 0], **kwargs) + + def slotting(self, value): + rating = float(value[1]) / value[0] if value[0] else 0 + return xapian.sortable_serialise(rating) + + +class Release(object): + + def typecast(self, rel): + if this.resource.exists and \ + 'activity' not in this.resource['type'] and \ + 'book' not in this.resource['type']: + return rel + if not isinstance(rel, dict): + __, rel = load_bundle(files.post(rel), context=this.request.guid) + return rel['spec']['*-*']['bundle'], rel + + def teardown(self, rel): + if this.resource.exists and \ + 'activity' not in this.resource['type'] and \ + 'book' not in this.resource['type']: + return + for spec in rel['spec'].values(): + files.delete(spec['bundle']) + + def encode(self, value): + return [] + + +def generate_node_stats(volume): + + def calc_rating(**kwargs): + rating = [0, 0] + alldocs, __ = volume['post'].find(**kwargs) + for post in alldocs: + if post['vote']: + rating[0] += 1 + rating[1] += post['vote'] + return rating + + alldocs, __ = volume['context'].find() + for context in alldocs: + rating = calc_rating(type='review', context=context.guid) + volume['context'].update(context.guid, {'rating': rating}) + + alldocs, __ = volume['post'].find(topic='') + for topic in alldocs: + rating = calc_rating(type='feedback', topic=topic.guid) + volume['post'].update(topic.guid, {'rating': rating}) + + +def populate_context_images(props, svg): + if 'guid' in props: + from sugar_network.toolkit.sugar import color_svg + svg = color_svg(svg, props['guid']) + props['artifact_icon'] = files.post( + svg, + {'mime_type': 'image/svg+xml'}, + ).digest + props['icon'] = files.post( + toolkit.svg_to_png(svg, 55, 55), + {'mime_type': 'image/png'}, + ).digest + props['logo'] = files.post( + toolkit.svg_to_png(svg, 140, 140), + {'mime_type': 'image/png'}, + ).digest + + +def load_bundle(blob, context=None, initial=False, extra_deps=None): + contexts = this.volume['context'] + context_type = None + context_meta = None + release_notes = None + release = {} + blob_meta = {} + + try: + bundle = Bundle(blob.path, mime_type='application/zip') + except Exception: + context_type = 'book' + if not context: + context = this.request['context'] + release['version'] = this.request['version'] + if 'license' in this.request: + release['license'] = this.request['license'] + if isinstance(release['license'], basestring): + release['license'] = [release['license']] + release['spec'] = {'*-*': { + 'bundle': blob.digest, + }} + blob_meta['mime_type'] = this.request.content_type + else: + context_type = 'activity' + unpack_size = 0 + + with bundle: + changelog = join(bundle.rootdir, 'CHANGELOG') + for arcname in bundle.get_names(): + if changelog and arcname == changelog: + with bundle.extractfile(changelog) as f: + release_notes = f.read() + changelog = None + unpack_size += bundle.getmember(arcname).size + spec = bundle.get_spec() + context_meta = _load_context_metadata(bundle, spec) + + if not context: + context = spec['context'] + else: + enforce(context == spec['context'], + http.BadRequest, 'Wrong context') + if extra_deps: + spec.requires.update(parse_requires(extra_deps)) + + release['version'] = spec['version'] + release['stability'] = spec['stability'] + if spec['license'] is not EMPTY_LICENSE: + release['license'] = spec['license'] + release['requires'] = requires = [] + for dep_name, dep in spec.requires.items(): + found = False + for version in dep.versions_range(): + requires.append('%s-%s' % (dep_name, version)) + found = True + if not found: + requires.append(dep_name) + release['spec'] = {'*-*': { + 'bundle': blob.digest, + 'commands': spec.commands, + 'requires': spec.requires, + }} + release['unpack_size'] = unpack_size + blob_meta['mime_type'] = 'application/vnd.olpc-sugar' + + enforce(context, http.BadRequest, 'Context is not specified') + enforce(release['version'], http.BadRequest, 'Version is not specified') + release['release'] = parse_version(release['version']) + if initial and not contexts.exists(context): + enforce(context_meta, http.BadRequest, 'No way to initate context') + context_meta['guid'] = context + context_meta['type'] = [context_type] + this.call(method='POST', path=['context'], content=context_meta) + else: + enforce(context_type in contexts[context]['type'], + http.BadRequest, 'Inappropriate bundle type') + context_obj = contexts[context] + + releases = context_obj['releases'] + if 'license' not in release: + enforce(releases, http.BadRequest, 'License is not specified') + recent = max(releases, key=lambda x: releases[x]['release']) + release['license'] = releases[recent]['license'] + + _logger.debug('Load %r release: %r', context, release) + + if this.request.principal in context_obj['author']: + diff = context_obj.patch(context_meta) + if diff: + this.call(method='PUT', path=['context', context], content=diff) + context_obj.props.update(diff) + # TRANS: Release notes title + title = i18n._('%(name)s %(version)s release') + else: + # TRANS: 3rd party release notes title + title = i18n._('%(name)s %(version)s third-party release') + release['announce'] = this.call(method='POST', path=['post'], + content={ + 'context': context, + 'type': 'notification', + 'title': i18n.encode(title, + name=context_obj['title'], + version=release['version'], + ), + 'message': release_notes or '', + }, + content_type='application/json') + + filename = ''.join(i18n.decode(context_obj['title']).split()) + blob_meta['name'] = '%s-%s' % (filename, release['version']) + files.update(blob.digest, blob_meta) + + return context, release + + +def _load_context_metadata(bundle, spec): + result = {} + for prop in ('homepage', 'mime_types'): + if spec[prop]: + result[prop] = spec[prop] + result['guid'] = spec['context'] + + try: + icon_file = bundle.extractfile(join(bundle.rootdir, spec['icon'])) + populate_context_images(result, icon_file.read()) + icon_file.close() + except Exception: + exception(_logger, 'Failed to load icon') + + msgids = {} + for prop, confname in [ + ('title', 'name'), + ('summary', 'summary'), + ('description', 'description'), + ]: + if spec[confname]: + msgids[prop] = spec[confname] + result[prop] = {'en': spec[confname]} + with toolkit.mkdtemp() as tmpdir: + for path in bundle.get_names(): + if not path.endswith('.mo'): + continue + mo_path = path.strip(os.sep).split(os.sep) + if len(mo_path) != 5 or mo_path[1] != 'locale': + continue + lang = mo_path[2] + bundle.extract(path, tmpdir) + try: + translation = gettext.translation(spec['context'], + join(tmpdir, *mo_path[:2]), [lang]) + for prop, value in msgids.items(): + msgstr = translation.gettext(value).decode('utf8') + if lang == 'en' or msgstr != value: + result[prop][lang] = msgstr + except Exception: + exception(_logger, 'Gettext failed to read %r', mo_path[-1]) + + return result diff --git a/sugar_network/model/context.py b/sugar_network/model/context.py index 1763d65..6bac120 100644 --- a/sugar_network/model/context.py +++ b/sugar_network/model/context.py @@ -13,42 +13,33 @@ # You should have received a copy of the GNU General Public License # along with this program. If not, see . -import hashlib -from cStringIO import StringIO - -from sugar_network import db, model, static, toolkit -from sugar_network.toolkit.router import Blob, ACL +from sugar_network import db, model +from sugar_network.toolkit.coroutine import this +from sugar_network.toolkit.router import ACL class Context(db.Resource): - @db.indexed_property(prefix='T', full_text=True, - typecast=[model.CONTEXT_TYPES]) + @db.indexed_property(db.List, prefix='T', full_text=True, + subtype=db.Enum(model.CONTEXT_TYPES)) def type(self, value): return value @type.setter def type(self, value): - if value and 'package' in value and 'common' not in self['layer']: - self['layer'] = tuple(self['layer']) + ('common',) - if 'artifact_icon' not in self: - for name in ('activity', 'book', 'group'): - if name not in self.type: - continue - with file(static.path('images', name + '.svg')) as f: - Context.populate_images(self, f.read()) - break - return value - - @db.indexed_property(slot=1, prefix='S', full_text=True, localized=True) + if 'package' in value and 'common' not in self['layer']: + self.post('layer', self['layer'] + ['common']) + return value + + @db.indexed_property(db.Localized, slot=1, prefix='S', full_text=True) def title(self, value): return value - @db.indexed_property(prefix='R', full_text=True, localized=True) + @db.indexed_property(db.Localized, prefix='R', full_text=True) def summary(self, value): return value - @db.indexed_property(prefix='D', full_text=True, localized=True) + @db.indexed_property(db.Localized, prefix='D', full_text=True) def description(self, value): return value @@ -56,72 +47,49 @@ class Context(db.Resource): def homepage(self, value): return value - @db.indexed_property(prefix='Y', default=[], typecast=[], full_text=True) + @db.indexed_property(db.List, prefix='Y', default=[], full_text=True) def mime_types(self, value): return value - @db.blob_property(mime_type='image/png') + @db.stored_property(db.Blob, mime_type='image/png', default='missing.png') def icon(self, value): - if value: - return value - if 'package' in self['type']: - return Blob({ - 'url': '/static/images/package.png', - 'blob': static.path('images', 'package.png'), - 'mime_type': 'image/png', - }) - else: - return Blob({ - 'url': '/static/images/missing.png', - 'blob': static.path('images', 'missing.png'), - 'mime_type': 'image/png', - }) - - @db.blob_property(mime_type='image/svg+xml') + return value + + @db.stored_property(db.Blob, mime_type='image/svg+xml', + default='missing.svg') def artifact_icon(self, value): - if value: - return value - if 'package' in self['type']: - return Blob({ - 'url': '/static/images/package.svg', - 'blob': static.path('images', 'package.svg'), - 'mime_type': 'image/png', - }) - else: - return Blob({ - 'url': '/static/images/missing.svg', - 'blob': static.path('images', 'missing.svg'), - 'mime_type': 'image/svg+xml', - }) - - @db.blob_property(mime_type='image/png') + return value + + @db.stored_property(db.Blob, mime_type='image/png', + default='missing-logo.png') def logo(self, value): - if value: - return value - if 'package' in self['type']: - return Blob({ - 'url': '/static/images/package-logo.png', - 'blob': static.path('images', 'package-logo.png'), - 'mime_type': 'image/png', - }) - else: - return Blob({ - 'url': '/static/images/missing-logo.png', - 'blob': static.path('images', 'missing-.png'), - 'mime_type': 'image/png', - }) - - @db.indexed_property(slot=2, default=0, acl=ACL.READ | ACL.CALC) - def downloads(self, value): return value - @db.indexed_property(slot=3, typecast=[], default=[0, 0], - sortable_serialise=lambda x: float(x[1]) / x[0] if x[0] else 0, + @db.stored_property(db.Aggregated, subtype=db.Blob()) + def previews(self, value): + return value + + @db.stored_property(db.Aggregated, subtype=model.Release(), + acl=ACL.READ | ACL.INSERT | ACL.REMOVE | ACL.REPLACE) + def releases(self, value): + return value + + @releases.setter + def releases(self, value): + if value or this.request.method != 'POST': + self.invalidate_solutions() + return value + + @db.indexed_property(db.Numeric, slot=2, default=0, acl=ACL.READ | ACL.CALC) + def downloads(self, value): + return value + + @db.indexed_property(model.Rating, slot=3, acl=ACL.READ | ACL.CALC) def rating(self, value): return value - @db.stored_property(typecast=[], default=[], acl=ACL.PUBLIC | ACL.LOCAL) + @db.stored_property(db.List, default=[], acl=ACL.PUBLIC | ACL.LOCAL) def dependencies(self, value): """Software dependencies. @@ -131,32 +99,20 @@ class Context(db.Resource): """ return value - @db.stored_property(typecast=dict, default={}, - acl=ACL.PUBLIC | ACL.LOCAL) - def aliases(self, value): - return value - - @db.stored_property(typecast=dict, default={}, acl=ACL.PUBLIC | ACL.LOCAL) - def packages(self, value): + @dependencies.setter + def dependencies(self, value): + if value or this.request.method != 'POST': + self.invalidate_solutions() return value - @staticmethod - def populate_images(props, svg): - if 'guid' in props: - from sugar_network.toolkit.sugar import color_svg - svg = color_svg(svg, props['guid']) + def deleted(self): + self.invalidate_solutions() - def convert(w, h): - png = toolkit.svg_to_png(svg, w, h) - return {'blob': png, - 'mime_type': 'image/png', - 'digest': hashlib.sha1(png.getvalue()).hexdigest(), - } + def restored(self): + self.invalidate_solutions() - props['artifact_icon'] = { - 'blob': StringIO(svg), - 'mime_type': 'image/svg+xml', - 'digest': hashlib.sha1(svg).hexdigest(), - } - props['icon'] = convert(55, 55) - props['logo'] = convert(140, 140) + def invalidate_solutions(self): + this.broadcast({ + 'event': 'release', + 'seqno': this.volume.releases_seqno.next(), + }) diff --git a/sugar_network/model/post.py b/sugar_network/model/post.py index 88c6956..107f354 100644 --- a/sugar_network/model/post.py +++ b/sugar_network/model/post.py @@ -13,39 +13,31 @@ # You should have received a copy of the GNU General Public License # along with this program. If not, see . -from sugar_network import db, model, static -from sugar_network.toolkit.router import Blob, ACL +from sugar_network import db, model +from sugar_network.toolkit.router import ACL +from sugar_network.toolkit.coroutine import this class Post(db.Resource): - @db.indexed_property(prefix='C', - acl=ACL.CREATE | ACL.READ) + @db.indexed_property(prefix='C', acl=ACL.CREATE | ACL.READ) def context(self, value): return value - @db.indexed_property(prefix='A', default='', - acl=ACL.CREATE | ACL.READ) + @db.indexed_property(prefix='A', default='', acl=ACL.CREATE | ACL.READ) def topic(self, value): return value - @topic.setter - def topic(self, value): - if value and not self['context']: - post = self.volume['post'].get(value) - self['context'] = post['context'] - return value - - @db.indexed_property(prefix='T', typecast=model.POST_TYPES) + @db.indexed_property(db.Enum, prefix='T', items=model.POST_TYPES) def type(self, value): return value - @db.indexed_property(slot=1, prefix='N', full_text=True, localized=True, + @db.indexed_property(db.Localized, slot=1, prefix='N', full_text=True, acl=ACL.CREATE | ACL.READ) def title(self, value): return value - @db.indexed_property(prefix='M', full_text=True, localized=True, + @db.indexed_property(db.Localized, prefix='M', full_text=True, acl=ACL.CREATE | ACL.READ) def message(self, value): return value @@ -54,40 +46,45 @@ class Post(db.Resource): def solution(self, value): return value - @db.indexed_property(prefix='V', typecast=model.RATINGS, default=0, + @db.indexed_property(db.Enum, prefix='V', items=range(5), default=0, acl=ACL.CREATE | ACL.READ) def vote(self, value): return value - @db.indexed_property(prefix='D', typecast=db.AggregatedType, - full_text=True, default=db.AggregatedType(), - fmt=lambda x: [i.get('message') for i in x.values()], - acl=ACL.READ | ACL.INSERT | ACL.REMOVE) + @vote.setter + def vote(self, value): + if value: + if self['topic']: + resource = this.volume['post'] + guid = self['topic'] + else: + resource = this.volume['context'] + guid = self['context'] + orig = resource[guid]['rating'] + resource.update(guid, {'rating': [orig[0] + 1, orig[1] + value]}) + return value + + @db.indexed_property(db.Aggregated, prefix='D', full_text=True, + subtype=db.Localized()) def comments(self, value): return value - @db.blob_property(mime_type='image/png') + @db.stored_property(db.Blob, mime_type='image/png', + default='missing-logo.png') def preview(self, value): - if value: - return value - return Blob({ - 'url': '/static/images/missing-logo.png', - 'blob': static.path('images', 'missing-logo.png'), - 'mime_type': 'image/png', - }) - - @db.blob_property() - def data(self, value): + return value + + @db.stored_property(db.Aggregated, subtype=db.Blob()) + def attachments(self, value): if value: value['name'] = self['title'] return value - @db.indexed_property(slot=2, default=0, acl=ACL.READ | ACL.CALC) + @db.indexed_property(db.Numeric, slot=2, default=0, + acl=ACL.READ | ACL.CALC) def downloads(self, value): return value - @db.indexed_property(slot=3, typecast=[], default=[0, 0], - sortable_serialise=lambda x: float(x[1]) / x[0] if x[0] else 0, - acl=ACL.READ | ACL.CALC) + @db.indexed_property(model.Rating, slot=3, acl=ACL.READ | ACL.CALC) def rating(self, value): return value diff --git a/sugar_network/model/release.py b/sugar_network/model/release.py deleted file mode 100644 index 46eeaae..0000000 --- a/sugar_network/model/release.py +++ /dev/null @@ -1,83 +0,0 @@ -# Copyright (C) 2012-2013 Aleksey Lim -# -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program. If not, see . - -import xapian - -from sugar_network import db, model -from sugar_network.toolkit.router import ACL -from sugar_network.toolkit.licenses import GOOD_LICENSES -from sugar_network.toolkit.spec import parse_version - - -class Release(db.Resource): - - @db.indexed_property(prefix='C', - acl=ACL.CREATE | ACL.READ) - def context(self, value): - return value - - @context.setter - def context(self, value): - if self.request.principal: - authors = self.volume['context'].get(value)['author'] - if self.request.principal in authors: - self['layer'] = ('origin',) + tuple(self.layer) - return value - - @db.indexed_property(prefix='L', full_text=True, typecast=[GOOD_LICENSES], - acl=ACL.CREATE | ACL.READ) - def license(self, value): - return value - - @db.indexed_property(slot=1, prefix='V', fmt=lambda x: _fmt_version(x), - acl=ACL.CREATE | ACL.READ) - def version(self, value): - return value - - @db.indexed_property(prefix='S', default='stabile', - acl=ACL.CREATE | ACL.READ, typecast=model.STABILITIES) - def stability(self, value): - return value - - @db.indexed_property(prefix='N', full_text=True, localized=True, - default='', acl=ACL.CREATE | ACL.READ) - def notes(self, value): - return value - - @db.indexed_property(prefix='R', typecast=[], default=[], - acl=ACL.CREATE | ACL.READ) - def requires(self, value): - return value - - @db.blob_property() - def data(self, value): - return value - - -def _fmt_version(version): - version = parse_version(version) - # Convert to [(`version`, `modifier`)] - version = zip(*([iter(version)] * 2)) - major, modifier = version.pop(0) - - result = sum([(rank % 10000) * pow(10000, 3 - i) - for i, rank in enumerate((major + [0, 0])[:3])]) - result += (5 + modifier) * 1000 - if modifier and version: - minor, __ = version.pop(0) - if minor: - result += (minor[0] % 1000) - - return xapian.sortable_serialise(result) diff --git a/sugar_network/model/report.py b/sugar_network/model/report.py index 84db43a..980c3ff 100644 --- a/sugar_network/model/report.py +++ b/sugar_network/model/report.py @@ -1,4 +1,4 @@ -# Copyright (C) 2012-2013 Aleksey Lim +# Copyright (C) 2012-2014 Aleksey Lim # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by @@ -17,6 +17,19 @@ from sugar_network import db from sugar_network.toolkit.router import ACL +class _Solution(db.Property): + + def __init__(self, **kwargs): + db.Property.__init__(self, default=[], **kwargs) + + def typecast(self, value): + return [] if value is None else list(value) + + def encode(self, value): + for i in value: + yield i[0] + + class Report(db.Resource): @db.indexed_property(prefix='C', acl=ACL.CREATE | ACL.READ) @@ -24,28 +37,27 @@ class Report(db.Resource): return value @db.indexed_property(prefix='V', default='', acl=ACL.CREATE | ACL.READ) - def release(self, value): + def version(self, value): return value - @release.setter - def release(self, value): - if value and 'version' not in self.props and 'release' in value: - version = self.volume['release'].get(value) - self['version'] = version['version'] + @db.indexed_property(prefix='E', full_text=True, acl=ACL.CREATE | ACL.READ) + def error(self, value): return value - @db.stored_property(default='', acl=ACL.CREATE | ACL.READ) - def version(self, value): + @db.indexed_property(prefix='U', full_text=True, acl=ACL.CREATE | ACL.READ) + def uname(self, value): return value - @db.stored_property(typecast=dict, default={}, acl=ACL.CREATE | ACL.READ) - def environ(self, value): + @db.indexed_property(db.Dict, prefix='L', full_text=True, + acl=ACL.CREATE | ACL.READ) + def lsb_release(self, value): return value - @db.indexed_property(prefix='T', acl=ACL.CREATE | ACL.READ) - def error(self, value): + @db.indexed_property(_Solution, prefix='S', full_text=True, + acl=ACL.CREATE | ACL.READ) + def solution(self, value): return value - @db.blob_property() - def data(self, value): + @db.stored_property(db.Aggregated, subtype=db.Blob()) + def logs(self, value): return value diff --git a/sugar_network/model/routes.py b/sugar_network/model/routes.py index c8f8da6..ff0377f 100644 --- a/sugar_network/model/routes.py +++ b/sugar_network/model/routes.py @@ -1,4 +1,4 @@ -# Copyright (C) 2013 Aleksey Lim +# Copyright (C) 2013-2014 Aleksey Lim # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by @@ -14,55 +14,21 @@ # along with this program. If not, see . import logging -import mimetypes -from os.path import split -from sugar_network import static, db -from sugar_network.toolkit.router import route, fallbackroute, Blob, ACL +from sugar_network.db import files +from sugar_network.toolkit.router import route +from sugar_network.toolkit.coroutine import this from sugar_network.toolkit import coroutine _logger = logging.getLogger('model.routes') -class VolumeRoutes(db.Routes): - - @route('GET', ['context', None], cmd='feed', - mime_type='application/json') - def feed(self, request, distro): - context = self.volume['context'].get(request.guid) - releases = self.volume['release'] - versions = [] - - impls, __ = releases.find(context=context.guid, - not_layer='deleted', **request) - for impl in impls: - version = impl.properties([ - 'guid', 'ctime', 'layer', 'author', 'tags', - 'version', 'stability', 'license', 'notes', - ]) - if context['dependencies']: - requires = version.setdefault('requires', {}) - for i in context['dependencies']: - requires.setdefault(i, {}) - version['data'] = data = impl.meta('data') - for key in ('mtime', 'seqno', 'blob'): - if key in data: - del data[key] - versions.append(version) - - result = {'releases': versions} - if distro: - aliases = context['aliases'].get(distro) - if aliases and 'binary' in aliases: - result['packages'] = aliases['binary'] - return result - - class FrontRoutes(object): def __init__(self): - self._pooler = _Pooler() + self._spooler = coroutine.Spooler() + this.broadcast = self._broadcast @route('GET', mime_type='text/html') def hello(self): @@ -80,34 +46,14 @@ class FrontRoutes(object): response.content_length = 0 @route('GET', cmd='subscribe', mime_type='text/event-stream') - def subscribe(self, request=None, response=None, ping=False, **condition): + def subscribe(self, request=None, response=None, **condition): """Subscribe to Server-Sent Events.""" if request is not None and not condition: condition = request if response is not None: response.content_type = 'text/event-stream' response['Cache-Control'] = 'no-cache' - return self._pull_events(request, ping, condition) - - @route('POST', cmd='broadcast', - mime_type='application/json', acl=ACL.LOCAL) - def broadcast(self, event=None, request=None): - if request is not None: - event = request.content - _logger.debug('Broadcast event: %r', event) - self._pooler.notify_all(event) - - @fallbackroute('GET', ['static']) - def get_static(self, request): - path = static.path(*request.path[1:]) - if not mimetypes.inited: - mimetypes.init() - mime_type = mimetypes.types_map.get('.' + path.rsplit('.', 1)[-1]) - return Blob({ - 'blob': path, - 'filename': split(path)[-1], - 'mime_type': mime_type, - }) + return self._pull_events(request, condition) @route('GET', ['robots.txt'], mime_type='text/plain') def robots(self, request, response): @@ -115,34 +61,29 @@ class FrontRoutes(object): @route('GET', ['favicon.ico']) def favicon(self, request, response): - return Blob({ - 'blob': static.path('favicon.ico'), - 'mime_type': 'image/x-icon', - }) - - def _pull_events(self, request, ping, condition): - _logger.debug('Start subscription, total=%s', self._pooler.waiters + 1) - - if ping: - # XXX The whole commands' kwargs handling should be redesigned - if 'ping' in condition: - condition.pop('ping') - # If non-greenlet application needs only to initiate - # a subscription and do not stuck in waiting for the first event, - # it should pass `ping` argument to return fake event to unblock - # `GET /?cmd=subscribe` call. - yield {'event': 'pong'} - - rfile = None + return files.get('favicon.ico') + + def _broadcast(self, event): + _logger.debug('Broadcast event: %r', event) + self._spooler.notify_all(event) + + def _pull_events(self, request, condition): + _logger.debug('Start %s-nth subscription', self._spooler.waiters + 1) + + # Unblock `GET /?cmd=subscribe` call to let non-greenlet application + # initiate a subscription and do not stuck in waiting for the 1st event + yield {'event': 'pong'} + + subscription = None if request is not None: - rfile = request.content_stream - if rfile is not None: - coroutine.spawn(self._waiter_for_closing, rfile) + subscription = request.content_stream + if subscription is not None: + coroutine.spawn(self._wait_for_closing, subscription) while True: - event = self._pooler.wait() + event = self._spooler.wait() if not isinstance(event, dict): - if event is rfile: + if event is subscription: break else: continue @@ -155,48 +96,13 @@ class FrontRoutes(object): else: yield event - _logger.debug('Stop subscription, total=%s', self._pooler.waiters) + _logger.debug('Stop %s-nth subscription', self._spooler.waiters) - def _waiter_for_closing(self, rfile): + def _wait_for_closing(self, rfile): try: coroutine.select([rfile.fileno()], [], []) finally: - self._pooler.notify_all(rfile) - - -class _Pooler(object): - """One-producer-to-many-consumers events delivery.""" - - def __init__(self): - self._value = None - self._waiters = 0 - self._ready = coroutine.Event() - self._open = coroutine.Event() - self._open.set() - - @property - def waiters(self): - return self._waiters - - def wait(self): - self._open.wait() - self._waiters += 1 - try: - self._ready.wait() - finally: - self._waiters -= 1 - if self._waiters == 0: - self._ready.clear() - self._open.set() - return self._value - - def notify_all(self, value=None): - self._open.wait() - if not self._waiters: - return - self._open.clear() - self._value = value - self._ready.set() + self._spooler.notify_all(rfile) _HELLO_HTML = """\ diff --git a/sugar_network/model/user.py b/sugar_network/model/user.py index 69d0d42..b44093e 100644 --- a/sugar_network/model/user.py +++ b/sugar_network/model/user.py @@ -1,4 +1,4 @@ -# Copyright (C) 2012-2013 Aleksey Lim +# Copyright (C) 2012-2014 Aleksey Lim # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by @@ -27,10 +27,10 @@ class User(db.Resource): def location(self, value): return value - @db.indexed_property(slot=2, prefix='B', default=0, typecast=int) + @db.indexed_property(db.Numeric, slot=2, prefix='B', default=0) def birthday(self, value): return value - @db.blob_property(acl=ACL.CREATE, mime_type='text/plain') + @db.stored_property(db.Blob, acl=ACL.CREATE, mime_type='text/plain') def pubkey(self, value): return value diff --git a/sugar_network/node/master.py b/sugar_network/node/master.py index 19a8cf1..c7c22e0 100644 --- a/sugar_network/node/master.py +++ b/sugar_network/node/master.py @@ -20,12 +20,19 @@ from Cookie import SimpleCookie from os.path import join from sugar_network import node, toolkit -from sugar_network.node import sync, stats_user, files, volume, downloads, obs +from sugar_network.node import sync, stats_user, files, model, downloads, obs from sugar_network.node.routes import NodeRoutes from sugar_network.toolkit.router import route, ACL -from sugar_network.toolkit import http, coroutine, enforce +from sugar_network.toolkit import http, enforce +RESOURCES = ( + 'sugar_network.node.model', + 'sugar_network.model.post', + 'sugar_network.model.report', + 'sugar_network.model.user', + ) + _ONE_WAY_DOCUMENTS = ['report'] _logger = logging.getLogger('node.master') @@ -33,12 +40,12 @@ _logger = logging.getLogger('node.master') class MasterRoutes(NodeRoutes): - def __init__(self, guid, volume_): - NodeRoutes.__init__(self, guid, volume_) + def __init__(self, guid, volume, **kwargs): + NodeRoutes.__init__(self, guid, volume=volume, **kwargs) self._pulls = { 'pull': lambda **kwargs: - ('diff', None, volume.diff(self.volume, + ('diff', None, model.diff(self.volume, ignore_documents=_ONE_WAY_DOCUMENTS, **kwargs)), 'files_pull': lambda **kwargs: ('files_diff', None, self._files.diff(**kwargs)), @@ -50,7 +57,7 @@ class MasterRoutes(NodeRoutes): if node.files_root.value: self._files = files.Index(node.files_root.value, - join(volume_.root, 'files.index'), volume_.seqno) + join(volume.root, 'files.index'), volume.seqno) @route('POST', cmd='sync', acl=ACL.AUTH) @@ -137,25 +144,13 @@ class MasterRoutes(NodeRoutes): enforce(node.files_root.value, http.BadRequest, 'Disabled') aliases = self.volume['context'].get(request.guid)['aliases'] enforce(aliases, http.BadRequest, 'Nothing to presolve') - return obs.presolve(aliases, node.files_root.value) + return obs.presolve(None, aliases, node.files_root.value) def status(self): result = NodeRoutes.status(self) result['level'] = 'master' return result - def after_post(self, doc): - if doc.metadata.name == 'context': - shift_releases = doc.modified('dependencies') - if doc.modified('aliases'): - # TODO Already launched job should be killed - coroutine.spawn(self._resolve_aliases, doc) - shift_releases = True - if shift_releases and not doc.is_new: - # Shift checkpoint to invalidate solutions - self.volume['release'].checkpoint() - NodeRoutes.after_post(self, doc) - def _push(self, stream): reply = [] cookie = _Cookie() @@ -172,8 +167,7 @@ class MasterRoutes(NodeRoutes): if self._files is not None: cookie['files_pull'].include(packet['sequence']) elif packet.name == 'diff': - seq, ack_seq = volume.merge(self.volume, packet, - stats=self._stats) + seq, ack_seq = model.merge(self.volume, packet) reply.append(('ack', { 'ack': ack_seq, 'sequence': seq, @@ -189,43 +183,6 @@ class MasterRoutes(NodeRoutes): return reply, cookie - def _resolve_aliases(self, doc): - packages = {} - for repo in obs.get_repos(): - alias = doc['aliases'].get(repo['distributor_id']) - if not alias: - continue - package = packages[repo['name']] = {} - for kind in ('binary', 'devel'): - obs_fails = [] - for to_resolve in alias.get(kind) or []: - if not to_resolve: - continue - try: - for arch in repo['arches']: - obs.resolve(repo['name'], arch, to_resolve) - except Exception, error: - _logger.warning('Failed to resolve %r on %s', - to_resolve, repo['name']) - obs_fails.append(str(error)) - continue - package[kind] = to_resolve - break - else: - package['status'] = '; '.join(obs_fails) - break - else: - if 'binary' in package: - package['status'] = 'success' - else: - package['status'] = 'no packages to resolve' - - if packages != doc['packages']: - self.volume['context'].update(doc.guid, {'packages': packages}) - - if node.files_root.value: - obs.presolve(doc['aliases'], node.files_root.value) - class _Cookie(list): diff --git a/sugar_network/node/model.py b/sugar_network/node/model.py new file mode 100644 index 0000000..2681b2d --- /dev/null +++ b/sugar_network/node/model.py @@ -0,0 +1,177 @@ +# Copyright (C) 2012-2014 Aleksey Lim +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . + +import logging + +from sugar_network import db, toolkit +from sugar_network.model import Release, context +from sugar_network.node import obs +from sugar_network.toolkit.router import ACL +from sugar_network.toolkit.coroutine import this +from sugar_network.toolkit import http, coroutine, enforce + + +_logger = logging.getLogger('node.model') +_presolve_queue = None + + +class _Release(Release): + + _package_cast = db.Dict(db.List()) + + def typecast(self, value): + if not this.resource.exists or 'package' not in this.resource['type']: + return Release.typecast(self, value) + + value = self._package_cast.typecast(value) + enforce(value.get('binary'), http.BadRequest, 'No binary aliases') + + distro = this.request.key + if distro == '*': + lsb_id = None + lsb_release = None + elif '-' in this.request.key: + lsb_id, lsb_release = distro.split('-', 1) + else: + lsb_id = distro + lsb_release = None + releases = this.resource.record.get('releases') + statuses = releases['value'].setdefault('status', {}) + to_presolve = [] + + for repo in obs.get_repos(): + if lsb_id and lsb_id != repo['lsb_id'] or \ + lsb_release and lsb_release != repo['lsb_release']: + continue + # Make sure there are no alias overrides + if not lsb_id and repo['lsb_id'] in releases['value'] or \ + not lsb_release and repo['name'] in releases['value']: + continue + pkgs = sum([value.get(i, []) for i in ('binary', 'devel')], []) + try: + for arch in repo['arches']: + obs.resolve(repo['name'], arch, pkgs) + except Exception, error: + _logger.warning('Failed to resolve %r on %s', + pkgs, repo['name']) + status = str(error) + else: + to_presolve.append((repo['name'], pkgs)) + status = 'success' + statuses[repo['name']] = status + + if to_presolve and _presolve_queue is not None: + _presolve_queue.put(to_presolve) + if statuses: + this.resource.record.set('releases', **releases) + + return value + + def teardown(self, value): + if 'package' not in this.resource['type']: + return Release.typecast(self, value) + # TODO Delete presolved files + + +class Context(context.Context): + + @db.stored_property(db.Aggregated, subtype=_Release(), + acl=ACL.READ | ACL.INSERT | ACL.REMOVE | ACL.REPLACE) + def releases(self, value): + return value + + @releases.setter + def releases(self, value): + if value or this.request.method != 'POST': + self.invalidate_solutions() + return value + + +def diff(volume, in_seq, out_seq=None, exclude_seq=None, layer=None, + ignore_documents=None, **kwargs): + if out_seq is None: + out_seq = toolkit.Sequence([]) + is_the_only_seq = not out_seq + if layer: + if isinstance(layer, basestring): + layer = [layer] + layer.append('common') + try: + for resource, directory in volume.items(): + if ignore_documents and resource in ignore_documents: + continue + coroutine.dispatch() + directory.commit() + yield {'resource': resource} + for guid, patch in directory.diff(in_seq, exclude_seq, + layer=layer if resource == 'context' else None): + adiff = {} + adiff_seq = toolkit.Sequence() + for prop, meta, seqno in patch: + adiff[prop] = meta + adiff_seq.include(seqno, seqno) + if adiff: + yield {'guid': guid, 'diff': adiff} + out_seq.include(adiff_seq) + if is_the_only_seq: + # There is only one diff, so, we can stretch it to remove all holes + out_seq.stretch() + except StopIteration: + pass + + yield {'commit': out_seq} + + +def merge(volume, records): + directory = None + commit_seq = toolkit.Sequence() + merged_seq = toolkit.Sequence() + synced = False + + for record in records: + resource_ = record.get('resource') + if resource_: + resource = resource_ + directory = volume[resource_] + continue + + if 'guid' in record: + guid = record['guid'] + existed = directory.exists(guid) + if existed: + layer = directory.get(guid)['layer'] + seqno, merged = directory.merge(**record) + synced = synced or merged + if seqno is not None: + merged_seq.include(seqno, seqno) + continue + + commit = record.get('commit') + if commit is not None: + commit_seq.include(commit) + continue + + if synced: + this.broadcast({'event': 'sync'}) + + return commit_seq, merged_seq + + +def presolve(presolve_path): + global _presolve_queue + _presolve_queue = coroutine.Queue() + + for repo_name, pkgs in _presolve_queue: + obs.presolve(repo_name, pkgs, presolve_path) diff --git a/sugar_network/node/obs.py b/sugar_network/node/obs.py index 1d8a547..6ef9e55 100644 --- a/sugar_network/node/obs.py +++ b/sugar_network/node/obs.py @@ -1,4 +1,4 @@ -# Copyright (C) 2012-2013 Aleksey Lim +# Copyright (C) 2012-2014 Aleksey Lim # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by @@ -37,7 +37,7 @@ obs_presolve_project = Option( default='presolve') _logger = logging.getLogger('node.obs') -_client = None +_conn = None _repos = {} @@ -45,82 +45,68 @@ def get_repos(): return _get_repos(obs_project.value) -def resolve(repo, arch, names): - for package in names: - _request('GET', ['resolve'], params={ - 'project': obs_project.value, - 'repository': repo, - 'arch': arch, - 'package': package, - }) +def resolve(repo, arch, packages): + _request('GET', ['resolve'], params={ + 'project': obs_project.value, + 'repository': repo, + 'arch': arch, + 'package': packages, + }) -def presolve(aliases, dst_path): +def presolve(repo_name, packages, dst_path): for repo in _get_repos(obs_presolve_project.value): - # Presolves make sense only for XO, thus, for Fedora - alias = aliases.get('Fedora') - if not alias: - continue - - name_variants = alias['binary'] - while name_variants: - names = name_variants.pop() - presolves = [] + dst_dir = join(dst_path, 'packages', + obs_presolve_project.value, repo['name']) + result = {} + to_download = [] + + for package in packages: + files = result.setdefault(package, {}) try: - for arch in repo['arches']: - for package in names: - response = _request('GET', ['resolve'], params={ - 'project': obs_presolve_project.value, - 'repository': repo['name'], - 'arch': arch, - 'package': package, - 'withdeps': '1', - 'exclude': 'sweets-sugar', - }) - binaries = [] - for pkg in response.findall('binary'): - binaries.append(dict(pkg.items())) - presolves.append((package, binaries)) + for repo_arch in repo['arches']: + response = _request('GET', ['resolve'], params={ + 'project': obs_presolve_project.value, + 'repository': '%(lsb_id)s-%(lsb_release)s' % repo, + 'arch': repo_arch, + 'package': package, + 'withdeps': '1', + 'exclude': 'sweets-sugar', + }) + for binary in response.findall('binary'): + binary = dict(binary.items()) + arch = binary.pop('arch') + url = binary.pop('url') + filename = binary['path'] = basename(url) + path = join(dst_dir, filename) + if not exists(path): + to_download.append((url, path)) + files.setdefault(arch, []).append(binary) except Exception: toolkit.exception(_logger, 'Failed to presolve %r on %s', - names, repo['name']) + packages, repo['name']) continue - _logger.debug('Presolve %r on %s', names, repo['name']) - - dst_dir = join(dst_path, 'packages', - obs_presolve_project.value, repo['name']) - if not exists(dst_dir): - os.makedirs(dst_dir) - result = {} - - for package, binaries in presolves: - files = [] - for binary in binaries: - arch = binary.pop('arch') - if not files: - result.setdefault(package, {})[arch] = files - url = binary.pop('url') - filename = binary['path'] = basename(url) - path = join(dst_dir, filename) - if not exists(path): - _client.download(url, path) - files.append(binary) + _logger.debug('Presolve %r on %s', packages, repo['name']) - for package, info in result.items(): - with toolkit.new_file(join(dst_dir, package)) as f: - json.dump(info, f) + if not exists(dst_dir): + os.makedirs(dst_dir) + for url, path in to_download: + _conn.download(url, path) + for package, info in result.items(): + with toolkit.new_file(join(dst_dir, package)) as f: + json.dump(info, f) - return {'repo': repo['name'], 'packages': result} + return {'repo': repo['name'], 'packages': result} def _request(*args, **kwargs): - global _client + global _conn - if _client is None: - _client = http.Connection(obs_url.value) + if _conn is None: + _conn = http.Connection(obs_url.value) - response = _client.request(*args, allowed=(400, 404), **kwargs) + response = _conn.request(*args, allowed=(400, 404), **kwargs) enforce(response.headers.get('Content-Type') == 'text/xml', 'Irregular OBS response') reply = ElementTree.fromstring(response.content) @@ -144,8 +130,10 @@ def _get_repos(project): for repo in _request('GET', ['build', project]).findall('entry'): repo = repo.get('name') arches = _request('GET', ['build', project, repo]) + lsb_id, lsb_release = repo.split('-', 1) repos.append({ - 'distributor_id': repo.split('-', 1)[0], + 'lsb_id': lsb_id, + 'lsb_release': lsb_release, 'name': repo, 'arches': [i.get('name') for i in arches.findall('entry')], }) diff --git a/sugar_network/node/routes.py b/sugar_network/node/routes.py index eb48c70..6323cbc 100644 --- a/sugar_network/node/routes.py +++ b/sugar_network/node/routes.py @@ -1,4 +1,4 @@ -# Copyright (C) 2012-2013 Aleksey Lim +# Copyright (C) 2012-2014 Aleksey Lim # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by @@ -15,23 +15,21 @@ import os import time -import shutil -import gettext import logging import hashlib -from contextlib import contextmanager from ConfigParser import ConfigParser from os.path import join, isdir, exists -from sugar_network import node, toolkit, model -from sugar_network.node import stats_node, stats_user -from sugar_network.model.context import Context +from sugar_network import db, node, toolkit, model +from sugar_network.db import files +from sugar_network.node import stats_user # pylint: disable-msg=W0611 from sugar_network.toolkit.router import route, preroute, postroute, ACL from sugar_network.toolkit.router import Unauthorized, Request, fallbackroute -from sugar_network.toolkit.spec import EMPTY_LICENSE from sugar_network.toolkit.spec import parse_requires, ensure_requires +from sugar_network.toolkit.spec import parse_version from sugar_network.toolkit.bundle import Bundle +from sugar_network.toolkit.coroutine import this from sugar_network.toolkit import pylru, http, coroutine, exception, enforce @@ -41,28 +39,16 @@ _AUTH_POOL_SIZE = 1024 _logger = logging.getLogger('node.routes') -class NodeRoutes(model.VolumeRoutes, model.FrontRoutes): +class NodeRoutes(db.Routes, model.FrontRoutes): - def __init__(self, guid, volume): - model.VolumeRoutes.__init__(self, volume) + def __init__(self, guid, **kwargs): + db.Routes.__init__(self, **kwargs) model.FrontRoutes.__init__(self) - volume.broadcast = self.broadcast - self._guid = guid - self._stats = None self._auth_pool = pylru.lrucache(_AUTH_POOL_SIZE) self._auth_config = None self._auth_config_mtime = 0 - if stats_node.stats_node.value: - stats_path = join(node.stats_root.value, 'node') - self._stats = stats_node.Sniffer(volume, stats_path) - coroutine.spawn(self._commit_stats) - - def close(self): - if self._stats is not None: - self._stats.suspend() - @property def guid(self): return self._guid @@ -80,33 +66,12 @@ class NodeRoutes(model.VolumeRoutes, model.FrontRoutes): @route('GET', cmd='status', mime_type='application/json') def status(self): - documents = {} - for name, directory in self.volume.items(): - documents[name] = {'mtime': directory.mtime} - return {'guid': self._guid, 'resources': documents} - - @route('GET', cmd='stats', arguments={ - 'start': int, 'end': int, 'records': int, 'source': list}, - mime_type='application/json') - def stats(self, start, end, records, source): - enforce(self._stats is not None, 'Node stats is disabled') - if not source: - return {} - - if records > _MAX_STAT_RECORDS: - _logger.debug('Decrease %d stats records number to %d', - records, _MAX_STAT_RECORDS) - records = _MAX_STAT_RECORDS - elif records <= 0: - records = _MAX_STAT_RECORDS / 10 - - stats = {} - for i in source: - enforce('.' in i, 'Misnamed source') - db_name, ds_name = i.split('.', 1) - stats.setdefault(db_name, []).append(ds_name) - - return self._stats.report(stats, start, end, records) + return {'guid': self._guid, + 'seqno': { + 'db': self.volume.seqno.value, + 'releases': self.volume.releases_seqno.value, + }, + } @route('POST', ['user'], mime_type='application/json') def register(self, request): @@ -149,23 +114,19 @@ class NodeRoutes(model.VolumeRoutes, model.FrontRoutes): response.content_type = 'application/json' return result - @route('POST', ['release'], cmd='submit', + @route('POST', ['context'], cmd='submit', arguments={'initial': False}, mime_type='application/json', acl=ACL.AUTH) - def submit_release(self, request, document): - with toolkit.NamedTemporaryFile() as blob: - shutil.copyfileobj(request.content_stream, blob) - blob.flush() - with load_bundle(self.volume, request, blob.name) as impl: - impl['data']['blob'] = blob.name - return impl['guid'] - - @route('DELETE', [None, None], acl=ACL.AUTH | ACL.AUTHOR) - def delete(self, request): - # Servers data should not be deleted immediately - # to let master-slave synchronization possible - request.call(method='PUT', path=request.path, - content={'layer': ['deleted']}) + def submit_release(self, request, initial): + blob = files.post(request.content_stream) + try: + context, release = model.load_bundle(blob, initial=initial) + except Exception: + files.delete(blob.digest) + raise + this.call(method='POST', path=['context', context, 'releases'], + content_type='application/json', content=release) + return blob.digest @route('PUT', [None, None], cmd='attach', acl=ACL.AUTH | ACL.SUPERUSER) def attach(self, request): @@ -186,43 +147,37 @@ class NodeRoutes(model.VolumeRoutes, model.FrontRoutes): @route('GET', ['context', None], cmd='clone', arguments={'requires': list}) def get_clone(self, request, response): - return self._get_clone(request, response) + deps = {} + if 'requires' in request: + for i in request['requires']: + deps.update(parse_requires(i)) + version = request.get('version') + if version: + version = parse_version(version)[0] + stability = request.get('stability') or 'stable' + + recent = None + context = self.volume['context'][request.guid] + for release in context['releases'].values(): + release = release.get('value') + if not release: + continue + spec = release['spec']['*-*'] + if version and version != release['release'][0] or \ + stability and stability != release['stability'] or \ + deps and not ensure_requires(spec['requires'], deps): + continue + if recent is None or release['release'] > recent['release']: + recent = release + enforce(recent, http.NotFound, 'No releases found') + + response.meta = recent + return files.get(recent['spec']['*-*']['bundle']) @route('HEAD', ['context', None], cmd='clone', arguments={'requires': list}) def head_clone(self, request, response): - self._get_clone(request, response) - - @route('GET', ['context', None], cmd='deplist', - mime_type='application/json', arguments={'requires': list}) - def deplist(self, request, repo): - """List of native packages context is dependening on. - - Command return only GNU/Linux package names and ignores - Sugar Network dependencies. - - :param repo: - OBS repository name to get package names for, e.g., - Fedora-14 - :returns: - list of package names - - """ - enforce(repo, 'Argument %r should be set', 'repo') - - spec = self._solve(request).meta('data')['spec']['*-*'] - common_deps = self.volume['context'].get(request.guid)['dependencies'] - result = [] - - for package in set(spec.get('requires') or []) | set(common_deps): - if package == 'sugar': - continue - dep = self.volume['context'].get(package) - enforce(repo in dep['packages'], - 'No packages for %r on %r', package, repo) - result.extend(dep['packages'][repo].get('binary') or []) - - return result + self.get_clone(request, response) @route('GET', ['user', None], cmd='stats-info', mime_type='application/json', acl=ACL.AUTH) @@ -246,15 +201,6 @@ class NodeRoutes(model.VolumeRoutes, model.FrontRoutes): for timestamp, values in values: rrd[name].put(values, timestamp) - @route('GET', ['report', None], cmd='log', mime_type='text/html') - def log(self, request): - # In further implementations, `data` might be a tarball - data = self.volume[request.resource].get(request.guid).meta('data') - if data and 'blob' in data: - return file(data['blob'], 'rb') - else: - return '' - @preroute def preroute(self, op, request, response): if op.acl & ACL.AUTH and request.principal is None: @@ -277,22 +223,11 @@ class NodeRoutes(model.VolumeRoutes, model.FrontRoutes): enforce(self.authorize(request.principal, 'root'), http.Forbidden, 'Operation is permitted only for superusers') - @postroute - def postroute(self, request, response, result, error): - if error is None or isinstance(error, http.StatusPass): - if self._stats is not None: - self._stats.log(request) - - def on_create(self, request, props, event): + def on_create(self, request, props): if request.resource == 'user': - with file(props['pubkey']['blob']) as f: + with file(files.get(props['pubkey']).path) as f: props['guid'] = str(hashlib.sha1(f.read()).hexdigest()) - model.VolumeRoutes.on_create(self, request, props, event) - - def on_update(self, request, props, event): - model.VolumeRoutes.on_update(self, request, props, event) - if 'deleted' in props.get('layer', []): - event['event'] = 'delete' + db.Routes.on_create(self, request, props) def on_aggprop_update(self, request, prop, value): if prop.acl & ACL.AUTHOR: @@ -300,27 +235,6 @@ class NodeRoutes(model.VolumeRoutes, model.FrontRoutes): elif value is not None: self._enforce_authority(request, value.get('author')) - def find(self, request, reply): - limit = request.get('limit') - if limit is None or limit < 0: - request['limit'] = node.find_limit.value - elif limit > node.find_limit.value: - _logger.warning('The find limit is restricted to %s', - node.find_limit.value) - request['limit'] = node.find_limit.value - layer = request.setdefault('layer', []) - if 'deleted' in layer: - _logger.warning('Requesting "deleted" layer') - layer.remove('deleted') - request.add('not_layer', 'deleted') - return model.VolumeRoutes.find(self, request, reply) - - def get(self, request, reply): - doc = self.volume[request.resource].get(request.guid) - enforce('deleted' not in doc['layer'], http.NotFound, - 'Resource deleted') - return model.VolumeRoutes.get(self, request, reply) - def authenticate(self, auth): enforce(auth.scheme == 'sugar', http.BadRequest, 'Unknown authentication scheme') @@ -329,8 +243,9 @@ class NodeRoutes(model.VolumeRoutes, model.FrontRoutes): from M2Crypto import RSA + pubkey = self.volume['user'][auth.login]['pubkey'] + key = RSA.load_pub_key(files.get(pubkey).path) data = hashlib.sha1('%s:%s' % (auth.login, auth.nonce)).digest() - key = RSA.load_pub_key(self.volume['user'].path(auth.login, 'pubkey')) enforce(key.verify(data, auth.signature.decode('hex')), http.Forbidden, 'Bad credentials') @@ -356,52 +271,6 @@ class NodeRoutes(model.VolumeRoutes, model.FrontRoutes): return self._auth_config.get(user, role).strip().lower() in \ ('true', 'on', '1', 'allow') - def _commit_stats(self): - while True: - coroutine.sleep(stats_node.stats_node_step.value) - self._stats.commit() - - def _solve(self, request): - requires = {} - if 'requires' in request: - for i in request['requires']: - requires.update(parse_requires(i)) - request.pop('requires') - else: - request['limit'] = 1 - - if 'stability' not in request: - request['stability'] = 'stable' - - impls, __ = self.volume['release'].find( - context=request.guid, order_by='-version', not_layer='deleted', - **request) - impl = None - for impl in impls: - if requires: - impl_deps = impl.meta('data')['spec']['*-*']['requires'] - if not ensure_requires(impl_deps, requires): - continue - break - else: - raise http.NotFound('No releases found') - return impl - - def _get_clone(self, request, response): - impl = self._solve(request) - result = request.call(method=request.method, - path=['release', impl['guid'], 'data'], - response=response) - response.meta = impl.properties([ - 'guid', 'ctime', 'layer', 'author', 'tags', - 'context', 'version', 'stability', 'license', 'notes', - ]) - response.meta['data'] = data = impl.meta('data') - for key in ('mtime', 'seqno', 'blob'): - if key in data: - del data[key] - return result - def _enforce_authority(self, request, author=None): if request.resource == 'user': allowed = (request.principal == request.guid) @@ -412,222 +281,3 @@ class NodeRoutes(model.VolumeRoutes, model.FrontRoutes): allowed = request.principal in author enforce(allowed or self.authorize(request.principal, 'root'), http.Forbidden, 'Operation is permitted only for authors') - - -def generate_node_stats(volume, path): - tmp_path = toolkit.mkdtemp() - new_stats = stats_node.Sniffer(volume, tmp_path, True) - old_stats = stats_node.Sniffer(volume, path) - - def timeline(ts): - ts = long(ts) - end = long(time.time()) - step = None - - archives = {} - for rra in stats_node.stats_node_rras.value: - a_step, a_size = [long(i) for i in rra.split(':')[-2:]] - a_step *= stats_node.stats_node_step.value - a_start = end - min(end, a_step * a_size) - if archives.setdefault(a_start, a_step) > a_step: - archives[a_start] = a_step - archives = list(sorted(archives.items())) - - try: - while ts <= end: - while not step or archives and ts >= archives[0][0]: - archive_start, step = archives.pop(0) - ts = max(ts / step * step, archive_start) - yield ts, ts + step - 1, step - ts += step - except GeneratorExit: - shutil.rmtree(tmp_path, ignore_errors=True) - - start = next(volume['context'].find(limit=1, order_by='ctime')[0])['ctime'] - for left, right, step in timeline(start): - for resource, props in [ - ('user', []), - ('context', []), - ('release', ['context']), - ('report', ['context', 'release']), - ('post', ['context', 'topic', 'type', 'vote']), - ]: - objs, __ = volume[resource].find( - query='ctime:%s..%s' % (left, right)) - for obj in objs: - request = Request(method='POST', path=[resource], - content=obj.properties(props)) - new_stats.log(request) - for resource, props in [ - ('user', ['layer']), - ('context', ['layer']), - ('release', ['layer']), - ('report', ['layer']), - ('post', ['layer']), - ]: - objs, __ = volume[resource].find( - query='mtime:%s..%s' % (left, right)) - for obj in objs: - if 'deleted' in obj['layer']: - request = Request(method='DELETE', - path=[resource, obj.guid]) - else: - request = Request(method='PUT', path=[resource, obj.guid], - content=obj.properties(props)) - new_stats.log(request) - downloaded = {} - for resource in ('context', 'post'): - stats = old_stats.report( - {resource: ['downloaded']}, left - step, right, 1) - if not stats.get(resource): - continue - stats = stats[resource][-1][1].get('downloaded') - if stats: - downloaded[resource] = {'downloaded': stats} - new_stats.commit(left + (right - left) / 2, downloaded) - - new_stats.commit_objects(True) - shutil.rmtree(path) - shutil.move(tmp_path, path) - - -@contextmanager -def load_bundle(volume, request, bundle_path): - impl = request.copy() - initial = False - if 'initial' in impl: - initial = impl.pop('initial') - data = impl.setdefault('data', {}) - contexts = volume['context'] - context = impl.get('context') - context_meta = None - impls = volume['release'] - - try: - bundle = Bundle(bundle_path, mime_type='application/zip') - except Exception: - _logger.debug('Load unrecognized bundle from %r', bundle_path) - context_type = 'book' - else: - _logger.debug('Load Sugar Activity bundle from %r', bundle_path) - context_type = 'activity' - unpack_size = 0 - - with bundle: - changelog = join(bundle.rootdir, 'CHANGELOG') - for arcname in bundle.get_names(): - if changelog and arcname == changelog: - with bundle.extractfile(changelog) as f: - impl['notes'] = f.read() - changelog = None - unpack_size += bundle.getmember(arcname).size - spec = bundle.get_spec() - context_meta = _load_context_metadata(bundle, spec) - if 'requires' in impl: - spec.requires.update(parse_requires(impl.pop('requires'))) - - context = impl['context'] = spec['context'] - impl['version'] = spec['version'] - impl['stability'] = spec['stability'] - if spec['license'] is not EMPTY_LICENSE: - impl['license'] = spec['license'] - requires = impl['requires'] = [] - for dep_name, dep in spec.requires.items(): - found = False - for version in dep.versions_range(): - requires.append('%s-%s' % (dep_name, version)) - found = True - if not found: - requires.append(dep_name) - - data['spec'] = {'*-*': { - 'commands': spec.commands, - 'requires': spec.requires, - }} - data['unpack_size'] = unpack_size - data['mime_type'] = 'application/vnd.olpc-sugar' - - if initial and not contexts.exists(context): - context_meta['type'] = 'activity' - request.call(method='POST', path=['context'], content=context_meta) - context_meta = None - - enforce(context, 'Context is not specified') - enforce('version' in impl, 'Version is not specified') - enforce(context_type in contexts.get(context)['type'], - http.BadRequest, 'Inappropriate bundle type') - if 'license' not in impl: - existing, total = impls.find( - context=context, order_by='-version', not_layer='deleted') - enforce(total, 'License is not specified') - impl['license'] = next(existing)['license'] - - digest = hashlib.sha1() - with file(bundle_path, 'rb') as f: - while True: - chunk = f.read(toolkit.BUFFER_SIZE) - if not chunk: - break - digest.update(chunk) - data['digest'] = digest.hexdigest() - - yield impl - - existing, __ = impls.find( - context=context, version=impl['version'], not_layer='deleted') - if 'url' not in data: - data['blob'] = bundle_path - impl['guid'] = request.call(method='POST', path=['release'], content=impl) - for i in existing: - layer = i['layer'] + ['deleted'] - impls.update(i.guid, {'layer': layer}) - - if 'origin' in impls.get(impl['guid']).layer: - diff = contexts.patch(context, context_meta) - if diff: - request.call(method='PUT', path=['context', context], content=diff) - - -def _load_context_metadata(bundle, spec): - result = {} - for prop in ('homepage', 'mime_types'): - if spec[prop]: - result[prop] = spec[prop] - result['guid'] = spec['context'] - - try: - icon_file = bundle.extractfile(join(bundle.rootdir, spec['icon'])) - Context.populate_images(result, icon_file.read()) - icon_file.close() - except Exception: - exception(_logger, 'Failed to load icon') - - msgids = {} - for prop, confname in [ - ('title', 'name'), - ('summary', 'summary'), - ('description', 'description'), - ]: - if spec[confname]: - msgids[prop] = spec[confname] - result[prop] = {'en': spec[confname]} - with toolkit.mkdtemp() as tmpdir: - for path in bundle.get_names(): - if not path.endswith('.mo'): - continue - mo_path = path.strip(os.sep).split(os.sep) - if len(mo_path) != 5 or mo_path[1] != 'locale': - continue - lang = mo_path[2] - bundle.extract(path, tmpdir) - try: - i18n = gettext.translation(spec['context'], - join(tmpdir, *mo_path[:2]), [lang]) - for prop, value in msgids.items(): - msgstr = i18n.gettext(value).decode('utf8') - if lang == 'en' or msgstr != value: - result[prop][lang] = msgstr - except Exception: - exception(_logger, 'Gettext failed to read %r', mo_path[-1]) - - return result diff --git a/sugar_network/node/slave.py b/sugar_network/node/slave.py index 69584be..2d60ea8 100644 --- a/sugar_network/node/slave.py +++ b/sugar_network/node/slave.py @@ -23,9 +23,10 @@ from gettext import gettext as _ from sugar_network import node, toolkit from sugar_network.client import api_url -from sugar_network.node import sync, stats_user, files, volume +from sugar_network.node import sync, stats_user, files, model from sugar_network.node.routes import NodeRoutes from sugar_network.toolkit.router import route, ACL +from sugar_network.toolkit.coroutine import this from sugar_network.toolkit import http, enforce @@ -55,7 +56,7 @@ class SlaveRoutes(NodeRoutes): # loosing payload after authentication conn.get(cmd='logon') - push = [('diff', None, volume.diff(self.volume, self._push_seq))] + push = [('diff', None, model.diff(self.volume, self._push_seq))] if not no_pull: push.extend([ ('pull', { @@ -119,7 +120,7 @@ class SlaveRoutes(NodeRoutes): }, None)) push.append(('files_pull', {'sequence': self._files_seq}, None)) - self.broadcast({ + this.broadcast({ 'event': 'sync_progress', 'progress': _('Reading sneakernet packages'), }) @@ -129,14 +130,14 @@ class SlaveRoutes(NodeRoutes): if exists(offline_script): shutil.copy(offline_script, path) - self.broadcast({ + this.broadcast({ 'event': 'sync_progress', 'progress': _('Generating new sneakernet package'), }) diff_seq = toolkit.Sequence([]) push.append(('diff', None, - volume.diff(self.volume, push_seq, diff_seq))) + model.diff(self.volume, push_seq, diff_seq))) if stats_user.stats_user.value: push.append(('stats_diff', None, stats_user.diff(stats_seq))) complete = sync.sneakernet_encode(push, root=path, @@ -156,7 +157,7 @@ class SlaveRoutes(NodeRoutes): if packet.name == 'diff': _logger.debug('Processing %r', packet) - seq, __ = volume.merge(self.volume, packet, shift_seqno=False) + seq, __ = model.merge(self.volume, packet, shift_seqno=False) if from_master and seq: self._pull_seq.exclude(seq) self._pull_seq.commit() diff --git a/sugar_network/node/stats_node.py b/sugar_network/node/stats_node.py deleted file mode 100644 index d37819b..0000000 --- a/sugar_network/node/stats_node.py +++ /dev/null @@ -1,311 +0,0 @@ -# Copyright (C) 2012-2014 Aleksey Lim -# -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program. If not, see . - -import os -import time -import json -import logging -from os.path import exists, join - -from sugar_network.toolkit.rrd import Rrd -from sugar_network.toolkit import Option - - -stats_node = Option( - 'collect unpersonalized node statistics', - default=False, type_cast=Option.bool_cast, action='store_true') - -stats_node_step = Option( - 'step interval in seconds for node RRD databases', - default=60 * 5, type_cast=int) - -stats_node_rras = Option( - 'comma separated list of RRAs for node RRD databases', - default=[ - 'RRA:AVERAGE:0.5:1:864', # 3d with 5min step - 'RRA:AVERAGE:0.5:288:3660', # 10y with 1d step - 'RRA:AVERAGE:0.5:2880:366', # 10y with 10d step - 'RRA:AVERAGE:0.5:8640:122', # 10y with 30d step - 'RRA:AVERAGE:0.5:105408:10', # 10y with 1y step - ], - type_cast=Option.list_cast, type_repr=Option.list_repr) - -_HEARTBEAT = 60 * 60 * 24 * 365 - -_logger = logging.getLogger('node.stats_node') - - -class Sniffer(object): - - def __init__(self, volume, path, reset=False): - _logger.info('Collect node stats in %r', path) - - self._volume = volume - self._rrd = Rrd(path, stats_node_step.value, stats_node_rras.value) - self._stats = {} - self._suspend_path = join(path, '.suspend') - self._last = int(time.time()) - - for name, cls in _STATS.items(): - stats = self._stats[name] = cls(self._stats, volume) - fields = {} - for field in stats: - fields[field] = 'DS:%s:GAUGE:%s:U:U' % (field, _HEARTBEAT) - if fields: - if not reset: - stats.update(self._rrd[name].last_ds) - stats['total'] = volume[name].find(limit=0)[1] - self._rrd[name].fields = fields - - if exists(self._suspend_path): - with file(self._suspend_path) as f: - suspend = json.load(f) - for name, stats in self._stats.items(): - if name not in suspend['state']: - continue - total_stats, stats.objects = suspend['state'][name] - stats.update(total_stats) - if suspend['timestamp'] < int(time.time()): - self.commit(suspend['timestamp']) - self.commit_objects() - os.unlink(self._suspend_path) - - def __getitem__(self, name): - return self._rrd[name] - - def suspend(self): - state = dict([(i, (j, j.objects)) for i, j in self._stats.items()]) - with file(self._suspend_path, 'w') as f: - json.dump({ - 'timestamp': self._last + stats_node_step.value, - 'state': state, - }, f) - - def log(self, request): - if request.cmd or request.resource not in _STATS: - return - self._stats[request.resource].log(request) - - def commit(self, timestamp=None, extra_values=None): - _logger.trace('Commit node stats') - - for resource, stats in self._stats.items(): - if resource not in self._rrd: - continue - values = stats.copy() - if extra_values and resource in extra_values: - values.update(extra_values[resource]) - if values: - self._rrd[resource].put(values, timestamp=timestamp) - - self._last = timestamp or int(time.time()) - - def commit_objects(self, reset=False): - _logger.trace('Commit object stats') - - for resource, stats in self._stats.items(): - old = { - 'downloads': 0, - 'rating': (0, 0), - } - directory = self._volume[resource] - for guid, new in stats.objects.items(): - if not directory.exists(guid): - _logger.warning('Ignore stats for missed %r %s', - guid, resource) - continue - if not reset: - old = directory.get(guid) - patch = {} - if 'downloads' in new: - patch['downloads'] = new['downloads'] + old['downloads'] - if 'votes' in new: - votes, rating = old['rating'] - votes += new['votes'] - rating += new['rating'] - patch['rating'] = [votes, rating] - directory.update(guid, patch) - stats.objects.clear() - - def report(self, dbs, start, end, records): - result = {} - - rdbs = [self._rrd[i] for i in dbs if i in self._rrd] - if not rdbs: - return result - - if not start: - start = min([i.first for i in rdbs]) or 0 - if not end: - end = max([i.last for i in rdbs]) or 0 - resolution = max(1, (end - start) / records) - - _logger.debug('Report start=%s end=%s resolution=%s dbs=%r', - start, end, resolution, dbs) - - for rdb in rdbs: - info = result[rdb.name] = [] - for ts, ds_values in rdb.get(start, end, resolution): - values = {} - for name in dbs[rdb.name]: - values[name] = ds_values.get(name) - info.append((ts, values)) - - return result - - -class _Stats(dict): - - RESOURCE = None - PARENTS = [] - - def __init__(self, stats, volume): - self.objects = {} - self._stats = stats - self._volume = volume - - def inc(self, guid, prop, value=1): - obj = self.objects.setdefault(guid, {}) - if prop not in obj: - obj[prop] = value - else: - obj[prop] += value - - def log(self, request): - pass - - -class _ResourceStats(_Stats): - - def __init__(self, stats, volume): - _Stats.__init__(self, stats, volume) - self['total'] = 0 - - def log(self, request): - if request.method == 'POST': - self['total'] += 1 - elif request.method == 'DELETE': - self['total'] -= 1 - - def parse_context(self, request): - context = None - directory = self._volume[self.RESOURCE] - - def parse_context(props): - for prop, resource in self.PARENTS: - guid = props.get(prop) - if not guid: - continue - if resource == 'context': - return guid - else: - return self._volume[resource].get(guid)['context'] - - if request.method == 'GET': - if not request.guid: - context = parse_context(request) - elif self.RESOURCE == 'context': - context = request.guid - elif self.RESOURCE != 'user': - context = directory.get(request.guid)['context'] - elif request.method == 'PUT': - if self.RESOURCE == 'context': - context = request.guid - else: - context = request.content.get('context') - if not context: - context = directory.get(request.guid)['context'] - elif request.method == 'POST': - context = parse_context(request.content) - - return context - - -class _UserStats(_ResourceStats): - - RESOURCE = 'user' - - -class _ContextStats(_ResourceStats): - - RESOURCE = 'context' - - def __init__(self, stats, volume): - _ResourceStats.__init__(self, stats, volume) - self['released'] = 0 - self['failed'] = 0 - self['downloaded'] = 0 - - -class _ReleaseStats(_Stats): - - RESOURCE = 'release' - PARENTS = [('context', 'context')] - - def log(self, request): - if request.method == 'GET': - if request.prop == 'data': - context = self._volume[self.RESOURCE].get(request.guid) - self._stats['context'].inc(context.context, 'downloads') - self._stats['context']['downloaded'] += 1 - elif request.method == 'POST': - self._stats['context']['released'] += 1 - - -class _ReportStats(_Stats): - - RESOURCE = 'report' - PARENTS = [('context', 'context'), ('release', 'release')] - - def log(self, request): - if request.method == 'POST': - self._stats['context']['failed'] += 1 - - -class _PostStats(_ResourceStats): - - RESOURCE = 'post' - PARENTS = [('context', 'context'), ('topic', 'post')] - - def __init__(self, stats, volume): - _ResourceStats.__init__(self, stats, volume) - self['downloaded'] = 0 - - def log(self, request): - _ResourceStats.log(self, request) - - if request.method == 'POST': - stats = None - if request.content['type'] == 'review': - stats = self._stats['context'] - guid = request.content['context'] - elif request.content['type'] == 'feedback': - stats = self._stats['post'] - guid = request.content['topic'] - if stats: - stats.inc(guid, 'votes') - stats.inc(guid, 'rating', request.content.get('vote') or 0) - - elif request.method == 'GET' and request.prop == 'data': - self.inc(request.guid, 'downloads') - self['downloaded'] += 1 - - -_STATS = {_UserStats.RESOURCE: _UserStats, - _ContextStats.RESOURCE: _ContextStats, - _ReleaseStats.RESOURCE: _ReleaseStats, - _ReportStats.RESOURCE: _ReportStats, - _PostStats.RESOURCE: _PostStats, - } diff --git a/sugar_network/node/sync.py b/sugar_network/node/sync.py index b0a20bf..f5b946c 100644 --- a/sugar_network/node/sync.py +++ b/sugar_network/node/sync.py @@ -199,7 +199,7 @@ def _encode(limit, packets, download_blobs, header, status): pos = (yield chunk) or 0 blob_size -= len(chunk) enforce(blob_size == 0, EOFError, - 'Blob size is not the same as declared') + 'File size is not the same as declared') record = next(content) except StopIteration: diff --git a/sugar_network/node/volume.py b/sugar_network/node/volume.py deleted file mode 100644 index 0c254f7..0000000 --- a/sugar_network/node/volume.py +++ /dev/null @@ -1,142 +0,0 @@ -# Copyright (C) 2012-2013 Aleksey Lim -# -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program. If not, see . - -import logging - -from sugar_network import toolkit -from sugar_network.toolkit.router import Request -from sugar_network.toolkit import http, coroutine, enforce - - -# Apply node level layer for these documents -_LIMITED_RESOURCES = ('context', 'release') - -_logger = logging.getLogger('node.volume') - - -def diff(volume, in_seq, out_seq=None, exclude_seq=None, layer=None, - fetch_blobs=False, ignore_documents=None, **kwargs): - connection = http.Connection() - if out_seq is None: - out_seq = toolkit.Sequence([]) - is_the_only_seq = not out_seq - if layer: - if isinstance(layer, basestring): - layer = [layer] - layer.append('common') - try: - for resource, directory in volume.items(): - if ignore_documents and resource in ignore_documents: - continue - coroutine.dispatch() - directory.commit() - yield {'resource': resource} - for guid, patch in directory.diff(in_seq, exclude_seq, - layer=layer if resource in _LIMITED_RESOURCES else None): - adiff = {} - adiff_seq = toolkit.Sequence() - for prop, meta, seqno in patch: - if 'blob' in meta: - blob_path = meta.pop('blob') - yield {'guid': guid, - 'diff': {prop: meta}, - 'blob_size': meta['blob_size'], - 'blob': toolkit.iter_file(blob_path), - } - elif fetch_blobs and 'url' in meta: - url = meta.pop('url') - try: - blob = connection.request('GET', url, - allow_redirects=True, - # We need uncompressed size - headers={'Accept-Encoding': ''}) - except Exception: - _logger.exception('Cannot fetch %r for %s:%s:%s', - url, resource, guid, prop) - is_the_only_seq = False - continue - yield {'guid': guid, - 'diff': {prop: meta}, - 'blob_size': - int(blob.headers['Content-Length']), - 'blob': blob.iter_content(toolkit.BUFFER_SIZE), - } - else: - adiff[prop] = meta - adiff_seq.include(seqno, seqno) - if adiff: - yield {'guid': guid, 'diff': adiff} - out_seq.include(adiff_seq) - if is_the_only_seq: - # There is only one diff, so, we can stretch it to remove all holes - out_seq.stretch() - except StopIteration: - pass - - yield {'commit': out_seq} - - -def merge(volume, records, shift_seqno=True, stats=None): - resource = None - directory = None - commit_seq = toolkit.Sequence() - merged_seq = toolkit.Sequence() - synced = False - - for record in records: - resource_ = record.get('resource') - if resource_: - resource = resource_ - directory = volume[resource_] - continue - - if 'guid' in record: - guid = record['guid'] - layer = [] - existed = directory.exists(guid) - if existed: - layer = directory.get(guid)['layer'] - - def update_stats(upd): - method = 'PUT' if existed else 'POST' - if ('deleted' in layer) != ('deleted' in upd.get('layer', [])): - if 'deleted' in layer: - # TODO - enforce(not 'supported yet') - else: - method = 'DELETE' - stats.log(Request( - method=method, - path=[resource, guid], - content=upd, - )) - - if stats is not None: - record['op'] = update_stats - seqno, merged = directory.merge(shift_seqno=shift_seqno, **record) - synced = synced or merged - if seqno is not None: - merged_seq.include(seqno, seqno) - continue - - commit = record.get('commit') - if commit is not None: - commit_seq.include(commit) - continue - - if synced: - volume.broadcast({'event': 'sync'}) - - return commit_seq, merged_seq diff --git a/sugar_network/toolkit/__init__.py b/sugar_network/toolkit/__init__.py index a32d87f..4088e07 100644 --- a/sugar_network/toolkit/__init__.py +++ b/sugar_network/toolkit/__init__.py @@ -1,4 +1,4 @@ -# Copyright (C) 2011-2013 Aleksey Lim +# Copyright (C) 2011-2014 Aleksey Lim # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by @@ -114,84 +114,12 @@ def exception(*args): logger.debug('\n'.join(tb)) -def default_lang(): - """Default language to fallback for localized strings. - - :returns: - string in format of HTTP's Accept-Language - - """ - return default_langs()[0] - - -def default_langs(): - """Default languages list, i.e., including all secondory languages. - - :returns: - list of strings in format of HTTP's Accept-Language - - """ - global _default_langs - - if _default_langs is None: - locales = os.environ.get('LANGUAGE') - if locales: - locales = [i for i in locales.split(':') if i.strip()] - else: - from locale import getdefaultlocale - locales = [getdefaultlocale()[0]] - if not locales: - _default_langs = ['en'] - else: - _default_langs = [] - for locale in locales: - lang = locale.strip().split('.')[0].lower() - if lang == 'c': - lang = 'en' - elif '_' in lang: - lang, region = lang.split('_') - if lang != region: - lang = '-'.join([lang, region]) - _default_langs.append(lang) - _logger.info('Default languages are %r', _default_langs) - - return _default_langs - - -def gettext(value, accept_language=None): - if not value: - return '' - if not isinstance(value, dict): - return value - - if accept_language is None: - accept_language = [default_lang()] - elif isinstance(accept_language, basestring): - accept_language = [accept_language] - accept_language.append('en') - - stripped_value = None - for lang in accept_language: - result = value.get(lang) - if result is not None: - return result - - prime_lang = lang.split('-')[0] - if prime_lang != lang: - result = value.get(prime_lang) - if result is not None: - return result - - if stripped_value is None: - stripped_value = {} - for k, v in value.items(): - if '-' in k: - stripped_value[k.split('-', 1)[0]] = v - result = stripped_value.get(prime_lang) - if result is not None: - return result - - return value[min(value.keys())] +def ascii(value): + if not isinstance(value, basestring): + return str(value) + if isinstance(value, unicode): + return value.encode('utf8') + return value def uuid(): @@ -484,12 +412,12 @@ def unique_filename(root, filename): class mkdtemp(str): - def __new__(cls, **kwargs): - if cachedir.value and 'dir' not in kwargs: - if not exists(cachedir.value): - os.makedirs(cachedir.value) + def __new__(cls, *args, **kwargs): + if 'dir' not in kwargs: kwargs['dir'] = cachedir.value - result = tempfile.mkdtemp(**kwargs) + if not exists(kwargs['dir']): + os.makedirs(kwargs['dir']) + result = tempfile.mkdtemp(*args, **kwargs) return str.__new__(cls, result) def __enter__(self): @@ -522,21 +450,60 @@ def svg_to_png(data, w, h): return result +class File(dict): + + AWAY = None + + def __init__(self, path=None, meta=None, digest=None): + self.path = path + self.digest = digest + dict.__init__(self, meta or {}) + self._stat = None + self._name = self.get('filename') + + @property + def size(self): + if self._stat is None: + self._stat = os.stat(self.path) + return self._stat.st_size + + @property + def mtime(self): + if self._stat is None: + self._stat = os.stat(self.path) + return int(self._stat.st_mtime) + + @property + def name(self): + if self._name is None: + self._name = self.get('name') or self.digest or 'blob' + mime_type = self.get('mime_type') + if mime_type: + import mimetypes + if not mimetypes.inited: + mimetypes.init() + self._name += mimetypes.guess_extension(mime_type) or '' + return self._name + + def __repr__(self): + return '' % (self.path, self.digest) + + def TemporaryFile(*args, **kwargs): - if cachedir.value and 'dir' not in kwargs: - if not exists(cachedir.value): - os.makedirs(cachedir.value) + if 'dir' not in kwargs: kwargs['dir'] = cachedir.value + if not exists(kwargs['dir']): + os.makedirs(kwargs['dir']) return tempfile.TemporaryFile(*args, **kwargs) class NamedTemporaryFile(object): def __init__(self, *args, **kwargs): - if cachedir.value and 'dir' not in kwargs: - if not exists(cachedir.value): - os.makedirs(cachedir.value) + if 'dir' not in kwargs: kwargs['dir'] = cachedir.value + if not exists(kwargs['dir']): + os.makedirs(kwargs['dir']) self._file = tempfile.NamedTemporaryFile(*args, **kwargs) def close(self): @@ -567,11 +534,9 @@ class Seqno(object): """ self._path = path self._value = 0 - if exists(path): with file(path) as f: self._value = int(f.read().strip()) - self._orig_value = self._value @property @@ -610,7 +575,7 @@ class Sequence(list): """List of sorted and non-overlapping ranges. List items are ranges, [`start`, `stop']. If `start` or `stop` - is `None`, it means the beginning or ending of the entire scale. + is `None`, it means the beginning or ending of the entire sequence. """ @@ -880,5 +845,4 @@ def _nb_read(stream): fcntl.fcntl(fd, fcntl.F_SETFL, orig_flags) -_default_lang = None -_default_langs = None +File.AWAY = File() diff --git a/sugar_network/toolkit/coroutine.py b/sugar_network/toolkit/coroutine.py index 170f445..1913bda 100644 --- a/sugar_network/toolkit/coroutine.py +++ b/sugar_network/toolkit/coroutine.py @@ -1,4 +1,4 @@ -# Copyright (C) 2012-2013 Aleksey Lim +# Copyright (C) 2012-2014 Aleksey Lim # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by @@ -23,6 +23,7 @@ import logging import gevent import gevent.pool import gevent.hub +from gevent.queue import Empty from sugar_network.toolkit import enforce @@ -36,27 +37,27 @@ sleep = gevent.sleep #: Wait for the spawned events to finish. joinall = gevent.joinall +#: Access to greenlet-local storage +this = None + gevent.hub.Hub.resolver_class = 'gevent.resolver_ares.Resolver' -_group = gevent.pool.Group() +_all_jobs = None _logger = logging.getLogger('coroutine') _wsgi_logger = logging.getLogger('wsgi') def spawn(*args, **kwargs): - return _group.spawn(*args, **kwargs) + return _all_jobs.spawn(*args, **kwargs) def spawn_later(seconds, *args, **kwargs): - job = _group.greenlet_class(*args, **kwargs) - job.start_later(seconds) - _group.add(job) - return job + return _all_jobs.spawn_later(*args, **kwargs) def shutdown(): - _group.kill() - return _group.join() + _all_jobs.kill() + return _all_jobs.join() def reset_resolver(): @@ -168,10 +169,6 @@ class ThreadResult(object): return self._value -class Empty(Exception): - pass - - class AsyncQueue(object): def __init__(self): @@ -216,30 +213,30 @@ class AsyncQueue(object): self._queue.put(*args, **kwargs) def _get(self): - from Queue import Empty as empty - try: - return self._queue.get_nowait() - except empty: - raise Empty() + return self._queue.get_nowait() class Pool(gevent.pool.Pool): def spawn(self, *args, **kwargs): - job = gevent.pool.Pool.spawn(self, *args, **kwargs) - _group.add(job) + job = self.greenlet_class(*args, **kwargs) + job.local = _Local() + if self is not _all_jobs: + _all_jobs.add(job) + self.start(job) return job def spawn_later(self, seconds, *args, **kwargs): job = self.greenlet_class(*args, **kwargs) + job.local = _Local() + if self is not _all_jobs: + _all_jobs.add(job) job.start_later(seconds) self.add(job) - _group.add(job) return job # pylint: disable-msg=W0221 def kill(self, *args, **kwargs): - from gevent.queue import Empty try: gevent.pool.Pool.kill(self, *args, **kwargs) except Empty: @@ -253,6 +250,71 @@ class Pool(gevent.pool.Pool): self.kill() +class Spooler(object): + """One-producer many-consumers events delivery. + + The delivery process supports lossless events feeding with guaranty that + every consumer proccessed every event producer pushed. + + """ + + def __init__(self): + self._value = None + self._waiters = 0 + self._ready = Event() + self._notifying_done = Event() + self._notifying_done.set() + + @property + def waiters(self): + return self._waiters + + def wait(self): + self._notifying_done.wait() + self._waiters += 1 + try: + self._ready.wait() + value = self._value + finally: + self._waiters -= 1 + if self._waiters == 0: + self._ready.clear() + self._notifying_done.set() + return value + + def notify_all(self, value=None): + while not self._notifying_done.is_set(): + self._notifying_done.wait() + if not self._waiters: + return + self._notifying_done.clear() + self._value = value + self._ready.set() + + +class _Local(object): + + def __init__(self): + self.attrs = set() + + if hasattr(gevent.getcurrent(), 'local'): + current = gevent.getcurrent().local + for attr in current.attrs: + self.attrs.add(attr) + setattr(self, attr, getattr(current, attr)) + + +class _LocalAccess(object): + + def __getattr__(self, name): + return getattr(gevent.getcurrent().local, name) + + def __setattr__(self, name, value): + local = gevent.getcurrent().local + local.attrs.add(name) + return setattr(local, name, value) + + class _Child(object): def __init__(self, pid): @@ -317,4 +379,7 @@ def _print_exception(context, klass, value, tb): _logger.error('\n'.join([error, context, tb_repr])) +_all_jobs = Pool() gevent.hub.get_hub().print_exception = _print_exception +gevent.getcurrent().local = gevent.get_hub().local = _Local() +this = _LocalAccess() diff --git a/sugar_network/toolkit/http.py b/sugar_network/toolkit/http.py index d1b2fe7..8d913ae 100644 --- a/sugar_network/toolkit/http.py +++ b/sugar_network/toolkit/http.py @@ -22,7 +22,7 @@ import logging from os.path import join, dirname, exists, expanduser, abspath from sugar_network import toolkit -from sugar_network.toolkit import enforce +from sugar_network.toolkit import i18n, enforce _REDIRECT_CODES = frozenset([301, 302, 303, 307, 308]) @@ -316,7 +316,7 @@ class Connection(object): self._session = Connection._Session() self._session.headers['accept-language'] = \ - ','.join(toolkit.default_langs()) + ','.join(i18n.default_langs()) for arg, value in self._session_args.items(): setattr(self._session, arg, value) self._session.stream = True diff --git a/sugar_network/toolkit/i18n.py b/sugar_network/toolkit/i18n.py new file mode 100644 index 0000000..86d3cae --- /dev/null +++ b/sugar_network/toolkit/i18n.py @@ -0,0 +1,134 @@ +# Copyright (C) 2014 Aleksey Lim +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . + +import os +import logging +from gettext import translation + + +# To let `encode()` working properly, avoid msgids gettext'izing +# but still populate .po files parsing the source code +_ = lambda x: x + +_logger = logging.getLogger('i18n') +_i18n = {} + + +def default_lang(): + """Default language to fallback for localized strings. + + :returns: + string in format of HTTP's Accept-Language + + """ + return default_langs()[0] + + +def default_langs(): + """Default languages list, i.e., including all secondory languages. + + :returns: + list of strings in format of HTTP's Accept-Language + + """ + global _default_langs + + if _default_langs is None: + locales = os.environ.get('LANGUAGE') + if locales: + locales = [i for i in locales.split(':') if i.strip()] + else: + from locale import getdefaultlocale + locales = [getdefaultlocale()[0]] + if not locales: + _default_langs = ['en'] + else: + _default_langs = [] + for locale in locales: + lang = locale.strip().split('.')[0].lower() + if lang == 'c': + lang = 'en' + elif '_' in lang: + lang, region = lang.split('_') + if lang != region: + lang = '-'.join([lang, region]) + _default_langs.append(lang) + _logger.info('Default languages are %r', _default_langs) + + return _default_langs + + +def decode(value, accept_language=None): + if not value: + return '' + if not isinstance(value, dict): + return value + + if accept_language is None: + accept_language = default_langs() + elif isinstance(accept_language, basestring): + accept_language = [accept_language] + accept_language.append('en') + + stripped_value = None + for lang in accept_language: + result = value.get(lang) + if result is not None: + return result + + prime_lang = lang.split('-')[0] + if prime_lang != lang: + result = value.get(prime_lang) + if result is not None: + return result + + if stripped_value is None: + stripped_value = {} + for k, v in value.items(): + if '-' in k: + stripped_value[k.split('-', 1)[0]] = v + result = stripped_value.get(prime_lang) + if result is not None: + return result + + return value[min(value.keys())] + + +def encode(msgid, *args, **kwargs): + if not _i18n: + from sugar_network.toolkit.languages import LANGUAGES + for lang in LANGUAGES: + _i18n[lang] = translation('sugar-network', languages=[lang]) + result = {} + + for lang, trans in _i18n.items(): + msgstr = trans.gettext(msgid) + if args: + msgargs = [] + for arg in args: + msgargs.append(decode(arg, lang)) + msgstr = msgstr % tuple(msgargs) + elif kwargs: + msgargs = {} + for key, value in kwargs.items(): + msgargs[key] = decode(value, lang) + msgstr = msgstr % msgargs + result[lang] = msgstr + + return result + + +_default_lang = None +_default_langs = None diff --git a/sugar_network/static/__init__.py b/sugar_network/toolkit/languages.py.in index 4295e38..2542821 100644 --- a/sugar_network/static/__init__.py +++ b/sugar_network/toolkit/languages.py.in @@ -1,4 +1,4 @@ -# Copyright (C) 2012 Aleksey Lim +# Copyright (C) 2014 Aleksey Lim # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by @@ -13,10 +13,4 @@ # You should have received a copy of the GNU General Public License # along with this program. If not, see . -from os.path import dirname, join - -PATH = join(dirname(__file__), 'httpdocs') - - -def path(*args): - return join(PATH, *args) +LANGUAGES = [%LANGUAGES%] diff --git a/sugar_network/toolkit/router.py b/sugar_network/toolkit/router.py index df57ff3..b37eee4 100644 --- a/sugar_network/toolkit/router.py +++ b/sugar_network/toolkit/router.py @@ -20,16 +20,16 @@ import time import types import logging import calendar -import mimetypes from base64 import b64decode from bisect import bisect_left from urllib import urlencode from urlparse import parse_qsl, urlsplit from email.utils import parsedate, formatdate -from os.path import isfile, split, splitext +from os.path import isfile from sugar_network import toolkit -from sugar_network.toolkit import http, coroutine, enforce +from sugar_network.toolkit.coroutine import this +from sugar_network.toolkit import i18n, http, coroutine, enforce _SIGNATURE_LIFETIME = 600 @@ -84,14 +84,15 @@ class ACL(object): DELETE = 1 << 5 INSERT = 1 << 6 REMOVE = 1 << 7 + REPLACE = 1 << 8 PUBLIC = CREATE | WRITE | READ | DELETE | INSERT | REMOVE - AUTH = 1 << 8 - AUTHOR = 1 << 9 - SUPERUSER = 1 << 10 + AUTH = 1 << 10 + AUTHOR = 1 << 11 + SUPERUSER = 1 << 12 - LOCAL = 1 << 11 - CALC = 1 << 12 + LOCAL = 1 << 13 + CALC = 1 << 14 NAMES = { CREATE: 'Create', @@ -100,6 +101,7 @@ class ACL(object): DELETE: 'Delete', INSERT: 'Insert', REMOVE: 'Remove', + REPLACE: 'Replace', } @@ -114,18 +116,16 @@ class Unauthorized(http.Unauthorized): class Request(dict): - principal = None - subcall = lambda *args: enforce(False) - def __init__(self, environ=None, method=None, path=None, cmd=None, content=None, content_stream=None, content_type=None, session=None, - **kwargs): + principal=None, **kwargs): dict.__init__(self) self.path = [] self.cmd = None self.environ = {} self.session = session or {} + self.principal = principal self._content = _NOT_SET self._dirty_query = False @@ -252,6 +252,11 @@ class Request(dict): return self.path[2] @property + def key(self): + if len(self.path) > 3: + return self.path[3] + + @property def static_prefix(self): http_host = self.environ.get('HTTP_HOST') if http_host: @@ -326,23 +331,6 @@ class Request(dict): else: existing_value = self[key] = [existing_value, value] - def call(self, response=None, **kwargs): - environ = {} - for key in ('HTTP_HOST', - 'HTTP_ACCEPT_LANGUAGE', - 'HTTP_ACCEPT_ENCODING', - 'HTTP_IF_MODIFIED_SINCE', - 'HTTP_AUTHORIZATION', - ): - if key in self.environ: - environ[key] = self.environ[key] - request = Request(environ, **kwargs) - if response is None: - response = Response() - request.principal = self.principal - request.subcall = self.subcall - return self.subcall(request, response) - def ensure_content(self): if self._content is not _NOT_SET: return @@ -400,9 +388,9 @@ class Response(dict): for key, value in dict.items(self): if type(value) in (list, tuple): for i in value: - result.append((_to_ascii(key), _to_ascii(i))) + result.append((toolkit.ascii(key), toolkit.ascii(i))) else: - result.append((_to_ascii(key), _to_ascii(value))) + result.append((toolkit.ascii(key), toolkit.ascii(value))) return result def __repr__(self): @@ -428,10 +416,6 @@ class Response(dict): dict.__delitem__(self, key) -class Blob(dict): - pass - - class Router(object): def __init__(self, routes_model, allow_spawn=False): @@ -441,8 +425,8 @@ class Router(object): self._invalid_origins = set() self._host = None self._routes = _Routes() - self._preroutes = set() - self._postroutes = set() + self._preroutes = [] + self._postroutes = [] processed = set() cls = type(routes_model) @@ -452,10 +436,14 @@ class Router(object): if name in processed: continue if hasattr(attr, 'is_preroute'): - self._preroutes.add(getattr(routes_model, name)) + route_ = getattr(routes_model, name) + if route_ not in self._preroutes: + self._preroutes.append(route_) continue elif hasattr(attr, 'is_postroute'): - self._postroutes.add(getattr(routes_model, name)) + route_ = getattr(routes_model, name) + if route_ not in self._postroutes: + self._postroutes.append(route_) continue elif not hasattr(attr, 'route'): continue @@ -481,44 +469,75 @@ class Router(object): processed.add(name) cls = cls.__base__ - def call(self, request, response): - request.subcall = self.call - result = self._call_route(request, response) - - if isinstance(result, Blob): - if 'url' in result: - raise http.Redirect(result['url']) - - path = result['blob'] - enforce(isfile(path), 'No such file') - - mtime = result.get('mtime') or int(os.stat(path).st_mtime) - if request.if_modified_since and mtime and \ - mtime <= request.if_modified_since: - raise http.NotModified() - response.last_modified = mtime - - response.content_type = result.get('mime_type') or \ - 'application/octet-stream' - - filename = result.get('filename') - if not filename: - filename = _filename(result.get('name') or - splitext(split(path)[-1])[0], - response.content_type) - response['Content-Disposition'] = \ - 'attachment; filename="%s"' % filename - - result = file(path, 'rb') - - if hasattr(result, 'read'): - if hasattr(result, 'fileno'): - response.content_length = os.fstat(result.fileno()).st_size - elif hasattr(result, 'seek'): - result.seek(0, 2) - response.content_length = result.tell() - result.seek(0) - result = _stream_reader(result) + this.call = self.call + + def call(self, request=None, response=None, environ=None, principal=None, + **kwargs): + if request is None: + if this.request is not None: + if not environ: + environ = {} + for key in ('HTTP_HOST', + 'HTTP_ACCEPT_LANGUAGE', + 'HTTP_ACCEPT_ENCODING', + 'HTTP_IF_MODIFIED_SINCE', + 'HTTP_AUTHORIZATION', + ): + if key in this.request.environ: + environ[key] = this.request.environ[key] + if not principal: + principal = this.request.principal + request = Request(environ=environ, principal=principal, **kwargs) + if response is None: + response = Response() + + route_ = self._resolve_route(request) + + for arg, cast in route_.arguments.items(): + value = request.get(arg) + if value is None: + if not hasattr(cast, '__call__'): + request[arg] = cast + continue + if not hasattr(cast, '__call__'): + cast = type(cast) + try: + request[arg] = _typecast(cast, value) + except Exception, error: + raise http.BadRequest( + 'Cannot typecast %r argument: %s' % (arg, error)) + kwargs = {} + for arg in route_.kwarg_names: + if arg == 'request': + kwargs[arg] = request + elif arg == 'response': + kwargs[arg] = response + elif arg not in kwargs: + kwargs[arg] = request.get(arg) + + for i in self._preroutes: + i(route_, request, response) + result = None + exception = None + try: + result = route_.callback(**kwargs) + if route_.mime_type == 'text/event-stream' and \ + self._allow_spawn and 'spawn' in request: + _logger.debug('Spawn event stream for %r', request) + request.ensure_content() + coroutine.spawn(self._event_stream, request, result) + result = None + except Exception, exception: + raise + else: + if not response.content_type: + if isinstance(result, toolkit.File): + response.content_type = result.get('mime_type') + if not response.content_type: + response.content_type = route_.mime_type + finally: + for i in self._postroutes: + i(request, response, result, exception) return result @@ -533,7 +552,7 @@ class Router(object): if 'callback' in request: js_callback = request.pop('callback') - result = None + content = None try: if 'HTTP_ORIGIN' in request.environ: enforce(self._assert_origin(request.environ), http.Forbidden, @@ -541,7 +560,34 @@ class Router(object): request.environ['HTTP_ORIGIN']) response['Access-Control-Allow-Origin'] = \ request.environ['HTTP_ORIGIN'] + result = self.call(request, response) + + if isinstance(result, toolkit.File): + if 'url' in result: + raise http.Redirect(result['url']) + enforce(isfile(result.path), 'No such file') + if request.if_modified_since and result.mtime and \ + result.mtime <= request.if_modified_since: + raise http.NotModified() + response.last_modified = result.mtime + response.content_type = result.get('mime_type') or \ + 'application/octet-stream' + response['Content-Disposition'] = \ + 'attachment; filename="%s"' % result.name + result = file(result.path, 'rb') + + if not hasattr(result, 'read'): + content = result + else: + if hasattr(result, 'fileno'): + response.content_length = os.fstat(result.fileno()).st_size + elif hasattr(result, 'seek'): + result.seek(0, 2) + response.content_length = result.tell() + result.seek(0) + content = _stream_reader(result) + except http.StatusPass, error: response.status = error.status if error.headers: @@ -557,100 +603,46 @@ class Router(object): if request.method == 'HEAD': response.meta['error'] = str(error) else: - result = {'error': str(error), - 'request': request.url, - } + content = {'error': str(error), 'request': request.url} response.content_type = 'application/json' - result_streamed = isinstance(result, types.GeneratorType) + streamed_content = isinstance(content, types.GeneratorType) if request.method == 'HEAD': - result_streamed = False - result = None + streamed_content = False + content = None elif js_callback: - if result_streamed: - result = ''.join(result) - result_streamed = False - result = '%s(%s);' % (js_callback, json.dumps(result)) - response.content_length = len(result) - elif not result_streamed: + if streamed_content: + content = ''.join(content) + streamed_content = False + content = '%s(%s);' % (js_callback, json.dumps(content)) + response.content_length = len(content) + elif not streamed_content: if response.content_type == 'application/json': - result = json.dumps(result) + content = json.dumps(content) if 'content-length' not in response: - response.content_length = len(result) if result else 0 + response.content_length = len(content) if content else 0 for key, value in response.meta.items(): - response.set('X-SN-%s' % _to_ascii(key), json.dumps(value)) + response.set('X-SN-%s' % toolkit.ascii(key), json.dumps(value)) - if request.method == 'HEAD' and result is not None: + if request.method == 'HEAD' and content is not None: _logger.warning('Content from HEAD response is ignored') - result = None + content = None - _logger.trace('%s call: request=%s response=%r result=%r', - self, request.environ, response, repr(result)[:256]) + _logger.trace('%s call: request=%s response=%r content=%r', + self, request.environ, response, repr(content)[:256]) start_response(response.status, response.items()) - if result_streamed: + if streamed_content: if response.content_type == 'text/event-stream': - for event in _event_stream(request, result): + for event in _event_stream(request, content): yield 'data: %s\n\n' % json.dumps(event) else: - for i in result: + for i in content: yield i - elif result is not None: - yield result - - def _call_route(self, request, response): - route_ = self._resolve_route(request) - request.routes = self._routes_model - - for arg, cast in route_.arguments.items(): - value = request.get(arg) - if value is None: - if not hasattr(cast, '__call__'): - request[arg] = cast - continue - if not hasattr(cast, '__call__'): - cast = type(cast) - try: - request[arg] = _typecast(cast, value) - except Exception, error: - raise http.BadRequest( - 'Cannot typecast %r argument: %s' % (arg, error)) - kwargs = {} - for arg in route_.kwarg_names: - if arg == 'request': - kwargs[arg] = request - elif arg == 'response': - kwargs[arg] = response - elif arg not in kwargs: - kwargs[arg] = request.get(arg) - - for i in self._preroutes: - i(route_, request, response) - result = None - exception = None - try: - result = route_.callback(**kwargs) - if route_.mime_type == 'text/event-stream' and \ - self._allow_spawn and 'spawn' in request: - _logger.debug('Spawn event stream for %r', request) - request.ensure_content() - coroutine.spawn(self._event_stream, request, result) - result = None - except Exception, exception: - raise - else: - if not response.content_type: - if isinstance(result, Blob): - response.content_type = result.get('mime_type') - if not response.content_type: - response.content_type = route_.mime_type - finally: - for i in self._postroutes: - i(request, response, result, exception) - - return result + elif content is not None: + yield content def _resolve_route(self, request): found_path = [False] @@ -695,9 +687,19 @@ class Router(object): commons['guid'] = request.guid if request.prop: commons['prop'] = request.prop - for event in _event_stream(request, stream): + try: + for event in _event_stream(request, stream): + event.update(commons) + this.localcast(event) + except Exception, error: + _logger.exception('Event stream %r failed', request) + event = {'event': 'failure', + 'exception': type(error).__name__, + 'error': str(error), + } + event.update(request.session) event.update(commons) - self._routes_model.broadcast(event) + this.localcast(event) def _assert_origin(self, environ): origin = environ['HTTP_ORIGIN'] @@ -747,22 +749,6 @@ class _ContentStream(object): return result -def _filename(names, mime_type): - if type(names) not in (list, tuple): - names = [names] - parts = [] - for name in names: - if isinstance(name, dict): - name = toolkit.gettext(name) - parts.append(''.join([i.capitalize() for i in name.split()])) - result = '-'.join(parts) - if mime_type: - if not mimetypes.inited: - mimetypes.init() - result += mimetypes.guess_extension(mime_type) or '' - return result.replace(os.sep, '') - - def _stream_reader(stream): try: while True: @@ -783,15 +769,8 @@ def _event_stream(request, stream): event[0].update(i) event = event[0] yield event - except Exception, error: - _logger.exception('Event stream %r failed', request) - event = {'event': 'failure', - 'exception': type(error).__name__, - 'error': str(error), - } - event.update(request.session) - yield event - _logger.debug('Event stream %r exited', request) + finally: + _logger.debug('Event stream %r exited', request) def _typecast(cast, value): @@ -817,7 +796,7 @@ def _typecast(cast, value): def _parse_accept_language(value): if not value: - return [toolkit.default_lang()] + return [i18n.default_lang()] langs = [] qualities = [] for chunk in value.split(','): @@ -836,14 +815,6 @@ def _parse_accept_language(value): return langs -def _to_ascii(value): - if not isinstance(value, basestring): - return str(value) - if isinstance(value, unicode): - return value.encode('utf8') - return value - - class _Routes(dict): def __init__(self, parent=None): diff --git a/sweets.recipe b/sweets.recipe index 8a84de9..b377508 100644 --- a/sweets.recipe +++ b/sweets.recipe @@ -6,7 +6,7 @@ project = SweetsDistribution:Factory implement = %(name)s summary = Sugar Network license = GPLv3+ -homepage = http://wiki.sugarlabs.org/go/Platform_Team/Sugar_Network +homepage = http://wiki.sugarlabs.org/go/Sugar_Network version = 0.9 stability = developer @@ -18,7 +18,10 @@ replaces = sugar-network-server; sweets-recipe; active-document pylru; requests [Build] -make = sed -i s/node-devel.sugarlabs.org/node-testing.sugarlabs.org/ sugar_network/client/__init__.py +make = sed -i s/node-devel.sugarlabs.org/node-testing.sugarlabs.org/ sugar_network/client/__init__.py && + cp sugar_network/toolkit/languages.py{.in,} && + langs=$(for i in `ls po/*.po`; do echo -n "'`basename $i .po`',"; done); sed -i "s/%LANGUAGES%/$langs/" sugar_network/toolkit/languages.py && + true install = install -m 0755 -d %(DESTDIR)s/%(PYTHONSITEDIR)s && cp -r sugar_network %(DESTDIR)s/%(PYTHONSITEDIR)s/ && install -m 0755 -D sugar-network %(DESTDIR)s/%(BINDIR)s/sugar-network && diff --git a/tests/__init__.py b/tests/__init__.py index 2e058f5..b93e388 100644 --- a/tests/__init__.py +++ b/tests/__init__.py @@ -8,6 +8,7 @@ import shutil import hashlib import logging import zipfile +import gettext import unittest import tempfile import subprocess @@ -16,17 +17,19 @@ from os.path import dirname, join, exists, abspath, isfile from M2Crypto import DSA from gevent import monkey -from sugar_network.toolkit import coroutine, http, mountpoints, Option, gbus +from sugar_network.toolkit import coroutine, http, mountpoints, Option, gbus, i18n, languages from sugar_network.toolkit.router import Router +from sugar_network.toolkit.coroutine import this +from sugar_network.db import files from sugar_network.client import IPCConnection, journal, routes as client_routes from sugar_network.client.routes import ClientRoutes, _Auth from sugar_network import db, client, node, toolkit, model from sugar_network.client import solver from sugar_network.model.user import User from sugar_network.model.context import Context -from sugar_network.model.release import Release +from sugar_network.model.post import Post from sugar_network.node.master import MasterRoutes -from sugar_network.node import stats_user, stats_node, obs, slave, downloads +from sugar_network.node import stats_user, obs, slave, downloads from requests import adapters @@ -41,6 +44,9 @@ monkey.patch_select() monkey.patch_ssl() monkey.patch_time() +gettext._default_localedir = join(root, 'data', 'locale') +languages.LANGUAGES = ['en', 'es', 'fr'] + def main(): shutil.rmtree(tmproot, ignore_errors=True) @@ -57,7 +63,7 @@ class Test(unittest.TestCase): os.environ['LANG'] = 'en_US' os.environ['LANGUAGE'] = 'en_US' - toolkit._default_langs = None + i18n._default_langs = None global tmpdir tmpdir = join(tmp_root or tmproot, '.'.join(self.id().split('.')[1:])) @@ -102,13 +108,10 @@ class Test(unittest.TestCase): mountpoints._connects.clear() mountpoints._found.clear() mountpoints._COMPLETE_MOUNT_TIMEOUT = .1 - stats_node.stats_node.value = False - stats_node.stats_node_step.value = 1 - stats_node.stats_node_rras.value = ['RRA:AVERAGE:0.5:1:60'] stats_user.stats_user.value = False stats_user.stats_user_step.value = 1 stats_user._user_cache.clear() - obs._client = None + obs._conn = None obs._repos = {'base': [], 'presolve': []} http._RECONNECTION_NUMBER = 0 toolkit.cachedir.value = tmpdir + '/tmp' @@ -122,8 +125,7 @@ class Test(unittest.TestCase): db.Volume.model = [ 'sugar_network.model.user', 'sugar_network.model.context', - 'sugar_network.model.artifact', - 'sugar_network.model.release', + 'sugar_network.model.post', 'sugar_network.model.report', ] @@ -137,10 +139,16 @@ class Test(unittest.TestCase): self.node = None self.client = None - self.forks = [] self.fork_num = fork_num + this.request = None + this.volume = None + this.call = None + this.broadcast = lambda x: x + + self.override_files() + def tearDown(self): self.stop_nodes() while db.Volume._flush_pool: @@ -150,6 +158,46 @@ class Test(unittest.TestCase): setattr(mod, name, old_handler) sys.stdout.flush() + def override_files(self): + os.makedirs('blobs') + self.blobs = {} + + def files_post(content, meta=None, digest_to_assert=None): + if hasattr(content, 'read'): + content = content.read() + digest = files.Digest(hash(content)) + if digest_to_assert: + assert digest == digest_to_assert + path = join('blobs', digest) + with file(path, 'w') as f: + f.write(content) + self.blobs[digest] = meta or {} + return toolkit.File(path, meta=meta, digest=digest) + + def files_update(digest, meta): + self.blobs.setdefault(digest, {}).update(meta) + + def files_get(digest): + if digest not in self.blobs: + return None + meta = toolkit.File(meta=self.blobs[digest]) + path = join('blobs', digest) + if exists(path): + meta.path = path + return meta + + def files_delete(digest): + path = join('blobs', digest) + if exists(path): + os.unlink(path) + if digest in self.blobs: + del self.blobs[digest] + + self.override(files, 'post', files_post) + self.override(files, 'update', files_update) + self.override(files, 'get', files_get) + self.override(files, 'delete', files_delete) + def stop_nodes(self): if self.client is not None: self.client.close() @@ -267,17 +315,20 @@ class Test(unittest.TestCase): def start_master(self, classes=None, routes=MasterRoutes): if classes is None: - classes = [User, Context, Release] + classes = [User, Context, Post] self.node_volume = db.Volume('master', classes) self.node_routes = routes('guid', self.node_volume) - self.node = coroutine.WSGIServer(('127.0.0.1', 8888), Router(self.node_routes)) + self.node_router = Router(self.node_routes) + self.node = coroutine.WSGIServer(('127.0.0.1', 8888), self.node_router) coroutine.spawn(self.node.serve_forever) coroutine.dispatch(.1) + this.volume = self.node_volume + this.call = self.node_router.call return self.node_volume def fork_master(self, classes=None, routes=MasterRoutes): if classes is None: - classes = [User, Context, Release] + classes = [User, Context] def node(): volume = db.Volume('master', classes) @@ -291,18 +342,19 @@ class Test(unittest.TestCase): def start_client(self, classes=None, routes=ClientRoutes): if classes is None: - classes = [User, Context, Release] + classes = [User, Context] volume = db.Volume('client', classes) self.client_routes = routes(volume, client.api_url.value) self.client = coroutine.WSGIServer( ('127.0.0.1', client.ipc_port.value), Router(self.client_routes)) coroutine.spawn(self.client.serve_forever) coroutine.dispatch() + this.volume = volume return volume def start_online_client(self, classes=None): if classes is None: - classes = [User, Context, Release] + classes = [User, Context] self.start_master(classes) volume = db.Volume('client', classes) self.client_routes = ClientRoutes(volume, client.api_url.value) @@ -311,6 +363,7 @@ class Test(unittest.TestCase): ('127.0.0.1', client.ipc_port.value), Router(self.client_routes)) coroutine.spawn(self.client.serve_forever) coroutine.dispatch() + this.volume = volume return volume def start_offline_client(self, resources=None): @@ -319,6 +372,7 @@ class Test(unittest.TestCase): server = coroutine.WSGIServer(('127.0.0.1', client.ipc_port.value), Router(self.client_routes)) coroutine.spawn(server.serve_forever) coroutine.dispatch() + this.volume = self.home_volume return IPCConnection() def restful_server(self, classes=None): @@ -337,7 +391,7 @@ class Test(unittest.TestCase): node.find_limit.value = 1024 db.index_write_queue.value = 10 - volume = db.Volume('remote', classes or [User, Context, Release]) + volume = db.Volume('remote', classes or [User, Context]) self.node_routes = MasterRoutes('guid', volume) httpd = coroutine.WSGIServer(('127.0.0.1', 8888), Router(self.node_routes)) try: diff --git a/tests/data/locale/en/LC_MESSAGES/sugar-network.mo b/tests/data/locale/en/LC_MESSAGES/sugar-network.mo new file mode 100644 index 0000000..c601536 --- /dev/null +++ b/tests/data/locale/en/LC_MESSAGES/sugar-network.mo Binary files differ diff --git a/tests/data/locale/es/LC_MESSAGES/sugar-network.mo b/tests/data/locale/es/LC_MESSAGES/sugar-network.mo new file mode 100644 index 0000000..d39d878 --- /dev/null +++ b/tests/data/locale/es/LC_MESSAGES/sugar-network.mo Binary files differ diff --git a/tests/data/locale/fr/LC_MESSAGES/sugar-network.mo b/tests/data/locale/fr/LC_MESSAGES/sugar-network.mo new file mode 100644 index 0000000..dc4a83e --- /dev/null +++ b/tests/data/locale/fr/LC_MESSAGES/sugar-network.mo Binary files differ diff --git a/tests/units/client/cache.py b/tests/units/client/cache.py index e980549..51245ee 100755 --- a/tests/units/client/cache.py +++ b/tests/units/client/cache.py @@ -12,7 +12,6 @@ from __init__ import tests from sugar_network import db from sugar_network.model.context import Context -from sugar_network.model.release import Release from sugar_network.client import cache_limit, cache_limit_percent, cache_lifetime, IPCConnection from sugar_network.client.cache import Cache from sugar_network.toolkit import http @@ -32,7 +31,7 @@ class CacheTest(tests.Test): self.override(os, 'statvfs', lambda *args: statvfs()) def test_open(self): - volume = db.Volume('db', [Context, Release]) + volume = db.Volume('db', [Context]) volume['release'].create({ 'guid': '1', @@ -83,7 +82,7 @@ class CacheTest(tests.Test): self.assertEqual(['5', '4', '1'], [i for i in cache]) def test_open_IgnoreClones(self): - volume = db.Volume('db', [Context, Release]) + volume = db.Volume('db', [Context]) volume['context'].create({ 'guid': 'context', @@ -109,7 +108,7 @@ class CacheTest(tests.Test): self.assertEqual([], [i for i in cache]) def test_ensure_AfterOpen(self): - volume = db.Volume('db', [Context, Release]) + volume = db.Volume('db', [Context]) volume['release'].create({'data': {'blob_size': 1}, 'guid': '1', 'context': 'context', 'version': '1', 'license': ['GPL'], 'stability': 'stable'}) os.utime('db/release/1/1', (1, 1)) @@ -143,7 +142,7 @@ class CacheTest(tests.Test): self.assertRaises(RuntimeError, cache.ensure, 2, 0) def test_ensure_Live(self): - volume = db.Volume('db', [Context, Release]) + volume = db.Volume('db', [Context]) cache = Cache(volume) # To initiate the cache @@ -159,7 +158,7 @@ class CacheTest(tests.Test): self.assertRaises(RuntimeError, cache.ensure, 1, 0) def test_ensure_ConsiderTmpSize(self): - volume = db.Volume('db', [Context, Release]) + volume = db.Volume('db', [Context]) volume['release'].create({'data': {'blob_size': 1}, 'guid': '1', 'context': 'context', 'version': '1', 'license': ['GPL'], 'stability': 'stable'}) cache = Cache(volume) @@ -175,7 +174,7 @@ class CacheTest(tests.Test): def test_recycle(self): ts = time.time() - volume = db.Volume('db', [Context, Release]) + volume = db.Volume('db', [Context]) volume['release'].create({'data': {'blob_size': 1}, 'guid': '1', 'context': 'context', 'version': '1', 'license': ['GPL'], 'stability': 'stable'}) os.utime('db/release/1/1', (ts - 1.5 * 86400, ts - 1.5 * 86400)) volume['release'].create({'data': {'blob_size': 1}, 'guid': '2', 'context': 'context', 'version': '1', 'license': ['GPL'], 'stability': 'stable'}) @@ -205,7 +204,7 @@ class CacheTest(tests.Test): cache.recycle() def test_checkin(self): - volume = db.Volume('db', [Context, Release]) + volume = db.Volume('db', [Context]) cache = Cache(volume) volume['release'].create({'guid': '1', 'context': 'context', 'version': '1', 'license': ['GPL'], 'stability': 'stable'}) @@ -229,7 +228,7 @@ class CacheTest(tests.Test): conn = IPCConnection() self.statvfs.f_blocks = 0 - impl1 = conn.upload(['release'], StringIO(self.zips(['TestActivity/activity/activity.info', [ + bundle = self.zips(['TestActivity/activity/activity.info', [ '[Activity]', 'name = TestActivity', 'bundle_id = context', @@ -238,9 +237,12 @@ class CacheTest(tests.Test): 'activity_version = 1', 'license = Public Domain', 'stability = stable', - ]])), cmd='submit', initial=True) - + ]]) + impl1 = conn.upload(['release'], StringIO(bundle), cmd='submit', initial=True) + print self.blobs[str(hash(bundle))] conn.put(['context', 'context'], True, cmd='clone') + print self.blobs[str(hash(bundle))] + return self.assertEqual([], [i for i in self.client_routes._cache]) assert local_volume['release'].exists(impl1) @@ -271,7 +273,7 @@ class CacheTest(tests.Test): assert local_volume['release'].exists(impl2) def test_Acquiring(self): - volume = db.Volume('db', [Context, Release]) + volume = db.Volume('db', [Context]) cache = Cache(volume) volume['release'].create({'guid': '1', 'context': 'context', 'version': '1', 'license': ['GPL'], 'stability': 'stable'}) diff --git a/tests/units/client/routes.py b/tests/units/client/routes.py index 0b757f5..9fad249 100755 --- a/tests/units/client/routes.py +++ b/tests/units/client/routes.py @@ -16,7 +16,7 @@ from sugar_network.client.routes import ClientRoutes, CachedClientRoutes from sugar_network.model.user import User from sugar_network.model.report import Report from sugar_network.toolkit.router import Router, Request, Response -from sugar_network.toolkit import coroutine +from sugar_network.toolkit import coroutine, i18n import requests @@ -420,28 +420,28 @@ class RoutesTest(tests.Test): 'description': '', }) - toolkit._default_langs = None + i18n._default_langs = None os.environ['LANGUAGE'] = 'es:ru:en' ipc = IPCConnection() self.assertEqual('3', ipc.get(['context', guid1, 'title'])) self.assertEqual('2', ipc.get(['context', guid2, 'title'])) self.assertEqual('1', ipc.get(['context', guid3, 'title'])) - toolkit._default_langs = None + i18n._default_langs = None os.environ['LANGUAGE'] = 'ru:en' ipc = IPCConnection() self.assertEqual('2', ipc.get(['context', guid1, 'title'])) self.assertEqual('2', ipc.get(['context', guid2, 'title'])) self.assertEqual('1', ipc.get(['context', guid3, 'title'])) - toolkit._default_langs = None + i18n._default_langs = None os.environ['LANGUAGE'] = 'en' ipc = IPCConnection() self.assertEqual('1', ipc.get(['context', guid1, 'title'])) self.assertEqual('1', ipc.get(['context', guid2, 'title'])) self.assertEqual('1', ipc.get(['context', guid3, 'title'])) - toolkit._default_langs = None + i18n._default_langs = None os.environ['LANGUAGE'] = 'foo' ipc = IPCConnection() self.assertEqual('1', ipc.get(['context', guid1, 'title'])) diff --git a/tests/units/db/__main__.py b/tests/units/db/__main__.py index fc91d7c..3b1b9ec 100644 --- a/tests/units/db/__main__.py +++ b/tests/units/db/__main__.py @@ -2,11 +2,12 @@ from __init__ import tests -from resource import * -from index import * -#from migrate import * +from metadata import * from storage import * +from index import * +from resource import * from routes import * +#from migrate import * if __name__ == '__main__': tests.main() diff --git a/tests/units/db/files.py b/tests/units/db/files.py new file mode 100755 index 0000000..0d806df --- /dev/null +++ b/tests/units/db/files.py @@ -0,0 +1,320 @@ + + def test_diff_WithBlobsSetByUrl(self): + URL = 'http://src.sugarlabs.org/robots.txt' + URL_content = urllib2.urlopen(URL).read() + + class Document(db.Resource): + + @db.blob_property() + def blob(self, value): + return value + + directory = Directory(tests.tmpdir, Document, IndexWriter) + + directory.create({'guid': '1', 'ctime': 1, 'mtime': 1}) + directory.update('1', {'blob': {'url': URL}}) + self.utime('1/1', 1) + + out_seq = Sequence() + self.assertEqual([ + {'guid': '1', 'diff': { + 'guid': {'value': '1', 'mtime': 1}, + 'ctime': {'value': 1, 'mtime': 1}, + 'mtime': {'value': 1, 'mtime': 1}, + 'blob': { + 'url': URL, + 'mtime': 1, + }, + }}, + ], + [i for i in diff(directory, [[0, None]], out_seq)]) + self.assertEqual([[1, 2]], out_seq) + + def test_merge_AvoidCalculatedBlobs(self): + + class Document(db.Resource): + + @db.blob_property() + def blob(self, value): + return {'url': 'http://foo/bar', 'mime_type': 'image/png'} + + directory1 = Directory('document1', Document, IndexWriter) + directory1.create({'guid': 'guid', 'ctime': 1, 'mtime': 1}) + for i in os.listdir('document1/gu/guid'): + os.utime('document1/gu/guid/%s' % i, (1, 1)) + + directory2 = Directory('document2', Document, IndexWriter) + for patch in diff(directory1, [[0, None]], Sequence()): + directory2.merge(**patch) + + doc = directory2.get('guid') + self.assertEqual(1, doc.get('seqno')) + self.assertEqual(1, doc.meta('guid')['mtime']) + assert not exists('document2/gu/guid/blob') + + def test_merge_Blobs(self): + + class Document(db.Resource): + + @db.blob_property() + def blob(self, value): + return value + + directory = Directory('document', Document, IndexWriter) + self.touch(('blob', 'blob-1')) + directory.merge('1', { + 'guid': {'mtime': 1, 'value': '1'}, + 'ctime': {'mtime': 2, 'value': 2}, + 'mtime': {'mtime': 3, 'value': 3}, + 'blob': {'mtime': 4, 'blob': 'blob'}, + }) + + self.assertEqual( + [(2, 3, '1')], + [(i['ctime'], i['mtime'], i['guid']) for i in directory.find()[0]]) + + doc = directory.get('1') + self.assertEqual(1, doc.get('seqno')) + self.assertEqual(1, doc.meta('guid')['mtime']) + self.assertEqual(2, doc.meta('ctime')['mtime']) + self.assertEqual(3, doc.meta('mtime')['mtime']) + self.assertEqual(4, doc.meta('blob')['mtime']) + self.assertEqual('blob-1', file('document/1/1/blob.blob').read()) + + self.touch(('blob', 'blob-2')) + directory.merge('1', { + 'blob': {'mtime': 5, 'blob': 'blob'}, + }) + + self.assertEqual(5, doc.meta('blob')['mtime']) + self.assertEqual('blob-2', file('document/1/1/blob.blob').read()) + + + def test_DeleteOldBlobOnUpdate(self): + + class Document(db.Resource): + + @db.blob_property() + def blob(self, value): + return value + + directory = Directory(tests.tmpdir, Document, IndexWriter) + + directory.create({'guid': 'guid', 'blob': 'foo'}) + assert exists('gu/guid/blob.blob') + directory.update('guid', {'blob': {'url': 'foo'}}) + assert not exists('gu/guid/blob.blob') + + directory.update('guid', {'blob': 'foo'}) + assert exists('gu/guid/blob.blob') + directory.update('guid', {'blob': {}}) + assert not exists('gu/guid/blob.blob') + + def test_diff_Blobs(self): + + class Document(db.Resource): + + @db.blob_property() + def prop(self, value): + return value + + volume = db.Volume('db', [Document]) + cp = NodeRoutes('guid', volume) + + guid = call(cp, method='POST', document='document', content={}) + call(cp, method='PUT', document='document', guid=guid, content={'prop': 'payload'}) + self.utime('db', 0) + + patch = diff(volume, toolkit.Sequence([[1, None]])) + self.assertEqual( + {'resource': 'document'}, + next(patch)) + record = next(patch) + self.assertEqual('payload', ''.join([i for i in record.pop('blob')])) + self.assertEqual( + {'guid': guid, 'blob_size': len('payload'), 'diff': { + 'prop': { + 'digest': hashlib.sha1('payload').hexdigest(), + 'blob_size': len('payload'), + 'mime_type': 'application/octet-stream', + 'mtime': 0, + }, + }}, + record) + self.assertEqual( + {'guid': guid, 'diff': { + 'guid': {'value': guid, 'mtime': 0}, + 'author': {'mtime': 0, 'value': {}}, + 'layer': {'mtime': 0, 'value': []}, + 'tags': {'mtime': 0, 'value': []}, + 'mtime': {'value': 0, 'mtime': 0}, + 'ctime': {'value': 0, 'mtime': 0}, + }}, + next(patch)) + self.assertEqual( + {'commit': [[1, 2]]}, + next(patch)) + self.assertRaises(StopIteration, next, patch) + + def test_diff_BlobUrls(self): + url = 'http://src.sugarlabs.org/robots.txt' + blob = urllib2.urlopen(url).read() + + class Document(db.Resource): + + @db.blob_property() + def prop(self, value): + return value + + volume = db.Volume('db', [Document]) + cp = NodeRoutes('guid', volume) + + guid = call(cp, method='POST', document='document', content={}) + call(cp, method='PUT', document='document', guid=guid, content={'prop': {'url': url}}) + self.utime('db', 1) + + self.assertEqual([ + {'resource': 'document'}, + {'guid': guid, + 'diff': { + 'guid': {'value': guid, 'mtime': 1}, + 'author': {'mtime': 1, 'value': {}}, + 'layer': {'mtime': 1, 'value': []}, + 'tags': {'mtime': 1, 'value': []}, + 'mtime': {'value': 0, 'mtime': 1}, + 'ctime': {'value': 0, 'mtime': 1}, + 'prop': {'url': url, 'mtime': 1}, + }, + }, + {'commit': [[1, 2]]}, + ], + [i for i in diff(volume, toolkit.Sequence([[1, None]]))]) + + patch = diff(volume, toolkit.Sequence([[1, None]]), fetch_blobs=True) + self.assertEqual( + {'resource': 'document'}, + next(patch)) + record = next(patch) + self.assertEqual(blob, ''.join([i for i in record.pop('blob')])) + self.assertEqual( + {'guid': guid, 'blob_size': len(blob), 'diff': {'prop': {'mtime': 1}}}, + record) + self.assertEqual( + {'guid': guid, 'diff': { + 'guid': {'value': guid, 'mtime': 1}, + 'author': {'mtime': 1, 'value': {}}, + 'layer': {'mtime': 1, 'value': []}, + 'tags': {'mtime': 1, 'value': []}, + 'mtime': {'value': 0, 'mtime': 1}, + 'ctime': {'value': 0, 'mtime': 1}, + }}, + next(patch)) + self.assertEqual( + {'commit': [[1, 2]]}, + next(patch)) + self.assertRaises(StopIteration, next, patch) + + def test_diff_SkipBrokenBlobUrls(self): + + class Document(db.Resource): + + @db.blob_property() + def prop(self, value): + return value + + volume = db.Volume('db', [Document]) + cp = NodeRoutes('guid', volume) + + guid1 = call(cp, method='POST', document='document', content={}) + call(cp, method='PUT', document='document', guid=guid1, content={'prop': {'url': 'http://foo/bar'}}) + guid2 = call(cp, method='POST', document='document', content={}) + self.utime('db', 1) + + self.assertEqual([ + {'resource': 'document'}, + {'guid': guid1, + 'diff': { + 'guid': {'value': guid1, 'mtime': 1}, + 'author': {'mtime': 1, 'value': {}}, + 'layer': {'mtime': 1, 'value': []}, + 'tags': {'mtime': 1, 'value': []}, + 'mtime': {'value': 0, 'mtime': 1}, + 'ctime': {'value': 0, 'mtime': 1}, + 'prop': {'url': 'http://foo/bar', 'mtime': 1}, + }, + }, + {'guid': guid2, + 'diff': { + 'guid': {'value': guid2, 'mtime': 1}, + 'author': {'mtime': 1, 'value': {}}, + 'layer': {'mtime': 1, 'value': []}, + 'tags': {'mtime': 1, 'value': []}, + 'mtime': {'value': 0, 'mtime': 1}, + 'ctime': {'value': 0, 'mtime': 1}, + }, + }, + {'commit': [[1, 3]]}, + ], + [i for i in diff(volume, toolkit.Sequence([[1, None]]), fetch_blobs=False)]) + + self.assertEqual([ + {'resource': 'document'}, + {'guid': guid1, + 'diff': { + 'guid': {'value': guid1, 'mtime': 1}, + 'author': {'mtime': 1, 'value': {}}, + 'layer': {'mtime': 1, 'value': []}, + 'tags': {'mtime': 1, 'value': []}, + 'mtime': {'value': 0, 'mtime': 1}, + 'ctime': {'value': 0, 'mtime': 1}, + }, + }, + {'guid': guid2, + 'diff': { + 'guid': {'value': guid2, 'mtime': 1}, + 'author': {'mtime': 1, 'value': {}}, + 'layer': {'mtime': 1, 'value': []}, + 'tags': {'mtime': 1, 'value': []}, + 'mtime': {'value': 0, 'mtime': 1}, + 'ctime': {'value': 0, 'mtime': 1}, + }, + }, + {'commit': [[1, 3]]}, + ], + [i for i in diff(volume, toolkit.Sequence([[1, None]]), fetch_blobs=True)]) + + def test_merge_Blobs(self): + + class Document(db.Resource): + + @db.blob_property() + def prop(self, value): + return value + + volume = db.Volume('db', [Document]) + + merge(volume, [ + {'resource': 'document'}, + {'guid': '1', 'diff': { + 'guid': {'value': '1', 'mtime': 1.0}, + 'ctime': {'value': 2, 'mtime': 2.0}, + 'mtime': {'value': 3, 'mtime': 3.0}, + 'prop': { + 'blob': StringIO('payload'), + 'blob_size': len('payload'), + 'digest': hashlib.sha1('payload').hexdigest(), + 'mime_type': 'foo/bar', + 'mtime': 1, + }, + }}, + {'commit': [[1, 1]]}, + ]) + + assert volume['document'].exists('1') + blob = volume['document'].get('1')['prop'] + self.assertEqual(1, blob['mtime']) + self.assertEqual('foo/bar', blob['mime_type']) + self.assertEqual(hashlib.sha1('payload').hexdigest(), blob['digest']) + self.assertEqual(tests.tmpdir + '/db/document/1/1/prop.blob', blob['blob']) + self.assertEqual('payload', file(blob['blob']).read()) + diff --git a/tests/units/db/index.py b/tests/units/db/index.py index 9d996b0..cb144c6 100755 --- a/tests/units/db/index.py +++ b/tests/units/db/index.py @@ -12,23 +12,22 @@ from __init__ import tests from sugar_network import toolkit from sugar_network.db import index -from sugar_network.db.index import _fmt_prop_value -from sugar_network.db.metadata import Metadata, IndexedProperty, GUID_PREFIX, Property +from sugar_network.db.metadata import Metadata, Property, GUID_PREFIX, Boolean, Enum, List, Localized, Numeric from sugar_network.toolkit.router import ACL -from sugar_network.toolkit import coroutine +from sugar_network.toolkit import coroutine, i18n class IndexTest(tests.Test): def test_Term_AvoidCollisionsWithGuid(self): - self.assertRaises(RuntimeError, IndexedProperty, 'key', 0, 'I') - self.assertRaises(RuntimeError, IndexedProperty, 'key', 0, 'K') - self.assertRaises(RuntimeError, IndexedProperty, 'key', 1, 'I') - IndexedProperty('key', 1, 'K') - IndexedProperty('guid', 0, 'I') + self.assertRaises(RuntimeError, Property, 'key', 0, 'I') + self.assertRaises(RuntimeError, Property, 'key', 0, 'K') + self.assertRaises(RuntimeError, Property, 'key', 1, 'I') + Property('key', 1, 'K') + Property('guid', 0, 'I') def test_Create(self): - db = Index({'key': IndexedProperty('key', 1, 'K')}) + db = Index({'key': Property('key', 1, 'K')}) self.assertEqual( ([], 0), @@ -47,8 +46,8 @@ class IndexTest(tests.Test): def test_update(self): db = Index({ - 'var_1': IndexedProperty('var_1', 1, 'A'), - 'var_2': IndexedProperty('var_2', 2, 'B'), + 'var_1': Property('var_1', 1, 'A'), + 'var_2': Property('var_2', 2, 'B'), }) db.store('1', {'var_1': 'value_1', 'var_2': 'value_2'}) @@ -62,7 +61,7 @@ class IndexTest(tests.Test): db._find(reply=['var_1', 'var_2'])) def test_delete(self): - db = Index({'key': IndexedProperty('key', 1, 'K')}) + db = Index({'key': Property('key', 1, 'K')}) db.store('1', {'key': 'value'}) self.assertEqual( @@ -74,8 +73,17 @@ class IndexTest(tests.Test): ([], 0), db._find(reply=['key'])) - def test_IndexByFmt(self): - db = Index({'key': IndexedProperty('key', 1, 'K', fmt=lambda x: "foo" + x)}) + def test_IndexCalculatedValue(self): + + class Property2(Property): + + def encode(self, value): + yield "foo" + value + + def decode(self, value): + return "foo" + value + + db = Index({'key': Property2('key', 1, 'K')}) db.store('1', {'key': 'bar'}) @@ -92,15 +100,17 @@ class IndexTest(tests.Test): [], db._find(key='fake', reply=['key'])[0]) - def test_IndexByFmtGenerator(self): + def test_IndexCalculatedValues(self): - def iterate(value): - if value != 'fake': - yield 'foo' - yield 'bar' - yield value + class Property2(Property): - db = Index({'key': IndexedProperty('key', 1, 'K', fmt=iterate)}) + def encode(self, value): + if value != 'fake': + yield 'foo' + yield 'bar' + yield value + + db = Index({'key': Property2('key', 1, 'K')}) db.store('1', {'key': 'value'}) self.assertEqual( @@ -118,9 +128,9 @@ class IndexTest(tests.Test): def test_find(self): db = Index({ - 'var_1': IndexedProperty('var_1', 1, 'A', full_text=True), - 'var_2': IndexedProperty('var_2', 2, 'B', full_text=True), - 'var_3': IndexedProperty('var_3', 3, 'C', full_text=True), + 'var_1': Property('var_1', 1, 'A', full_text=True), + 'var_2': Property('var_2', 2, 'B', full_text=True), + 'var_3': Property('var_3', 3, 'C', full_text=True), }) db.store('1', {'var_1': '1', 'var_2': 'у', 'var_3': 'г'}) @@ -149,7 +159,7 @@ class IndexTest(tests.Test): def test_find_NoneFilters(self): db = Index({ - 'prop': IndexedProperty('prop', 1, 'P', full_text=True), + 'prop': Property('prop', 1, 'P', full_text=True), }) db.store('guid', {'prop': 'value'}) @@ -164,9 +174,9 @@ class IndexTest(tests.Test): [{'guid': 'guid', 'prop': 'value'}], db._find(guid=None, reply=['prop'])[0]) - def test_find_WithTypeCast(self): + def test_find_DecodeArgs(self): db = Index({ - 'var_1': IndexedProperty('var_1', 1, 'A', typecast=bool), + 'var_1': Boolean('var_1', 1, 'A'), }) db.store('1', {'var_1': True}) @@ -181,9 +191,9 @@ class IndexTest(tests.Test): def test_find_WithProps(self): db = Index({ - 'var_1': IndexedProperty('var_1', 1, 'A', full_text=True), - 'var_2': IndexedProperty('var_2', 2, 'B', full_text=True), - 'var_3': IndexedProperty('var_3', 3, 'C', full_text=True), + 'var_1': Property('var_1', 1, 'A', full_text=True), + 'var_2': Property('var_2', 2, 'B', full_text=True), + 'var_3': Property('var_3', 3, 'C', full_text=True), }) db.store('1', {'var_1': '1', 'var_2': 'у', 'var_3': 'г'}) @@ -209,9 +219,9 @@ class IndexTest(tests.Test): def test_find_WithAllBooleanProps(self): db = Index({ - 'var_1': IndexedProperty('var_1', 1, 'A', boolean=True, full_text=True), - 'var_2': IndexedProperty('var_2', 2, 'B', boolean=True, full_text=True), - 'var_3': IndexedProperty('var_3', 3, 'C', boolean=True, full_text=True), + 'var_1': Property('var_1', 1, 'A', boolean=True, full_text=True), + 'var_2': Property('var_2', 2, 'B', boolean=True, full_text=True), + 'var_3': Property('var_3', 3, 'C', boolean=True, full_text=True), }) db.store('1', {'var_1': '1', 'var_2': 'у', 'var_3': 'г'}) @@ -232,9 +242,9 @@ class IndexTest(tests.Test): def test_find_WithBooleanProps(self): db = Index({ - 'var_1': IndexedProperty('var_1', 1, 'A', boolean=True, full_text=True), - 'var_2': IndexedProperty('var_2', 2, 'B', boolean=False, full_text=True), - 'var_3': IndexedProperty('var_3', 3, 'C', boolean=True, full_text=True), + 'var_1': Property('var_1', 1, 'A', boolean=True, full_text=True), + 'var_2': Property('var_2', 2, 'B', boolean=False, full_text=True), + 'var_3': Property('var_3', 3, 'C', boolean=True, full_text=True), }) db.store('1', {'var_1': '1', 'var_2': 'у', 'var_3': 'г'}) @@ -254,7 +264,7 @@ class IndexTest(tests.Test): db._find(query='б', var_1='1', var_2='у', var_3='г', reply=['var_1'])) def test_find_ExactQuery(self): - db = Index({'key': IndexedProperty('key', 1, 'K', full_text=True)}) + db = Index({'key': Property('key', 1, 'K', full_text=True)}) db.store('1', {'key': 'фу'}) db.store('2', {'key': 'фу бар'}) @@ -280,7 +290,7 @@ class IndexTest(tests.Test): def test_find_ExactQueryTerms(self): term = 'azAZ09_' - db = Index({term: IndexedProperty(term, 1, 'T', full_text=True)}) + db = Index({term: Property(term, 1, 'T', full_text=True)}) db.store('1', {term: 'test'}) db.store('2', {term: 'test fail'}) @@ -290,7 +300,7 @@ class IndexTest(tests.Test): db._find(query='%s:=test' % term, reply=['guid'])) def test_find_ReturnPortions(self): - db = Index({'key': IndexedProperty('key', 1, 'K')}) + db = Index({'key': Property('key', 1, 'K')}) db.store('1', {'key': '1'}) db.store('2', {'key': '2'}) @@ -311,8 +321,8 @@ class IndexTest(tests.Test): def test_find_OrderBy(self): db = Index({ - 'var_1': IndexedProperty('var_1', 1, 'A'), - 'var_2': IndexedProperty('var_2', 2, 'B'), + 'var_1': Property('var_1', 1, 'A'), + 'var_2': Property('var_2', 2, 'B'), }) db.store('1', {'var_1': '1', 'var_2': '3'}) @@ -341,15 +351,15 @@ class IndexTest(tests.Test): def test_find_GroupBy(self): db = Index({ - 'var_1': IndexedProperty('var_1', 1, 'A'), - 'var_2': IndexedProperty('var_2', 2, 'B'), - 'var_3': IndexedProperty('var_3', 3, 'C'), - 'var_4': IndexedProperty('var_4', 4, 'D'), + 'var_1': Property('var_1', 1, 'A'), + 'var_2': Property('var_2', 2, 'B'), + 'var_3': Property('var_3', 3, 'C'), + 'var_4': Property('var_4', 4, 'D'), }) - db.store('1', {'var_1': '1', 'var_2': '1', 'var_3': '3', 'var_4': 0}) - db.store('2', {'var_1': '2', 'var_2': '1', 'var_3': '4', 'var_4': 0}) - db.store('3', {'var_1': '3', 'var_2': '2', 'var_3': '4', 'var_4': 0}) + db.store('1', {'var_1': '1', 'var_2': '1', 'var_3': '3', 'var_4': '0'}) + db.store('2', {'var_1': '2', 'var_2': '1', 'var_3': '4', 'var_4': '0'}) + db.store('3', {'var_1': '3', 'var_2': '2', 'var_3': '4', 'var_4': '0'}) self.assertEqual( [{'guid': '1', 'var_1': '1'}, {'guid': '3', 'var_1': '3'}], @@ -366,7 +376,7 @@ class IndexTest(tests.Test): def test_MultipleValues(self): db = Index({ - 'prop': IndexedProperty('prop', prefix='B', typecast=[1, 2], full_text=True), + 'prop': List(name='prop', prefix='B', subtype=Enum([1, 2, 3]), full_text=True), }) db.store('1', {'prop': [1, 2]}) db.store('2', {'prop': [2, 3]}) @@ -385,7 +395,7 @@ class IndexTest(tests.Test): db.close() db = Index({ - 'prop': IndexedProperty('prop', prefix='B', typecast=[], full_text=True), + 'prop': List(name='prop', prefix='B', full_text=True), }) db.store('1', {'prop': ['a', 'b']}) db.store('2', {'prop': ['b', 'c']}) @@ -448,7 +458,7 @@ class IndexTest(tests.Test): db.close() def test_find_OrderByGUIDAllTime(self): - db = Index({'prop': IndexedProperty('prop', 1, 'P')}) + db = Index({'prop': Property('prop', 1, 'P')}) db.store('3', {'prop': '1'}) db.store('2', {'prop': '1'}) @@ -469,7 +479,7 @@ class IndexTest(tests.Test): def test_find_Region(self): term = 'azAZ09_' - db = Index({term: IndexedProperty(term, 1, 'T', full_text=True)}) + db = Index({term: Property(term, 1, 'T', full_text=True)}) db.store('1', {term: 'test'}) db.store('2', {term: 'test fail'}) @@ -479,7 +489,7 @@ class IndexTest(tests.Test): db._find(query='%s:=test' % term, reply=['guid'])) def test_find_WithListProps(self): - db = Index({'prop': IndexedProperty('prop', None, 'A', full_text=True, typecast=[])}) + db = Index({'prop': List(name='prop', prefix='A', full_text=True)}) db.store('1', {'prop': ('a', )}) db.store('2', {'prop': ('a', 'aa')}) @@ -571,10 +581,10 @@ class IndexTest(tests.Test): self.assertEqual(1, len(commits)) def test_SortLocalizedProps(self): - toolkit._default_langs = ['default_lang'] + i18n._default_langs = ['default_lang'] current_lang = locale.getdefaultlocale()[0].replace('_', '-') - db = Index({'prop': IndexedProperty('prop', 1, 'A', localized=True)}) + db = Index({'prop': Localized(name='prop', slot=1, prefix='A')}) db.store('0', {'prop': {'foo': '5'}}) db.store('1', {'prop': {current_lang: '4', 'default_lang': '1', 'foo': '3'}}) @@ -598,7 +608,7 @@ class IndexTest(tests.Test): db._find(order_by='-prop')[0]) def test_SearchByLocalizedProps(self): - db = Index({'prop': IndexedProperty('prop', 1, 'A', localized=True, full_text=True)}) + db = Index({'prop': Localized(name='prop', slot=1, prefix='A', full_text=True)}) db.store('1', {'prop': {'a': 'ё'}}) db.store('2', {'prop': {'a': 'ё', 'b': 'ю'}}) @@ -635,7 +645,7 @@ class IndexTest(tests.Test): sorted(db._find(query='prop:я')[0])) def test_find_MultipleFilter(self): - db = Index({'prop': IndexedProperty('prop', 1, 'A')}) + db = Index({'prop': Property('prop', 1, 'A')}) db.store('1', {'prop': 'a'}) db.store('2', {'prop': 'b'}) @@ -677,7 +687,7 @@ class IndexTest(tests.Test): db._find(prop=['b', 'foo', 'bar'], reply=['guid'])[0]) def test_find_AndNotFilter(self): - db = Index({'prop': IndexedProperty('prop', 1, 'A')}) + db = Index({'prop': Property('prop', 1, 'A')}) db.store('1', {'prop': 'a'}) db.store('2', {'prop': 'b'}) @@ -721,24 +731,21 @@ class IndexTest(tests.Test): ]), sorted(db._find(prop=['a', 'c'], reply=['guid'], **{'!prop': 'b'})[0])) - def test_fmt_prop_value(self): - prop = Property('prop') - self.assertEqual(['0'], [i for i in _fmt_prop_value(prop, 0)]) - self.assertEqual(['1'], [i for i in _fmt_prop_value(prop, 1)]) - self.assertEqual(['0'], [i for i in _fmt_prop_value(prop, 0)]) - self.assertEqual(['1.1'], [i for i in _fmt_prop_value(prop, 1.1)]) - self.assertEqual(['0', '1'], [i for i in _fmt_prop_value(prop, [0, 1])]) - self.assertEqual(['2', '1'], [i for i in _fmt_prop_value(prop, [2, 1])]) - self.assertEqual(['probe', 'True', '0'], [i for i in _fmt_prop_value(prop, ['probe', True, 0])]) - self.assertEqual(['True'], [i for i in _fmt_prop_value(prop, True)]) - self.assertEqual(['False'], [i for i in _fmt_prop_value(prop, False)]) + def test_find_CustomEncode(self): + db = Index({'trait': Numeric('trait', 1, 'A')}) + + db.store('1', {'trait': 1}) + db.store('2', {'trait': 2}) + db.store('11', {'trait': 11}) + + self.assertEqual([{'guid': '1'}], db._find(trait='1')[0]) + self.assertEqual([{'guid': '1'}], db._find(trait=1)[0]) - prop = Property('prop', typecast=bool) - self.assertEqual(['1'], [i for i in _fmt_prop_value(prop, True)]) - self.assertEqual(['0'], [i for i in _fmt_prop_value(prop, False)]) + self.assertEqual([{'guid': '2'}], db._find(trait='2')[0]) + self.assertEqual([{'guid': '2'}], db._find(trait=2)[0]) - prop = Property('prop', fmt=lambda x: x.keys()) - self.assertEqual(['a', '2'], [i for i in _fmt_prop_value(prop, {'a': 1, 2: 'b'})]) + self.assertEqual([{'guid': '11'}], db._find(trait='11')[0]) + self.assertEqual([{'guid': '11'}], db._find(trait=11)[0]) class Index(index.IndexWriter): @@ -750,7 +757,7 @@ class Index(index.IndexWriter): metadata = Metadata(Index) metadata.update(props) - metadata['guid'] = IndexedProperty('guid', + metadata['guid'] = Property('guid', acl=ACL.CREATE | ACL.READ, slot=0, prefix=GUID_PREFIX) diff --git a/tests/units/db/metadata.py b/tests/units/db/metadata.py new file mode 100755 index 0000000..a0ba512 --- /dev/null +++ b/tests/units/db/metadata.py @@ -0,0 +1,71 @@ +#!/usr/bin/env python +# sugar-lint: disable + +from __init__ import tests + +from sugar_network import db + + +class MetadataTest(tests.Test): + + def test_Typecast(self): + prop = db.Numeric() + self.assertEqual(1, prop.typecast(1)) + self.assertEqual(1, prop.typecast(1.1)) + self.assertEqual(1, prop.typecast('1')) + self.assertRaises(ValueError, prop.typecast, '1.0') + self.assertRaises(ValueError, prop.typecast, '') + self.assertRaises(TypeError, prop.typecast, None) + + prop = db.Boolean() + self.assertEqual(False, prop.typecast(0)) + self.assertEqual(True, prop.typecast(1)) + self.assertEqual(True, prop.typecast(1.1)) + self.assertEqual(True, prop.typecast('1')) + self.assertEqual(False, prop.typecast('false')) + self.assertEqual(True, prop.typecast(True)) + self.assertEqual(False, prop.typecast(False)) + self.assertEqual(False, prop.typecast('False')) + self.assertEqual(False, prop.typecast('0')) + self.assertEqual(False, prop.typecast('')) + self.assertEqual(False, prop.typecast(None)) + + prop = db.List(subtype=db.Numeric()) + self.assertEqual([1], prop.typecast(1)) + self.assertEqual([], prop.typecast(None)) + self.assertRaises(ValueError, prop.typecast, '') + self.assertEqual([], prop.typecast([])) + self.assertEqual([123], prop.typecast('123')) + self.assertRaises(ValueError, prop.typecast, 'a') + self.assertEqual([123, 4, 5], prop.typecast(['123', 4, 5.6])) + + prop = db.Enum(items=[1, 2]) + self.assertRaises(ValueError, prop.typecast, 0) + self.assertRaises(TypeError, prop.typecast, None) + self.assertRaises(ValueError, prop.typecast, '') + self.assertRaises(ValueError, prop.typecast, 'A') + self.assertRaises(ValueError, prop.typecast, '3') + self.assertEqual(1, prop.typecast(1)) + self.assertEqual(2, prop.typecast(2)) + self.assertEqual(1, prop.typecast('1')) + + prop = db.List() + self.assertEqual([], prop.typecast(None)) + self.assertEqual([''], prop.typecast('')) + self.assertEqual([''], prop.typecast([''])) + self.assertEqual([], prop.typecast([])) + self.assertEqual([0], prop.typecast(0)) + self.assertEqual([''], prop.typecast('')) + self.assertEqual(['foo'], prop.typecast('foo')) + + prop = db.List(subtype=db.Enum(['A', 'B', 'C'])) + self.assertRaises(ValueError, prop.typecast, '') + self.assertRaises(ValueError, prop.typecast, ['']) + self.assertEqual([], prop.typecast([])) + self.assertEqual(['A', 'B', 'C'], prop.typecast(['A', 'B', 'C'])) + self.assertRaises(ValueError, prop.typecast, ['a']) + self.assertRaises(ValueError, prop.typecast, ['A', 'x']) + + +if __name__ == '__main__': + tests.main() diff --git a/tests/units/db/resource.py b/tests/units/db/resource.py index d09010e..ef305ec 100755 --- a/tests/units/db/resource.py +++ b/tests/units/db/resource.py @@ -23,11 +23,16 @@ from sugar_network.db import directory as directory_ from sugar_network.db.directory import Directory from sugar_network.db.index import IndexWriter from sugar_network.toolkit.router import ACL +from sugar_network.toolkit.coroutine import this from sugar_network.toolkit import http, Sequence class ResourceTest(tests.Test): + def setUp(self, fork_num=0): + tests.Test.setUp(self, fork_num) + this.broadcast = lambda x: x + def test_ActiveProperty_Slotted(self): class Document(db.Resource): @@ -345,31 +350,31 @@ class ResourceTest(tests.Test): return value directory = Directory(tests.tmpdir, Document, IndexWriter) + guid = directory.create({'guid': '1', 'prop1': '1', 'prop2': '2'}) + doc = directory.get(guid) - self.assertRaises(http.NotFound, directory.patch, 'absent', {}) - - directory.create({'guid': '1', 'prop1': '1', 'prop2': '2'}) - self.assertEqual({}, directory.patch('1', {})) - self.assertEqual({}, directory.patch('1', {'prop1': '1', 'prop2': '2'})) - self.assertEqual({'prop1': '1_'}, directory.patch('1', {'prop1': '1_', 'prop2': '2'})) - self.assertEqual({'prop1': '1_', 'prop2': '2_'}, directory.patch('1', {'prop1': '1_', 'prop2': '2_'})) + self.assertEqual({}, doc.patch({})) + self.assertEqual({}, doc.patch({'prop1': '1', 'prop2': '2'})) + self.assertEqual({'prop1': '1_'}, doc.patch({'prop1': '1_', 'prop2': '2'})) + self.assertEqual({'prop1': '1_', 'prop2': '2_'}, doc.patch({'prop1': '1_', 'prop2': '2_'})) def test_patch_LocalizedProps(self): class Document(db.Resource): - @db.indexed_property(slot=1, localized=True) + @db.indexed_property(db.Localized, slot=1) def prop(self, value): return value directory = Directory(tests.tmpdir, Document, IndexWriter) + guid = directory.create({'guid': '1', 'prop': {'ru': 'ru'}}) + doc = directory.get(guid) - directory.create({'guid': '1', 'prop': {'ru': 'ru'}}) - self.assertEqual({}, directory.patch('1', {'prop': 'ru'})) - self.assertEqual({'prop': {'ru': 'ru_'}}, directory.patch('1', {'prop': {'ru': 'ru_'}})) - self.assertEqual({'prop': {'en': 'en'}}, directory.patch('1', {'prop': {'en': 'en'}})) - self.assertEqual({'prop': {'ru': 'ru', 'en': 'en'}}, directory.patch('1', {'prop': {'ru': 'ru', 'en': 'en'}})) - self.assertEqual({'prop': {'ru': 'ru_', 'en': 'en'}}, directory.patch('1', {'prop': {'ru': 'ru_', 'en': 'en'}})) + self.assertEqual({}, doc.patch({'prop': {'ru': 'ru'}})) + self.assertEqual({'prop': {'ru': 'ru_'}}, doc.patch({'prop': {'ru': 'ru_'}})) + self.assertEqual({'prop': {'en': 'en'}}, doc.patch({'prop': {'en': 'en'}})) + self.assertEqual({'prop': {'ru': 'ru', 'en': 'en'}}, doc.patch({'prop': {'ru': 'ru', 'en': 'en'}})) + self.assertEqual({'prop': {'ru': 'ru_', 'en': 'en'}}, doc.patch({'prop': {'ru': 'ru_', 'en': 'en'}})) def test_diff(self): @@ -379,21 +384,13 @@ class ResourceTest(tests.Test): def prop(self, value): return value - @db.blob_property() - def blob(self, value): - return value - directory = Directory(tests.tmpdir, Document, IndexWriter) - self.touch(('blob', '1')) directory.create({'guid': '1', 'prop': '1', 'ctime': 1, 'mtime': 1}) - directory.update('1', {'blob': {'blob': 'blob'}}) for i in os.listdir('1/1'): os.utime('1/1/%s' % i, (1, 1)) - self.touch(('blob', '2')) directory.create({'guid': '2', 'prop': '2', 'ctime': 2, 'mtime': 2}) - directory.update('2', {'blob': {'blob': 'blob'}}) for i in os.listdir('2/2'): os.utime('2/2/%s' % i, (2, 2)) @@ -408,22 +405,12 @@ class ResourceTest(tests.Test): 'ctime': {'value': 1, 'mtime': 1}, 'prop': {'value': '1', 'mtime': 1}, 'mtime': {'value': 1, 'mtime': 1}, - 'blob': { - 'mtime': 1, - 'blob': tests.tmpdir + '/1/1/blob.blob', - 'blob_size': 1, - }, }}, {'guid': '2', 'diff': { 'guid': {'value': '2', 'mtime': 2}, 'ctime': {'value': 2, 'mtime': 2}, 'prop': {'value': '2', 'mtime': 2}, 'mtime': {'value': 2, 'mtime': 2}, - 'blob': { - 'mtime': 2, - 'blob': tests.tmpdir + '/2/2/blob.blob', - 'blob_size': 1, - }, }}, {'guid': '3', 'diff': { 'guid': {'value': '3', 'mtime': 3}, @@ -433,7 +420,7 @@ class ResourceTest(tests.Test): }}, ], [i for i in diff(directory, [[0, None]], out_seq)]) - self.assertEqual([[1, 5]], out_seq) + self.assertEqual([[1, 3]], out_seq) out_seq = Sequence() self.assertEqual([ @@ -442,26 +429,15 @@ class ResourceTest(tests.Test): 'ctime': {'value': 2, 'mtime': 2}, 'prop': {'value': '2', 'mtime': 2}, 'mtime': {'value': 2, 'mtime': 2}, - 'blob': { - 'mtime': 2, - 'blob': tests.tmpdir + '/2/2/blob.blob', - 'blob_size': 1, - }, }}, ], - [i for i in diff(directory, [[3, 4]], out_seq)]) - self.assertEqual([[3, 4]], out_seq) - - out_seq = Sequence() - self.assertEqual([ - ], - [i for i in diff(directory, [[3, 3]], out_seq)]) - self.assertEqual([], out_seq) + [i for i in diff(directory, [[2, 2]], out_seq)]) + self.assertEqual([[2, 2]], out_seq) out_seq = Sequence() self.assertEqual([ ], - [i for i in diff(directory, [[6, 100]], out_seq)]) + [i for i in diff(directory, [[4, 100]], out_seq)]) self.assertEqual([], out_seq) directory.update('2', {'prop': '22'}) self.assertEqual([ @@ -469,8 +445,8 @@ class ResourceTest(tests.Test): 'prop': {'value': '22', 'mtime': int(os.stat('2/2/prop').st_mtime)}, }}, ], - [i for i in diff(directory, [[6, 100]], out_seq)]) - self.assertEqual([[6, 6]], out_seq) + [i for i in diff(directory, [[4, 100]], out_seq)]) + self.assertEqual([[4, 4]], out_seq) def test_diff_IgnoreCalcProps(self): @@ -535,37 +511,6 @@ class ResourceTest(tests.Test): self.assertEqual([[1, 1], [4, 4]], out_seq) - def test_diff_WithBlobsSetByUrl(self): - URL = 'http://src.sugarlabs.org/robots.txt' - URL_content = urllib2.urlopen(URL).read() - - class Document(db.Resource): - - @db.blob_property() - def blob(self, value): - return value - - directory = Directory(tests.tmpdir, Document, IndexWriter) - - directory.create({'guid': '1', 'ctime': 1, 'mtime': 1}) - directory.update('1', {'blob': {'url': URL}}) - self.utime('1/1', 1) - - out_seq = Sequence() - self.assertEqual([ - {'guid': '1', 'diff': { - 'guid': {'value': '1', 'mtime': 1}, - 'ctime': {'value': 1, 'mtime': 1}, - 'mtime': {'value': 1, 'mtime': 1}, - 'blob': { - 'url': URL, - 'mtime': 1, - }, - }}, - ], - [i for i in diff(directory, [[0, None]], out_seq)]) - self.assertEqual([[1, 2]], out_seq) - def test_diff_Filter(self): class Document(db.Resource): @@ -626,7 +571,7 @@ class ResourceTest(tests.Test): class Document(db.Resource): - @db.stored_property(typecast=db.AggregatedType, default=db.AggregatedType()) + @db.stored_property(db.Aggregated) def prop(self, value): return value @@ -768,21 +713,13 @@ class ResourceTest(tests.Test): def prop(self, value): return value - @db.blob_property() - def blob(self, value): - return value - directory1 = Directory('document1', Document, IndexWriter) directory1.create({'guid': '1', 'prop': '1', 'ctime': 1, 'mtime': 1}) - self.touch(('blob', '1')) - directory1.update('1', {'blob': {'blob': 'blob'}}) for i in os.listdir('document1/1/1'): os.utime('document1/1/1/%s' % i, (1, 1)) directory1.create({'guid': '2', 'prop': '2', 'ctime': 2, 'mtime': 2}) - self.touch(('blob', '2')) - directory1.update('2', {'blob': {'blob': 'blob'}}) for i in os.listdir('document1/2/2'): os.utime('document1/2/2/%s' % i, (2, 2)) @@ -808,7 +745,6 @@ class ResourceTest(tests.Test): self.assertEqual(1, doc.meta('ctime')['mtime']) self.assertEqual(1, doc.meta('prop')['mtime']) self.assertEqual(1, doc.meta('mtime')['mtime']) - self.assertEqual(1, doc.meta('blob')['mtime']) doc = directory2.get('2') self.assertEqual(2, doc.get('seqno')) @@ -816,7 +752,6 @@ class ResourceTest(tests.Test): self.assertEqual(2, doc.meta('ctime')['mtime']) self.assertEqual(2, doc.meta('prop')['mtime']) self.assertEqual(2, doc.meta('mtime')['mtime']) - self.assertEqual(2, doc.meta('blob')['mtime']) doc = directory2.get('3') self.assertEqual(3, doc.get('seqno')) @@ -824,28 +759,25 @@ class ResourceTest(tests.Test): self.assertEqual(3, doc.meta('ctime')['mtime']) self.assertEqual(3, doc.meta('prop')['mtime']) self.assertEqual(3, doc.meta('mtime')['mtime']) - self.assertEqual(None, doc.meta('blob')) def test_merge_Update(self): class Document(db.Resource): - @db.blob_property() - def blob(self, value): + @db.stored_property(default='') + def prop(self, value): return value directory1 = Directory('document1', Document, IndexWriter) directory2 = Directory('document2', Document, IndexWriter) directory1.create({'guid': 'guid', 'ctime': 1, 'mtime': 1}) - self.touch(('blob', '1')) - directory1.update('guid', {'blob': {'blob': 'blob'}}) + directory1.update('guid', {'prop': '1'}) for i in os.listdir('document1/gu/guid'): os.utime('document1/gu/guid/%s' % i, (1, 1)) directory2.create({'guid': 'guid', 'ctime': 2, 'mtime': 2}) - self.touch(('blob', '2')) - directory2.update('guid', {'blob': {'blob': 'blob'}}) + directory2.update('guid', {'prop': '2'}) for i in os.listdir('document2/gu/guid'): os.utime('document2/gu/guid/%s' % i, (2, 2)) @@ -858,8 +790,8 @@ class ResourceTest(tests.Test): self.assertEqual(2, doc.meta('guid')['mtime']) self.assertEqual(2, doc.meta('ctime')['mtime']) self.assertEqual(2, doc.meta('mtime')['mtime']) - self.assertEqual(2, doc.meta('blob')['mtime']) - self.assertEqual('2', file('document2/gu/guid/blob.blob').read()) + self.assertEqual(2, doc.meta('prop')['mtime']) + self.assertEqual('2', doc.meta('prop')['value']) for patch in diff(directory1, [[0, None]], Sequence()): directory2.merge(**patch) @@ -872,8 +804,8 @@ class ResourceTest(tests.Test): self.assertEqual(2, doc.meta('guid')['mtime']) self.assertEqual(2, doc.meta('ctime')['mtime']) self.assertEqual(2, doc.meta('mtime')['mtime']) - self.assertEqual(2, doc.meta('blob')['mtime']) - self.assertEqual('2', file('document2/gu/guid/blob.blob').read()) + self.assertEqual(2, doc.meta('prop')['mtime']) + self.assertEqual('2', doc.meta('prop')['value']) os.utime('document1/gu/guid/mtime', (3, 3)) for patch in diff(directory1, [[0, None]], Sequence()): @@ -887,10 +819,10 @@ class ResourceTest(tests.Test): self.assertEqual(2, doc.meta('guid')['mtime']) self.assertEqual(2, doc.meta('ctime')['mtime']) self.assertEqual(3, doc.meta('mtime')['mtime']) - self.assertEqual(2, doc.meta('blob')['mtime']) - self.assertEqual('2', file('document2/gu/guid/blob.blob').read()) + self.assertEqual(2, doc.meta('prop')['mtime']) + self.assertEqual('2', doc.meta('prop')['value']) - os.utime('document1/gu/guid/blob', (4, 4)) + os.utime('document1/gu/guid/prop', (4, 4)) for patch in diff(directory1, [[0, None]], Sequence()): directory2.merge(**patch) @@ -902,132 +834,14 @@ class ResourceTest(tests.Test): self.assertEqual(2, doc.meta('guid')['mtime']) self.assertEqual(2, doc.meta('ctime')['mtime']) self.assertEqual(3, doc.meta('mtime')['mtime']) - self.assertEqual(4, doc.meta('blob')['mtime']) - self.assertEqual('1', file('document2/gu/guid/blob.blob').read()) - - def test_merge_SeqnoLessMode(self): - - class Document(db.Resource): - - @db.indexed_property(slot=1) - def prop(self, value): - return value - - directory1 = Directory('document1', Document, IndexWriter) - directory1.create({'guid': '1', 'prop': '1', 'ctime': 1, 'mtime': 1}) - - directory2 = Directory('document2', Document, IndexWriter) - for patch in diff(directory1, [[0, None]], Sequence()): - directory2.merge(shift_seqno=False, **patch) - self.assertEqual( - [(1, 1, '1', '1')], - [(i['ctime'], i['mtime'], i['guid'], i['prop']) for i in directory2.find()[0]]) - doc = directory2.get('1') - self.assertEqual(0, doc.get('seqno')) - self.assertEqual(0, doc.meta('guid')['seqno']) - self.assertEqual(0, doc.meta('prop')['seqno']) - - directory3 = Directory('document3', Document, IndexWriter) - for patch in diff(directory1, [[0, None]], Sequence()): - directory3.merge(**patch) - self.assertEqual( - [(1, 1, '1', '1')], - [(i['ctime'], i['mtime'], i['guid'], i['prop']) for i in directory3.find()[0]]) - doc = directory3.get('1') - self.assertEqual(1, doc.get('seqno')) - self.assertEqual(1, doc.meta('guid')['seqno']) - self.assertEqual(1, doc.meta('prop')['seqno']) - - time.sleep(1) - directory1.update('1', {'prop': '2', 'ctime': 2, 'mtime': 2}) - - for patch in diff(directory1, [[0, None]], Sequence()): - directory3.merge(shift_seqno=False, **patch) - self.assertEqual( - [(2, 2, '1', '2')], - [(i['ctime'], i['mtime'], i['guid'], i['prop']) for i in directory3.find()[0]]) - doc = directory3.get('1') - self.assertEqual(1, doc.get('seqno')) - self.assertEqual(1, doc.meta('guid')['seqno']) - self.assertEqual(1, doc.meta('prop')['seqno']) - - time.sleep(1) - directory1.update('1', {'prop': '3', 'ctime': 3, 'mtime': 3}) - - for patch in diff(directory1, [[0, None]], Sequence()): - directory3.merge(**patch) - self.assertEqual( - [(3, 3, '1', '3')], - [(i['ctime'], i['mtime'], i['guid'], i['prop']) for i in directory3.find()[0]]) - doc = directory3.get('1') - self.assertEqual(2, doc.get('seqno')) - self.assertEqual(1, doc.meta('guid')['seqno']) - self.assertEqual(2, doc.meta('prop')['seqno']) - - def test_merge_AvoidCalculatedBlobs(self): - - class Document(db.Resource): - - @db.blob_property() - def blob(self, value): - return {'url': 'http://foo/bar', 'mime_type': 'image/png'} - - directory1 = Directory('document1', Document, IndexWriter) - directory1.create({'guid': 'guid', 'ctime': 1, 'mtime': 1}) - for i in os.listdir('document1/gu/guid'): - os.utime('document1/gu/guid/%s' % i, (1, 1)) - - directory2 = Directory('document2', Document, IndexWriter) - for patch in diff(directory1, [[0, None]], Sequence()): - directory2.merge(**patch) - - doc = directory2.get('guid') - self.assertEqual(1, doc.get('seqno')) - self.assertEqual(1, doc.meta('guid')['mtime']) - assert not exists('document2/gu/guid/blob') - - def test_merge_Blobs(self): - - class Document(db.Resource): - - @db.blob_property() - def blob(self, value): - return value - - directory = Directory('document', Document, IndexWriter) - self.touch(('blob', 'blob-1')) - directory.merge('1', { - 'guid': {'mtime': 1, 'value': '1'}, - 'ctime': {'mtime': 2, 'value': 2}, - 'mtime': {'mtime': 3, 'value': 3}, - 'blob': {'mtime': 4, 'blob': 'blob'}, - }) - - self.assertEqual( - [(2, 3, '1')], - [(i['ctime'], i['mtime'], i['guid']) for i in directory.find()[0]]) - - doc = directory.get('1') - self.assertEqual(1, doc.get('seqno')) - self.assertEqual(1, doc.meta('guid')['mtime']) - self.assertEqual(2, doc.meta('ctime')['mtime']) - self.assertEqual(3, doc.meta('mtime')['mtime']) - self.assertEqual(4, doc.meta('blob')['mtime']) - self.assertEqual('blob-1', file('document/1/1/blob.blob').read()) - - self.touch(('blob', 'blob-2')) - directory.merge('1', { - 'blob': {'mtime': 5, 'blob': 'blob'}, - }) - - self.assertEqual(5, doc.meta('blob')['mtime']) - self.assertEqual('blob-2', file('document/1/1/blob.blob').read()) + self.assertEqual(4, doc.meta('prop')['mtime']) + self.assertEqual('1', doc.meta('prop')['value']) def test_merge_Aggprops(self): class Document(db.Resource): - @db.stored_property(typecast=db.AggregatedType, default=db.AggregatedType()) + @db.stored_property(db.Aggregated) def prop(self, value): return value @@ -1079,6 +893,28 @@ class ResourceTest(tests.Test): }, directory.get('1')['prop']) + def test_merge_CallSetters(self): + + class Document(db.Resource): + + @db.stored_property(db.Numeric) + def prop(self, value): + return value + + @prop.setter + def prop(self, value): + return value + 1 + + directory = Directory('document', Document, IndexWriter) + + directory.merge('1', { + 'guid': {'mtime': 1, 'value': '1'}, + 'ctime': {'mtime': 1, 'value': 1}, + 'mtime': {'mtime': 1, 'value': 1}, + 'prop': {'mtime': 1, 'value': 1}, + }) + self.assertEqual(2, directory.get('1')['prop']) + def test_wipe(self): class Document(db.Resource): @@ -1088,31 +924,11 @@ class ResourceTest(tests.Test): guid = directory.create({'prop': '1'}) self.assertEqual([guid], [i.guid for i in directory.find()[0]]) directory.commit() - assert directory.mtime != 0 + assert exists('index/mtime') directory.wipe() self.assertEqual([], [i.guid for i in directory.find()[0]]) - assert directory.mtime == 0 - - def test_DeleteOldBlobOnUpdate(self): - - class Document(db.Resource): - - @db.blob_property() - def blob(self, value): - return value - - directory = Directory(tests.tmpdir, Document, IndexWriter) - - directory.create({'guid': 'guid', 'blob': 'foo'}) - assert exists('gu/guid/blob.blob') - directory.update('guid', {'blob': {'url': 'foo'}}) - assert not exists('gu/guid/blob.blob') - - directory.update('guid', {'blob': 'foo'}) - assert exists('gu/guid/blob.blob') - directory.update('guid', {'blob': {}}) - assert not exists('gu/guid/blob.blob') + assert not exists('index/mtime') def diff(directory, in_seq, out_seq, exclude_seq=None, **kwargs): diff --git a/tests/units/db/routes.py b/tests/units/db/routes.py index 5908d0f..8824ca8 100755 --- a/tests/units/db/routes.py +++ b/tests/units/db/routes.py @@ -16,10 +16,11 @@ src_root = abspath(dirname(__file__)) from __init__ import tests from sugar_network import db, toolkit -from sugar_network.db.routes import _typecast_prop_value -from sugar_network.db.metadata import Property -from sugar_network.toolkit.router import Router, Request, Response, fallbackroute, Blob, ACL -from sugar_network.toolkit import coroutine, http +from sugar_network.db import files +from sugar_network.model.user import User +from sugar_network.toolkit.router import Router, Request, Response, fallbackroute, ACL +from sugar_network.toolkit.coroutine import this +from sugar_network.toolkit import coroutine, http, i18n class RoutesTest(tests.Test): @@ -36,18 +37,19 @@ class RoutesTest(tests.Test): def wo_default(self, value): return value - @db.indexed_property(slot=1, default='not_stored_default') + @db.stored_property(default='not_stored_default') def not_stored_default(self, value): return value - self.volume = db.Volume(tests.tmpdir, [Document], lambda event: None) + volume = db.Volume(tests.tmpdir, [Document]) + router = Router(db.Routes(volume)) - self.assertRaises(RuntimeError, self.call, 'POST', ['document'], content={}) + self.assertRaises(RuntimeError, this.call, method='POST', path=['document'], content={}) - guid = self.call('POST', ['document'], content={'wo_default': 'wo_default'}) - self.assertEqual('default', self.call('GET', ['document', guid, 'w_default'])) - self.assertEqual('wo_default', self.call('GET', ['document', guid, 'wo_default'])) - self.assertEqual('not_stored_default', self.call('GET', ['document', guid, 'not_stored_default'])) + guid = this.call(method='POST', path=['document'], content={'wo_default': 'wo_default'}) + self.assertEqual('default', this.call(method='GET', path=['document', guid, 'w_default'])) + self.assertEqual('wo_default', this.call(method='GET', path=['document', guid, 'wo_default'])) + self.assertEqual('not_stored_default', this.call(method='GET', path=['document', guid, 'not_stored_default'])) def test_Populate(self): self.touch( @@ -65,10 +67,10 @@ class RoutesTest(tests.Test): class Document(db.Resource): pass - with db.Volume(tests.tmpdir, [Document], lambda event: None) as volume: - for cls in volume.values(): - for __ in cls.populate(): - pass + with db.Volume(tests.tmpdir, [Document]) as volume: + router = Router(db.Routes(volume)) + for __ in volume['document'].populate(): + pass self.assertEqual( sorted(['1', '2']), sorted([i.guid for i in volume['document'].find()[0]])) @@ -78,10 +80,10 @@ class RoutesTest(tests.Test): class Document(db.Resource): pass - with db.Volume(tests.tmpdir, [Document], lambda event: None) as volume: - for cls in volume.values(): - for __ in cls.populate(): - pass + with db.Volume(tests.tmpdir, [Document]) as volume: + router = Router(db.Routes(volume)) + for __ in volume['document'].populate(): + pass self.assertEqual( sorted(['1', '2']), sorted([i.guid for i in volume['document'].find()[0]])) @@ -94,12 +96,14 @@ class RoutesTest(tests.Test): def prop(self, value): return value - @db.indexed_property(prefix='L', localized=True, default='') + @db.indexed_property(db.Localized, prefix='L', default={}) def localized_prop(self, value): return value - self.volume = db.Volume(tests.tmpdir, [TestDocument], lambda event: None) - self.volume['testdocument'].create({'guid': 'guid'}) + volume = db.Volume(tests.tmpdir, [TestDocument]) + router = Router(db.Routes(volume)) + + volume['testdocument'].create({'guid': 'guid'}) self.assertEqual({ 'total': 1, @@ -107,11 +111,11 @@ class RoutesTest(tests.Test): {'guid': 'guid', 'prop': ''}, ], }, - self.call('GET', path=['testdocument'], reply=['guid', 'prop'])) + this.call(method='GET', path=['testdocument'], reply=['guid', 'prop'])) - guid_1 = self.call('POST', path=['testdocument'], content={'prop': 'value_1'}) + guid_1 = this.call(method='POST', path=['testdocument'], content={'prop': 'value_1'}) assert guid_1 - guid_2 = self.call('POST', path=['testdocument'], content={'prop': 'value_2'}) + guid_2 = this.call(method='POST', path=['testdocument'], content={'prop': 'value_2'}) assert guid_2 self.assertEqual( @@ -120,9 +124,9 @@ class RoutesTest(tests.Test): {'guid': guid_1, 'prop': 'value_1'}, {'guid': guid_2, 'prop': 'value_2'}, ]), - sorted(self.call('GET', path=['testdocument'], reply=['guid', 'prop'])['result'])) + sorted(this.call(method='GET', path=['testdocument'], reply=['guid', 'prop'])['result'])) - self.call('PUT', path=['testdocument', guid_1], content={'prop': 'value_3'}) + this.call(method='PUT', path=['testdocument', guid_1], content={'prop': 'value_3'}) self.assertEqual( sorted([ @@ -130,240 +134,397 @@ class RoutesTest(tests.Test): {'guid': guid_1, 'prop': 'value_3'}, {'guid': guid_2, 'prop': 'value_2'}, ]), - sorted(self.call('GET', path=['testdocument'], reply=['guid', 'prop'])['result'])) + sorted(this.call(method='GET', path=['testdocument'], reply=['guid', 'prop'])['result'])) - self.call('DELETE', path=['testdocument', guid_2]) + this.call(method='DELETE', path=['testdocument', guid_2]) self.assertEqual( sorted([ {'guid': 'guid', 'prop': ''}, {'guid': guid_1, 'prop': 'value_3'}, ]), - sorted(self.call('GET', path=['testdocument'], reply=['guid', 'prop'])['result'])) + sorted(this.call(method='GET', path=['testdocument'], reply=['guid', 'prop'])['result'])) - self.assertRaises(http.NotFound, self.call, 'GET', path=['testdocument', guid_2]) + self.assertRaises(http.NotFound, this.call, method='GET', path=['testdocument', guid_2]) self.assertEqual( {'guid': guid_1, 'prop': 'value_3'}, - self.call('GET', path=['testdocument', guid_1], reply=['guid', 'prop'])) + this.call(method='GET', path=['testdocument', guid_1], reply=['guid', 'prop'])) self.assertEqual( 'value_3', - self.call('GET', path=['testdocument', guid_1, 'prop'])) + this.call(method='GET', path=['testdocument', guid_1, 'prop'])) def test_SetBLOBs(self): class TestDocument(db.Resource): - @db.blob_property() + @db.stored_property(db.Blob) def blob(self, value): return value - self.volume = db.Volume(tests.tmpdir, [TestDocument], lambda event: None) - guid = self.call('POST', path=['testdocument'], content={}) + volume = db.Volume(tests.tmpdir, [TestDocument]) + router = Router(db.Routes(volume)) - self.call('PUT', path=['testdocument', guid, 'blob'], content='blob1') - self.assertEqual('blob1', file(self.call('GET', path=['testdocument', guid, 'blob'])['blob']).read()) + guid = this.call(method='POST', path=['testdocument'], content={}) + self.assertRaises(http.NotFound, this.call, method='GET', path=['testdocument', guid, 'blob']) - self.call('PUT', path=['testdocument', guid, 'blob'], content_stream=StringIO('blob2')) - self.assertEqual('blob2', file(self.call('GET', path=['testdocument', guid, 'blob'])['blob']).read()) + this.call(method='PUT', path=['testdocument', guid, 'blob'], content='blob1') + self.assertEqual('blob1', file(this.call(method='GET', path=['testdocument', guid, 'blob']).path).read()) - self.call('PUT', path=['testdocument', guid, 'blob'], content=None) - self.assertRaises(http.NotFound, self.call, 'GET', path=['testdocument', guid, 'blob']) + this.call(method='PUT', path=['testdocument', guid, 'blob'], content_stream=StringIO('blob2')) + self.assertEqual('blob2', file(this.call(method='GET', path=['testdocument', guid, 'blob']).path).read()) - def test_SetBLOBsByMeta(self): + this.call(method='PUT', path=['testdocument', guid, 'blob'], content=None) + self.assertRaises(http.NotFound, this.call, method='GET', path=['testdocument', guid, 'blob']) + + def test_CreateBLOBsWithMeta(self): class TestDocument(db.Resource): - @db.blob_property(mime_type='default') + @db.stored_property(db.Blob) def blob(self, value): return value - self.volume = db.Volume(tests.tmpdir, [TestDocument], lambda event: None) - guid = self.call('POST', path=['testdocument'], content={}) + volume = db.Volume(tests.tmpdir, [TestDocument]) + router = Router(db.Routes(volume)) + guid = this.call(method='POST', path=['testdocument'], content={}) - self.assertRaises(RuntimeError, self.call, 'PUT', path=['testdocument', guid, 'blob'], + self.assertRaises(http.BadRequest, this.call, method='PUT', path=['testdocument', guid, 'blob'], content={}, content_type='application/json') - self.assertRaises(http.NotFound, self.call, 'GET', path=['testdocument', guid, 'blob']) + self.assertRaises(http.NotFound, this.call, method='GET', path=['testdocument', guid, 'blob']) + + self.assertRaises(http.BadRequest, this.call, method='PUT', path=['testdocument', guid, 'blob'], + content={'url': 'foo'}, content_type='application/json') + self.assertRaises(http.NotFound, this.call, method='GET', path=['testdocument', guid, 'blob']) + + this.call(method='PUT', path=['testdocument', guid, 'blob'], + content={'url': 'url', 'digest': 'digest', 'foo': 'bar'}, content_type='application/json') + self.assertEqual({ + 'mime_type': 'application/octet-stream', + 'url': 'url', + 'foo': 'bar', + }, + this.call(method='GET', path=['testdocument', guid, 'blob'])) + + def test_UpdateUrlBLOBsWithMeta(self): + + class TestDocument(db.Resource): + + @db.stored_property(db.Blob) + def blob(self, value): + return value + + volume = db.Volume(tests.tmpdir, [TestDocument]) + router = Router(db.Routes(volume)) + + guid = this.call(method='POST', path=['testdocument'], content={'blob': {'digest': 'digest', 'url': 'url'}}) + self.assertEqual({ + 'mime_type': 'application/octet-stream', + 'url': 'url', + }, + this.call(method='GET', path=['testdocument', guid, 'blob'])) + + self.assertRaises(http.BadRequest, this.call, method='PUT', path=['testdocument', guid, 'blob'], + content={'digest': 'fake'}, content_type='application/json') + self.assertEqual({ + 'mime_type': 'application/octet-stream', + 'url': 'url', + }, + this.call(method='GET', path=['testdocument', guid, 'blob'])) + + this.call(method='PUT', path=['testdocument', guid, 'blob'], + content={'foo': 'bar'}, content_type='application/json') + self.assertEqual({ + 'mime_type': 'application/octet-stream', + 'url': 'url', + 'foo': 'bar', + }, + this.call(method='GET', path=['testdocument', guid, 'blob'])) + + def test_UpdateFileBLOBsWithMeta(self): + + class TestDocument(db.Resource): + + @db.stored_property(db.Blob) + def blob(self, value): + return value + + volume = db.Volume(tests.tmpdir, [TestDocument]) + router = Router(db.Routes(volume)) + + guid = this.call(method='POST', path=['testdocument'], content={'blob': 'blob'}) + blob = this.call(method='GET', path=['testdocument', guid, 'blob'], environ={'HTTP_HOST': 'localhost'}) + self.assertEqual({ + 'mime_type': 'application/octet-stream', + 'size': os.stat(blob.path).st_size, + 'mtime': int(os.stat(blob.path).st_mtime), + 'url': 'http://localhost/blobs/%s' % hash('blob'), + 'digest': str(hash('blob')), + }, + blob) + self.assertEqual('blob', file(blob.path).read()) + + self.assertRaises(http.BadRequest, this.call, method='PUT', path=['testdocument', guid, 'blob'], + content={'digest': 'fake'}, content_type='application/json') + blob = this.call(method='GET', path=['testdocument', guid, 'blob'], environ={'HTTP_HOST': 'localhost'}) + self.assertEqual({ + 'mime_type': 'application/octet-stream', + 'size': os.stat(blob.path).st_size, + 'mtime': int(os.stat(blob.path).st_mtime), + 'digest': str(hash('blob')), + 'url': 'http://localhost/blobs/%s' % hash('blob'), + }, + blob) + self.assertEqual('blob', file(blob.path).read()) + + this.call(method='PUT', path=['testdocument', guid, 'blob'], + content={'foo': 'bar'}, content_type='application/json') + blob = this.call(method='GET', path=['testdocument', guid, 'blob'], environ={'HTTP_HOST': 'localhost'}) + self.assertEqual({ + 'mime_type': 'application/octet-stream', + 'size': os.stat(blob.path).st_size, + 'mtime': int(os.stat(blob.path).st_mtime), + 'digest': str(hash('blob')), + 'url': 'http://localhost/blobs/%s' % hash('blob'), + 'foo': 'bar', + }, + blob) + self.assertEqual('blob', file(blob.path).read()) + + def test_SwitchBLOBsType(self): + + class TestDocument(db.Resource): + + @db.stored_property(db.Blob) + def blob(self, value): + return value + + volume = db.Volume(tests.tmpdir, [TestDocument]) + router = Router(db.Routes(volume)) - self.call('PUT', path=['testdocument', guid, 'blob'], - content={'url': 'foo', 'bar': 'probe'}, content_type='application/json') - blob = self.call('GET', path=['testdocument', guid, 'blob']) - self.assertEqual('foo', blob['url']) + guid = this.call(method='POST', path=['testdocument'], content={'blob': 'blob'}) + this.call(method='PUT', path=['testdocument', guid, 'blob'], + content={'foo': 'bar'}, content_type='application/json') + + file_blob = this.call(method='GET', path=['testdocument', guid, 'blob'], environ={'HTTP_HOST': 'localhost'}) + self.assertEqual({ + 'mime_type': 'application/octet-stream', + 'size': os.stat(file_blob.path).st_size, + 'mtime': int(os.stat(file_blob.path).st_mtime), + 'digest': str(hash('blob')), + 'url': 'http://localhost/blobs/%s' % hash('blob'), + 'foo': 'bar', + }, file_blob) + self.assertEqual('blob', file(file_blob.path).read()) + + this.call(method='PUT', path=['testdocument', guid, 'blob'], + content={'url': 'url'}, content_type='application/json') + self.assertEqual({ + 'mime_type': 'application/octet-stream', + 'url': 'url', + 'foo': 'bar', + }, this.call(method='GET', path=['testdocument', guid, 'blob'])) + assert not exists(file_blob.path) + + this.call(method='PUT', path=['testdocument', guid, 'blob'], + content='blob', content_type='application/octet-stream', environ={'HTTP_HOST': 'localhost'}) + self.assertEqual({ + 'mime_type': 'application/octet-stream', + 'size': os.stat(file_blob.path).st_size, + 'mtime': int(os.stat(file_blob.path).st_mtime), + 'digest': str(hash('blob')), + 'url': 'http://localhost/blobs/%s' % hash('blob'), + }, this.call(method='GET', path=['testdocument', guid, 'blob'], environ={'HTTP_HOST': 'localhost'})) + self.assertEqual('blob', file(file_blob.path).read()) def test_RemoveBLOBs(self): class TestDocument(db.Resource): - @db.blob_property(mime_type='default') + @db.stored_property(db.Blob) def blob(self, value): return value - self.volume = db.Volume(tests.tmpdir, [TestDocument], lambda event: None) - guid = self.call('POST', path=['testdocument'], content={'blob': 'blob'}) + volume = db.Volume(tests.tmpdir, [TestDocument]) + router = Router(db.Routes(volume)) + guid = this.call(method='POST', path=['testdocument'], content={'blob': 'blob'}) - self.assertEqual('blob', file(self.call('GET', path=['testdocument', guid, 'blob'])['blob']).read()) + self.assertEqual('blob', file(this.call(method='GET', path=['testdocument', guid, 'blob']).path).read()) - self.call('PUT', path=['testdocument', guid, 'blob']) - self.assertRaises(http.NotFound, self.call, 'GET', path=['testdocument', guid, 'blob']) + this.call(method='PUT', path=['testdocument', guid, 'blob']) + self.assertRaises(http.NotFound, this.call, method='GET', path=['testdocument', guid, 'blob']) - def test_RemoveTempBLOBFilesOnFails(self): + def test_ReuploadBLOBs(self): class TestDocument(db.Resource): - @db.blob_property(mime_type='default') + @db.stored_property(db.Blob) + def blob(self, value): + return value + + volume = db.Volume(tests.tmpdir, [TestDocument]) + router = Router(db.Routes(volume)) + guid = this.call(method='POST', path=['testdocument'], content={'blob': 'blob1'}) + + blob1 = this.call(method='GET', path=['testdocument', guid, 'blob']) + self.assertEqual('blob1', file(blob1.path).read()) + + this.call(method='PUT', path=['testdocument', guid, 'blob'], content='blob2') + blob2 = this.call(method='GET', path=['testdocument', guid, 'blob']) + self.assertEqual('blob2', file(blob2.path).read()) + assert blob1.path != blob2.path + assert not exists(blob1.path) + + def test_RemoveBLOBsOnFailedSetter(self): + + class TestDocument(db.Resource): + + @db.stored_property(db.Blob) def blob(self, value): return value @blob.setter def blob(self, value): - raise RuntimeError() + if value: + raise RuntimeError() + return value + + volume = db.Volume(tests.tmpdir, [TestDocument]) + router = Router(db.Routes(volume)) - self.volume = db.Volume(tests.tmpdir, [TestDocument], lambda event: None) - guid = self.call('POST', path=['testdocument'], content={}) + guid = this.call(method='POST', path=['testdocument'], content={}) + self.assertRaises(http.NotFound, this.call, method='GET', path=['testdocument', guid, 'blob']) - self.assertRaises(RuntimeError, self.call, 'PUT', path=['testdocument', guid, 'blob'], content='probe') - self.assertEqual(0, len(os.listdir('tmp'))) + self.assertRaises(RuntimeError, this.call, method='PUT', path=['testdocument', guid, 'blob'], content='probe') + self.assertRaises(http.NotFound, this.call, method='GET', path=['testdocument', guid, 'blob']) + assert not exists('blobs/%s' % hash('probe')) def test_SetBLOBsWithMimeType(self): class TestDocument(db.Resource): - @db.blob_property(mime_type='default') + @db.stored_property(db.Blob, mime_type='default') def blob(self, value): return value - self.volume = db.Volume(tests.tmpdir, [TestDocument], lambda event: None) - guid = self.call('POST', path=['testdocument'], content={}) + volume = db.Volume(tests.tmpdir, [TestDocument]) + router = Router(db.Routes(volume)) + guid = this.call(method='POST', path=['testdocument'], content={}) - self.call('PUT', path=['testdocument', guid, 'blob'], content='blob1') - self.assertEqual('default', self.call('GET', path=['testdocument', guid, 'blob'])['mime_type']) - self.assertEqual('default', self.response.content_type) + response = Response() + this.call(response=response, + method='PUT', path=['testdocument', guid, 'blob'], content='blob1') + response = Response() + self.assertEqual('default', this.call(response=response, + method='GET', path=['testdocument', guid, 'blob'])['mime_type']) + self.assertEqual('default', response.content_type) - self.call('PUT', path=['testdocument', guid, 'blob'], content='blob1', content_type='foo') - self.assertEqual('foo', self.call('GET', path=['testdocument', guid, 'blob'])['mime_type']) - self.assertEqual('foo', self.response.content_type) + response = Response() + this.call(response=response, + method='PUT', path=['testdocument', guid, 'blob'], content='blob1', content_type='foo') + response = Response() + self.assertEqual('foo', this.call(response=response, + method='GET', path=['testdocument', guid, 'blob'])['mime_type']) + self.assertEqual('foo', response.content_type) def test_GetBLOBs(self): class TestDocument(db.Resource): - @db.blob_property() + @db.stored_property(db.Blob) def blob(self, value): return value - self.volume = db.Volume(tests.tmpdir, [TestDocument], lambda event: None) - guid = self.call('POST', path=['testdocument'], content={}) + volume = db.Volume(tests.tmpdir, [TestDocument]) + router = Router(db.Routes(volume)) + + guid = this.call(method='POST', path=['testdocument'], content={}) blob = 'blob' - self.call('PUT', path=['testdocument', guid, 'blob'], content=blob) - - blob_path = tests.tmpdir + '/testdocument/%s/%s/blob' % (guid[:2], guid) - blob_meta = { - 'seqno': 2, - 'blob': blob_path + '.blob', - 'blob_size': len(blob), - 'digest': hashlib.sha1(blob).hexdigest(), - 'mime_type': 'application/octet-stream', - 'mtime': int(os.stat(blob_path).st_mtime), - } + this.call(method='PUT', path=['testdocument', guid, 'blob'], content=blob) + digest = str(hash(blob)) + blob_path = 'blobs/%s' % digest - self.assertEqual('blob', file(self.call('GET', path=['testdocument', guid, 'blob'])['blob']).read()) + self.assertEqual('blob', file(this.call(method='GET', path=['testdocument', guid, 'blob']).path).read()) self.assertEqual({ 'blob': { - 'url': 'http://localhost/testdocument/%s/blob' % guid, - 'blob_size': len(blob), - 'digest': hashlib.sha1(blob).hexdigest(), 'mime_type': u'application/octet-stream', + 'url': 'http://localhost/blobs/%s' % digest, + 'size': len(blob), + 'digest': digest, + 'mtime': int(os.stat(blob_path).st_mtime), }, }, - self.call('GET', path=['testdocument', guid], reply=['blob'], host='localhost')) + this.call(method='GET', path=['testdocument', guid], reply=['blob'], environ={'HTTP_HOST': 'localhost'})) self.assertEqual([{ 'blob': { - 'url': 'http://localhost/testdocument/%s/blob' % guid, - 'blob_size': len(blob), - 'digest': hashlib.sha1(blob).hexdigest(), 'mime_type': u'application/octet-stream', + 'url': 'http://localhost/blobs/%s' % digest, + 'size': len(blob), + 'digest': digest, + 'mtime': int(os.stat(blob_path).st_mtime), }, }], - self.call('GET', path=['testdocument'], reply=['blob'], host='localhost')['result']) + this.call(method='GET', path=['testdocument'], reply=['blob'], environ={'HTTP_HOST': 'localhost'})['result']) def test_GetBLOBsByUrls(self): class TestDocument(db.Resource): - @db.blob_property() + @db.stored_property(db.Blob) def blob(self, value): return value - self.volume = db.Volume(tests.tmpdir, [TestDocument], lambda event: None) - guid1 = self.call('POST', path=['testdocument'], content={}) + volume = db.Volume(tests.tmpdir, [TestDocument]) + router = Router(db.Routes(volume)) + guid1 = this.call(method='POST', path=['testdocument'], content={}) - self.assertRaises(http.NotFound, self.call, 'GET', path=['testdocument', guid1, 'blob']) + self.assertRaises(http.NotFound, this.call, method='GET', path=['testdocument', guid1, 'blob']) self.assertEqual( - {'blob': {'url': 'http://127.0.0.1/testdocument/%s/blob' % guid1}}, - self.call('GET', path=['testdocument', guid1], reply=['blob'], host='127.0.0.1')) + {'blob': {}}, + this.call(method='GET', path=['testdocument', guid1], reply=['blob'], environ={'HTTP_HOST': '127.0.0.1'})) blob = 'file' - guid2 = self.call('POST', path=['testdocument'], content={'blob': blob}) - self.assertEqual('file', file(self.call('GET', path=['testdocument', guid2, 'blob'])['blob']).read()) - self.assertEqual({ - 'blob': { - 'url': 'http://127.0.0.1/testdocument/%s/blob' % guid2, - 'blob_size': len(blob), - 'digest': hashlib.sha1(blob).hexdigest(), - 'mime_type': u'application/octet-stream', - }, - }, - self.call('GET', path=['testdocument', guid2], reply=['blob'], host='127.0.0.1')) + guid2 = this.call(method='POST', path=['testdocument'], content={'blob': blob}) + self.assertEqual( + 'http://127.0.0.1/blobs/%s' % hash(blob), + this.call(method='GET', path=['testdocument', guid2], reply=['blob'], environ={'HTTP_HOST': '127.0.0.1'})['blob']['url']) - guid3 = self.call('POST', path=['testdocument'], content={'blob': {'url': 'http://foo'}}, content_type='application/json') - self.assertEqual('http://foo', self.call('GET', path=['testdocument', guid3, 'blob'])['url']) - self.assertEqual({ - 'blob': { - 'url': 'http://foo', - }, - }, - self.call('GET', path=['testdocument', guid3], reply=['blob'], host='127.0.0.1')) + guid3 = this.call(method='POST', path=['testdocument'], content={'blob': {'url': 'http://foo', 'digest': 'digest'}}, content_type='application/json') + self.assertEqual( + 'http://foo', + this.call(method='GET', path=['testdocument', guid3, 'blob'])['url']) + self.assertEqual( + 'http://foo', + this.call(method='GET', path=['testdocument', guid3], reply=['blob'], environ={'HTTP_HOST': '127.0.0.1'})['blob']['url']) self.assertEqual( sorted([ - {'blob': { - 'url': 'http://127.0.0.1/testdocument/%s/blob' % guid1, - }}, - { 'blob': { - 'url': 'http://127.0.0.1/testdocument/%s/blob' % guid2, - 'blob_size': len(blob), - 'digest': hashlib.sha1(blob).hexdigest(), - 'mime_type': u'application/octet-stream', - }}, - { 'blob': { - 'url': 'http://foo', - }}, + None, + 'http://127.0.0.1/blobs/%s' % hash(blob), + 'http://foo', ]), - sorted(self.call('GET', path=['testdocument'], reply=['blob'], host='127.0.0.1')['result'])) + sorted([i['blob'].get('url') for i in this.call(method='GET', path=['testdocument'], reply=['blob'], + environ={'HTTP_HOST': '127.0.0.1'})['result']])) def test_CommandsGetAbsentBlobs(self): class TestDocument(db.Resource): - @db.indexed_property(slot=1, default='') - def prop(self, value): - return value - - @db.blob_property() + @db.stored_property(db.Blob) def blob(self, value): return value - self.volume = db.Volume(tests.tmpdir, [TestDocument], lambda event: None) + volume = db.Volume(tests.tmpdir, [TestDocument]) + router = Router(db.Routes(volume)) - guid = self.call('POST', path=['testdocument'], content={'prop': 'value'}) - self.assertEqual('value', self.call('GET', path=['testdocument', guid, 'prop'])) - self.assertRaises(http.NotFound, self.call, 'GET', path=['testdocument', guid, 'blob']) + guid = this.call(method='POST', path=['testdocument'], content={}) + self.assertRaises(http.NotFound, this.call, method='GET', path=['testdocument', guid, 'blob']) self.assertEqual( - {'blob': {'url': 'http://localhost/testdocument/%s/blob' % guid}}, - self.call('GET', path=['testdocument', guid], reply=['blob'], host='localhost')) + {'blob': {}}, + this.call(method='GET', path=['testdocument', guid], reply=['blob'], environ={'HTTP_HOST': 'localhost'})) def test_Command_ReplyForGET(self): @@ -373,44 +534,46 @@ class RoutesTest(tests.Test): def prop(self, value): return value - @db.indexed_property(prefix='L', localized=True, default='') + @db.indexed_property(db.Localized, prefix='L', default={}) def localized_prop(self, value): return value - self.volume = db.Volume(tests.tmpdir, [TestDocument], lambda event: None) - guid = self.call('POST', path=['testdocument'], content={'prop': 'value'}) + volume = db.Volume(tests.tmpdir, [TestDocument]) + router = Router(db.Routes(volume)) + guid = this.call(method='POST', path=['testdocument'], content={'prop': 'value'}) self.assertEqual( ['guid', 'prop'], - self.call('GET', path=['testdocument', guid], reply=['guid', 'prop']).keys()) + this.call(method='GET', path=['testdocument', guid], reply=['guid', 'prop']).keys()) self.assertEqual( ['guid'], - self.call('GET', path=['testdocument'])['result'][0].keys()) + this.call(method='GET', path=['testdocument'])['result'][0].keys()) self.assertEqual( sorted(['guid', 'prop']), - sorted(self.call('GET', path=['testdocument'], reply=['prop', 'guid'])['result'][0].keys())) + sorted(this.call(method='GET', path=['testdocument'], reply=['prop', 'guid'])['result'][0].keys())) self.assertEqual( sorted(['prop']), - sorted(self.call('GET', path=['testdocument'], reply=['prop'])['result'][0].keys())) + sorted(this.call(method='GET', path=['testdocument'], reply=['prop'])['result'][0].keys())) def test_DecodeBeforeSetting(self): class TestDocument(db.Resource): - @db.indexed_property(slot=1, typecast=int) + @db.indexed_property(db.Numeric, slot=1) def prop(self, value): return value - self.volume = db.Volume(tests.tmpdir, [TestDocument], lambda event: None) + volume = db.Volume(tests.tmpdir, [TestDocument]) + router = Router(db.Routes(volume)) - guid = self.call('POST', path=['testdocument'], content={'prop': '-1'}) - self.assertEqual(-1, self.call('GET', path=['testdocument', guid, 'prop'])) + guid = this.call(method='POST', path=['testdocument'], content={'prop': '-1'}) + self.assertEqual(-1, this.call(method='GET', path=['testdocument', guid, 'prop'])) def test_LocalizedSet(self): - toolkit._default_langs = ['en'] + i18n._default_langs = ['en'] class TestDocument(db.Resource): @@ -418,32 +581,23 @@ class RoutesTest(tests.Test): def prop(self, value): return value - @db.indexed_property(prefix='L', localized=True, default='') + @db.indexed_property(db.Localized, prefix='L', default={}) def localized_prop(self, value): return value - self.volume = db.Volume(tests.tmpdir, [TestDocument], lambda event: None) - directory = self.volume['testdocument'] - - guid = directory.create({'localized_prop': 'value_raw'}) - self.assertEqual({'en': 'value_raw'}, directory.get(guid)['localized_prop']) - self.assertEqual( - [guid], - [i.guid for i in directory.find(localized_prop='value_raw')[0]]) - - directory.update(guid, {'localized_prop': 'value_raw2'}) - self.assertEqual({'en': 'value_raw2'}, directory.get(guid)['localized_prop']) - self.assertEqual( - [guid], - [i.guid for i in directory.find(localized_prop='value_raw2')[0]]) + volume = db.Volume(tests.tmpdir, [TestDocument]) + router = Router(db.Routes(volume)) + directory = volume['testdocument'] + guid = this.call(method='POST', path=['testdocument'], content={'localized_prop': 'value_ru'}, + environ={'HTTP_ACCEPT_LANGUAGE': 'ru'}) - guid = self.call('POST', path=['testdocument'], accept_language=['ru'], content={'localized_prop': 'value_ru'}) self.assertEqual({'ru': 'value_ru'}, directory.get(guid)['localized_prop']) self.assertEqual( [guid], [i.guid for i in directory.find(localized_prop='value_ru')[0]]) - self.call('PUT', path=['testdocument', guid], accept_language=['en'], content={'localized_prop': 'value_en'}) + this.call(method='PUT', path=['testdocument', guid], content={'localized_prop': 'value_en'}, + environ={'HTTP_ACCEPT_LANGUAGE': 'en'}) self.assertEqual({'ru': 'value_ru', 'en': 'value_en'}, directory.get(guid)['localized_prop']) self.assertEqual( [guid], @@ -460,14 +614,15 @@ class RoutesTest(tests.Test): def prop(self, value): return value - @db.indexed_property(prefix='L', localized=True, default='') + @db.indexed_property(db.Localized, prefix='L', default={}) def localized_prop(self, value): return value - self.volume = db.Volume(tests.tmpdir, [TestDocument], lambda event: None) - directory = self.volume['testdocument'] + volume = db.Volume(tests.tmpdir, [TestDocument]) + router = Router(db.Routes(volume)) + directory = volume['testdocument'] - guid = self.call('POST', path=['testdocument'], content={ + guid = this.call(method='POST', path=['testdocument'], content={ 'localized_prop': { 'ru': 'value_ru', 'es': 'value_es', @@ -475,63 +630,78 @@ class RoutesTest(tests.Test): }, }) - toolkit._default_langs = ['en'] + i18n._default_langs = ['en'] self.assertEqual( {'localized_prop': 'value_en'}, - self.call('GET', path=['testdocument', guid], reply=['localized_prop'])) + this.call(method='GET', path=['testdocument', guid], reply=['localized_prop'])) self.assertEqual( {'localized_prop': 'value_ru'}, - self.call('GET', path=['testdocument', guid], accept_language=['ru'], reply=['localized_prop'])) + this.call(method='GET', path=['testdocument', guid], reply=['localized_prop'], + environ={'HTTP_ACCEPT_LANGUAGE': 'ru'})) self.assertEqual( 'value_ru', - self.call('GET', path=['testdocument', guid, 'localized_prop'], accept_language=['ru', 'es'])) + this.call(method='GET', path=['testdocument', guid, 'localized_prop'], + environ={'HTTP_ACCEPT_LANGUAGE': 'ru,es'})) self.assertEqual( [{'localized_prop': 'value_ru'}], - self.call('GET', path=['testdocument'], accept_language=['foo', 'ru', 'es'], reply=['localized_prop'])['result']) + this.call(method='GET', path=['testdocument'], reply=['localized_prop'], + environ={'HTTP_ACCEPT_LANGUAGE': 'foo,ru,es'})['result']) self.assertEqual( {'localized_prop': 'value_ru'}, - self.call('GET', path=['testdocument', guid], accept_language=['ru-RU'], reply=['localized_prop'])) + this.call(method='GET', path=['testdocument', guid], reply=['localized_prop'], + environ={'HTTP_ACCEPT_LANGUAGE': 'ru-RU'})) self.assertEqual( 'value_ru', - self.call('GET', path=['testdocument', guid, 'localized_prop'], accept_language=['ru-RU', 'es'])) + this.call(method='GET', path=['testdocument', guid, 'localized_prop'], + environ={'HTTP_ACCEPT_LANGUAGE': 'ru-RU,es'})) self.assertEqual( [{'localized_prop': 'value_ru'}], - self.call('GET', path=['testdocument'], accept_language=['foo', 'ru-RU', 'es'], reply=['localized_prop'])['result']) + this.call(method='GET', path=['testdocument'], reply=['localized_prop'], + environ={'HTTP_ACCEPT_LANGUAGE': 'ru-RU,es'})['result']) self.assertEqual( {'localized_prop': 'value_es'}, - self.call('GET', path=['testdocument', guid], accept_language=['es'], reply=['localized_prop'])) + this.call(method='GET', path=['testdocument', guid], reply=['localized_prop'], + environ={'HTTP_ACCEPT_LANGUAGE': 'es'})) self.assertEqual( 'value_es', - self.call('GET', path=['testdocument', guid, 'localized_prop'], accept_language=['es', 'ru'])) + this.call(method='GET', path=['testdocument', guid, 'localized_prop'], + environ={'HTTP_ACCEPT_LANGUAGE': 'es,ru'})) self.assertEqual( [{'localized_prop': 'value_es'}], - self.call('GET', path=['testdocument'], accept_language=['foo', 'es', 'ru'], reply=['localized_prop'])['result']) + this.call(method='GET', path=['testdocument'], reply=['localized_prop'], + environ={'HTTP_ACCEPT_LANGUAGE': 'foo,es,ru'})['result']) self.assertEqual( {'localized_prop': 'value_en'}, - self.call('GET', path=['testdocument', guid], accept_language=['fr'], reply=['localized_prop'])) + this.call(method='GET', path=['testdocument', guid], reply=['localized_prop'], + environ={'HTTP_ACCEPT_LANGUAGE': 'fr'})) self.assertEqual( 'value_en', - self.call('GET', path=['testdocument', guid, 'localized_prop'], accept_language=['fr', 'za'])) + this.call(method='GET', path=['testdocument', guid, 'localized_prop'], + environ={'HTTP_ACCEPT_LANGUAGE': 'fr,za'})) self.assertEqual( [{'localized_prop': 'value_en'}], - self.call('GET', path=['testdocument'], accept_language=['foo', 'fr', 'za'], reply=['localized_prop'])['result']) + this.call(method='GET', path=['testdocument'], reply=['localized_prop'], + environ={'HTTP_ACCEPT_LANGUAGE': 'foo,fr,za'})['result']) - toolkit._default_langs = ['foo'] + i18n._default_langs = ['foo'] fallback_lang = sorted(['ru', 'es', 'en'])[0] self.assertEqual( {'localized_prop': 'value_%s' % fallback_lang}, - self.call('GET', path=['testdocument', guid], accept_language=['fr'], reply=['localized_prop'])) + this.call(method='GET', path=['testdocument', guid], reply=['localized_prop'], + environ={'HTTP_ACCEPT_LANGUAGE': 'fr'})) self.assertEqual( 'value_%s' % fallback_lang, - self.call('GET', path=['testdocument', guid, 'localized_prop'], accept_language=['fr', 'za'])) + this.call(method='GET', path=['testdocument', guid, 'localized_prop'], + environ={'HTTP_ACCEPT_LANGUAGE': 'fr,za'})) self.assertEqual( [{'localized_prop': 'value_%s' % fallback_lang}], - self.call('GET', path=['testdocument'], accept_language=['foo', 'fr', 'za'], reply=['localized_prop'])['result']) + this.call(method='GET', path=['testdocument'], reply=['localized_prop'], + environ={'HTTP_ACCEPT_LANGUAGE': 'foo,fr,za'})['result']) def test_OpenByModuleName(self): self.touch( @@ -543,9 +713,9 @@ class RoutesTest(tests.Test): ) sys.path.insert(0, '.') - volume = db.Volume('.', ['foo.bar'], lambda event: None) - assert exists('bar/index') + volume = db.Volume('.', ['foo.bar']) volume['bar'].find() + assert exists('bar/index') volume.close() def test_Command_GetBlobSetByUrl(self): @@ -556,21 +726,25 @@ class RoutesTest(tests.Test): def prop(self, value): return value - @db.blob_property() + @db.stored_property(db.Blob) def blob(self, value): return value - @db.indexed_property(prefix='L', localized=True, default='') + @db.indexed_property(db.Localized, prefix='L', default={}) def localized_prop(self, value): return value - self.volume = db.Volume(tests.tmpdir, [TestDocument], lambda event: None) - guid = self.call('POST', path=['testdocument'], content={}) - self.call('PUT', path=['testdocument', guid, 'blob'], url='http://sugarlabs.org') + volume = db.Volume(tests.tmpdir, [TestDocument]) + router = Router(db.Routes(volume)) + guid = this.call(method='POST', path=['testdocument'], content={}) + this.call(method='PUT', path=['testdocument', guid, 'blob'], content={ + 'digest': 'digest', + 'url': 'http://sugarlabs.org', + }, content_type='application/json') self.assertEqual( 'http://sugarlabs.org', - self.call('GET', path=['testdocument', guid, 'blob'])['url']) + this.call(method='GET', path=['testdocument', guid, 'blob'])['url']) def test_on_create(self): @@ -580,24 +754,25 @@ class RoutesTest(tests.Test): def prop(self, value): return value - @db.indexed_property(prefix='L', localized=True, default='') + @db.indexed_property(db.Localized, prefix='L', default={}) def localized_prop(self, value): return value - self.volume = db.Volume(tests.tmpdir, [TestDocument], lambda event: None) + volume = db.Volume(tests.tmpdir, [TestDocument]) + router = Router(db.Routes(volume)) ts = int(time.time()) - guid = self.call('POST', path=['testdocument'], content={}) - assert self.volume['testdocument'].get(guid)['ctime'] in range(ts - 1, ts + 1) - assert self.volume['testdocument'].get(guid)['mtime'] in range(ts - 1, ts + 1) + guid = this.call(method='POST', path=['testdocument'], content={}) + assert volume['testdocument'].get(guid)['ctime'] in range(ts - 1, ts + 1) + assert volume['testdocument'].get(guid)['mtime'] in range(ts - 1, ts + 1) def test_on_create_Override(self): class Routes(db.Routes): - def on_create(self, request, props, event): + def on_create(self, request, props): props['prop'] = 'overriden' - db.Routes.on_create(self, request, props, event) + db.Routes.on_create(self, request, props) class TestDocument(db.Resource): @@ -605,17 +780,18 @@ class RoutesTest(tests.Test): def prop(self, value): return value - @db.indexed_property(prefix='L', localized=True, default='') + @db.indexed_property(db.Localized, prefix='L', default={}) def localized_prop(self, value): return value - self.volume = db.Volume(tests.tmpdir, [TestDocument], lambda event: None) + volume = db.Volume(tests.tmpdir, [TestDocument]) + router = Router(Routes(volume)) - guid = self.call('POST', ['testdocument'], content={'prop': 'foo'}, routes=Routes) - self.assertEqual('overriden', self.volume['testdocument'].get(guid)['prop']) + guid = this.call(method='POST', path=['testdocument'], content={'prop': 'foo'}, routes=Routes) + self.assertEqual('overriden', volume['testdocument'].get(guid)['prop']) - self.call('PUT', ['testdocument', guid], content={'prop': 'bar'}, routes=Routes) - self.assertEqual('bar', self.volume['testdocument'].get(guid)['prop']) + this.call(method='PUT', path=['testdocument', guid], content={'prop': 'bar'}, routes=Routes) + self.assertEqual('bar', volume['testdocument'].get(guid)['prop']) def test_on_update(self): @@ -625,26 +801,28 @@ class RoutesTest(tests.Test): def prop(self, value): return value - @db.indexed_property(prefix='L', localized=True, default='') + @db.indexed_property(db.Localized, prefix='L', default={}) def localized_prop(self, value): return value - self.volume = db.Volume(tests.tmpdir, [TestDocument], lambda event: None) - guid = self.call('POST', path=['testdocument'], content={}) - prev_mtime = self.volume['testdocument'].get(guid)['mtime'] + volume = db.Volume(tests.tmpdir, [TestDocument]) + router = Router(db.Routes(volume)) + + guid = this.call(method='POST', path=['testdocument'], content={}) + prev_mtime = volume['testdocument'].get(guid)['mtime'] time.sleep(1) - self.call('PUT', path=['testdocument', guid], content={'prop': 'probe'}) - assert self.volume['testdocument'].get(guid)['mtime'] - prev_mtime >= 1 + this.call(method='PUT', path=['testdocument', guid], content={'prop': 'probe'}) + assert volume['testdocument'].get(guid)['mtime'] - prev_mtime >= 1 def test_on_update_Override(self): class Routes(db.Routes): - def on_update(self, request, props, event): + def on_update(self, request, props): props['prop'] = 'overriden' - db.Routes.on_update(self, request, props, event) + db.Routes.on_update(self, request, props) class TestDocument(db.Resource): @@ -652,17 +830,18 @@ class RoutesTest(tests.Test): def prop(self, value): return value - @db.indexed_property(prefix='L', localized=True, default='') + @db.indexed_property(db.Localized, prefix='L', default={}) def localized_prop(self, value): return value - self.volume = db.Volume(tests.tmpdir, [TestDocument], lambda event: None) + volume = db.Volume(tests.tmpdir, [TestDocument]) + router = Router(Routes(volume)) - guid = self.call('POST', ['testdocument'], content={'prop': 'foo'}, routes=Routes) - self.assertEqual('foo', self.volume['testdocument'].get(guid)['prop']) + guid = this.call(method='POST', path=['testdocument'], content={'prop': 'foo'}, routes=Routes) + self.assertEqual('foo', volume['testdocument'].get(guid)['prop']) - self.call('PUT', ['testdocument', guid], content={'prop': 'bar'}, routes=Routes) - self.assertEqual('overriden', self.volume['testdocument'].get(guid)['prop']) + this.call(method='PUT', path=['testdocument', guid], content={'prop': 'bar'}, routes=Routes) + self.assertEqual('overriden', volume['testdocument'].get(guid)['prop']) def __test_DoNotPassGuidsForCreate(self): @@ -672,13 +851,15 @@ class RoutesTest(tests.Test): def prop(self, value): return value - @db.indexed_property(prefix='L', localized=True, default='') + @db.indexed_property(db.Localized, prefix='L', default={}) def localized_prop(self, value): return value - self.volume = db.Volume(tests.tmpdir, [TestDocument], lambda event: None) - self.assertRaises(http.Forbidden, self.call, 'POST', path=['testdocument'], content={'guid': 'foo'}) - guid = self.call('POST', path=['testdocument'], content={}) + volume = db.Volume(tests.tmpdir, [TestDocument]) + router = Router(db.Routes(volume)) + + self.assertRaises(http.Forbidden, this.call, method='POST', path=['testdocument'], content={'guid': 'foo'}) + guid = this.call(method='POST', path=['testdocument'], content={}) assert guid def test_seqno(self): @@ -689,7 +870,8 @@ class RoutesTest(tests.Test): class Document2(db.Resource): pass - volume = db.Volume(tests.tmpdir, [Document1, Document2], lambda event: None) + volume = db.Volume(tests.tmpdir, [Document1, Document2]) + router = Router(db.Routes(volume)) assert not exists('seqno') self.assertEqual(0, volume.seqno.value) @@ -706,8 +888,8 @@ class RoutesTest(tests.Test): self.assertEqual(4, volume.seqno.value) assert not exists('seqno') volume.seqno.commit() - assert exists('seqno') - volume = db.Volume(tests.tmpdir, [Document1, Document2], lambda event: None) + assert exists('db.seqno') + volume = db.Volume(tests.tmpdir, [Document1, Document2]) self.assertEqual(4, volume.seqno.value) def test_Events(self): @@ -726,7 +908,7 @@ class RoutesTest(tests.Test): def prop(self, value): pass - @db.blob_property() + @db.stored_property(db.Blob) def blob(self, value): return value @@ -739,13 +921,15 @@ class RoutesTest(tests.Test): ) events = [] - volume = db.Volume(tests.tmpdir, [Document1, Document2], lambda event: events.append(event)) + this.broadcast = lambda x: events.append(x) + volume = db.Volume(tests.tmpdir, [Document1, Document2]) + volume['document1'] + volume['document2'] coroutine.sleep(.1) mtime = int(os.stat('document1/index/mtime').st_mtime) self.assertEqual([ {'event': 'commit', 'resource': 'document1', 'mtime': mtime}, - {'event': 'populate', 'resource': 'document1', 'mtime': mtime}, ], events) del events[:] @@ -794,43 +978,45 @@ class RoutesTest(tests.Test): def prop(self, value): pass - @db.blob_property(acl=ACL.READ) + @db.stored_property(db.Blob, acl=ACL.READ) def blob(self, value): return value - self.volume = db.Volume(tests.tmpdir, [TestDocument], lambda event: None) - guid = self.call('POST', path=['testdocument'], content={}) + volume = db.Volume(tests.tmpdir, [TestDocument]) + router = Router(db.Routes(volume)) + guid = this.call(method='POST', path=['testdocument'], content={}) - self.assertRaises(http.Forbidden, self.call, 'POST', path=['testdocument'], content={'prop': 'value'}) - self.assertRaises(http.Forbidden, self.call, 'PUT', path=['testdocument', guid], content={'prop': 'value'}) - self.assertRaises(http.Forbidden, self.call, 'PUT', path=['testdocument', guid], content={'blob': 'value'}) - self.assertRaises(http.Forbidden, self.call, 'PUT', path=['testdocument', guid, 'prop'], content='value') - self.assertRaises(http.Forbidden, self.call, 'PUT', path=['testdocument', guid, 'blob'], content='value') + self.assertRaises(http.Forbidden, this.call, method='POST', path=['testdocument'], content={'prop': 'value'}) + self.assertRaises(http.Forbidden, this.call, method='PUT', path=['testdocument', guid], content={'prop': 'value'}) + self.assertRaises(http.Forbidden, this.call, method='PUT', path=['testdocument', guid], content={'blob': 'value'}) + self.assertRaises(http.Forbidden, this.call, method='PUT', path=['testdocument', guid, 'prop'], content='value') + self.assertRaises(http.Forbidden, this.call, method='PUT', path=['testdocument', guid, 'blob'], content='value') def test_BlobsWritePermissions(self): class TestDocument(db.Resource): - @db.blob_property(acl=ACL.CREATE | ACL.WRITE) + @db.stored_property(db.Blob, acl=ACL.CREATE | ACL.WRITE) def blob1(self, value): return value - @db.blob_property(acl=ACL.CREATE) + @db.stored_property(db.Blob, acl=ACL.CREATE) def blob2(self, value): return value - self.volume = db.Volume(tests.tmpdir, [TestDocument], lambda event: None) + volume = db.Volume(tests.tmpdir, [TestDocument]) + router = Router(db.Routes(volume)) - guid = self.call('POST', path=['testdocument'], content={}) - self.call('PUT', path=['testdocument', guid], content={'blob1': 'value1', 'blob2': 'value2'}) - self.call('PUT', path=['testdocument', guid], content={'blob1': 'value1'}) - self.assertRaises(http.Forbidden, self.call, 'PUT', path=['testdocument', guid], content={'blob2': 'value2_'}) + guid = this.call(method='POST', path=['testdocument'], content={}) + this.call(method='PUT', path=['testdocument', guid], content={'blob1': 'value1', 'blob2': 'value2'}) + this.call(method='PUT', path=['testdocument', guid], content={'blob1': 'value1'}) + self.assertRaises(http.Forbidden, this.call, method='PUT', path=['testdocument', guid], content={'blob2': 'value2_'}) - guid = self.call('POST', path=['testdocument'], content={}) - self.call('PUT', path=['testdocument', guid, 'blob1'], content='value1') - self.call('PUT', path=['testdocument', guid, 'blob2'], content='value2') - self.call('PUT', path=['testdocument', guid, 'blob1'], content='value1_') - self.assertRaises(http.Forbidden, self.call, 'PUT', path=['testdocument', guid, 'blob2'], content='value2_') + guid = this.call(method='POST', path=['testdocument'], content={}) + this.call(method='PUT', path=['testdocument', guid, 'blob1'], content='value1') + this.call(method='PUT', path=['testdocument', guid, 'blob2'], content='value2') + this.call(method='PUT', path=['testdocument', guid, 'blob1'], content='value1_') + self.assertRaises(http.Forbidden, this.call, method='PUT', path=['testdocument', guid, 'blob2'], content='value2_') def test_properties_OverrideGet(self): @@ -844,30 +1030,32 @@ class RoutesTest(tests.Test): def prop2(self, value): return -1 - @db.blob_property() + @db.stored_property(db.Blob) def blob(self, meta): meta['blob'] = 'new-blob' return meta - self.volume = db.Volume(tests.tmpdir, [TestDocument], lambda event: None) - guid = self.call('POST', path=['testdocument'], content={}) + volume = db.Volume(tests.tmpdir, [TestDocument]) + router = Router(db.Routes(volume)) + + guid = this.call(method='POST', path=['testdocument'], content={}) self.touch(('new-blob', 'new-blob')) - self.call('PUT', path=['testdocument', guid, 'blob'], content='old-blob') + this.call(method='PUT', path=['testdocument', guid, 'blob'], content='old-blob') self.assertEqual( 'new-blob', - self.call('GET', path=['testdocument', guid, 'blob'])['blob']) + this.call(method='GET', path=['testdocument', guid, 'blob'])['blob']) self.assertEqual( '1', - self.call('GET', path=['testdocument', guid, 'prop1'])) + this.call(method='GET', path=['testdocument', guid, 'prop1'])) self.assertEqual( -1, - self.call('GET', path=['testdocument', guid, 'prop2'])) + this.call(method='GET', path=['testdocument', guid, 'prop2'])) self.assertEqual( {'prop1': '1', 'prop2': -1}, - self.call('GET', path=['testdocument', guid], reply=['prop1', 'prop2'])) + this.call(method='GET', path=['testdocument', guid], reply=['prop1', 'prop2'])) - def test_properties_OverrideSet(self): + def test_properties_OverrideSetter(self): class TestDocument(db.Resource): @@ -879,53 +1067,47 @@ class RoutesTest(tests.Test): def prop(self, value): return '_%s' % value - @db.blob_property() - def blob1(self, meta): - return meta + volume = db.Volume(tests.tmpdir, [TestDocument]) + router = Router(db.Routes(volume)) + guid = this.call(method='POST', path=['testdocument'], content={}) - @blob1.setter - def blob1(self, value): - return Blob({'url': file(value['blob']).read()}) + self.assertEqual('_1', this.call(method='GET', path=['testdocument', guid, 'prop'])) - @db.blob_property() - def blob2(self, meta): - return meta + this.call(method='PUT', path=['testdocument', guid, 'prop'], content='2') + self.assertEqual('_2', this.call(method='GET', path=['testdocument', guid, 'prop'])) - @blob2.setter - def blob2(self, value): - with toolkit.NamedTemporaryFile(delete=False) as f: - f.write(' %s ' % file(value['blob']).read()) - value['blob'] = f.name - return value + this.call(method='PUT', path=['testdocument', guid], content={'prop': 3}) + self.assertEqual('_3', this.call(method='GET', path=['testdocument', guid, 'prop'])) - self.volume = db.Volume(tests.tmpdir, [TestDocument], lambda event: None) - guid = self.call('POST', path=['testdocument'], content={}) + def test_properties_AccessToOldValuesInSetters(self): + + class TestDocument(db.Resource): - self.assertEqual('_1', self.call('GET', path=['testdocument', guid, 'prop'])) - self.assertRaises(http.NotFound, self.call, 'GET', path=['testdocument', guid, 'blob1']) + @db.stored_property(db.Numeric) + def prop(self, value): + return value - self.call('PUT', path=['testdocument', guid, 'prop'], content='2') - self.assertEqual('_2', self.call('GET', path=['testdocument', guid, 'prop'])) - self.assertRaises(http.NotFound, self.call, 'GET', path=['testdocument', guid, 'blob1']) + @prop.setter + def prop(self, value): + return value + (self['prop'] or 0) - self.call('PUT', path=['testdocument', guid], content={'prop': 3}) - self.assertEqual('_3', self.call('GET', path=['testdocument', guid, 'prop'])) - self.assertRaises(http.NotFound, self.call, 'GET', path=['testdocument', guid, 'blob1']) + volume = db.Volume(tests.tmpdir, [TestDocument]) + router = Router(db.Routes(volume)) - self.call('PUT', path=['testdocument', guid, 'blob1'], content='blob_url') - self.assertEqual('blob_url', self.call('GET', path=['testdocument', guid, 'blob1'])['url']) + guid = this.call(method='POST', path=['testdocument'], content={'prop': 1}) + self.assertEqual(1, this.call(method='GET', path=['testdocument', guid, 'prop'])) - guid = self.call('POST', path=['testdocument'], content={'blob2': 'foo'}) - self.assertEqual(' foo ', file(self.call('GET', path=['testdocument', guid, 'blob2'])['blob']).read()) + this.call(method='PUT', path=['testdocument', guid, 'prop'], content='2') + self.assertEqual(3, this.call(method='GET', path=['testdocument', guid, 'prop'])) - self.call('PUT', path=['testdocument', guid, 'blob2'], content='bar') - self.assertEqual(' bar ', file(self.call('GET', path=['testdocument', guid, 'blob2'])['blob']).read()) + this.call(method='PUT', path=['testdocument', guid], content={'prop': 3}) + self.assertEqual(6, this.call(method='GET', path=['testdocument', guid, 'prop'])) def test_properties_CallSettersAtTheEnd(self): class TestDocument(db.Resource): - @db.indexed_property(slot=1, typecast=int) + @db.indexed_property(db.Numeric, slot=1) def prop1(self, value): return value @@ -933,7 +1115,7 @@ class RoutesTest(tests.Test): def prop1(self, value): return self['prop3'] + value - @db.indexed_property(slot=2, typecast=int) + @db.indexed_property(db.Numeric, slot=2) def prop2(self, value): return value @@ -941,107 +1123,40 @@ class RoutesTest(tests.Test): def prop2(self, value): return self['prop3'] - value - @db.indexed_property(slot=3, typecast=int) + @db.indexed_property(db.Numeric, slot=3) def prop3(self, value): return value - self.volume = db.Volume(tests.tmpdir, [TestDocument], lambda event: None) - guid = self.call('POST', path=['testdocument'], content={'prop1': 1, 'prop2': 2, 'prop3': 3}) - self.assertEqual(4, self.call('GET', path=['testdocument', guid, 'prop1'])) - self.assertEqual(1, self.call('GET', path=['testdocument', guid, 'prop2'])) + volume = db.Volume(tests.tmpdir, [TestDocument]) + router = Router(db.Routes(volume)) + + guid = this.call(method='POST', path=['testdocument'], content={'prop1': 1, 'prop2': 2, 'prop3': 3}) + self.assertEqual(4, this.call(method='GET', path=['testdocument', guid, 'prop1'])) + self.assertEqual(1, this.call(method='GET', path=['testdocument', guid, 'prop2'])) def test_properties_PopulateRequiredPropsInSetters(self): class TestDocument(db.Resource): - @db.indexed_property(slot=1, typecast=int) + @db.indexed_property(db.Numeric, slot=1) def prop1(self, value): return value @prop1.setter def prop1(self, value): - self['prop2'] = value + 1 + self.post('prop2', value + 1) return value - @db.indexed_property(slot=2, typecast=int) + @db.indexed_property(db.Numeric, slot=2) def prop2(self, value): return value - @db.blob_property() - def prop3(self, value): - return value - - @prop3.setter - def prop3(self, value): - self['prop1'] = -1 - self['prop2'] = -2 - return value - - self.volume = db.Volume(tests.tmpdir, [TestDocument], lambda event: None) - guid = self.call('POST', path=['testdocument'], content={'prop1': 1}) - self.assertEqual(1, self.call('GET', path=['testdocument', guid, 'prop1'])) - self.assertEqual(2, self.call('GET', path=['testdocument', guid, 'prop2'])) - - def test_properties_PopulateRequiredPropsInBlobSetter(self): - - class TestDocument(db.Resource): - - @db.blob_property() - def blob(self, value): - return value - - @blob.setter - def blob(self, value): - self['prop1'] = 1 - self['prop2'] = 2 - return value - - @db.indexed_property(slot=1, typecast=int) - def prop1(self, value): - return value - - @db.indexed_property(slot=2, typecast=int) - def prop2(self, value): - return value - - self.volume = db.Volume(tests.tmpdir, [TestDocument], lambda event: None) - guid = self.call('POST', path=['testdocument'], content={'blob': ''}) - self.assertEqual(1, self.call('GET', path=['testdocument', guid, 'prop1'])) - self.assertEqual(2, self.call('GET', path=['testdocument', guid, 'prop2'])) - - def __test_SubCall(self): - - class TestDocument(db.Resource): - - @db.blob_property(mime_type='application/json') - def blob(self, value): - return value - - @blob.setter - def blob(self, value): - blob = file(value['blob']).read() - if '!' not in blob: - meta = self.meta('blob') - if meta: - blob = file(meta['blob']).read() + blob - with toolkit.NamedTemporaryFile(delete=False) as f: - f.write(blob) - value['blob'] = f.name - coroutine.spawn(self.post, blob) - return value - - def post(self, value): - self.request.call('PUT', path=['testdocument', self.guid, 'blob'], content=value + '!') + volume = db.Volume(tests.tmpdir, [TestDocument]) + router = Router(db.Routes(volume)) - self.volume = db.Volume(tests.tmpdir, [TestDocument], lambda event: None) - - guid = self.call('POST', path=['testdocument'], content={'blob': '0'}) - coroutine.dispatch() - self.assertEqual('0!', file(self.call('GET', path=['testdocument', guid, 'blob'])['blob']).read()) - - self.call('PUT', path=['testdocument', guid, 'blob'], content='1') - coroutine.dispatch() - self.assertEqual('0!1!', file(self.call('GET', path=['testdocument', guid, 'blob'])['blob']).read()) + guid = this.call(method='POST', path=['testdocument'], content={'prop1': 1}) + self.assertEqual(1, this.call(method='GET', path=['testdocument', guid, 'prop1'])) + self.assertEqual(2, this.call(method='GET', path=['testdocument', guid, 'prop2'])) def test_Group(self): @@ -1051,15 +1166,16 @@ class RoutesTest(tests.Test): def prop(self, value): return value - self.volume = db.Volume(tests.tmpdir, [TestDocument], lambda event: None) + volume = db.Volume(tests.tmpdir, [TestDocument]) + router = Router(db.Routes(volume)) - self.call('POST', path=['testdocument'], content={'prop': 1}) - self.call('POST', path=['testdocument'], content={'prop': 2}) - self.call('POST', path=['testdocument'], content={'prop': 1}) + this.call(method='POST', path=['testdocument'], content={'prop': 1}) + this.call(method='POST', path=['testdocument'], content={'prop': 2}) + this.call(method='POST', path=['testdocument'], content={'prop': 1}) self.assertEqual( sorted([{'prop': 1}, {'prop': 2}]), - sorted(self.call('GET', path=['testdocument'], reply='prop', group_by='prop')['result'])) + sorted(this.call(method='GET', path=['testdocument'], reply='prop', group_by='prop')['result'])) def test_CallSetterEvenIfThereIsNoCreatePermissions(self): @@ -1073,12 +1189,13 @@ class RoutesTest(tests.Test): def prop(self, value): return value + 1 - self.volume = db.Volume(tests.tmpdir, [TestDocument], lambda event: None) + volume = db.Volume(tests.tmpdir, [TestDocument]) + router = Router(db.Routes(volume)) - self.assertRaises(http.Forbidden, self.call, 'POST', path=['testdocument'], content={'prop': 1}) + self.assertRaises(http.Forbidden, this.call, method='POST', path=['testdocument'], content={'prop': 1}) - guid = self.call('POST', path=['testdocument'], content={}) - self.assertEqual(1, self.call('GET', path=['testdocument', guid, 'prop'])) + guid = this.call(method='POST', path=['testdocument'], content={}) + self.assertEqual(1, this.call(method='GET', path=['testdocument', guid, 'prop'])) def test_ReturnDefualtsForMissedProps(self): @@ -1088,57 +1205,34 @@ class RoutesTest(tests.Test): def prop(self, value): return value - self.volume = db.Volume(tests.tmpdir, [TestDocument], lambda event: None) - guid = self.call('POST', path=['testdocument'], content={'prop': 'set'}) + volume = db.Volume(tests.tmpdir, [TestDocument]) + router = Router(db.Routes(volume)) + guid = this.call(method='POST', path=['testdocument'], content={'prop': 'set'}) self.assertEqual( [{'prop': 'set'}], - self.call('GET', path=['testdocument'], reply='prop')['result']) + this.call(method='GET', path=['testdocument'], reply='prop')['result']) self.assertEqual( {'prop': 'set'}, - self.call('GET', path=['testdocument', guid], reply='prop')) + this.call(method='GET', path=['testdocument', guid], reply='prop')) self.assertEqual( 'set', - self.call('GET', path=['testdocument', guid, 'prop'])) + this.call(method='GET', path=['testdocument', guid, 'prop'])) os.unlink('testdocument/%s/%s/prop' % (guid[:2], guid)) self.assertEqual( [{'prop': 'default'}], - self.call('GET', path=['testdocument'], reply='prop')['result']) + this.call(method='GET', path=['testdocument'], reply='prop')['result']) self.assertEqual( {'prop': 'default'}, - self.call('GET', path=['testdocument', guid], reply='prop')) + this.call(method='GET', path=['testdocument', guid], reply='prop')) self.assertEqual( 'default', - self.call('GET', path=['testdocument', guid, 'prop'])) - - def test_PopulateNonDefualtPropsInSetters(self): - - class TestDocument(db.Resource): - - @db.indexed_property(slot=1) - def prop1(self, value): - return value - - @db.indexed_property(slot=2, default='default') - def prop2(self, value): - return all - - @prop2.setter - def prop2(self, value): - if value != 'default': - self['prop1'] = value - return value - - self.volume = db.Volume(tests.tmpdir, [TestDocument], lambda event: None) - - self.assertRaises(RuntimeError, self.call, 'POST', path=['testdocument'], content={}) - - guid = self.call('POST', path=['testdocument'], content={'prop2': 'value2'}) - self.assertEqual('value2', self.call('GET', path=['testdocument', guid, 'prop1'])) + this.call(method='GET', path=['testdocument', guid, 'prop'])) def test_prop_meta(self): + files.update('url', {'url': 'http://new', 'foo': 'bar', 'size': 100}) class TestDocument(db.Resource): @@ -1146,44 +1240,55 @@ class RoutesTest(tests.Test): def prop(self, value): return value - @db.blob_property() + @db.stored_property(db.Blob) def blob1(self, value): return value - @db.blob_property() + @db.stored_property(db.Blob) def blob2(self, value): return value @blob2.setter def blob2(self, value): - return {'url': 'http://new', 'foo': 'bar', 'blob_size': 100} + return 'url' - self.volume = db.Volume(tests.tmpdir, [TestDocument], lambda event: None) - guid = self.call('POST', ['testdocument'], content = {'prop': 'prop', 'blob1': 'blob', 'blob2': ''}) + volume = db.Volume(tests.tmpdir, [TestDocument]) + router = Router(db.Routes(volume)) + guid = this.call(method='POST', path=['testdocument'], content = {'prop': 'prop', 'blob1': 'blob', 'blob2': ''}) - assert self.call('HEAD', ['testdocument', guid, 'prop']) is None - meta = self.volume['testdocument'].get(guid).meta('prop') + response = Response() + assert this.call(response=response, + method='HEAD', path=['testdocument', guid, 'prop']) is None + meta = volume['testdocument'].get(guid).meta('prop') meta.pop('value') - self.assertEqual(meta, self.response.meta) - self.assertEqual(formatdate(meta['mtime'], localtime=False, usegmt=True), self.response.last_modified) - - assert self.call('HEAD', ['testdocument', guid, 'blob1'], host='localhost') is None - meta = self.volume['testdocument'].get(guid).meta('blob1') - meta.pop('blob') - self.assertEqual(meta, self.response.meta) - self.assertEqual(len('blob'), self.response.content_length) - self.assertEqual(formatdate(meta['mtime'], localtime=False, usegmt=True), self.response.last_modified) - - assert self.call('HEAD', ['testdocument', guid, 'blob2']) is None - meta = self.volume['testdocument'].get(guid).meta('blob2') - self.assertEqual(meta, self.response.meta) - self.assertEqual(100, self.response.content_length) - self.assertEqual(formatdate(meta['mtime'], localtime=False, usegmt=True), self.response.last_modified) - - assert self.call('GET', ['testdocument', guid, 'blob2']) is not None - meta = self.volume['testdocument'].get(guid).meta('blob2') - self.assertEqual(meta, self.response.meta) - self.assertEqual(formatdate(meta['mtime'], localtime=False, usegmt=True), self.response.last_modified) + self.assertEqual(meta, response.meta) + self.assertEqual(formatdate(meta['mtime'], localtime=False, usegmt=True), response.last_modified) + + response = Response() + assert this.call(response=response, + method='HEAD', path=['testdocument', guid, 'blob1'], environ={'HTTP_HOST': 'localhost'}) is None + meta = volume['testdocument'].get(guid).meta('blob1') + meta.pop('value') + self.assertEqual(meta, response.meta) + self.assertEqual(len('blob'), response.content_length) + self.assertEqual(formatdate(meta['mtime'], localtime=False, usegmt=True), response.last_modified) + + response = Response() + assert this.call(response=response, + method='HEAD', path=['testdocument', guid, 'blob2']) is None + meta = volume['testdocument'].get(guid).meta('blob2') + meta.pop('value') + self.assertEqual(meta, response.meta) + self.assertEqual(100, response.content_length) + self.assertEqual(formatdate(meta['mtime'], localtime=False, usegmt=True), response.last_modified) + + response = Response() + assert this.call(response=response, + method='GET', path=['testdocument', guid, 'blob2']) is not None + meta = volume['testdocument'].get(guid).meta('blob2') + meta.pop('value') + self.assertEqual(meta, response.meta) + self.assertEqual(formatdate(meta['mtime'], localtime=False, usegmt=True), response.last_modified) def test_DefaultAuthor(self): @@ -1196,25 +1301,26 @@ class RoutesTest(tests.Test): class Document(db.Resource): pass - self.volume = db.Volume('db', [User, Document]) + volume = db.Volume(tests.tmpdir, [User, Document]) + router = Router(db.Routes(volume)) - guid = self.call('POST', ['document'], content={}, principal='user') + guid = this.call(method='POST', path=['document'], content={}, principal='user') self.assertEqual( [{'name': 'user', 'role': 2}], - self.call('GET', ['document', guid, 'author'])) + this.call(method='GET', path=['document', guid, 'author'])) self.assertEqual( {'user': {'role': 2, 'order': 0}}, - self.volume['document'].get(guid)['author']) + volume['document'].get(guid)['author']) - self.volume['user'].create({'guid': 'user', 'pubkey': '', 'name': 'User'}) + volume['user'].create({'guid': 'user', 'pubkey': '', 'name': 'User'}) - guid = self.call('POST', ['document'], content={}, principal='user') + guid = this.call(method='POST', path=['document'], content={}, principal='user') self.assertEqual( [{'guid': 'user', 'name': 'User', 'role': 3}], - self.call('GET', ['document', guid, 'author'])) + this.call(method='GET', path=['document', guid, 'author'])) self.assertEqual( {'user': {'name': 'User', 'role': 3, 'order': 0}}, - self.volume['document'].get(guid)['author']) + volume['document'].get(guid)['author']) def test_FindByAuthor(self): @@ -1227,36 +1333,37 @@ class RoutesTest(tests.Test): class Document(db.Resource): pass - self.volume = db.Volume('db', [User, Document]) + volume = db.Volume(tests.tmpdir, [User, Document]) + router = Router(db.Routes(volume)) - self.volume['user'].create({'guid': 'user1', 'pubkey': '', 'name': 'UserName1'}) - self.volume['user'].create({'guid': 'user2', 'pubkey': '', 'name': 'User Name2'}) - self.volume['user'].create({'guid': 'user3', 'pubkey': '', 'name': 'User Name 3'}) + volume['user'].create({'guid': 'user1', 'pubkey': '', 'name': 'UserName1'}) + volume['user'].create({'guid': 'user2', 'pubkey': '', 'name': 'User Name2'}) + volume['user'].create({'guid': 'user3', 'pubkey': '', 'name': 'User Name 3'}) - guid1 = self.call('POST', ['document'], content={}, principal='user1') - guid2 = self.call('POST', ['document'], content={}, principal='user2') - guid3 = self.call('POST', ['document'], content={}, principal='user3') + guid1 = this.call(method='POST', path=['document'], content={}, principal='user1') + guid2 = this.call(method='POST', path=['document'], content={}, principal='user2') + guid3 = this.call(method='POST', path=['document'], content={}, principal='user3') self.assertEqual(sorted([ {'guid': guid1}, ]), - self.call('GET', ['document'], author='UserName1')['result']) + this.call(method='GET', path=['document'], author='UserName1')['result']) self.assertEqual(sorted([ {'guid': guid1}, ]), - sorted(self.call('GET', ['document'], query='author:UserName')['result'])) + sorted(this.call(method='GET', path=['document'], query='author:UserName')['result'])) self.assertEqual(sorted([ {'guid': guid1}, {'guid': guid2}, {'guid': guid3}, ]), - sorted(self.call('GET', ['document'], query='author:User')['result'])) + sorted(this.call(method='GET', path=['document'], query='author:User')['result'])) self.assertEqual(sorted([ {'guid': guid2}, {'guid': guid3}, ]), - sorted(self.call('GET', ['document'], query='author:Name')['result'])) + sorted(this.call(method='GET', path=['document'], query='author:Name')['result'])) def test_PreserveAuthorsOrder(self): @@ -1269,99 +1376,77 @@ class RoutesTest(tests.Test): class Document(db.Resource): pass - self.volume = db.Volume('db', [User, Document]) + volume = db.Volume(tests.tmpdir, [User, Document]) + router = Router(db.Routes(volume)) - self.volume['user'].create({'guid': 'user1', 'pubkey': '', 'name': 'User1'}) - self.volume['user'].create({'guid': 'user2', 'pubkey': '', 'name': 'User2'}) - self.volume['user'].create({'guid': 'user3', 'pubkey': '', 'name': 'User3'}) + volume['user'].create({'guid': 'user1', 'pubkey': '', 'name': 'User1'}) + volume['user'].create({'guid': 'user2', 'pubkey': '', 'name': 'User2'}) + volume['user'].create({'guid': 'user3', 'pubkey': '', 'name': 'User3'}) - guid = self.call('POST', ['document'], content={}, principal='user1') - self.call('PUT', ['document', guid], cmd='useradd', user='user2', role=0) - self.call('PUT', ['document', guid], cmd='useradd', user='user3', role=0) + guid = this.call(method='POST', path=['document'], content={}, principal='user1') + this.call(method='PUT', path=['document', guid], cmd='useradd', user='user2', role=0) + this.call(method='PUT', path=['document', guid], cmd='useradd', user='user3', role=0) self.assertEqual([ {'guid': 'user1', 'name': 'User1', 'role': 3}, {'guid': 'user2', 'name': 'User2', 'role': 1}, {'guid': 'user3', 'name': 'User3', 'role': 1}, ], - self.call('GET', ['document', guid, 'author'])) + this.call(method='GET', path=['document', guid, 'author'])) self.assertEqual({ 'user1': {'name': 'User1', 'role': 3, 'order': 0}, 'user2': {'name': 'User2', 'role': 1, 'order': 1}, 'user3': {'name': 'User3', 'role': 1, 'order': 2}, }, - self.volume['document'].get(guid)['author']) + volume['document'].get(guid)['author']) - self.call('PUT', ['document', guid], cmd='userdel', user='user2', principal='user1') - self.call('PUT', ['document', guid], cmd='useradd', user='user2', role=0) + this.call(method='PUT', path=['document', guid], cmd='userdel', user='user2', principal='user1') + this.call(method='PUT', path=['document', guid], cmd='useradd', user='user2', role=0) self.assertEqual([ {'guid': 'user1', 'name': 'User1', 'role': 3}, {'guid': 'user3', 'name': 'User3', 'role': 1}, {'guid': 'user2', 'name': 'User2', 'role': 1}, ], - self.call('GET', ['document', guid, 'author'])) + this.call(method='GET', path=['document', guid, 'author'])) self.assertEqual({ 'user1': {'name': 'User1', 'role': 3, 'order': 0}, 'user3': {'name': 'User3', 'role': 1, 'order': 2}, 'user2': {'name': 'User2', 'role': 1, 'order': 3}, }, - self.volume['document'].get(guid)['author']) + volume['document'].get(guid)['author']) - self.call('PUT', ['document', guid], cmd='userdel', user='user2', principal='user1') - self.call('PUT', ['document', guid], cmd='useradd', user='user2', role=0) + this.call(method='PUT', path=['document', guid], cmd='userdel', user='user2', principal='user1') + this.call(method='PUT', path=['document', guid], cmd='useradd', user='user2', role=0) self.assertEqual([ {'guid': 'user1', 'name': 'User1', 'role': 3}, {'guid': 'user3', 'name': 'User3', 'role': 1}, {'guid': 'user2', 'name': 'User2', 'role': 1}, ], - self.call('GET', ['document', guid, 'author'])) + this.call(method='GET', path=['document', guid, 'author'])) self.assertEqual({ 'user1': {'name': 'User1', 'role': 3, 'order': 0}, 'user3': {'name': 'User3', 'role': 1, 'order': 2}, 'user2': {'name': 'User2', 'role': 1, 'order': 3}, }, - self.volume['document'].get(guid)['author']) + volume['document'].get(guid)['author']) - self.call('PUT', ['document', guid], cmd='userdel', user='user3', principal='user1') - self.call('PUT', ['document', guid], cmd='useradd', user='user3', role=0) + this.call(method='PUT', path=['document', guid], cmd='userdel', user='user3', principal='user1') + this.call(method='PUT', path=['document', guid], cmd='useradd', user='user3', role=0) self.assertEqual([ {'guid': 'user1', 'name': 'User1', 'role': 3}, {'guid': 'user2', 'name': 'User2', 'role': 1}, {'guid': 'user3', 'name': 'User3', 'role': 1}, ], - self.call('GET', ['document', guid, 'author'])) + this.call(method='GET', path=['document', guid, 'author'])) self.assertEqual({ 'user1': {'name': 'User1', 'role': 3, 'order': 0}, 'user2': {'name': 'User2', 'role': 1, 'order': 3}, 'user3': {'name': 'User3', 'role': 1, 'order': 4}, }, - self.volume['document'].get(guid)['author']) - - def test_CopyAthors(self): - - class User(db.Resource): - - @db.indexed_property(slot=1) - def name(self, value): - return value - - class Document(db.Resource): - pass - - self.volume = db.Volume('db', [User, Document]) - self.volume['user'].create({'guid': 'user', 'pubkey': '', 'name': 'User'}) - - guid1 = self.call('POST', ['document'], content={}, principal='user') - self.assertEqual({'user': {'name': 'User', 'role': 3, 'order': 0}}, self.volume['document'].get(guid1)['author']) - author = self.call('GET', ['document', guid1, 'author']) - self.assertEqual([{'guid': 'user', 'role': 3, 'name': 'User'}], author) - - guid2 = self.volume['document'].create({'author': author}, setters=True) - author = self.call('GET', ['document', guid1, 'author']) - self.assertEqual({'user': {'name': 'User', 'role': 3, 'order': 0}}, self.volume['document'].get(guid2)['author']) + volume['document'].get(guid)['author']) def test_AddUser(self): @@ -1374,62 +1459,63 @@ class RoutesTest(tests.Test): class Document(db.Resource): pass - self.volume = db.Volume('db', [User, Document]) + volume = db.Volume(tests.tmpdir, [User, Document]) + router = Router(db.Routes(volume)) - self.volume['user'].create({'guid': 'user1', 'pubkey': '', 'name': 'User1'}) - self.volume['user'].create({'guid': 'user2', 'pubkey': '', 'name': 'User2'}) + volume['user'].create({'guid': 'user1', 'pubkey': '', 'name': 'User1'}) + volume['user'].create({'guid': 'user2', 'pubkey': '', 'name': 'User2'}) - guid = self.call('POST', ['document'], content={}, principal='user1') + guid = this.call(method='POST', path=['document'], content={}, principal='user1') self.assertEqual([ {'guid': 'user1', 'name': 'User1', 'role': 3}, ], - self.call('GET', ['document', guid, 'author'])) + this.call(method='GET', path=['document', guid, 'author'])) self.assertEqual({ 'user1': {'name': 'User1', 'role': 3, 'order': 0}, }, - self.volume['document'].get(guid)['author']) + volume['document'].get(guid)['author']) - self.call('PUT', ['document', guid], cmd='useradd', user='user2', role=2) + this.call(method='PUT', path=['document', guid], cmd='useradd', user='user2', role=2) self.assertEqual([ {'guid': 'user1', 'name': 'User1', 'role': 3}, {'guid': 'user2', 'name': 'User2', 'role': 3}, ], - self.call('GET', ['document', guid, 'author'])) + this.call(method='GET', path=['document', guid, 'author'])) self.assertEqual({ 'user1': {'name': 'User1', 'role': 3, 'order': 0}, 'user2': {'name': 'User2', 'role': 3, 'order': 1}, }, - self.volume['document'].get(guid)['author']) + volume['document'].get(guid)['author']) - self.call('PUT', ['document', guid], cmd='useradd', user='User3', role=3) + this.call(method='PUT', path=['document', guid], cmd='useradd', user='User3', role=3) self.assertEqual([ {'guid': 'user1', 'name': 'User1', 'role': 3}, {'guid': 'user2', 'name': 'User2', 'role': 3}, {'name': 'User3', 'role': 2}, ], - self.call('GET', ['document', guid, 'author'])) + this.call(method='GET', path=['document', guid, 'author'])) self.assertEqual({ 'user1': {'name': 'User1', 'role': 3, 'order': 0}, 'user2': {'name': 'User2', 'role': 3, 'order': 1}, 'User3': {'role': 2, 'order': 2}, }, - self.volume['document'].get(guid)['author']) + volume['document'].get(guid)['author']) - self.call('PUT', ['document', guid], cmd='useradd', user='User4', role=4) + this.call(method='PUT', path=['document', guid], cmd='useradd', user='User4', role=4) self.assertEqual([ {'guid': 'user1', 'name': 'User1', 'role': 3}, {'guid': 'user2', 'name': 'User2', 'role': 3}, {'name': 'User3', 'role': 2}, {'name': 'User4', 'role': 0}, ], - self.call('GET', ['document', guid, 'author'])) + this.call(method='GET', path=['document', guid, 'author'])) self.assertEqual({ 'user1': {'name': 'User1', 'role': 3, 'order': 0}, 'user2': {'name': 'User2', 'role': 3, 'order': 1}, 'User3': {'role': 2, 'order': 2}, 'User4': {'role': 0, 'order': 3}, }, - self.volume['document'].get(guid)['author']) + volume['document'].get(guid)['author']) def test_UpdateAuthor(self): @@ -1442,46 +1528,47 @@ class RoutesTest(tests.Test): class Document(db.Resource): pass - self.volume = db.Volume('db', [User, Document]) + volume = db.Volume(tests.tmpdir, [User, Document]) + router = Router(db.Routes(volume)) - self.volume['user'].create({'guid': 'user1', 'pubkey': '', 'name': 'User1'}) - guid = self.call('POST', ['document'], content={}, principal='user1') + volume['user'].create({'guid': 'user1', 'pubkey': '', 'name': 'User1'}) + guid = this.call(method='POST', path=['document'], content={}, principal='user1') - self.call('PUT', ['document', guid], cmd='useradd', user='User2', role=0) + this.call(method='PUT', path=['document', guid], cmd='useradd', user='User2', role=0) self.assertEqual([ {'guid': 'user1', 'name': 'User1', 'role': 3}, {'name': 'User2', 'role': 0}, ], - self.call('GET', ['document', guid, 'author'])) + this.call(method='GET', path=['document', guid, 'author'])) self.assertEqual({ 'user1': {'name': 'User1', 'role': 3, 'order': 0}, 'User2': {'role': 0, 'order': 1}, }, - self.volume['document'].get(guid)['author']) + volume['document'].get(guid)['author']) - self.call('PUT', ['document', guid], cmd='useradd', user='user1', role=0) + this.call(method='PUT', path=['document', guid], cmd='useradd', user='user1', role=0) self.assertEqual([ {'guid': 'user1', 'name': 'User1', 'role': 1}, {'name': 'User2', 'role': 0}, ], - self.call('GET', ['document', guid, 'author'])) + this.call(method='GET', path=['document', guid, 'author'])) self.assertEqual({ 'user1': {'name': 'User1', 'role': 1, 'order': 0}, 'User2': {'role': 0, 'order': 1}, }, - self.volume['document'].get(guid)['author']) + volume['document'].get(guid)['author']) - self.call('PUT', ['document', guid], cmd='useradd', user='User2', role=2) + this.call(method='PUT', path=['document', guid], cmd='useradd', user='User2', role=2) self.assertEqual([ {'guid': 'user1', 'name': 'User1', 'role': 1}, {'name': 'User2', 'role': 2}, ], - self.call('GET', ['document', guid, 'author'])) + this.call(method='GET', path=['document', guid, 'author'])) self.assertEqual({ 'user1': {'name': 'User1', 'role': 1, 'order': 0}, 'User2': {'role': 2, 'order': 1}, }, - self.volume['document'].get(guid)['author']) + volume['document'].get(guid)['author']) def test_DelUser(self): @@ -1494,150 +1581,73 @@ class RoutesTest(tests.Test): class Document(db.Resource): pass - self.volume = db.Volume('db', [User, Document]) + volume = db.Volume(tests.tmpdir, [User, Document]) + router = Router(db.Routes(volume)) - self.volume['user'].create({'guid': 'user1', 'pubkey': '', 'name': 'User1'}) - self.volume['user'].create({'guid': 'user2', 'pubkey': '', 'name': 'User2'}) - guid = self.call('POST', ['document'], content={}, principal='user1') - self.call('PUT', ['document', guid], cmd='useradd', user='user2') - self.call('PUT', ['document', guid], cmd='useradd', user='User3') + volume['user'].create({'guid': 'user1', 'pubkey': '', 'name': 'User1'}) + volume['user'].create({'guid': 'user2', 'pubkey': '', 'name': 'User2'}) + guid = this.call(method='POST', path=['document'], content={}, principal='user1') + this.call(method='PUT', path=['document', guid], cmd='useradd', user='user2') + this.call(method='PUT', path=['document', guid], cmd='useradd', user='User3') self.assertEqual([ {'guid': 'user1', 'name': 'User1', 'role': 3}, {'guid': 'user2', 'name': 'User2', 'role': 1}, {'name': 'User3', 'role': 0}, ], - self.call('GET', ['document', guid, 'author'])) + this.call(method='GET', path=['document', guid, 'author'])) self.assertEqual({ 'user1': {'name': 'User1', 'role': 3, 'order': 0}, 'user2': {'name': 'User2', 'role': 1, 'order': 1}, 'User3': {'role': 0, 'order': 2}, }, - self.volume['document'].get(guid)['author']) + volume['document'].get(guid)['author']) # Do not remove yourself - self.assertRaises(RuntimeError, self.call, 'PUT', ['document', guid], cmd='userdel', user='user1', principal='user1') - self.assertRaises(RuntimeError, self.call, 'PUT', ['document', guid], cmd='userdel', user='user2', principal='user2') + self.assertRaises(RuntimeError, this.call, method='PUT', path=['document', guid], cmd='userdel', user='user1', principal='user1') + self.assertRaises(RuntimeError, this.call, method='PUT', path=['document', guid], cmd='userdel', user='user2', principal='user2') - self.call('PUT', ['document', guid], cmd='userdel', user='user1', principal='user2') + this.call(method='PUT', path=['document', guid], cmd='userdel', user='user1', principal='user2') self.assertEqual([ {'guid': 'user2', 'name': 'User2', 'role': 1}, {'name': 'User3', 'role': 0}, ], - self.call('GET', ['document', guid, 'author'])) + this.call(method='GET', path=['document', guid, 'author'])) self.assertEqual({ 'user2': {'name': 'User2', 'role': 1, 'order': 1}, 'User3': {'role': 0, 'order': 2}, }, - self.volume['document'].get(guid)['author']) + volume['document'].get(guid)['author']) - self.call('PUT', ['document', guid], cmd='userdel', user='User3', principal='user2') + this.call(method='PUT', path=['document', guid], cmd='userdel', user='User3', principal='user2') self.assertEqual([ {'guid': 'user2', 'name': 'User2', 'role': 1}, ], - self.call('GET', ['document', guid, 'author'])) + this.call(method='GET', path=['document', guid, 'author'])) self.assertEqual({ 'user2': {'name': 'User2', 'role': 1, 'order': 1}, }, - self.volume['document'].get(guid)['author']) - - def test_typecast_prop_value(self): - prop = Property('prop', typecast=int) - self.assertEqual(1, _typecast_prop_value(prop.typecast, 1)) - self.assertEqual(1, _typecast_prop_value(prop.typecast, 1.1)) - self.assertEqual(1, _typecast_prop_value(prop.typecast, '1')) - self.assertRaises(ValueError, _typecast_prop_value, prop.typecast, '1.0') - self.assertRaises(ValueError, _typecast_prop_value, prop.typecast, '') - self.assertRaises(ValueError, _typecast_prop_value, prop.typecast, None) - - prop = Property('prop', typecast=float) - self.assertEqual(1.0, _typecast_prop_value(prop.typecast, 1)) - self.assertEqual(1.1, _typecast_prop_value(prop.typecast, 1.1)) - self.assertEqual(1.0, _typecast_prop_value(prop.typecast, '1')) - self.assertEqual(1.1, _typecast_prop_value(prop.typecast, '1.1')) - self.assertRaises(ValueError, _typecast_prop_value, prop.typecast, '') - self.assertRaises(ValueError, _typecast_prop_value, prop.typecast, None) - - prop = Property('prop', typecast=bool) - self.assertEqual(False, _typecast_prop_value(prop.typecast, 0)) - self.assertEqual(True, _typecast_prop_value(prop.typecast, 1)) - self.assertEqual(True, _typecast_prop_value(prop.typecast, 1.1)) - self.assertEqual(True, _typecast_prop_value(prop.typecast, '1')) - self.assertEqual(True, _typecast_prop_value(prop.typecast, 'false')) - self.assertEqual(False, _typecast_prop_value(prop.typecast, '')) - self.assertRaises(ValueError, _typecast_prop_value, prop.typecast, None) - - prop = Property('prop', typecast=[int]) - self.assertEqual((1,), _typecast_prop_value(prop.typecast, 1)) - self.assertRaises(ValueError, _typecast_prop_value, prop.typecast, None) - self.assertRaises(ValueError, _typecast_prop_value, prop.typecast, '') - self.assertEqual((), _typecast_prop_value(prop.typecast, [])) - self.assertEqual((123,), _typecast_prop_value(prop.typecast, '123')) - self.assertRaises(ValueError, _typecast_prop_value, prop.typecast, 'a') - self.assertEqual((123, 4, 5), _typecast_prop_value(prop.typecast, ['123', 4, 5.6])) - - prop = Property('prop', typecast=[1, 2]) - self.assertRaises(ValueError, _typecast_prop_value, prop.typecast, 0) - self.assertRaises(ValueError, _typecast_prop_value, prop.typecast, None) - self.assertRaises(ValueError, _typecast_prop_value, prop.typecast, '') - self.assertRaises(ValueError, _typecast_prop_value, prop.typecast, 'A') - self.assertRaises(ValueError, _typecast_prop_value, prop.typecast, '3') - self.assertEqual(1, _typecast_prop_value(prop.typecast, 1)) - self.assertEqual(2, _typecast_prop_value(prop.typecast, 2)) - self.assertEqual(1, _typecast_prop_value(prop.typecast, '1')) - - prop = Property('prop', typecast=[str]) - self.assertEqual(('',), _typecast_prop_value(prop.typecast, '')) - self.assertEqual(('',), _typecast_prop_value(prop.typecast, [''])) - self.assertEqual((), _typecast_prop_value(prop.typecast, [])) - - prop = Property('prop', typecast=[]) - self.assertRaises(ValueError, _typecast_prop_value, prop.typecast, None) - self.assertEqual(('',), _typecast_prop_value(prop.typecast, '')) - self.assertEqual(('',), _typecast_prop_value(prop.typecast, [''])) - self.assertEqual((), _typecast_prop_value(prop.typecast, [])) - self.assertEqual(('0',), _typecast_prop_value(prop.typecast, 0)) - self.assertEqual(('',), _typecast_prop_value(prop.typecast, '')) - self.assertEqual(('foo',), _typecast_prop_value(prop.typecast, 'foo')) - - prop = Property('prop', typecast=[['A', 'B', 'C']]) - self.assertRaises(ValueError, _typecast_prop_value, prop.typecast, '') - self.assertRaises(ValueError, _typecast_prop_value, prop.typecast, ['']) - self.assertEqual((), _typecast_prop_value(prop.typecast, [])) - self.assertEqual(('A', 'B', 'C'), _typecast_prop_value(prop.typecast, ['A', 'B', 'C'])) - self.assertRaises(ValueError, _typecast_prop_value, prop.typecast, ['a']) - self.assertRaises(ValueError, _typecast_prop_value, prop.typecast, ['A', 'x']) - - prop = Property('prop', typecast=bool) - self.assertEqual(True, _typecast_prop_value(prop.typecast, True)) - self.assertEqual(False, _typecast_prop_value(prop.typecast, False)) - self.assertEqual(True, _typecast_prop_value(prop.typecast, 'False')) - self.assertEqual(True, _typecast_prop_value(prop.typecast, '0')) - - prop = Property('prop', typecast=[['A', 'B', 'C']]) - self.assertEqual(('A', 'B', 'C'), _typecast_prop_value(prop.typecast, ['A', 'B', 'C'])) - - prop = Property('prop', typecast=lambda x: x + 1) - self.assertEqual(1, _typecast_prop_value(prop.typecast, 0)) + volume['document'].get(guid)['author']) def test_DefaultOrder(self): class Document(db.Resource): pass - self.volume = db.Volume('db', [Document]) + volume = db.Volume(tests.tmpdir, [Document]) + router = Router(db.Routes(volume)) - self.volume['document'].create({'guid': '3', 'ctime': 3}) - self.volume['document'].create({'guid': '2', 'ctime': 2}) - self.volume['document'].create({'guid': '1', 'ctime': 1}) + volume['document'].create({'guid': '3', 'ctime': 3}) + volume['document'].create({'guid': '2', 'ctime': 2}) + volume['document'].create({'guid': '1', 'ctime': 1}) self.assertEqual([ {'guid': '1'}, {'guid': '2'}, {'guid': '3'}, ], - self.call('GET', ['document'])['result']) + this.call(method='GET', path=['document'])['result']) - def test_SetDefaultPropsOnNoneValues(self): + def test_DefaultsOnNonePostValues(self): class Document(db.Resource): @@ -1645,10 +1655,11 @@ class RoutesTest(tests.Test): def prop(self, value): return value - self.volume = db.Volume('db', [Document]) + volume = db.Volume(tests.tmpdir, [Document]) + router = Router(db.Routes(volume)) - guid = self.call('POST', ['document'], content={'prop': None}) - self.assertEqual('default', self.volume['document'].get(guid).meta('prop')['value']) + guid = this.call(method='POST', path=['document'], content={'prop': None}) + self.assertEqual('default', this.call(method='GET', path=['document', guid, 'prop'])) def test_InsertAggprops(self): @@ -1658,117 +1669,316 @@ class RoutesTest(tests.Test): def prop1(self, value): return value - @db.stored_property(typecast=db.AggregatedType, default=db.AggregatedType(), acl=ACL.WRITE) - def prop2(self, value): - return value - - @db.stored_property(typecast=db.AggregatedType, default=db.AggregatedType(), acl=ACL.INSERT) + @db.stored_property(db.Aggregated, acl=ACL.INSERT) def prop3(self, value): return value events = [] - self.volume = db.Volume('db', [Document], lambda event: events.append(event)) - guid = self.call('POST', ['document'], content={}) + volume = db.Volume(tests.tmpdir, [Document]) + router = Router(db.Routes(volume)) + this.broadcast = lambda x: events.append(x) + guid = this.call(method='POST', path=['document'], content={}) - self.assertRaises(http.NotFound, self.call, 'POST', ['document', 'foo', 'bar'], content={}) - self.assertRaises(http.NotFound, self.call, 'POST', ['document', guid, 'bar'], content={}) - self.assertRaises(http.BadRequest, self.call, 'POST', ['document', guid, 'prop1'], content={}) - self.assertRaises(http.Forbidden, self.call, 'POST', ['document', guid, 'prop2'], content={}) + self.assertRaises(http.NotFound, this.call, method='POST', path=['document', 'foo', 'bar'], content={}) + self.assertRaises(http.NotFound, this.call, method='POST', path=['document', guid, 'bar'], content={}) + self.assertRaises(http.BadRequest, this.call, method='POST', path=['document', guid, 'prop1'], content={}) del events[:] self.override(time, 'time', lambda: 0) self.override(toolkit, 'uuid', lambda: '0') - self.assertEqual('0', self.call('POST', ['document', guid, 'prop3'], content={})) + self.assertEqual('0', this.call(method='POST', path=['document', guid, 'prop3'], content=0)) self.assertEqual({ - '0': {'seqno': 2}, + '0': {'seqno': 2, 'value': 0}, }, - self.volume['document'].get(guid)['prop3']) + volume['document'].get(guid)['prop3']) self.assertEqual([ {'event': 'update', 'resource': 'document', 'guid': guid}, ], events) self.override(time, 'time', lambda: 1) - self.assertEqual('1', self.call('POST', ['document', guid, 'prop3'], content={'guid': '1', 'foo': 'bar'})) + self.override(toolkit, 'uuid', lambda: '1') + self.assertEqual('1', this.call(method='POST', path=['document', guid, 'prop3'], content={'foo': 'bar'})) self.assertEqual({ - '0': {'seqno': 2}, - '1': {'seqno': 3, 'foo': 'bar'}, + '0': {'seqno': 2, 'value': 0}, + '1': {'seqno': 3, 'value': {'foo': 'bar'}}, }, - self.volume['document'].get(guid)['prop3']) + volume['document'].get(guid)['prop3']) self.override(time, 'time', lambda: 2) self.override(toolkit, 'uuid', lambda: '2') - self.assertEqual('2', self.call('POST', ['document', guid, 'prop3'], content={'prop': 'more'})) + self.assertEqual('2', this.call(method='POST', path=['document', guid, 'prop3'], content=None)) self.assertEqual({ - '0': {'seqno': 2}, - '1': {'seqno': 3, 'foo': 'bar'}, - '2': {'seqno': 4, 'prop': 'more'}, + '0': {'seqno': 2, 'value': 0}, + '1': {'seqno': 3, 'value': {'foo': 'bar'}}, + '2': {'seqno': 4, 'value': None}, }, - self.volume['document'].get(guid)['prop3']) + volume['document'].get(guid)['prop3']) def test_RemoveAggprops(self): class Document(db.Resource): - @db.stored_property(typecast=db.AggregatedType, default=db.AggregatedType(), acl=ACL.INSERT) + @db.stored_property(db.Aggregated, acl=ACL.INSERT) def prop1(self, value): return value - @db.stored_property(typecast=db.AggregatedType, default=db.AggregatedType(), acl=ACL.INSERT | ACL.REMOVE) + @db.stored_property(db.Aggregated, acl=ACL.INSERT | ACL.REMOVE) def prop2(self, value): return value events = [] - self.volume = db.Volume('db', [Document], lambda event: events.append(event)) - guid = self.call('POST', ['document'], content={}) + volume = db.Volume(tests.tmpdir, [Document]) + router = Router(db.Routes(volume)) + this.broadcast = lambda x: events.append(x) + guid = this.call(method='POST', path=['document'], content={}) - agg_guid = self.call('POST', ['document', guid, 'prop1'], content={'probe': 'value'}) + agg_guid = this.call(method='POST', path=['document', guid, 'prop1'], content=2) del events[:] self.assertEqual( - {agg_guid: {'seqno': 2, 'probe': 'value'}}, - self.volume['document'].get(guid)['prop1']) - self.assertRaises(http.Forbidden, self.call, 'DELETE', ['document', guid, 'prop1', agg_guid]) + {agg_guid: {'seqno': 2, 'value': 2}}, + volume['document'].get(guid)['prop1']) + self.assertRaises(http.Forbidden, this.call, method='DELETE', path=['document', guid, 'prop1', agg_guid]) self.assertEqual( - {agg_guid: {'seqno': 2, 'probe': 'value'}}, - self.volume['document'].get(guid)['prop1']) + {agg_guid: {'seqno': 2, 'value': 2}}, + volume['document'].get(guid)['prop1']) self.assertEqual([], events) - agg_guid = self.call('POST', ['document', guid, 'prop2'], content={'probe': 'value'}) + agg_guid = this.call(method='POST', path=['document', guid, 'prop2'], content=3) del events[:] self.assertEqual( - {agg_guid: {'seqno': 3, 'probe': 'value'}}, - self.volume['document'].get(guid)['prop2']) - self.call('DELETE', ['document', guid, 'prop2', agg_guid]) + {agg_guid: {'seqno': 3, 'value': 3}}, + volume['document'].get(guid)['prop2']) + this.call(method='DELETE', path=['document', guid, 'prop2', agg_guid]) self.assertEqual( {agg_guid: {'seqno': 4}}, - self.volume['document'].get(guid)['prop2']) + volume['document'].get(guid)['prop2']) + self.assertEqual([ + {'event': 'update', 'resource': 'document', 'guid': guid}, + ], + events) + + def test_FailOnAbsentAggprops(self): + + class Document(db.Resource): + + @db.stored_property(db.Aggregated, acl=ACL.INSERT | ACL.REMOVE | ACL.REPLACE) + def prop(self, value): + return value + + events = [] + volume = db.Volume(tests.tmpdir, [Document]) + router = Router(db.Routes(volume)) + this.broadcast = lambda x: events.append(x) + guid = this.call(method='POST', path=['document'], content={}) + del events[:] + + self.assertRaises(http.NotFound, this.call, method='DELETE', path=['document', guid, 'prop', 'absent']) + self.assertEqual([], events) + + def test_UpdateAggprops(self): + + class Document(db.Resource): + + @db.stored_property(db.Aggregated) + def prop1(self, value): + return value + + @db.stored_property(db.Aggregated, acl=ACL.INSERT | ACL.REMOVE | ACL.REPLACE) + def prop2(self, value): + return value + + events = [] + volume = db.Volume(tests.tmpdir, [Document]) + router = Router(db.Routes(volume)) + this.broadcast = lambda x: events.append(x) + guid = this.call(method='POST', path=['document'], content={}) + + agg_guid = this.call(method='POST', path=['document', guid, 'prop1'], content=1) + del events[:] + self.assertEqual( + {agg_guid: {'seqno': 2, 'value': 1}}, + volume['document'].get(guid)['prop1']) + self.assertRaises(http.Forbidden, this.call, method='PUT', path=['document', guid, 'prop1', agg_guid], content=2) + self.assertEqual( + {agg_guid: {'seqno': 2, 'value': 1}}, + volume['document'].get(guid)['prop1']) + self.assertEqual([], events) + + agg_guid = this.call(method='POST', path=['document', guid, 'prop2'], content=2) + del events[:] + self.assertEqual( + {agg_guid: {'seqno': 3, 'value': 2}}, + volume['document'].get(guid)['prop2']) + this.call(method='PUT', path=['document', guid, 'prop2', agg_guid], content=3) + self.assertEqual( + {agg_guid: {'seqno': 4, 'value': 3}}, + volume['document'].get(guid)['prop2']) + self.assertEqual([ + {'event': 'update', 'resource': 'document', 'guid': guid}, + ], + events) + + def test_PostAbsentAggpropsOnUpdate(self): + + class Document(db.Resource): + + @db.stored_property(db.Aggregated, acl=ACL.INSERT | ACL.REMOVE | ACL.REPLACE) + def prop(self, value): + return value + + events = [] + volume = db.Volume(tests.tmpdir, [Document]) + router = Router(db.Routes(volume)) + this.broadcast = lambda x: events.append(x) + guid = this.call(method='POST', path=['document'], content={}) + del events[:] + + this.call(method='PUT', path=['document', guid, 'prop', 'absent'], content='probe') + self.assertEqual( + {'absent': {'seqno': 2, 'value': 'probe'}}, + volume['document'].get(guid)['prop']) self.assertEqual([ {'event': 'update', 'resource': 'document', 'guid': guid}, ], events) - def call(self, method=None, path=None, - accept_language=None, content=None, content_stream=None, cmd=None, - content_type=None, host=None, request=None, routes=db.Routes, principal=None, - **kwargs): - if request is None: - environ = { - 'REQUEST_METHOD': method, - 'PATH_INFO': '/'.join([''] + path), - 'HTTP_ACCEPT_LANGUAGE': ','.join(accept_language or []), - 'HTTP_HOST': host, - 'wsgi.input': content_stream, - } - if content_type: - environ['CONTENT_TYPE'] = content_type - if content_stream is not None: - environ['CONTENT_LENGTH'] = str(len(content_stream.getvalue())) - request = Request(environ, cmd=cmd, content=content) - request.update(kwargs) - request.principal = principal - router = Router(routes(self.volume)) - self.response = Response() - return router._call_route(request, self.response) + def test_OriginalAggprops(self): + + class Document(db.Resource): + + @db.stored_property(db.Aggregated, acl=ACL.INSERT | ACL.REMOVE) + def prop(self, value): + return value + + volume = db.Volume(tests.tmpdir, [User, Document]) + router = Router(db.Routes(volume)) + volume['user'].create({'guid': 'user1', 'pubkey': '', 'name': 'User1'}) + volume['user'].create({'guid': 'user2', 'pubkey': '', 'name': 'User2'}) + + guid = this.call(method='POST', path=['document'], content={}, principal=tests.UID) + assert ACL.ORIGINAL & volume['document'][guid]['author'][tests.UID]['role'] + + agg_guid1 = this.call(method='POST', path=['document', guid, 'prop'], content=1, principal=tests.UID) + assert tests.UID2 not in volume['document'][guid]['prop'][agg_guid1]['author'] + assert ACL.ORIGINAL & volume['document'][guid]['prop'][agg_guid1]['author'][tests.UID]['role'] + + agg_guid2 = this.call(method='POST', path=['document', guid, 'prop'], content=1, principal=tests.UID2) + assert tests.UID not in volume['document'][guid]['prop'][agg_guid2]['author'] + assert not (ACL.ORIGINAL & volume['document'][guid]['prop'][agg_guid2]['author'][tests.UID2]['role']) + + this.call(method='DELETE', path=['document', guid, 'prop', agg_guid2], principal=tests.UID2) + assert tests.UID not in volume['document'][guid]['prop'][agg_guid2]['author'] + assert not (ACL.ORIGINAL & volume['document'][guid]['prop'][agg_guid2]['author'][tests.UID2]['role']) + + def test_AggregatedBlobs(self): + + class Document(db.Resource): + + @db.stored_property(db.Aggregated, subtype=db.Blob()) + def blobs(self, value): + return value + + volume = db.Volume(tests.tmpdir, [Document]) + router = Router(db.Routes(volume)) + guid = this.call(method='POST', path=['document'], content={}) + + agg1 = this.call(method='POST', path=['document', guid, 'blobs'], content='blob1') + self.assertEqual({ + agg1: {'seqno': 2, 'value': str(hash('blob1'))}, + }, + volume['document'].get(guid)['blobs']) + assert files.get(str(hash('blob1'))) + + agg2 = this.call(method='POST', path=['document', guid, 'blobs'], content='blob2') + self.assertEqual({ + agg1: {'seqno': 2, 'value': str(hash('blob1'))}, + agg2: {'seqno': 3, 'value': str(hash('blob2'))}, + }, + volume['document'].get(guid)['blobs']) + assert files.get(str(hash('blob2'))) + + this.call(method='DELETE', path=['document', guid, 'blobs', agg1]) + self.assertEqual({ + agg1: {'seqno': 4}, + agg2: {'seqno': 3, 'value': str(hash('blob2'))}, + }, + volume['document'].get(guid)['blobs']) + assert files.get(str(hash('blob1'))) is None + assert files.get(str(hash('blob2'))) + + this.call(method='DELETE', path=['document', guid, 'blobs', agg2]) + self.assertEqual({ + agg1: {'seqno': 4}, + agg2: {'seqno': 5}, + }, + volume['document'].get(guid)['blobs']) + assert files.get(str(hash('blob1'))) is None + assert files.get(str(hash('blob2'))) is None + + agg3 = this.call(method='POST', path=['document', guid, 'blobs'], content='blob3') + self.assertEqual({ + agg1: {'seqno': 4}, + agg2: {'seqno': 5}, + agg3: {'seqno': 6, 'value': str(hash('blob3'))}, + }, + volume['document'].get(guid)['blobs']) + assert files.get(str(hash('blob1'))) is None + assert files.get(str(hash('blob2'))) is None + assert files.get(str(hash('blob3'))) + + def test_AggregatedSearch(self): + + class Document(db.Resource): + + @db.stored_property(db.Aggregated, prefix='A', full_text=True) + def comments(self, value): + return value + + @db.stored_property(prefix='B', full_text=False, default='') + def prop(self, value): + return value + + volume = db.Volume(tests.tmpdir, [Document]) + router = Router(db.Routes(volume)) + + guid1 = this.call(method='POST', path=['document'], content={}) + this.call(method='POST', path=['document', guid1, 'comments'], content='a') + this.call(method='POST', path=['document', guid1, 'comments'], content='b') + this.call(method='PUT', path=['document', guid1, 'prop'], content='c') + + guid2 = this.call(method='POST', path=['document'], content={}) + this.call(method='POST', path=['document', guid2, 'comments'], content='c') + this.call(method='POST', path=['document', guid2, 'comments'], content='a') + this.call(method='PUT', path=['document', guid2, 'prop'], content='b') + + guid3 = this.call(method='POST', path=['document'], content={}) + this.call(method='POST', path=['document', guid3, 'comments'], content='a c') + this.call(method='POST', path=['document', guid3, 'comments'], content='b d') + this.call(method='PUT', path=['document', guid3, 'prop'], content='e') + + self.assertEqual( + sorted([guid1, guid2, guid3]), + sorted([i['guid'] for i in this.call(method='GET', path=['document'], query='a')['result']])) + self.assertEqual( + sorted([guid1, guid3]), + sorted([i['guid'] for i in this.call(method='GET', path=['document'], query='b')['result']])) + self.assertEqual( + sorted([guid2, guid3]), + sorted([i['guid'] for i in this.call(method='GET', path=['document'], query='c')['result']])) + self.assertEqual( + sorted([]), + sorted([i['guid'] for i in this.call(method='GET', path=['document'], query='absent')['result']])) + + self.assertEqual( + sorted([guid1, guid2, guid3]), + sorted([i['guid'] for i in this.call(method='GET', path=['document'], query='comments:a')['result']])) + self.assertEqual( + sorted([guid1, guid3]), + sorted([i['guid'] for i in this.call(method='GET', path=['document'], query='comments:b')['result']])) + self.assertEqual( + sorted([guid2, guid3]), + sorted([i['guid'] for i in this.call(method='GET', path=['document'], query='comments:c')['result']])) if __name__ == '__main__': diff --git a/tests/units/db/storage.py b/tests/units/db/storage.py index 6eb62e5..bb61f8a 100755 --- a/tests/units/db/storage.py +++ b/tests/units/db/storage.py @@ -11,8 +11,7 @@ from os.path import exists from __init__ import tests -from sugar_network.db.metadata import Metadata, StoredProperty -from sugar_network.db.metadata import BlobProperty +from sugar_network.db.metadata import Metadata, Property from sugar_network.db.storage import Storage from sugar_network.toolkit import BUFFER_SIZE @@ -30,7 +29,7 @@ class StorageTest(tests.Test): return Storage(tests.tmpdir, metadata) def test_Record_get(self): - storage = self.storage([StoredProperty('prop')]) + storage = self.storage([Property('prop')]) self.assertEqual(None, storage.get('guid').get('prop')) self.touch(('gu/guid/prop', json.dumps({ @@ -45,7 +44,7 @@ class StorageTest(tests.Test): storage.get('guid').get('prop')) def test_Record_set(self): - storage = self.storage([StoredProperty('prop')]) + storage = self.storage([Property('prop')]) storage.get('guid').set('prop', value='value', foo='bar') self.assertEqual({ @@ -56,7 +55,7 @@ class StorageTest(tests.Test): storage.get('guid').get('prop')) def test_delete(self): - storage = self.storage([StoredProperty('prop')]) + storage = self.storage([Property('prop')]) assert not exists('ab/absent') storage.delete('absent') @@ -69,8 +68,8 @@ class StorageTest(tests.Test): def test_Record_consistent(self): storage = self.storage([ - StoredProperty('guid'), - StoredProperty('prop'), + Property('guid'), + Property('prop'), ]) record = storage.get('guid') @@ -83,7 +82,7 @@ class StorageTest(tests.Test): self.assertEqual(True, record.consistent) def test_walk(self): - storage = self.storage([StoredProperty('guid')]) + storage = self.storage([Property('guid')]) storage.get('guid1').set('guid', value=1, mtime=1) storage.get('guid2').set('guid', value=2, mtime=2) @@ -107,8 +106,8 @@ class StorageTest(tests.Test): def test_walk_SkipGuidLess(self): storage = self.storage([ - StoredProperty('guid'), - StoredProperty('prop'), + Property('guid'), + Property('prop'), ]) record = storage.get('guid1') diff --git a/tests/units/model/__main__.py b/tests/units/model/__main__.py index 2766988..3814380 100644 --- a/tests/units/model/__main__.py +++ b/tests/units/model/__main__.py @@ -2,9 +2,9 @@ from __init__ import tests +from model import * from post import * from context import * -from release import * from routes import * if __name__ == '__main__': diff --git a/tests/units/model/context.py b/tests/units/model/context.py index 3389f41..71357f2 100755 --- a/tests/units/model/context.py +++ b/tests/units/model/context.py @@ -1,82 +1,331 @@ #!/usr/bin/env python # sugar-lint: disable +from cStringIO import StringIO from os.path import exists from __init__ import tests from sugar_network import db -from sugar_network.node import obs +from sugar_network.db import files +from sugar_network.client import IPCConnection, Connection, keyfile from sugar_network.model.context import Context -from sugar_network.client import IPCConnection -from sugar_network.toolkit import coroutine, enforce +from sugar_network.toolkit.coroutine import this +from sugar_network.toolkit.router import Request +from sugar_network.toolkit import i18n, http, coroutine, enforce class ContextTest(tests.Test): def test_SetCommonLayerForPackages(self): - self.start_online_client() - ipc = IPCConnection() + volume = self.start_master() + conn = Connection(auth=http.SugarAuth(keyfile.value)) - guid = ipc.post(['context'], { + guid = conn.post(['context'], { 'type': 'package', 'title': 'title', 'summary': 'summary', 'description': 'description', }) - self.assertEqual(['common'], ipc.get(['context', guid, 'layer'])) + self.assertEqual(['common'], conn.get(['context', guid, 'layer'])) - guid = ipc.post(['context'], { + guid = conn.post(['context'], { 'type': 'package', 'title': 'title', 'summary': 'summary', 'description': 'description', 'layer': 'foo', }) - self.assertEqual(['foo', 'common'], ipc.get(['context', guid, 'layer'])) + self.assertEqual(['foo', 'common'], conn.get(['context', guid, 'layer'])) - guid = ipc.post(['context'], { + guid = conn.post(['context'], { 'type': 'package', 'title': 'title', 'summary': 'summary', 'description': 'description', 'layer': ['common', 'bar'], }) - self.assertEqual(['common', 'bar'], ipc.get(['context', guid, 'layer'])) + self.assertEqual(['common', 'bar'], conn.get(['context', guid, 'layer'])) - def test_DefaultImages(self): - self.start_online_client() - ipc = IPCConnection() + def test_Releases(self): + volume = self.start_master() + conn = Connection(auth=http.SugarAuth(keyfile.value)) - guid = ipc.post(['context'], { - 'guid': 'guid', + context = conn.post(['context'], { 'type': 'activity', - 'title': 'title', + 'title': 'Activity', + 'summary': 'summary', + 'description': 'description', + }) + + activity_info1 = '\n'.join([ + '[Activity]', + 'name = Activity', + 'bundle_id = %s' % context, + 'exec = true', + 'icon = icon', + 'activity_version = 1', + 'license = Public Domain', + ]) + bundle1 = self.zips(('topdir/activity/activity.info', activity_info1)) + release1 = conn.upload(['context', context, 'releases'], StringIO(bundle1)) + assert release1 == str(hash(bundle1)) + self.assertEqual({ + release1: { + 'seqno': 5, + 'author': {tests.UID: {'name': tests.UID, 'order': 0, 'role': 3}}, + 'value': { + 'license': ['Public Domain'], + 'announce': next(volume['post'].find(query='title:1')[0]).guid, + 'release': [[1], 0], + 'requires': [], + 'spec': {'*-*': {'bundle': str(hash(bundle1)), 'commands': {'activity': {'exec': 'true'}}, 'requires': {}}}, + 'stability': 'stable', + 'unpack_size': len(activity_info1), + 'version': '1', + }, + }, + }, conn.get(['context', context, 'releases'])) + assert files.get(str(hash(bundle1))) + + activity_info2 = '\n'.join([ + '[Activity]', + 'name = Activity', + 'bundle_id = %s' % context, + 'exec = true', + 'icon = icon', + 'activity_version = 2', + 'license = Public Domain', + ]) + bundle2 = self.zips(('topdir/activity/activity.info', activity_info2)) + release2 = conn.upload(['context', context, 'releases'], StringIO(bundle2)) + assert release2 == str(hash(bundle2)) + self.assertEqual({ + release1: { + 'seqno': 5, + 'author': {tests.UID: {'name': tests.UID, 'order': 0, 'role': 3}}, + 'value': { + 'license': ['Public Domain'], + 'announce': next(volume['post'].find(query='title:1')[0]).guid, + 'release': [[1], 0], + 'requires': [], + 'spec': {'*-*': {'bundle': str(hash(bundle1)), 'commands': {'activity': {'exec': 'true'}}, 'requires': {}}}, + 'stability': 'stable', + 'unpack_size': len(activity_info1), + 'version': '1', + }, + }, + release2: { + 'seqno': 7, + 'author': {tests.UID: {'name': tests.UID, 'order': 0, 'role': 3}}, + 'value': { + 'license': ['Public Domain'], + 'announce': next(volume['post'].find(query='title:2')[0]).guid, + 'release': [[2], 0], + 'requires': [], + 'spec': {'*-*': {'bundle': str(hash(bundle2)), 'commands': {'activity': {'exec': 'true'}}, 'requires': {}}}, + 'stability': 'stable', + 'unpack_size': len(activity_info2), + 'version': '2', + }, + }, + }, conn.get(['context', context, 'releases'])) + assert files.get(str(hash(bundle1))) + assert files.get(str(hash(bundle2))) + + conn.delete(['context', context, 'releases', release1]) + self.assertEqual({ + release1: { + 'seqno': 8, + 'author': {tests.UID: {'name': tests.UID, 'order': 0, 'role': 3}}, + }, + release2: { + 'seqno': 7, + 'author': {tests.UID: {'name': tests.UID, 'order': 0, 'role': 3}}, + 'value': { + 'license': ['Public Domain'], + 'announce': next(volume['post'].find(query='title:2')[0]).guid, + 'release': [[2], 0], + 'requires': [], + 'spec': {'*-*': {'bundle': str(hash(bundle2)), 'commands': {'activity': {'exec': 'true'}}, 'requires': {}}}, + 'stability': 'stable', + 'unpack_size': len(activity_info2), + 'version': '2', + }, + }, + }, conn.get(['context', context, 'releases'])) + assert files.get(str(hash(bundle1))) is None + assert files.get(str(hash(bundle2))) + + conn.delete(['context', context, 'releases', release2]) + self.assertEqual({ + release1: { + 'seqno': 8, + 'author': {tests.UID: {'name': tests.UID, 'order': 0, 'role': 3}}, + }, + release2: { + 'seqno': 9, + 'author': {tests.UID: {'name': tests.UID, 'order': 0, 'role': 3}}, + }, + }, conn.get(['context', context, 'releases'])) + assert files.get(str(hash(bundle1))) is None + assert files.get(str(hash(bundle2))) is None + + def test_IncrementReleasesSeqnoOnNewReleases(self): + events = [] + volume = self.start_master() + this.broadcast = lambda x: events.append(x) + conn = Connection(auth=http.SugarAuth(keyfile.value)) + + context = conn.post(['context'], { + 'type': 'activity', + 'title': 'Activity', + 'summary': 'summary', + 'description': 'description', + }) + self.assertEqual([ + ], [i for i in events if i['event'] == 'release']) + self.assertEqual(0, volume.releases_seqno.value) + + conn.put(['context', context], { + 'summary': 'summary2', + }) + self.assertEqual([ + ], [i for i in events if i['event'] == 'release']) + self.assertEqual(0, volume.releases_seqno.value) + + bundle = self.zips(('topdir/activity/activity.info', '\n'.join([ + '[Activity]', + 'name = Activity', + 'bundle_id = %s' % context, + 'exec = true', + 'icon = icon', + 'activity_version = 1', + 'license = Public Domain', + ]))) + release = conn.upload(['context', context, 'releases'], StringIO(bundle)) + self.assertEqual([ + {'event': 'release', 'seqno': 1}, + ], [i for i in events if i['event'] == 'release']) + self.assertEqual(1, volume.releases_seqno.value) + + bundle = self.zips(('topdir/activity/activity.info', '\n'.join([ + '[Activity]', + 'name = Activity', + 'bundle_id = %s' % context, + 'exec = true', + 'icon = icon', + 'activity_version = 2', + 'license = Public Domain', + ]))) + release = conn.upload(['context', context, 'releases'], StringIO(bundle)) + self.assertEqual([ + {'event': 'release', 'seqno': 1}, + {'event': 'release', 'seqno': 2}, + ], [i for i in events if i['event'] == 'release']) + self.assertEqual(2, volume.releases_seqno.value) + + bundle = self.zips(('topdir/activity/activity.info', '\n'.join([ + '[Activity]', + 'name = Activity', + 'bundle_id = %s' % context, + 'exec = true', + 'icon = icon', + 'activity_version = 2', + 'license = Public Domain', + ]))) + release = conn.upload(['context', context, 'releases'], StringIO(bundle)) + self.assertEqual([ + {'event': 'release', 'seqno': 1}, + {'event': 'release', 'seqno': 2}, + {'event': 'release', 'seqno': 3}, + ], [i for i in events if i['event'] == 'release']) + self.assertEqual(3, volume.releases_seqno.value) + + conn.delete(['context', context, 'releases', release]) + self.assertEqual([ + {'event': 'release', 'seqno': 1}, + {'event': 'release', 'seqno': 2}, + {'event': 'release', 'seqno': 3}, + {'event': 'release', 'seqno': 4}, + ], [i for i in events if i['event'] == 'release']) + self.assertEqual(4, volume.releases_seqno.value) + + def test_IncrementReleasesSeqnoOnDependenciesChange(self): + events = [] + volume = self.start_master() + this.broadcast = lambda x: events.append(x) + conn = Connection(auth=http.SugarAuth(keyfile.value)) + + context = conn.post(['context'], { + 'type': 'activity', + 'title': 'Activity', 'summary': 'summary', 'description': 'description', }) - assert exists('master/context/gu/guid/artifact_icon.blob') - assert exists('master/context/gu/guid/icon.blob') - assert exists('master/context/gu/guid/logo.blob') - - def test_RatingSort(self): - directory = db.Volume('db', [Context])['context'] - - directory.create({'guid': '1', 'type': 'activity', 'title': '', 'summary': '', 'description': '', 'rating': [0, 0]}) - directory.create({'guid': '2', 'type': 'activity', 'title': '', 'summary': '', 'description': '', 'rating': [1, 2]}) - directory.create({'guid': '3', 'type': 'activity', 'title': '', 'summary': '', 'description': '', 'rating': [1, 4]}) - directory.create({'guid': '4', 'type': 'activity', 'title': '', 'summary': '', 'description': '', 'rating': [10, 10]}) - directory.create({'guid': '5', 'type': 'activity', 'title': '', 'summary': '', 'description': '', 'rating': [30, 90]}) - - self.assertEqual( - ['1', '2', '3', '4', '5'], - [i.guid for i in directory.find()[0]]) - self.assertEqual( - ['1', '4', '2', '5', '3'], - [i.guid for i in directory.find(order_by='rating')[0]]) - self.assertEqual( - ['3', '5', '2', '4', '1'], - [i.guid for i in directory.find(order_by='-rating')[0]]) + self.assertEqual([ + ], [i for i in events if i['event'] == 'release']) + self.assertEqual(0, volume.releases_seqno.value) + + conn.put(['context', context], { + 'dependencies': 'dep', + }) + self.assertEqual([ + {'event': 'release', 'seqno': 1}, + ], [i for i in events if i['event'] == 'release']) + self.assertEqual(1, volume.releases_seqno.value) + + def test_IncrementReleasesSeqnoOnDeletes(self): + events = [] + volume = self.start_master() + this.broadcast = lambda x: events.append(x) + conn = Connection(auth=http.SugarAuth(keyfile.value)) + + context = conn.post(['context'], { + 'type': 'activity', + 'title': 'Activity', + 'summary': 'summary', + 'description': 'description', + }) + self.assertEqual([ + ], [i for i in events if i['event'] == 'release']) + self.assertEqual(0, volume.releases_seqno.value) + + conn.put(['context', context], { + 'layer': ['deleted'], + }) + self.assertEqual([ + {'event': 'release', 'seqno': 1}, + ], [i for i in events if i['event'] == 'release']) + self.assertEqual(1, volume.releases_seqno.value) + + conn.put(['context', context], { + 'layer': [], + }) + self.assertEqual([ + {'event': 'release', 'seqno': 1}, + {'event': 'release', 'seqno': 2}, + ], [i for i in events if i['event'] == 'release']) + self.assertEqual(2, volume.releases_seqno.value) + + def test_RestoreReleasesSeqno(self): + events = [] + volume = self.start_master() + this.broadcast = lambda x: events.append(x) + conn = Connection(auth=http.SugarAuth(keyfile.value)) + + context = conn.post(['context'], { + 'type': 'activity', + 'title': 'Activity', + 'summary': 'summary', + 'description': 'description', + 'dependencies': 'dep', + }) + self.assertEqual(1, volume.releases_seqno.value) + + volume.close() + volume = db.Volume('master', []) + self.assertEqual(1, volume.releases_seqno.value) if __name__ == '__main__': diff --git a/tests/units/model/model.py b/tests/units/model/model.py new file mode 100755 index 0000000..f8b3866 --- /dev/null +++ b/tests/units/model/model.py @@ -0,0 +1,519 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# sugar-lint: disable + +import base64 + +from __init__ import tests + +from sugar_network import db +from sugar_network.db import files +from sugar_network.model import load_bundle +from sugar_network.model.post import Post +from sugar_network.client import IPCConnection, Connection, keyfile +from sugar_network.toolkit.router import Request +from sugar_network.toolkit.coroutine import this +from sugar_network.toolkit import i18n, http, coroutine, enforce + + +class ModelTest(tests.Test): + + def test_RatingSort(self): + directory = db.Volume('db', [Post])['post'] + + directory.create({'guid': '1', 'context': '', 'type': 'post', 'title': {}, 'message': {}, 'rating': [0, 0]}) + directory.create({'guid': '2', 'context': '', 'type': 'post', 'title': {}, 'message': {}, 'rating': [1, 2]}) + directory.create({'guid': '3', 'context': '', 'type': 'post', 'title': {}, 'message': {}, 'rating': [1, 4]}) + directory.create({'guid': '4', 'context': '', 'type': 'post', 'title': {}, 'message': {}, 'rating': [10, 10]}) + directory.create({'guid': '5', 'context': '', 'type': 'post', 'title': {}, 'message': {}, 'rating': [30, 90]}) + + self.assertEqual( + ['1', '2', '3', '4', '5'], + [i.guid for i in directory.find()[0]]) + self.assertEqual( + ['1', '4', '2', '5', '3'], + [i.guid for i in directory.find(order_by='rating')[0]]) + self.assertEqual( + ['3', '5', '2', '4', '1'], + [i.guid for i in directory.find(order_by='-rating')[0]]) + + def test_load_bundle_Activity(self): + volume = self.start_master() + conn = Connection(auth=http.SugarAuth(keyfile.value)) + + conn.post(['context'], { + 'guid': 'bundle_id', + 'type': 'activity', + 'title': 'Activity', + 'summary': 'summary', + 'description': 'description', + }) + activity_info = '\n'.join([ + '[Activity]', + 'name = Activity', + 'bundle_id = bundle_id', + 'exec = true', + 'icon = icon', + 'activity_version = 1', + 'license = Public Domain', + 'stability = developer', + 'requires = sugar>=0.88; dep' + ]) + changelog = "LOG" + bundle = self.zips( + ('topdir/activity/activity.info', activity_info), + ('topdir/CHANGELOG', changelog), + ) + blob = files.post(bundle) + + this.request = Request(method='POST', path=['context', 'bundle_id'], principal=tests.UID) + context, release = load_bundle(blob, 'bundle_id') + + self.assertEqual({ + 'mime_type': 'application/vnd.olpc-sugar', + 'name': 'Activity-1', + }, files.get(blob.digest)) + self.assertEqual('bundle_id', context) + self.assertEqual('1', release['version']) + self.assertEqual('developer', release['stability']) + self.assertEqual(['Public Domain'], release['license']) + self.assertEqual('developer', release['stability']) + self.assertEqual(sorted(['dep', 'sugar-0.88']), sorted(release['requires'])) + self.assertEqual({ + '*-*': { + 'bundle': blob.digest, + 'commands': {'activity': {'exec': 'true'}}, + 'requires': {'dep': {}, 'sugar': {'restrictions': [('0.88', None)]}}, + }, + }, + release['spec']) + self.assertEqual(len(activity_info) + len(changelog), release['unpack_size']) + + post = volume['post'][release['announce']] + assert tests.UID in post['author'] + self.assertEqual('notification', post['type']) + self.assertEqual({ + 'en': 'Activity 1 release', + 'es': 'Activity 1 release', + 'fr': 'Activity 1 release', + }, post['title']) + self.assertEqual({ + 'en-us': 'LOG', + }, post['message']) + + def test_load_bundle_NonActivity(self): + volume = self.start_master() + conn = Connection(auth=http.SugarAuth(keyfile.value)) + + conn.post(['context'], { + 'guid': 'bundle_id', + 'type': 'book', + 'title': 'NonActivity', + 'summary': 'summary', + 'description': 'description', + }) + bundle = 'non-activity' + blob = files.post(bundle) + + this.request = Request(method='POST', path=['context', 'bundle_id'], principal=tests.UID, + content_type = 'content/type', version='2', license='GPL') + context, release = load_bundle(blob, 'bundle_id') + + self.assertEqual({ + 'mime_type': 'content/type', + 'name': 'NonActivity-2', + }, files.get(blob.digest)) + self.assertEqual('bundle_id', context) + self.assertEqual('2', release['version']) + self.assertEqual(['GPL'], release['license']) + + post = volume['post'][release['announce']] + assert tests.UID in post['author'] + self.assertEqual('notification', post['type']) + self.assertEqual({ + 'en': 'NonActivity 2 release', + 'es': 'NonActivity 2 release', + 'fr': 'NonActivity 2 release', + }, post['title']) + self.assertEqual({ + 'en-us': '', + }, post['message']) + + def test_load_bundle_ReuseActivityLicense(self): + volume = self.start_master() + conn = Connection(auth=http.SugarAuth(keyfile.value)) + + conn.post(['context'], { + 'guid': 'bundle_id', + 'type': 'activity', + 'title': 'Activity', + 'summary': 'summary', + 'description': 'description', + }) + + activity_info_wo_license = '\n'.join([ + '[Activity]', + 'name = Activity', + 'bundle_id = bundle_id', + 'exec = true', + 'icon = icon', + 'activity_version = 1', + ]) + bundle = self.zips(('topdir/activity/activity.info', activity_info_wo_license)) + blob_wo_license = files.post(bundle) + self.assertRaises(http.BadRequest, load_bundle, blob_wo_license, 'bundle_id') + + volume['context'].update('bundle_id', {'releases': { + 'new': {'release': 2, 'license': ['New']}, + }}) + this.request = Request(method='POST', path=['context', 'bundle_id'], principal=tests.UID) + context, release = load_bundle(blob_wo_license, 'bundle_id') + self.assertEqual(['New'], release['license']) + + volume['context'].update('bundle_id', {'releases': { + 'new': {'release': 2, 'license': ['New']}, + 'old': {'release': 1, 'license': ['Old']}, + }}) + this.request = Request(method='POST', path=['context', 'bundle_id'], principal=tests.UID) + context, release = load_bundle(blob_wo_license, 'bundle_id') + self.assertEqual(['New'], release['license']) + + volume['context'].update('bundle_id', {'releases': { + 'new': {'release': 2, 'license': ['New']}, + 'old': {'release': 1, 'license': ['Old']}, + 'newest': {'release': 3, 'license': ['Newest']}, + }}) + this.request = Request(method='POST', path=['context', 'bundle_id'], principal=tests.UID) + context, release = load_bundle(blob_wo_license, 'bundle_id') + self.assertEqual(['Newest'], release['license']) + + def test_load_bundle_ReuseNonActivityLicense(self): + volume = self.start_master() + conn = Connection(auth=http.SugarAuth(keyfile.value)) + + conn.post(['context'], { + 'guid': 'bundle_id', + 'type': 'book', + 'title': 'Activity', + 'summary': 'summary', + 'description': 'description', + }) + + blob = files.post('non-activity') + this.request = Request(method='POST', path=['context', 'bundle_id'], principal=tests.UID, version='1') + self.assertRaises(http.BadRequest, load_bundle, blob, 'bundle_id') + + volume['context'].update('bundle_id', {'releases': { + 'new': {'release': 2, 'license': ['New']}, + }}) + this.request = Request(method='POST', path=['context', 'bundle_id'], principal=tests.UID, version='1') + context, release = load_bundle(blob, 'bundle_id') + self.assertEqual(['New'], release['license']) + + volume['context'].update('bundle_id', {'releases': { + 'new': {'release': 2, 'license': ['New']}, + 'old': {'release': 1, 'license': ['Old']}, + }}) + this.request = Request(method='POST', path=['context', 'bundle_id'], principal=tests.UID, version='1') + context, release = load_bundle(blob, 'bundle_id') + self.assertEqual(['New'], release['license']) + + volume['context'].update('bundle_id', {'releases': { + 'new': {'release': 2, 'license': ['New']}, + 'old': {'release': 1, 'license': ['Old']}, + 'newest': {'release': 3, 'license': ['Newest']}, + }}) + this.request = Request(method='POST', path=['context', 'bundle_id'], principal=tests.UID, version='1') + context, release = load_bundle(blob, 'bundle_id') + self.assertEqual(['Newest'], release['license']) + + def test_load_bundle_WrontContextType(self): + volume = self.start_master() + conn = Connection(auth=http.SugarAuth(keyfile.value)) + + conn.post(['context'], { + 'guid': 'bundle_id', + 'type': 'group', + 'title': 'NonActivity', + 'summary': 'summary', + 'description': 'description', + }) + + blob = files.post('non-activity') + this.request = Request(method='POST', path=['context', 'bundle_id'], principal=tests.UID, version='2', license='GPL') + self.assertRaises(http.BadRequest, load_bundle, blob, 'bundle_id') + + activity_info = '\n'.join([ + '[Activity]', + 'name = Activity', + 'bundle_id = bundle_id', + 'exec = true', + 'icon = icon', + 'activity_version = 1', + 'license = Public Domain', + 'stability = developer', + 'requires = sugar>=0.88; dep' + ]) + changelog = "LOG" + bundle = self.zips( + ('topdir/activity/activity.info', activity_info), + ('topdir/CHANGELOG', changelog), + ) + blob = files.post(bundle) + self.assertRaises(http.BadRequest, load_bundle, blob, 'bundle_id') + + def test_load_bundle_MissedContext(self): + volume = self.start_master() + volume['user'].create({'guid': tests.UID, 'name': 'user', 'pubkey': tests.PUBKEY}) + conn = Connection(auth=http.SugarAuth(keyfile.value)) + + bundle = self.zips(('topdir/activity/activity.info', '\n'.join([ + '[Activity]', + 'name = Activity', + 'bundle_id = bundle_id', + 'exec = true', + 'icon = icon', + 'activity_version = 1', + 'license = Public Domain', + 'stability = developer', + 'requires = sugar>=0.88; dep' + ]))) + blob = files.post(bundle) + + this.request = Request(principal=tests.UID) + self.assertRaises(http.NotFound, load_bundle, blob, initial=False) + + def test_load_bundle_CreateContext(self): + volume = self.start_master() + volume['user'].create({'guid': tests.UID, 'name': 'user', 'pubkey': tests.PUBKEY}) + conn = Connection(auth=http.SugarAuth(keyfile.value)) + + bundle = self.zips( + ('ImageViewer.activity/activity/activity.info', '\n'.join([ + '[Activity]', + 'bundle_id = org.laptop.ImageViewerActivity', + 'name = Image Viewer', + 'summary = The Image Viewer activity is a simple and fast image viewer tool', + 'description = It has features one would expect of a standard image viewer, like zoom, rotate, etc.', + 'homepage = http://wiki.sugarlabs.org/go/Activities/Image_Viewer', + 'activity_version = 1', + 'license = GPLv2+', + 'icon = activity-imageviewer', + 'exec = true', + 'mime_types = image/bmp;image/gif', + ])), + ('ImageViewer.activity/activity/activity-imageviewer.svg', ''), + ) + blob = files.post(bundle) + + this.request = Request(principal=tests.UID) + context, release = load_bundle(blob, initial=True) + self.assertEqual('org.laptop.ImageViewerActivity', context) + + context = volume['context'].get('org.laptop.ImageViewerActivity') + self.assertEqual({'en': 'Image Viewer'}, context['title']) + self.assertEqual({'en': 'The Image Viewer activity is a simple and fast image viewer tool'}, context['summary']) + self.assertEqual({'en': 'It has features one would expect of a standard image viewer, like zoom, rotate, etc.'}, context['description']) + self.assertEqual('http://wiki.sugarlabs.org/go/Activities/Image_Viewer', context['homepage']) + self.assertEqual(['image/bmp', 'image/gif'], context['mime_types']) + assert context['ctime'] > 0 + assert context['mtime'] > 0 + self.assertEqual({tests.UID: {'role': 3, 'name': 'user', 'order': 0}}, context['author']) + + post = volume['post'][release['announce']] + assert tests.UID in post['author'] + self.assertEqual('notification', post['type']) + self.assertEqual({ + 'en': 'Image Viewer 1 release', + 'es': 'Image Viewer 1 release', + 'fr': 'Image Viewer 1 release', + }, post['title']) + + def test_load_bundle_UpdateContext(self): + volume = self.start_master() + conn = Connection(auth=http.SugarAuth(keyfile.value)) + + conn.post(['context'], { + 'guid': 'org.laptop.ImageViewerActivity', + 'type': 'activity', + 'title': {'en': ''}, + 'summary': {'en': ''}, + 'description': {'en': ''}, + }) + svg = '\n'.join([ + '', + '', + ' ', + ']>', + '', + ' ', + ' ', + ' ', + ' ', + ' ', + ' ', + ' ', + ' ', + ' ', + '', + ]) + bundle = self.zips( + ('ImageViewer.activity/activity/activity.info', '\n'.join([ + '[Activity]', + 'bundle_id = org.laptop.ImageViewerActivity', + 'name = Image Viewer', + 'summary = The Image Viewer activity is a simple and fast image viewer tool', + 'description = It has features one would expect of a standard image viewer, like zoom, rotate, etc.', + 'homepage = http://wiki.sugarlabs.org/go/Activities/Image_Viewer', + 'activity_version = 22', + 'license = GPLv2+', + 'icon = activity-imageviewer', + 'exec = true', + 'mime_types = image/bmp;image/gif', + ])), + ('ImageViewer.activity/locale/ru/LC_MESSAGES/org.laptop.ImageViewerActivity.mo', + base64.b64decode('3hIElQAAAAAMAAAAHAAAAHwAAAARAAAA3AAAAAAAAAAgAQAADwAAACEBAAAOAAAAMQEAAA0AAABAAQAACgAAAE4BAAAMAAAAWQEAAA0AAABmAQAAJwAAAHQBAAAUAAAAnAEAABAAAACxAQAABwAAAMIBAAAIAAAAygEAANEBAADTAQAAIQAAAKUDAAATAAAAxwMAABwAAADbAwAAFwAAAPgDAAAhAAAAEAQAAB0AAAAyBAAAQAAAAFAEAAA9AAAAkQQAADUAAADPBAAAFAAAAAUFAAAQAAAAGgUAAAEAAAACAAAABwAAAAAAAAADAAAAAAAAAAwAAAAJAAAAAAAAAAoAAAAEAAAAAAAAAAAAAAALAAAABgAAAAgAAAAFAAAAAENob29zZSBkb2N1bWVudABEb3dubG9hZGluZy4uLgBGaXQgdG8gd2luZG93AEZ1bGxzY3JlZW4ASW1hZ2UgVmlld2VyAE9yaWdpbmFsIHNpemUAUmV0cmlldmluZyBzaGFyZWQgaW1hZ2UsIHBsZWFzZSB3YWl0Li4uAFJvdGF0ZSBhbnRpY2xvY2t3aXNlAFJvdGF0ZSBjbG9ja3dpc2UAWm9vbSBpbgBab29tIG91dABQcm9qZWN0LUlkLVZlcnNpb246IFBBQ0tBR0UgVkVSU0lPTgpSZXBvcnQtTXNnaWQtQnVncy1UbzogClBPVC1DcmVhdGlvbi1EYXRlOiAyMDEyLTA5LTI3IDE0OjU3LTA0MDAKUE8tUmV2aXNpb24tRGF0ZTogMjAxMC0wOS0yMiAxMzo1MCswMjAwCkxhc3QtVHJhbnNsYXRvcjoga3JvbTlyYSA8a3JvbTlyYUBnbWFpbC5jb20+Ckxhbmd1YWdlLVRlYW06IExBTkdVQUdFIDxMTEBsaS5vcmc+Ckxhbmd1YWdlOiAKTUlNRS1WZXJzaW9uOiAxLjAKQ29udGVudC1UeXBlOiB0ZXh0L3BsYWluOyBjaGFyc2V0PVVURi04CkNvbnRlbnQtVHJhbnNmZXItRW5jb2Rpbmc6IDhiaXQKUGx1cmFsLUZvcm1zOiBucGx1cmFscz0zOyBwbHVyYWw9KG4lMTA9PTEgJiYgbiUxMDAhPTExID8gMCA6IG4lMTA+PTIgJiYgbiUxMDw9NCAmJiAobiUxMDA8MTAgfHwgbiUxMDA+PTIwKSA/IDEgOiAyKTsKWC1HZW5lcmF0b3I6IFBvb3RsZSAyLjAuMwoA0JLRi9Cx0LXRgNC40YLQtSDQtNC+0LrRg9C80LXQvdGCANCX0LDQs9GA0YPQt9C60LAuLi4A0KPQvNC10YHRgtC40YLRjCDQsiDQvtC60L3QtQDQn9C+0LvQvdGL0Lkg0Y3QutGA0LDQvQDQn9GA0L7RgdC80L7RgtGAINC60LDRgNGC0LjQvdC+0LoA0JjRgdGC0LjQvdC90YvQuSDRgNCw0LfQvNC10YAA0J/QvtC70YPRh9C10L3QuNC1INC40LfQvtCx0YDQsNC20LXQvdC40LksINC/0L7QtNC+0LbQtNC40YLQtS4uLgDQn9C+0LLQtdGA0L3Rg9GC0Ywg0L/RgNC+0YLQuNCyINGH0LDRgdC+0LLQvtC5INGB0YLRgNC10LvQutC4ANCf0L7QstC10YDQvdGD0YLRjCDQv9C+INGH0LDRgdC+0LLQvtC5INGB0YLRgNC10LvQutC1ANCf0YDQuNCx0LvQuNC30LjRgtGMANCe0YLQtNCw0LvQuNGC0YwA')), + ('ImageViewer.activity/activity/activity-imageviewer.svg', svg), + ) + + blob = files.post(bundle) + this.request = Request(method='POST', path=['context', 'org.laptop.ImageViewerActivity'], principal=tests.UID) + context, release = load_bundle(blob, initial=True) + + context = volume['context'].get('org.laptop.ImageViewerActivity') + self.assertEqual({ + 'en': 'Image Viewer', + 'ru': u'Просмотр картинок', + }, + context['title']) + self.assertEqual({ + 'en': 'The Image Viewer activity is a simple and fast image viewer tool', + }, + context['summary']) + self.assertEqual({ + 'en': 'It has features one would expect of a standard image viewer, like zoom, rotate, etc.', + }, + context['description']) + self.assertEqual(svg, file(files.get(context['artifact_icon']).path).read()) + assert context['icon'] != 'missing.png' + assert context['logo'] != 'missing-logo.png' + self.assertEqual('http://wiki.sugarlabs.org/go/Activities/Image_Viewer', context['homepage']) + self.assertEqual(['image/bmp', 'image/gif'], context['mime_types']) + + def test_load_bundle_3rdPartyRelease(self): + i18n._default_langs = ['en'] + volume = self.start_master() + volume['user'].create({'guid': tests.UID2, 'name': 'user2', 'pubkey': tests.PUBKEY2}) + conn = Connection(auth=http.SugarAuth(keyfile.value)) + + conn.post(['context'], { + 'guid': 'bundle_id', + 'type': 'activity', + 'title': 'Activity', + 'summary': 'summary', + 'description': 'description', + }) + + bundle = self.zips(('topdir/activity/activity.info', '\n'.join([ + '[Activity]', + 'name = Activity2', + 'bundle_id = bundle_id', + 'exec = true', + 'icon = icon', + 'activity_version = 1', + 'license = Public Domain', + 'stability = developer', + ]))) + blob = files.post(bundle) + this.request = Request(method='POST', path=['context', 'bundle_id'], principal=tests.UID2) + context, release = load_bundle(blob, 'bundle_id') + + assert tests.UID in volume['context']['bundle_id']['author'] + assert tests.UID2 not in volume['context']['bundle_id']['author'] + self.assertEqual({'en': 'Activity'}, volume['context']['bundle_id']['title']) + + post = volume['post'][release['announce']] + assert tests.UID not in post['author'] + assert tests.UID2 in post['author'] + self.assertEqual('notification', post['type']) + self.assertEqual({ + 'en': 'Activity 1 third-party release', + 'es': 'Activity 1 third-party release', + 'fr': 'Activity 1 third-party release', + }, post['title']) + + files.delete(blob.digest) + blob = files.post(bundle) + this.request = Request(method='POST', path=['context', 'bundle_id'], principal=tests.UID) + context, release = load_bundle(blob, 'bundle_id') + + assert tests.UID in volume['context']['bundle_id']['author'] + assert tests.UID2 not in volume['context']['bundle_id']['author'] + self.assertEqual({'en': 'Activity2'}, volume['context']['bundle_id']['title']) + + post = volume['post'][release['announce']] + assert tests.UID in post['author'] + assert tests.UID2 not in post['author'] + self.assertEqual('notification', post['type']) + self.assertEqual({ + 'en': 'Activity2 1 release', + 'es': 'Activity2 1 release', + 'fr': 'Activity2 1 release', + }, post['title']) + + def test_load_bundle_PopulateRequires(self): + volume = self.start_master() + conn = Connection(auth=http.SugarAuth(keyfile.value)) + + conn.post(['context'], { + 'guid': 'bundle_id', + 'type': 'activity', + 'title': 'Activity', + 'summary': 'summary', + 'description': 'description', + }) + bundle = self.zips( + ('ImageViewer.activity/activity/activity.info', '\n'.join([ + '[Activity]', + 'bundle_id = bundle_id', + 'name = Image Viewer', + 'activity_version = 22', + 'license = GPLv2+', + 'icon = activity-imageviewer', + 'exec = true', + 'requires = dep1, dep2<10, dep3<=20, dep4>30, dep5>=40, dep6>5<7, dep7>=1<=3', + ])), + ('ImageViewer.activity/activity/activity-imageviewer.svg', ''), + ) + blob = files.post(bundle) + this.request = Request(method='POST', path=['context', 'bundle_id'], principal=tests.UID) + context, release = load_bundle(blob, 'bundle_id') + + self.assertEqual( + sorted([ + 'dep1', 'dep2', 'dep3', 'dep4-31', 'dep5-40', + 'dep6-6', + 'dep7-1', 'dep7-2', 'dep7-3', + ]), + sorted(release['requires'])) + + def test_load_bundle_IgnoreNotSupportedContextTypes(self): + volume = self.start_master() + conn = Connection(auth=http.SugarAuth(keyfile.value)) + + context = conn.post(['context'], { + 'type': 'package', + 'title': '', + 'summary': '', + 'description': '', + }) + this.request = Request(method='POST', path=['context', context]) + aggid = conn.post(['context', context, 'releases'], -1) + self.assertEqual({ + aggid: {'seqno': 3, 'value': -1, 'author': {tests.UID: {'role': 3, 'name': tests.UID, 'order': 0}}}, + }, volume['context'][context]['releases']) + + +if __name__ == '__main__': + tests.main() diff --git a/tests/units/model/post.py b/tests/units/model/post.py index dc6f6f4..45b85e1 100755 --- a/tests/units/model/post.py +++ b/tests/units/model/post.py @@ -8,74 +8,27 @@ from sugar_network.client import Connection, keyfile from sugar_network.model.user import User from sugar_network.model.context import Context from sugar_network.model.post import Post -from sugar_network.model.release import Release +from sugar_network.toolkit.coroutine import this from sugar_network.toolkit import http class PostTest(tests.Test): - def test_SetContext(self): - volume = self.start_master([User, Context, Release, Post]) - client = Connection(auth=http.SugarAuth(keyfile.value)) - - self.assertRaises(http.NotFound, client.post, ['post'], {'type': 'comment', 'title': '', 'message': '', 'topic': 'absent'}) - - context = client.post(['context'], { - 'type': 'package', - 'title': 'title', - 'summary': 'summary', - 'description': 'description', - }) - topic = client.post(['post'], { - 'context': context, - 'title': 'title', - 'message': 'message', - 'type': 'update', - }) - comment = client.post(['post'], { - 'topic': topic, - 'title': 'title', - 'message': 'message', - 'type': 'comment', - }) - self.assertEqual( - context, - client.get(['post', comment, 'context'])) - - def test_RatingSort(self): - directory = db.Volume('db', [Post])['post'] - - directory.create({'guid': '1', 'context': '', 'type': 'comment', 'title': '', 'message': '', 'rating': [0, 0]}) - directory.create({'guid': '2', 'context': '', 'type': 'comment', 'title': '', 'message': '', 'rating': [1, 2]}) - directory.create({'guid': '3', 'context': '', 'type': 'comment', 'title': '', 'message': '', 'rating': [1, 4]}) - directory.create({'guid': '4', 'context': '', 'type': 'comment', 'title': '', 'message': '', 'rating': [10, 10]}) - directory.create({'guid': '5', 'context': '', 'type': 'comment', 'title': '', 'message': '', 'rating': [30, 90]}) - - self.assertEqual( - ['1', '2', '3', '4', '5'], - [i.guid for i in directory.find()[0]]) - self.assertEqual( - ['1', '4', '2', '5', '3'], - [i.guid for i in directory.find(order_by='rating')[0]]) - self.assertEqual( - ['3', '5', '2', '4', '1'], - [i.guid for i in directory.find(order_by='-rating')[0]]) - def test_FindComments(self): directory = db.Volume('db', [Post])['post'] - directory.create({'guid': '1', 'context': '', 'type': 'comment', 'title': '', 'message': '', 'comments': { - '1': {'message': 'foo'}, + directory.create({'guid': '1', 'context': '', 'type': 'post', 'title': {}, 'message': {}, 'comments': { + '1': {'value': {'en': 'foo'}}, }}) - directory.create({'guid': '2', 'context': '', 'type': 'comment', 'title': '', 'message': '', 'comments': { - '1': {'message': 'bar'}, + directory.create({'guid': '2', 'context': '', 'type': 'post', 'title': {}, 'message': {}, 'comments': { + '1': {'value': {'en': 'bar'}}, }}) - directory.create({'guid': '3', 'context': '', 'type': 'comment', 'title': '', 'message': '', 'comments': { - '1': {'message': 'bar'}, - '2': {'message': 'foo'}, + directory.create({'guid': '3', 'context': '', 'type': 'post', 'title': {}, 'message': {}, 'comments': { + '1': {'value': {'en': 'bar'}}, + '2': {'value': {'en': 'foo'}}, }}) - directory.create({'guid': '4', 'context': '', 'type': 'comment', 'title': '', 'message': '', 'comments': { - '1': {'message': 'foo bar'}, + directory.create({'guid': '4', 'context': '', 'type': 'post', 'title': {}, 'message': {}, 'comments': { + '1': {'value': {'en': 'foo bar'}}, }}) self.assertEqual( @@ -92,6 +45,115 @@ class PostTest(tests.Test): ['1', '3', '4'], [i.guid for i in directory.find(query='comments:foo')[0]]) + def test_ShiftContextRating(self): + volume = db.Volume('db', [Context, Post]) + this.volume = volume + + context = volume['context'].create({ + 'type': 'activity', + 'title': {}, + 'summary': {}, + 'description': {}, + }) + self.assertEqual([0, 0], volume['context'][context]['rating']) + + volume['post'].create({ + 'context': context, + 'type': 'post', + 'title': {}, + 'message': {}, + }) + self.assertEqual([0, 0], volume['context'][context]['rating']) + + volume['post'].create({ + 'context': context, + 'type': 'post', + 'title': {}, + 'message': {}, + 'vote': 0, + }) + self.assertEqual([0, 0], volume['context'][context]['rating']) + + volume['post'].create({ + 'context': context, + 'type': 'post', + 'title': {}, + 'message': {}, + 'vote': 1, + }) + self.assertEqual([1, 1], volume['context'][context]['rating']) + + volume['post'].create({ + 'context': context, + 'type': 'post', + 'title': {}, + 'message': {}, + 'vote': 2, + }) + self.assertEqual([2, 3], volume['context'][context]['rating']) + + def test_ShiftTopicRating(self): + volume = db.Volume('db2', [Context, Post]) + this.volume = volume + + context = volume['context'].create({ + 'type': 'activity', + 'title': {}, + 'summary': {}, + 'description': {}, + }) + topic = volume['post'].create({ + 'context': context, + 'type': 'post', + 'title': {}, + 'message': {}, + }) + self.assertEqual([0, 0], volume['context'][context]['rating']) + self.assertEqual([0, 0], volume['post'][topic]['rating']) + + volume['post'].create({ + 'context': context, + 'topic': topic, + 'type': 'post', + 'title': {}, + 'message': {}, + }) + self.assertEqual([0, 0], volume['context'][context]['rating']) + self.assertEqual([0, 0], volume['post'][topic]['rating']) + + volume['post'].create({ + 'context': context, + 'topic': topic, + 'type': 'post', + 'title': {}, + 'message': {}, + 'vote': 0, + }) + self.assertEqual([0, 0], volume['context'][context]['rating']) + self.assertEqual([0, 0], volume['post'][topic]['rating']) + + volume['post'].create({ + 'context': context, + 'topic': topic, + 'type': 'post', + 'title': {}, + 'message': {}, + 'vote': 1, + }) + self.assertEqual([0, 0], volume['context'][context]['rating']) + self.assertEqual([1, 1], volume['post'][topic]['rating']) + + volume['post'].create({ + 'context': context, + 'topic': topic, + 'type': 'post', + 'title': {}, + 'message': {}, + 'vote': 2, + }) + self.assertEqual([0, 0], volume['context'][context]['rating']) + self.assertEqual([2, 3], volume['post'][topic]['rating']) + if __name__ == '__main__': tests.main() diff --git a/tests/units/model/release.py b/tests/units/model/release.py deleted file mode 100755 index 2f4bfb1..0000000 --- a/tests/units/model/release.py +++ /dev/null @@ -1,124 +0,0 @@ -#!/usr/bin/env python -# sugar-lint: disable - -import os - -import xapian - -from __init__ import tests - -from sugar_network import db -from sugar_network.model import release -from sugar_network.model.release import _fmt_version, Release -from sugar_network.client import IPCConnection -from sugar_network.toolkit import http, coroutine - - -class ReleaseTest(tests.Test): - - def test_fmt_version(self): - self.assertEqual( - xapian.sortable_serialise(eval('1''0000''0000''5''000')), - _fmt_version('1')) - self.assertEqual( - xapian.sortable_serialise(eval('1''0002''0000''5''000')), - _fmt_version('1.2')) - self.assertEqual( - xapian.sortable_serialise(eval('1''0020''0300''5''000')), - _fmt_version('1.20.300')) - self.assertEqual( - xapian.sortable_serialise(eval('1''0020''0300''5''000')), - _fmt_version('1.20.300.4444')) - - self.assertEqual( - xapian.sortable_serialise(eval('1''9999''0000''5''000')), - _fmt_version('10001.99999.10000')) - - self.assertEqual( - xapian.sortable_serialise(eval('1''0000''0000''3''000')), - _fmt_version('1-pre')) - self.assertEqual( - xapian.sortable_serialise(eval('1''0000''0000''4''000')), - _fmt_version('1-rc')) - self.assertEqual( - xapian.sortable_serialise(eval('1''0000''0000''5''000')), - _fmt_version('1-')) - self.assertEqual( - xapian.sortable_serialise(eval('1''0000''0000''6''000')), - _fmt_version('1-r')) - - self.assertEqual( - xapian.sortable_serialise(eval('1''0000''0000''3''001')), - _fmt_version('1-pre1')) - self.assertEqual( - xapian.sortable_serialise(eval('1''0000''0000''4''002')), - _fmt_version('1-rc2')) - self.assertEqual( - xapian.sortable_serialise(eval('1''0000''0000''6''003')), - _fmt_version('1-r3')) - - self.assertEqual( - xapian.sortable_serialise(eval('1''0000''0000''6''000')), - _fmt_version('1-r-2-3')) - self.assertEqual( - xapian.sortable_serialise(eval('1''0000''0000''6''001')), - _fmt_version('1-r1.2-3')) - - def test_OriginalAuthor(self): - self.start_online_client() - client = IPCConnection() - - self.node_volume['context'].create({ - 'guid': 'context', - 'type': 'book', - 'title': 'title', - 'summary': 'summary', - 'description': 'description', - 'author': {'fake': None}, - }) - - guid = client.post(['release'], { - 'context': 'context', - 'license': 'GPLv3+', - 'version': '1', - 'stability': 'stable', - 'notes': '', - }) - self.assertEqual([], self.node_volume['release'].get(guid)['layer']) - - guid = client.post(['release'], { - 'context': 'context', - 'license': 'GPLv3+', - 'version': '1', - 'stability': 'stable', - 'notes': '', - 'layer': ['foo'], - }) - self.assertEqual(['foo'], self.node_volume['release'].get(guid)['layer']) - - self.node_volume['context'].update('context', {'author': {tests.UID: None}}) - - guid = client.post(['release'], { - 'context': 'context', - 'license': 'GPLv3+', - 'version': '1', - 'stability': 'stable', - 'notes': '', - }) - self.assertEqual(['origin'], self.node_volume['release'].get(guid)['layer']) - - guid = client.post(['release'], { - 'context': 'context', - 'license': 'GPLv3+', - 'version': '1', - 'stability': 'stable', - 'notes': '', - 'layer': ['foo'], - }) - self.assertEqual( - sorted(['foo', 'origin']), - sorted(self.node_volume['release'].get(guid)['layer'])) - - -if __name__ == '__main__': - tests.main() diff --git a/tests/units/model/routes.py b/tests/units/model/routes.py index 989dfc1..3c21483 100755 --- a/tests/units/model/routes.py +++ b/tests/units/model/routes.py @@ -10,36 +10,15 @@ from os.path import exists from __init__ import tests, src_root from sugar_network import db, model +from sugar_network.db import files from sugar_network.model.user import User from sugar_network.toolkit.router import Router, Request +from sugar_network.toolkit.coroutine import this from sugar_network.toolkit import coroutine class RoutesTest(tests.Test): - def test_StaticFiles(self): - router = Router(model.FrontRoutes()) - local_path = src_root + '/sugar_network/static/httpdocs/images/missing.png' - - response = [] - reply = router({ - 'PATH_INFO': '/static/images/missing.png', - 'REQUEST_METHOD': 'GET', - }, - lambda status, headers: response.extend([status, dict(headers)])) - result = file(local_path).read() - self.assertEqual(result, ''.join([i for i in reply])) - self.assertEqual([ - '200 OK', - { - 'last-modified': formatdate(os.stat(local_path).st_mtime, localtime=False, usegmt=True), - 'content-length': str(len(result)), - 'content-type': 'image/png', - 'content-disposition': 'attachment; filename="missing.png"', - } - ], - response) - def test_Subscribe(self): class Document(db.Resource): @@ -49,7 +28,7 @@ class RoutesTest(tests.Test): return value routes = model.FrontRoutes() - volume = db.Volume('db', [Document], routes.broadcast) + volume = db.Volume('db', [Document]) events = [] def read_events(): @@ -69,6 +48,7 @@ class RoutesTest(tests.Test): job.kill() self.assertEqual([ + {'event': 'pong'}, {'guid': 'guid', 'resource': 'document', 'event': 'create'}, {'guid': 'guid', 'resource': 'document', 'event': 'update'}, {'guid': 'guid', 'event': 'delete', 'resource': u'document'}, @@ -77,66 +57,10 @@ class RoutesTest(tests.Test): def test_SubscribeWithPong(self): routes = model.FrontRoutes() - for event in routes.subscribe(ping=True): + for event in routes.subscribe(): break self.assertEqual({'event': 'pong'}, event) - def test_feed(self): - volume = db.Volume('db', model.RESOURCES) - routes = model.VolumeRoutes(volume) - - volume['context'].create({ - 'guid': 'context', - 'type': 'activity', - 'title': '', - 'summary': '', - 'description': '', - 'dependencies': ['foo', 'bar'], - }) - volume['release'].create({ - 'guid': 'release', - 'context': 'context', - 'license': 'GPLv3', - 'version': '1', - 'date': 0, - 'stability': 'stable', - 'notes': '', - 'data': { - 'spec': { - '*-*': { - 'commands': {'activity': {'exec': 'true'}}, - 'requires': {'dep': {}, 'sugar': {'restrictions': [['0.88', None]]}}, - }, - }, - }, - }) - - self.assertEqual({ - 'releases': [ - { - 'guid': 'release', - 'author': {}, - 'ctime': 0, - 'data': { - 'spec': { - '*-*': { - 'commands': {'activity': {'exec': 'true'}}, - 'requires': {'dep': {}, 'sugar': {'restrictions': [['0.88', None]]}}, - }, - }, - }, - 'layer': [], - 'license': 'GPLv3', - 'notes': {'en-us': ''}, - 'stability': 'stable', - 'tags': [], - 'version': '1', - 'requires': {'bar': {}, 'foo': {}}, - }, - ], - }, - routes.feed(Request(method='GET', path=['context', 'context']), 'foo')) - if __name__ == '__main__': tests.main() diff --git a/tests/units/node/__main__.py b/tests/units/node/__main__.py index ac37315..dfadaf3 100644 --- a/tests/units/node/__main__.py +++ b/tests/units/node/__main__.py @@ -4,16 +4,14 @@ from __init__ import tests from downloads import * from files import * -from master import * from node import * from obs import * -from stats_node import * from stats_user import * from sync import * from sync_master import * from sync_offline import * from sync_online import * -from volume import * +from model import * if __name__ == '__main__': tests.main() diff --git a/tests/units/node/master.py b/tests/units/node/master.py deleted file mode 100755 index b3eaa75..0000000 --- a/tests/units/node/master.py +++ /dev/null @@ -1,214 +0,0 @@ -#!/usr/bin/env python -# sugar-lint: disable - -import os - -from __init__ import tests - -from sugar_network.node import obs -from sugar_network.client import IPCConnection -from sugar_network.toolkit import coroutine, enforce - - -class MasterTest(tests.Test): - - def test_Aliases(self): - self.override(obs, 'get_repos', lambda: [ - {'distributor_id': 'Gentoo', 'name': 'Gentoo-2.1', 'arches': ['x86', 'x86_64']}, - {'distributor_id': 'Debian', 'name': 'Debian-6.0', 'arches': ['x86']}, - {'distributor_id': 'Debian', 'name': 'Debian-7.0', 'arches': ['x86_64']}, - ]) - self.override(obs, 'resolve', lambda repo, arch, names: ['fake']) - - self.start_online_client() - ipc = IPCConnection() - - guid = ipc.post(['context'], { - 'type': 'activity', - 'title': 'title', - 'summary': 'summary', - 'description': 'description', - }) - - ipc.put(['context', guid, 'aliases'], { - 'Gentoo': { - 'binary': [['pkg1.bin', 'pkg2.bin']], - 'devel': [['pkg3.devel']], - }, - 'Debian': { - 'binary': [['pkg4.bin']], - 'devel': [['pkg5.devel', 'pkg6.devel']], - }, - }) - coroutine.dispatch() - self.assertEqual({ - 'Gentoo-2.1': {'status': 'success', 'binary': ['pkg1.bin', 'pkg2.bin'], 'devel': ['pkg3.devel']}, - 'Debian-6.0': {'status': 'success', 'binary': ['pkg4.bin'], 'devel': ['pkg5.devel', 'pkg6.devel']}, - 'Debian-7.0': {'status': 'success', 'binary': ['pkg4.bin'], 'devel': ['pkg5.devel', 'pkg6.devel']}, - }, - ipc.get(['context', guid, 'packages'])) - - def test_WrongAliases(self): - self.override(obs, 'get_repos', lambda: [ - {'distributor_id': 'Gentoo', 'name': 'Gentoo-2.1', 'arches': ['x86', 'x86_64']}, - {'distributor_id': 'Debian', 'name': 'Debian-6.0', 'arches': ['x86']}, - {'distributor_id': 'Debian', 'name': 'Debian-7.0', 'arches': ['x86_64']}, - ]) - self.override(obs, 'resolve', lambda repo, arch, names: enforce(False, 'resolve failed')) - - self.start_online_client() - ipc = IPCConnection() - - guid = ipc.post(['context'], { - 'type': 'activity', - 'title': 'title', - 'summary': 'summary', - 'description': 'description', - }) - - ipc.put(['context', guid, 'aliases'], { - 'Gentoo': { - 'binary': [['pkg1.bin', 'pkg2.bin']], - 'devel': [['pkg3.devel']], - }, - 'Debian': { - 'binary': [['pkg4.bin']], - 'devel': [['pkg5.devel', 'pkg6.devel']], - }, - }) - coroutine.dispatch() - self.assertEqual({ - 'Gentoo-2.1': {'status': 'resolve failed'}, - 'Debian-6.0': {'status': 'resolve failed'}, - 'Debian-7.0': {'status': 'resolve failed'}, - }, - ipc.get(['context', guid, 'packages'])) - - def test_MultipleAliases(self): - - def resolve(repo, arch, names): - enforce(not [i for i in names if 'fake' in i], 'resolve failed') - return ['fake'] - - self.override(obs, 'get_repos', lambda: [ - {'distributor_id': 'Gentoo', 'name': 'Gentoo-2.1', 'arches': ['x86', 'x86_64']}, - ]) - self.override(obs, 'resolve', resolve) - - self.start_online_client() - ipc = IPCConnection() - - guid = ipc.post(['context'], { - 'type': 'activity', - 'title': 'title', - 'summary': 'summary', - 'description': 'description', - }) - - ipc.put(['context', guid, 'aliases'], { - 'Gentoo': { - 'binary': [['fake.bin'], ['proper.bin'], ['not-reach.bin']], - 'devel': [['fake.devel'], ['proper.devel'], ['not-reach.devel']], - }, - }) - coroutine.dispatch() - self.assertEqual({ - 'Gentoo-2.1': {'status': 'success', 'binary': ['proper.bin'], 'devel': ['proper.devel']}, - }, - ipc.get(['context', guid, 'packages'])) - - ipc.put(['context', guid, 'aliases'], { - 'Gentoo': { - 'binary': [['proper.bin']], - 'devel': [['fake.devel']], - }, - }) - coroutine.dispatch() - self.assertEqual({ - 'Gentoo-2.1': {'status': 'resolve failed', 'binary': ['proper.bin']}, - }, - ipc.get(['context', guid, 'packages'])) - - ipc.put(['context', guid, 'aliases'], { - 'Gentoo': { - 'binary': [['fake.bin']], - 'devel': [['proper.devel']], - }, - }) - coroutine.dispatch() - self.assertEqual({ - 'Gentoo-2.1': {'status': 'resolve failed'}, - }, - ipc.get(['context', guid, 'packages'])) - - def test_InvalidateSolutions(self): - self.override(obs, 'get_repos', lambda: [ - {'distributor_id': 'Gentoo', 'name': 'Gentoo-2.1', 'arches': ['x86_64']}, - ]) - self.override(obs, 'resolve', lambda repo, arch, names: ['fake']) - - self.start_online_client() - ipc = IPCConnection() - - events = [] - def read_events(): - for event in ipc.subscribe(): - if event.get('resource') == 'release': - events.append(event) - job = coroutine.spawn(read_events) - - guid = ipc.post(['context'], { - 'type': 'activity', - 'title': 'title', - 'summary': 'summary', - 'description': 'description', - }) - coroutine.sleep(.5) - self.assertEqual([], events) - - ipc.put(['context', guid, 'aliases'], { - 'Gentoo': { - 'binary': [['bin']], - 'devel': [['devel']], - }, - }) - coroutine.sleep(.5) - self.assertEqual([ - {'event': 'populate', 'resource': 'release', 'mtime': int(os.stat('master/release/index/mtime').st_mtime)}, - ], - events) - self.assertEqual({ - 'Gentoo-2.1': {'status': 'success', 'binary': ['bin'], 'devel': ['devel']}, - }, - ipc.get(['context', guid, 'packages'])) - - def test_InvalidateSolutionsOnDependenciesChanges(self): - self.start_online_client() - ipc = IPCConnection() - - events = [] - def read_events(): - for event in ipc.subscribe(): - if event.get('resource') == 'release': - events.append(event) - job = coroutine.spawn(read_events) - - guid = ipc.post(['context'], { - 'type': 'activity', - 'title': 'title', - 'summary': 'summary', - 'description': 'description', - 'dependencies': [], - }) - self.assertEqual(0, len(events)) - - ipc.put(['context', guid, 'dependencies'], ['foo']) - coroutine.sleep(.1) - self.assertEqual([ - {'event': 'populate', 'resource': 'release', 'mtime': int(os.stat('master/release/index/mtime').st_mtime)}, - ], - events) - - -if __name__ == '__main__': - tests.main() diff --git a/tests/units/node/model.py b/tests/units/node/model.py new file mode 100755 index 0000000..68215c1 --- /dev/null +++ b/tests/units/node/model.py @@ -0,0 +1,660 @@ +#!/usr/bin/env python +# sugar-lint: disable + +import os +import time + +from __init__ import tests + +from sugar_network import db, toolkit +from sugar_network.db import files +from sugar_network.client import Connection, keyfile, api_url +from sugar_network.model.user import User +from sugar_network.model.post import Post +from sugar_network.node import model, obs +from sugar_network.node.routes import NodeRoutes +from sugar_network.toolkit.coroutine import this +from sugar_network.toolkit.router import Request, Router +from sugar_network.toolkit import i18n, http, coroutine, enforce + + +class ModelTest(tests.Test): + + def test_IncrementReleasesSeqno(self): + events = [] + volume = self.start_master([User, model.Context, Post]) + this.broadcast = lambda x: events.append(x) + conn = Connection(auth=http.SugarAuth(keyfile.value)) + + context = conn.post(['context'], { + 'type': 'group', + 'title': 'Activity', + 'summary': 'summary', + 'description': 'description', + }) + self.assertEqual([ + ], [i for i in events if i['event'] == 'release']) + self.assertEqual(0, volume.releases_seqno.value) + + aggid = conn.post(['context', context, 'releases'], -1) + self.assertEqual([ + {'event': 'release', 'seqno': 1}, + ], [i for i in events if i['event'] == 'release']) + self.assertEqual(1, volume.releases_seqno.value) + + def test_diff(self): + self.override(time, 'time', lambda: 0) + self.override(NodeRoutes, 'authorize', lambda self, user, role: True) + + class Document(db.Resource): + + @db.indexed_property(slot=1) + def prop(self, value): + return value + + volume = self.start_master([User, Document]) + conn = Connection(auth=http.SugarAuth(keyfile.value)) + + guid1 = conn.post(['document'], {'prop': 'a'}) + self.utime('master/document/%s/%s' % (guid1[:2], guid1), 1) + guid2 = conn.post(['document'], {'prop': 'b'}) + self.utime('master/document/%s/%s' % (guid2[:2], guid2), 2) + + in_seq = toolkit.Sequence([[1, None]]) + self.assertEqual([ + {'resource': 'document'}, + {'guid': guid1, + 'diff': { + 'guid': {'value': guid1, 'mtime': 1}, + 'mtime': {'value': 0, 'mtime': 1}, + 'ctime': {'value': 0, 'mtime': 1}, + 'prop': {'value': 'a', 'mtime': 1}, + 'author': {'mtime': 1, 'value': {}}, + 'layer': {'mtime': 1, 'value': []}, + 'tags': {'mtime': 1, 'value': []}, + }, + }, + {'guid': guid2, + 'diff': { + 'guid': {'value': guid2, 'mtime': 2}, + 'mtime': {'value': 0, 'mtime': 2}, + 'ctime': {'value': 0, 'mtime': 2}, + 'prop': {'value': 'b', 'mtime': 2}, + 'author': {'mtime': 2, 'value': {}}, + 'layer': {'mtime': 2, 'value': []}, + 'tags': {'mtime': 2, 'value': []}, + }, + }, + {'commit': [[1, 2]]}, + ], + [i for i in model.diff(volume, in_seq)]) + self.assertEqual([[1, None]], in_seq) + + def test_diff_Partial(self): + self.override(time, 'time', lambda: 0) + self.override(NodeRoutes, 'authorize', lambda self, user, role: True) + + class Document(db.Resource): + + @db.indexed_property(slot=1) + def prop(self, value): + return value + + volume = self.start_master([User, Document]) + conn = Connection(auth=http.SugarAuth(keyfile.value)) + + guid1 = conn.post(['document'], {'prop': 'a'}) + self.utime('master/document/%s/%s' % (guid1[:2], guid1), 1) + guid2 = conn.post(['document'], {'prop': 'b'}) + self.utime('master/document/%s/%s' % (guid2[:2], guid2), 2) + + in_seq = toolkit.Sequence([[1, None]]) + patch = model.diff(volume, in_seq) + self.assertEqual({'resource': 'document'}, next(patch)) + self.assertEqual(guid1, next(patch)['guid']) + self.assertEqual({'commit': []}, patch.throw(StopIteration())) + try: + next(patch) + assert False + except StopIteration: + pass + + patch = model.diff(volume, in_seq) + self.assertEqual({'resource': 'document'}, next(patch)) + self.assertEqual(guid1, next(patch)['guid']) + self.assertEqual(guid2, next(patch)['guid']) + self.assertEqual({'commit': [[1, 1]]}, patch.throw(StopIteration())) + try: + next(patch) + assert False + except StopIteration: + pass + + def test_diff_Stretch(self): + self.override(time, 'time', lambda: 0) + self.override(NodeRoutes, 'authorize', lambda self, user, role: True) + + class Document(db.Resource): + + @db.indexed_property(slot=1) + def prop(self, value): + return value + + volume = self.start_master([User, Document]) + conn = Connection(auth=http.SugarAuth(keyfile.value)) + + guid1 = conn.post(['document'], {'prop': 'a'}) + self.utime('master/document/%s/%s' % (guid1[:2], guid1), 1) + guid2 = conn.post(['document'], {'prop': 'b'}) + volume['document'].delete(guid2) + guid3 = conn.post(['document'], {'prop': 'c'}) + self.utime('master/document/%s/%s' % (guid3[:2], guid3), 2) + guid4 = conn.post(['document'], {'prop': 'd'}) + volume['document'].delete(guid4) + guid5 = conn.post(['document'], {'prop': 'f'}) + self.utime('master/document/%s/%s' % (guid5[:2], guid5), 2) + + in_seq = toolkit.Sequence([[1, None]]) + patch = model.diff(volume, in_seq) + self.assertEqual({'resource': 'document'}, patch.send(None)) + self.assertEqual(guid1, patch.send(None)['guid']) + self.assertEqual(guid3, patch.send(None)['guid']) + self.assertEqual(guid5, patch.send(None)['guid']) + self.assertEqual({'commit': [[1, 1], [3, 3]]}, patch.throw(StopIteration())) + try: + patch.send(None) + assert False + except StopIteration: + pass + + patch = model.diff(volume, in_seq) + self.assertEqual({'resource': 'document'}, patch.send(None)) + self.assertEqual(guid1, patch.send(None)['guid']) + self.assertEqual(guid3, patch.send(None)['guid']) + self.assertEqual(guid5, patch.send(None)['guid']) + self.assertEqual({'commit': [[1, 5]]}, patch.send(None)) + try: + patch.send(None) + assert False + except StopIteration: + pass + + def test_diff_DoNotStretchContinuesPacket(self): + self.override(time, 'time', lambda: 0) + self.override(NodeRoutes, 'authorize', lambda self, user, role: True) + + class Document(db.Resource): + + @db.indexed_property(slot=1) + def prop(self, value): + return value + + volume = self.start_master([User, Document]) + conn = Connection(auth=http.SugarAuth(keyfile.value)) + + guid1 = conn.post(['document'], {'prop': 'a'}) + volume['document'].delete(guid1) + guid2 = conn.post(['document'], {'prop': 'b'}) + volume['document'].delete(guid2) + guid3 = conn.post(['document'], {'prop': 'c'}) + self.utime('master/document/%s/%s' % (guid3[:2], guid3), 2) + guid4 = conn.post(['document'], {'prop': 'd'}) + volume['document'].delete(guid4) + guid5 = conn.post(['document'], {'prop': 'f'}) + self.utime('master/document/%s/%s' % (guid5[:2], guid5), 2) + + in_seq = toolkit.Sequence([[1, None]]) + patch = model.diff(volume, in_seq, toolkit.Sequence([[1, 1]])) + self.assertEqual({'resource': 'document'}, patch.send(None)) + self.assertEqual(guid3, patch.send(None)['guid']) + self.assertEqual(guid5, patch.send(None)['guid']) + self.assertEqual({'commit': [[1, 1], [3, 3], [5, 5]]}, patch.send(None)) + try: + patch.send(None) + assert False + except StopIteration: + pass + + def test_diff_TheSameInSeqForAllDocuments(self): + self.override(time, 'time', lambda: 0) + self.override(NodeRoutes, 'authorize', lambda self, user, role: True) + + class Document1(db.Resource): + pass + + class Document2(db.Resource): + pass + + class Document3(db.Resource): + pass + + volume = self.start_master([User, Document1, Document2, Document3]) + conn = Connection(auth=http.SugarAuth(keyfile.value)) + + guid3 = conn.post(['document1'], {}) + self.utime('master/document/%s/%s' % (guid3[:2], guid3), 3) + guid2 = conn.post(['document2'], {}) + self.utime('master/document/%s/%s' % (guid2[:2], guid2), 2) + guid1 = conn.post(['document3'], {}) + self.utime('master/document/%s/%s' % (guid1[:2], guid1), 1) + + in_seq = toolkit.Sequence([[1, None]]) + patch = model.diff(volume, in_seq) + self.assertEqual({'resource': 'document1'}, patch.send(None)) + self.assertEqual(guid3, patch.send(None)['guid']) + self.assertEqual({'resource': 'document2'}, patch.send(None)) + self.assertEqual(guid2, patch.send(None)['guid']) + self.assertEqual({'resource': 'document3'}, patch.send(None)) + self.assertEqual(guid1, patch.send(None)['guid']) + self.assertEqual({'commit': [[1, 3]]}, patch.send(None)) + try: + patch.send(None) + assert False + except StopIteration: + pass + + def test_merge_Create(self): + self.override(time, 'time', lambda: 0) + self.override(NodeRoutes, 'authorize', lambda self, user, role: True) + + class Document1(db.Resource): + + @db.indexed_property(slot=1) + def prop(self, value): + return value + + class Document2(db.Resource): + pass + + self.touch(('master/db.seqno', '100')) + volume = self.start_master([Document1, Document2]) + + records = [ + {'resource': 'document1'}, + {'guid': '1', 'diff': { + 'guid': {'value': '1', 'mtime': 1.0}, + 'ctime': {'value': 2, 'mtime': 2.0}, + 'mtime': {'value': 3, 'mtime': 3.0}, + 'prop': {'value': '4', 'mtime': 4.0}, + }}, + {'resource': 'document2'}, + {'guid': '5', 'diff': { + 'guid': {'value': '5', 'mtime': 5.0}, + 'ctime': {'value': 6, 'mtime': 6.0}, + 'mtime': {'value': 7, 'mtime': 7.0}, + }}, + {'commit': [[1, 2]]}, + ] + self.assertEqual(([[1, 2]], [[101, 102]]), model.merge(volume, records)) + + self.assertEqual( + {'guid': '1', 'prop': '4', 'ctime': 2, 'mtime': 3}, + volume['document1'].get('1').properties(['guid', 'ctime', 'mtime', 'prop'])) + self.assertEqual(1, os.stat('master/document1/1/1/guid').st_mtime) + self.assertEqual(2, os.stat('master/document1/1/1/ctime').st_mtime) + self.assertEqual(3, os.stat('master/document1/1/1/mtime').st_mtime) + self.assertEqual(4, os.stat('master/document1/1/1/prop').st_mtime) + + self.assertEqual( + {'guid': '5', 'ctime': 6, 'mtime': 7}, + volume['document2'].get('5').properties(['guid', 'ctime', 'mtime'])) + self.assertEqual(5, os.stat('master/document2/5/5/guid').st_mtime) + self.assertEqual(6, os.stat('master/document2/5/5/ctime').st_mtime) + self.assertEqual(7, os.stat('master/document2/5/5/mtime').st_mtime) + + def test_merge_Update(self): + self.override(time, 'time', lambda: 0) + self.override(NodeRoutes, 'authorize', lambda self, user, role: True) + + class Document(db.Resource): + + @db.indexed_property(slot=1) + def prop(self, value): + return value + + self.touch(('master/db.seqno', '100')) + volume = db.Volume('master', [Document]) + volume['document'].create({'guid': '1', 'prop': '1', 'ctime': 1, 'mtime': 1}) + for i in os.listdir('master/document/1/1'): + os.utime('master/document/1/1/%s' % i, (2, 2)) + + records = [ + {'resource': 'document'}, + {'guid': '1', 'diff': {'prop': {'value': '2', 'mtime': 1.0}}}, + {'commit': [[1, 1]]}, + ] + self.assertEqual(([[1, 1]], []), model.merge(volume, records)) + self.assertEqual( + {'guid': '1', 'prop': '1', 'ctime': 1, 'mtime': 1}, + volume['document'].get('1').properties(['guid', 'ctime', 'mtime', 'prop'])) + self.assertEqual(2, os.stat('master/document/1/1/prop').st_mtime) + + records = [ + {'resource': 'document'}, + {'guid': '1', 'diff': {'prop': {'value': '3', 'mtime': 2.0}}}, + {'commit': [[2, 2]]}, + ] + self.assertEqual(([[2, 2]], []), model.merge(volume, records)) + self.assertEqual( + {'guid': '1', 'prop': '1', 'ctime': 1, 'mtime': 1}, + volume['document'].get('1').properties(['guid', 'ctime', 'mtime', 'prop'])) + self.assertEqual(2, os.stat('master/document/1/1/prop').st_mtime) + + records = [ + {'resource': 'document'}, + {'guid': '1', 'diff': {'prop': {'value': '4', 'mtime': 3.0}}}, + {'commit': [[3, 3]]}, + ] + self.assertEqual(([[3, 3]], [[102, 102]]), model.merge(volume, records)) + self.assertEqual( + {'guid': '1', 'prop': '4', 'ctime': 1, 'mtime': 1}, + volume['document'].get('1').properties(['guid', 'ctime', 'mtime', 'prop'])) + self.assertEqual(3, os.stat('master/document/1/1/prop').st_mtime) + + def test_merge_MultipleCommits(self): + self.override(time, 'time', lambda: 0) + + class Document(db.Resource): + + @db.stored_property() + def prop(self, value): + return value + + self.touch(('master/db.seqno', '100')) + volume = db.Volume('master', [Document]) + + def generator(): + for i in [ + {'resource': 'document'}, + {'commit': [[1, 1]]}, + {'guid': '1', 'diff': { + 'guid': {'value': '1', 'mtime': 1.0}, + 'ctime': {'value': 2, 'mtime': 2.0}, + 'mtime': {'value': 3, 'mtime': 3.0}, + 'prop': {'value': '4', 'mtime': 4.0}, + }}, + {'commit': [[2, 3]]}, + ]: + yield i + + records = generator() + self.assertEqual(([[1, 3]], [[101, 101]]), model.merge(volume, records)) + assert volume['document'].exists('1') + + def test_diff_ByLayers(self): + self.override(time, 'time', lambda: 0) + self.override(NodeRoutes, 'authorize', lambda self, user, role: True) + + class Context(db.Resource): + pass + + class Post(db.Resource): + pass + + this.request = Request() + volume = db.Volume('db', [Context, Post]) + volume['context'].create({'guid': '0', 'ctime': 1, 'mtime': 1, 'layer': ['layer0', 'common']}) + volume['context'].create({'guid': '1', 'ctime': 1, 'mtime': 1, 'layer': ['layer1']}) + volume['post'].create({'guid': '3', 'ctime': 3, 'mtime': 3, 'layer': 'layer3'}) + + volume['context'].update('0', {'tags': '0'}) + volume['context'].update('1', {'tags': '1'}) + volume['post'].update('3', {'tags': '3'}) + self.utime('db', 0) + + self.assertEqual(sorted([ + {'resource': 'context'}, + {'guid': '0', 'diff': {'tags': {'value': '0', 'mtime': 0}}}, + {'guid': '1', 'diff': {'tags': {'value': '1', 'mtime': 0}}}, + {'resource': 'post'}, + {'guid': '3', 'diff': {'tags': {'value': '3', 'mtime': 0}}}, + {'commit': [[4, 6]]}, + ]), + sorted([i for i in model.diff(volume, toolkit.Sequence([[4, None]]))])) + + self.assertEqual(sorted([ + {'resource': 'context'}, + {'guid': '0', 'diff': {'tags': {'value': '0', 'mtime': 0}}}, + {'guid': '1', 'diff': {'tags': {'value': '1', 'mtime': 0}}}, + {'resource': 'post'}, + {'guid': '3', 'diff': {'tags': {'value': '3', 'mtime': 0}}}, + {'commit': [[4, 6]]}, + ]), + sorted([i for i in model.diff(volume, toolkit.Sequence([[4, None]]), layer='layer1')])) + + self.assertEqual(sorted([ + {'resource': 'context'}, + {'guid': '0', 'diff': {'tags': {'value': '0', 'mtime': 0}}}, + {'resource': 'post'}, + {'guid': '3', 'diff': {'tags': {'value': '3', 'mtime': 0}}}, + {'commit': [[4, 6]]}, + ]), + sorted([i for i in model.diff(volume, toolkit.Sequence([[4, None]]), layer='layer2')])) + + self.assertEqual(sorted([ + {'resource': 'context'}, + {'guid': '0', 'diff': {'tags': {'value': '0', 'mtime': 0}}}, + {'resource': 'post'}, + {'guid': '3', 'diff': {'tags': {'value': '3', 'mtime': 0}}}, + {'commit': [[4, 6]]}, + ]), + sorted([i for i in model.diff(volume, toolkit.Sequence([[4, None]]), layer='foo')])) + + def test_Packages(self): + self.override(obs, 'get_repos', lambda: [ + {'lsb_id': 'Gentoo', 'lsb_release': '2.1', 'name': 'Gentoo-2.1', 'arches': ['x86', 'x86_64']}, + {'lsb_id': 'Debian', 'lsb_release': '6.0', 'name': 'Debian-6.0', 'arches': ['x86']}, + {'lsb_id': 'Debian', 'lsb_release': '7.0', 'name': 'Debian-7.0', 'arches': ['x86_64']}, + ]) + self.override(obs, 'resolve', lambda repo, arch, names: ['fake']) + + volume = self.start_master([User, model.Context]) + conn = http.Connection(api_url.value, http.SugarAuth(keyfile.value)) + + guid = conn.post(['context'], { + 'type': 'package', + 'title': 'title', + 'summary': 'summary', + 'description': 'description', + }) + conn.put(['context', guid, 'releases', '*'], { + 'binary': ['pkg1.bin', 'pkg2.bin'], + 'devel': 'pkg3.devel', + }) + self.assertEqual({ + '*': { + 'seqno': 3, + 'author': {tests.UID: {'name': tests.UID, 'order': 0, 'role': 3}}, + 'value': {'binary': ['pkg1.bin', 'pkg2.bin'], 'devel': ['pkg3.devel']}, + }, + 'status': { + 'Gentoo-2.1': 'success', + 'Debian-6.0': 'success', + 'Debian-7.0': 'success', + }, + }, + volume['context'][guid]['releases']) + + guid = conn.post(['context'], { + 'type': 'package', + 'title': 'title', + 'summary': 'summary', + 'description': 'description', + }) + conn.put(['context', guid, 'releases', 'Gentoo'], { + 'binary': ['pkg1.bin', 'pkg2.bin'], + 'devel': 'pkg3.devel', + }) + self.assertEqual({ + 'Gentoo': { + 'seqno': 5, + 'author': {tests.UID: {'name': tests.UID, 'order': 0, 'role': 3}}, + 'value': {'binary': ['pkg1.bin', 'pkg2.bin'], 'devel': ['pkg3.devel']}, + }, + 'status': { + 'Gentoo-2.1': 'success', + }, + }, + volume['context'][guid]['releases']) + + guid = conn.post(['context'], { + 'type': 'package', + 'title': 'title', + 'summary': 'summary', + 'description': 'description', + }) + conn.put(['context', guid, 'releases', 'Debian-6.0'], { + 'binary': ['pkg1.bin', 'pkg2.bin'], + 'devel': 'pkg3.devel', + }) + self.assertEqual({ + 'Debian-6.0': { + 'seqno': 7, + 'author': {tests.UID: {'name': tests.UID, 'order': 0, 'role': 3}}, + 'value': {'binary': ['pkg1.bin', 'pkg2.bin'], 'devel': ['pkg3.devel']}, + }, + 'status': { + 'Debian-6.0': 'success', + }, + }, + volume['context'][guid]['releases']) + + def test_UnresolvedPackages(self): + self.override(obs, 'get_repos', lambda: [ + {'lsb_id': 'Gentoo', 'lsb_release': '2.1', 'name': 'Gentoo-2.1', 'arches': ['x86', 'x86_64']}, + ]) + self.override(obs, 'resolve', lambda repo, arch, names: enforce(False, 'resolve failed')) + + volume = self.start_master([User, model.Context]) + conn = http.Connection(api_url.value, http.SugarAuth(keyfile.value)) + + guid = conn.post(['context'], { + 'type': 'package', + 'title': 'title', + 'summary': 'summary', + 'description': 'description', + }) + conn.put(['context', guid, 'releases', '*'], { + 'binary': ['pkg1.bin', 'pkg2.bin'], + 'devel': 'pkg3.devel', + }) + self.assertEqual({ + '*': { + 'seqno': 3, + 'author': {tests.UID: {'name': tests.UID, 'order': 0, 'role': 3}}, + 'value': {'binary': ['pkg1.bin', 'pkg2.bin'], 'devel': ['pkg3.devel']}, + }, + 'status': { + 'Gentoo-2.1': 'resolve failed', + }, + }, + volume['context'][guid]['releases']) + + def test_PackageOverrides(self): + self.override(obs, 'get_repos', lambda: [ + {'lsb_id': 'Gentoo', 'lsb_release': '2.1', 'name': 'Gentoo-2.1', 'arches': ['x86', 'x86_64']}, + {'lsb_id': 'Debian', 'lsb_release': '6.0', 'name': 'Debian-6.0', 'arches': ['x86']}, + {'lsb_id': 'Debian', 'lsb_release': '7.0', 'name': 'Debian-7.0', 'arches': ['x86_64']}, + ]) + + volume = self.start_master([User, model.Context]) + conn = http.Connection(api_url.value, http.SugarAuth(keyfile.value)) + guid = conn.post(['context'], { + 'type': 'package', + 'title': 'title', + 'summary': 'summary', + 'description': 'description', + }) + + self.override(obs, 'resolve', lambda repo, arch, names: enforce(False, '1')) + conn.put(['context', guid, 'releases', '*'], {'binary': '1'}) + self.assertEqual({ + '*': { + 'seqno': 3, + 'author': {tests.UID: {'name': tests.UID, 'order': 0, 'role': 3}}, + 'value': {'binary': ['1']}, + }, + 'status': { + 'Gentoo-2.1': '1', + 'Debian-6.0': '1', + 'Debian-7.0': '1', + }, + }, + volume['context'][guid]['releases']) + + self.override(obs, 'resolve', lambda repo, arch, names: enforce(False, '2')) + conn.put(['context', guid, 'releases', 'Debian'], {'binary': '2'}) + self.assertEqual({ + '*': { + 'seqno': 3, + 'author': {tests.UID: {'name': tests.UID, 'order': 0, 'role': 3}}, + 'value': {'binary': ['1']}, + }, + 'Debian': { + 'seqno': 4, + 'author': {tests.UID: {'name': tests.UID, 'order': 0, 'role': 3}}, + 'value': {'binary': ['2']}, + }, + 'status': { + 'Gentoo-2.1': '1', + 'Debian-6.0': '2', + 'Debian-7.0': '2', + }, + }, + volume['context'][guid]['releases']) + + self.override(obs, 'resolve', lambda repo, arch, names: enforce(False, '3')) + conn.put(['context', guid, 'releases', 'Debian-6.0'], {'binary': '3'}) + self.assertEqual({ + '*': { + 'seqno': 3, + 'author': {tests.UID: {'name': tests.UID, 'order': 0, 'role': 3}}, + 'value': {'binary': ['1']}, + }, + 'Debian': { + 'seqno': 4, + 'author': {tests.UID: {'name': tests.UID, 'order': 0, 'role': 3}}, + 'value': {'binary': ['2']}, + }, + 'Debian-6.0': { + 'seqno': 5, + 'author': {tests.UID: {'name': tests.UID, 'order': 0, 'role': 3}}, + 'value': {'binary': ['3']}, + }, + 'status': { + 'Gentoo-2.1': '1', + 'Debian-6.0': '3', + 'Debian-7.0': '2', + }, + }, + volume['context'][guid]['releases']) + + self.override(obs, 'resolve', lambda repo, arch, names: enforce(False, '4')) + conn.put(['context', guid, 'releases', 'Debian'], {'binary': '4'}) + self.assertEqual({ + '*': { + 'seqno': 3, + 'author': {tests.UID: {'name': tests.UID, 'order': 0, 'role': 3}}, + 'value': {'binary': ['1']}, + }, + 'Debian': { + 'seqno': 6, + 'author': {tests.UID: {'name': tests.UID, 'order': 0, 'role': 3}}, + 'value': {'binary': ['4']}, + }, + 'Debian-6.0': { + 'seqno': 5, + 'author': {tests.UID: {'name': tests.UID, 'order': 0, 'role': 3}}, + 'value': {'binary': ['3']}, + }, + 'status': { + 'Gentoo-2.1': '1', + 'Debian-6.0': '3', + 'Debian-7.0': '4', + }, + }, + volume['context'][guid]['releases']) + + +if __name__ == '__main__': + tests.main() diff --git a/tests/units/node/node.py b/tests/units/node/node.py index d8f00ec..0058918 100755 --- a/tests/units/node/node.py +++ b/tests/units/node/node.py @@ -16,17 +16,17 @@ from os.path import exists, join from __init__ import tests from sugar_network import db, node, model, client -from sugar_network.client import Connection, keyfile +from sugar_network.db import files +from sugar_network.client import Connection, keyfile, api_url from sugar_network.toolkit import http, coroutine from sugar_network.toolkit.rrd import Rrd -from sugar_network.node import stats_user, stats_node, obs -from sugar_network.node.routes import NodeRoutes, generate_node_stats +from sugar_network.node import stats_user +from sugar_network.node.routes import NodeRoutes from sugar_network.node.master import MasterRoutes from sugar_network.model.user import User from sugar_network.model.context import Context -from sugar_network.model.release import Release from sugar_network.model.user import User -from sugar_network.toolkit.router import Router, Request, Response, fallbackroute, Blob, ACL, route +from sugar_network.toolkit.router import Router, Request, Response, fallbackroute, ACL, route from sugar_network.toolkit import http @@ -40,7 +40,7 @@ class NodeTest(tests.Test): def test_UserStats(self): volume = db.Volume('db', model.RESOURCES) - cp = NodeRoutes('guid', volume) + cp = NodeRoutes('guid', volume=volume) call(cp, method='POST', document='user', principal=tests.UID, content={ 'name': 'user', @@ -100,69 +100,10 @@ class NodeTest(tests.Test): }, call(cp, method='GET', cmd='stats-info', document='user', guid=tests.UID, principal=tests.UID)) - def test_NodeStats(self): - stats_node.stats_node.value = True - stats_node.stats_node_rras.value = ['RRA:AVERAGE:0.5:1:60', 'RRA:AVERAGE:0.5:3:60'] - rrd = Rrd('stats/node', stats_node.stats_node_step.value, stats_node.stats_node_rras.value) - - ts = int(time.time()) / 3 * 3 - for i in range(10): - rrd['user'].put({'total': i}, ts + i) - - volume = db.Volume('db', model.RESOURCES) - cp = NodeRoutes('guid', volume) - - self.assertEqual({ - 'user': [ - (ts + 0, {'total': 0.0}), - (ts + 1, {'total': 1.0}), - (ts + 2, {'total': 2.0}), - (ts + 3, {'total': 3.0}), - ], - }, - call(cp, method='GET', cmd='stats', source='user.total', start=ts, end=ts + 3, records=4)) - - self.assertEqual({ - 'user': [ - (ts + 0, {'total': 0.0}), - (ts + 3, {'total': 2.0}), - (ts + 6, {'total': 5.0}), - (ts + 9, {'total': 8.0}), - ], - }, - call(cp, method='GET', cmd='stats', source='user.total', start=ts, end=ts + 9, records=3)) - - def test_NodeStatsDefaults(self): - stats_node.stats_node.value = True - rrd = Rrd('stats/node', stats_node.stats_node_step.value, stats_node.stats_node_rras.value) - - ts = int(time.time()) - for i in range(10): - rrd['user'].put({'total': i}, ts + i) - - volume = db.Volume('db', model.RESOURCES) - cp = NodeRoutes('guid', volume) - - self.assertEqual({ - 'user': [ - (ts + 0, {'total': 0.0}), - (ts + 1, {'total': 1.0}), - (ts + 2, {'total': 2.0}), - (ts + 3, {'total': 3.0}), - (ts + 4, {'total': 4.0}), - (ts + 5, {'total': 5.0}), - (ts + 6, {'total': 6.0}), - (ts + 7, {'total': 7.0}), - (ts + 8, {'total': 8.0}), - (ts + 9, {'total': 9.0}), - ], - }, - call(cp, method='GET', cmd='stats', source='user.total')) - def test_HandleDeletes(self): volume = db.Volume('db', model.RESOURCES) - volume['user'].create({'guid': tests.UID, 'name': 'user', 'pubkey': {'blob': StringIO(tests.PUBKEY)}}) - cp = NodeRoutes('guid', volume) + cp = NodeRoutes('guid', volume=volume) + volume['user'].create({'guid': tests.UID, 'name': 'user', 'pubkey': tests.PUBKEY}) guid = call(cp, method='POST', document='context', principal=tests.UID, content={ 'type': 'activity', @@ -192,33 +133,41 @@ class NodeTest(tests.Test): coroutine.dispatch() self.assertRaises(http.NotFound, call, cp, method='GET', document='context', guid=guid, reply=['guid', 'title']) self.assertEqual(['deleted'], volume['context'].get(guid)['layer']) - self.assertEqual({'event': 'delete', 'resource': 'context', 'guid': guid}, events[0]) - def test_SimulateDeleteEvents(self): - volume = db.Volume('db', model.RESOURCES) - volume['user'].create({'guid': tests.UID, 'name': 'user', 'pubkey': {'blob': StringIO(tests.PUBKEY)}}) - cp = NodeRoutes('guid', volume) + def test_DeletedRestoredHandlers(self): + trigger = [] - guid = call(cp, method='POST', document='context', principal=tests.UID, content={ - 'type': 'activity', - 'title': 'title', - 'summary': 'summary', - 'description': 'description', - }) + class TestDocument(db.Resource): - def subscribe(): - for event in cp.subscribe(): - events.append(event) - events = [] - coroutine.spawn(subscribe) - coroutine.dispatch() + def deleted(self): + trigger.append(False) - call(cp, method='PUT', document='context', guid=guid, principal=tests.UID, content={'layer': ['deleted']}) - coroutine.dispatch() - self.assertEqual({'event': 'delete', 'resource': 'context', 'guid': guid}, events[0]) + def restored(self): + trigger.append(True) + + volume = self.start_master([TestDocument, User]) + conn = Connection(auth=http.SugarAuth(keyfile.value)) + + guid = conn.post(['testdocument'], {}) + self.assertEqual([], trigger) + + conn.put(['testdocument', guid, 'layer'], ['deleted']) + self.assertEqual([False], trigger) + + conn.put(['testdocument', guid, 'layer'], []) + self.assertEqual([False, True], trigger) + + conn.put(['testdocument', guid, 'layer'], ['bar']) + self.assertEqual([False, True], trigger) + + conn.put(['testdocument', guid, 'layer'], ['deleted']) + self.assertEqual([False, True, False], trigger) + + conn.put(['testdocument', guid, 'layer'], ['deleted', 'foo']) + self.assertEqual([False, True, False], trigger) def test_RegisterUser(self): - cp = NodeRoutes('guid', db.Volume('db', [User])) + cp = NodeRoutes('guid', volume=db.Volume('db', [User])) guid = call(cp, method='POST', document='user', principal=tests.UID2, content={ 'name': 'user', @@ -229,7 +178,7 @@ class NodeTest(tests.Test): def test_UnauthorizedCommands(self): volume = db.Volume('db', model.RESOURCES) - volume['user'].create({'guid': tests.UID, 'name': 'user', 'pubkey': {'blob': StringIO(tests.PUBKEY)}}) + volume['user'].create({'guid': tests.UID, 'name': 'user', 'pubkey': tests.PUBKEY}) class Routes(NodeRoutes): @@ -244,7 +193,7 @@ class NodeTest(tests.Test): class Document(db.Resource): pass - cp = Routes('guid', db.Volume('db', [User, Document])) + cp = Routes('guid', volume=db.Volume('db', [User, Document])) guid = call(cp, method='POST', document='document', principal=tests.UID, content={}) self.assertRaises(http.Unauthorized, call, cp, method='GET', cmd='probe1', document='document', guid=guid) @@ -267,8 +216,8 @@ class NodeTest(tests.Test): pass volume = db.Volume('db', [User, Document]) - volume['user'].create({'guid': tests.UID, 'name': 'user', 'pubkey': {'blob': StringIO(tests.PUBKEY)}}) - cp = Routes('guid', volume) + cp = Routes('guid', volume=volume) + volume['user'].create({'guid': tests.UID, 'name': 'user', 'pubkey': tests.PUBKEY}) guid = call(cp, method='POST', document='document', principal=tests.UID, content={}) @@ -278,7 +227,7 @@ class NodeTest(tests.Test): call(cp, method='GET', cmd='probe2', document='document', guid=guid) def test_ForbiddenCommandsForUserResource(self): - cp = NodeRoutes('guid', db.Volume('db', [User])) + cp = NodeRoutes('guid', volume=db.Volume('db', [User])) call(cp, method='POST', document='user', principal=tests.UID2, content={ 'name': 'user1', @@ -304,9 +253,9 @@ class NodeTest(tests.Test): return 'ok' volume = db.Volume('db', [User]) - volume['user'].create({'guid': tests.UID, 'name': 'user', 'pubkey': {'blob': StringIO(tests.PUBKEY)}}) - volume['user'].create({'guid': tests.UID2, 'name': 'test', 'pubkey': {'blob': StringIO(tests.PUBKEY2)}}) - cp = Routes('guid', volume) + cp = Routes('guid', volume=volume) + volume['user'].create({'guid': tests.UID, 'name': 'user', 'pubkey': tests.PUBKEY}) + volume['user'].create({'guid': tests.UID2, 'name': 'test', 'pubkey': tests.PUBKEY2}) self.assertRaises(http.Forbidden, call, cp, method='PROBE') self.assertRaises(http.Forbidden, call, cp, method='PROBE', principal=tests.UID2) @@ -321,9 +270,9 @@ class NodeTest(tests.Test): return value volume = db.Volume('db', [User, Document]) - volume['user'].create({'guid': tests.UID, 'name': 'user', 'pubkey': {'blob': StringIO(tests.PUBKEY)}}) - volume['user'].create({'guid': tests.UID2, 'name': 'user', 'pubkey': {'blob': StringIO(tests.PUBKEY2)}}) - cp = NodeRoutes('guid', volume) + cp = NodeRoutes('guid', volume=volume) + volume['user'].create({'guid': tests.UID, 'name': 'user', 'pubkey': tests.PUBKEY}) + volume['user'].create({'guid': tests.UID2, 'name': 'user', 'pubkey': tests.PUBKEY2}) guid = call(cp, method='POST', document='document', principal=tests.UID, content={'prop': '1'}) self.assertRaises(http.Forbidden, call, cp, 'PUT', document='document', guid=guid, content={'prop': '2'}, principal=tests.UID2) @@ -342,9 +291,9 @@ class NodeTest(tests.Test): return value volume = db.Volume('db', [User, Document]) - volume['user'].create({'guid': tests.UID, 'name': 'user', 'pubkey': {'blob': StringIO(tests.PUBKEY)}}) - volume['user'].create({'guid': tests.UID2, 'name': 'user', 'pubkey': {'blob': StringIO(tests.PUBKEY2)}}) - cp = NodeRoutes('guid', volume) + cp = NodeRoutes('guid', volume=volume) + volume['user'].create({'guid': tests.UID, 'name': 'user', 'pubkey': tests.PUBKEY}) + volume['user'].create({'guid': tests.UID2, 'name': 'user', 'pubkey': tests.PUBKEY2}) guid = call(cp, method='POST', document='document', principal=tests.UID, content={'prop': '1'}) @@ -363,8 +312,8 @@ class NodeTest(tests.Test): pass volume = db.Volume('db', [User]) - volume['user'].create({'guid': tests.UID, 'name': 'user', 'pubkey': {'blob': StringIO(tests.PUBKEY)}}) - cp = Routes('guid', volume) + cp = Routes('guid', volume=volume) + volume['user'].create({'guid': tests.UID, 'name': 'user', 'pubkey': tests.PUBKEY}) self.assertRaises(http.Forbidden, call, cp, 'PROBE', principal=tests.UID) self.touch(('authorization.conf', [ @@ -386,7 +335,7 @@ class NodeTest(tests.Test): pass volume = db.Volume('db', [User]) - cp = Routes('guid', volume) + cp = Routes('guid', volume=volume) self.assertRaises(http.Unauthorized, call, cp, 'PROBE1') self.assertRaises(http.Forbidden, call, cp, 'PROBE2') @@ -401,8 +350,8 @@ class NodeTest(tests.Test): def test_SetUser(self): volume = db.Volume('db', model.RESOURCES) - volume['user'].create({'guid': tests.UID, 'name': 'user', 'pubkey': {'blob': StringIO(tests.PUBKEY)}}) - cp = NodeRoutes('guid', volume) + cp = NodeRoutes('guid', volume=volume) + volume['user'].create({'guid': tests.UID, 'name': 'user', 'pubkey': tests.PUBKEY}) guid = call(cp, method='POST', document='context', principal=tests.UID, content={ 'type': 'activity', @@ -416,8 +365,8 @@ class NodeTest(tests.Test): def test_find_MaxLimit(self): volume = db.Volume('db', model.RESOURCES) - volume['user'].create({'guid': tests.UID, 'name': 'user', 'pubkey': {'blob': StringIO(tests.PUBKEY)}}) - cp = NodeRoutes('guid', volume) + cp = NodeRoutes('guid', volume=volume) + volume['user'].create({'guid': tests.UID, 'name': 'user', 'pubkey': tests.PUBKEY}) call(cp, method='POST', document='context', principal=tests.UID, content={ 'type': 'activity', @@ -438,23 +387,26 @@ class NodeTest(tests.Test): 'description': 'description', }) - node.find_limit.value = 3 + cp._find_limit = 3 self.assertEqual(3, len(call(cp, method='GET', document='context', limit=1024)['result'])) - node.find_limit.value = 2 + cp._find_limit = 2 self.assertEqual(2, len(call(cp, method='GET', document='context', limit=1024)['result'])) - node.find_limit.value = 1 + cp._find_limit = 1 self.assertEqual(1, len(call(cp, method='GET', document='context', limit=1024)['result'])) def test_DeletedDocuments(self): volume = db.Volume('db', model.RESOURCES) - volume['user'].create({'guid': tests.UID, 'name': 'user', 'pubkey': {'blob': StringIO(tests.PUBKEY)}}) - cp = NodeRoutes('guid', volume) + cp = NodeRoutes('guid', volume=volume) + volume['user'].create({'guid': tests.UID, 'name': 'user', 'pubkey': tests.PUBKEY}) guid = call(cp, method='POST', document='context', principal=tests.UID, content={ 'type': 'activity', 'title': 'title1', 'summary': 'summary', 'description': 'description', + 'artifact_icon': '', + 'icon': '', + 'logo': '', }) call(cp, method='GET', document='context', guid=guid) @@ -468,8 +420,8 @@ class NodeTest(tests.Test): def test_CreateGUID(self): # TODO Temporal security hole, see TODO volume = db.Volume('db', model.RESOURCES) - volume['user'].create({'guid': tests.UID, 'name': 'user', 'pubkey': {'blob': StringIO(tests.PUBKEY)}}) - cp = NodeRoutes('guid', volume) + cp = NodeRoutes('guid', volume=volume) + volume['user'].create({'guid': tests.UID, 'name': 'user', 'pubkey': tests.PUBKEY}) call(cp, method='POST', document='context', principal=tests.UID, content={ 'guid': 'foo', 'type': 'activity', @@ -483,10 +435,10 @@ class NodeTest(tests.Test): def test_CreateMalformedGUID(self): volume = db.Volume('db', model.RESOURCES) - volume['user'].create({'guid': tests.UID, 'name': 'user', 'pubkey': {'blob': StringIO(tests.PUBKEY)}}) cp = MasterRoutes('guid', volume) + volume['user'].create({'guid': tests.UID, 'name': 'user', 'pubkey': tests.PUBKEY}) - self.assertRaises(RuntimeError, call, cp, method='POST', document='context', principal=tests.UID, content={ + self.assertRaises(http.BadRequest, call, cp, method='POST', document='context', principal=tests.UID, content={ 'guid': '!?', 'type': 'activity', 'title': 'title', @@ -496,8 +448,8 @@ class NodeTest(tests.Test): def test_FailOnExistedGUID(self): volume = db.Volume('db', model.RESOURCES) - volume['user'].create({'guid': tests.UID, 'name': 'user', 'pubkey': {'blob': StringIO(tests.PUBKEY)}}) cp = MasterRoutes('guid', volume) + volume['user'].create({'guid': tests.UID, 'name': 'user', 'pubkey': tests.PUBKEY}) guid = call(cp, method='POST', document='context', principal=tests.UID, content={ 'type': 'activity', @@ -506,7 +458,7 @@ class NodeTest(tests.Test): 'description': 'description', }) - self.assertRaises(RuntimeError, call, cp, method='POST', document='context', principal=tests.UID, content={ + self.assertRaises(http.BadRequest, call, cp, method='POST', document='context', principal=tests.UID, content={ 'guid': guid, 'type': 'activity', 'title': 'title', @@ -566,139 +518,9 @@ class NodeTest(tests.Test): def test_Clone(self): volume = self.start_master() - client = Connection(auth=http.SugarAuth(keyfile.value)) + client = http.Connection(api_url.value, http.SugarAuth(keyfile.value)) - context = client.post(['context'], { - 'type': 'activity', - 'title': 'title', - 'summary': 'summary', - 'description': 'description', - }) - impl1 = client.post(['release'], { - 'context': context, - 'license': 'GPLv3+', - 'version': '1', - 'stability': 'stable', - 'notes': '', - }) - blob1 = self.zips(('topdir/probe', 'probe1')) - volume['release'].update(impl1, {'data': { - 'blob': StringIO(blob1), - 'spec': { - '*-*': { - 'requires': { - 'dep1': {}, - }, - }, - }, - }}) - impl2 = client.post(['release'], { - 'context': context, - 'license': 'GPLv3+', - 'version': '2', - 'stability': 'stable', - 'notes': '', - }) - blob2 = self.zips(('topdir/probe', 'probe2')) - volume['release'].update(impl2, {'data': { - 'blob': StringIO(blob2), - 'spec': { - '*-*': { - 'requires': { - 'dep2': {'restrictions': [[None, '2']]}, - 'dep3': {}, - }, - }, - }, - }}) - impl3 = client.post(['release'], { - 'context': context, - 'license': 'GPLv3+', - 'version': '3', - 'stability': 'stable', - 'notes': '', - }) - blob3 = self.zips(('topdir/probe', 'probe3')) - volume['release'].update(impl3, {'data': { - 'blob': StringIO(blob3), - 'spec': { - '*-*': { - 'requires': { - 'dep2': {'restrictions': [['2', None]]}, - }, - }, - }, - }}) - impl4 = client.post(['release'], { - 'context': context, - 'license': 'GPLv3+', - 'version': '4', - 'stability': 'developer', - 'notes': '', - }) - blob4 = self.zips(('topdir/probe', 'probe4')) - volume['release'].update(impl4, {'data': { - 'blob': StringIO(blob4), - 'spec': { - '*-*': { - 'requires': {}, - }, - }, - }}) - - self.assertEqual(blob3, client.get(['context', context], cmd='clone')) - self.assertEqual(blob4, client.get(['context', context], cmd='clone', stability='developer')) - self.assertEqual(blob1, client.get(['context', context], cmd='clone', version='1')) - - self.assertEqual(blob1, client.get(['context', context], cmd='clone', requires='dep1')) - self.assertEqual(blob3, client.get(['context', context], cmd='clone', requires='dep2')) - self.assertEqual(blob2, client.get(['context', context], cmd='clone', requires='dep2=1')) - self.assertEqual(blob3, client.get(['context', context], cmd='clone', requires='dep2=2')) - self.assertEqual(blob2, client.get(['context', context], cmd='clone', requires='dep3')) - - self.assertRaises(http.NotFound, client.get, ['context', context], cmd='clone', requires='dep4') - self.assertRaises(http.NotFound, client.get, ['context', context], cmd='clone', stability='foo') - - response = Response() - client.call(Request(method='GET', path=['context', context], cmd='clone'), response) - self.assertEqual({ - 'context': context, - 'stability': 'stable', - 'guid': impl3, - 'version': '3', - 'license': ['GPLv3+'], - 'layer': ['origin'], - 'author': {tests.UID: {'name': tests.UID, 'order': 0, 'role': 3}}, - 'ctime': self.node_volume['release'].get(impl3).ctime, - 'notes': {'en-us': ''}, - 'tags': [], - 'data': { - 'blob_size': len(blob3), - 'spec': { - '*-*': { - 'requires': { - 'dep2': { - 'restrictions': [['2', None]], - }, - }, - }, - }, - }, - }, - response.meta) - - def test_release(self): - volume = self.start_master() - conn = Connection(auth=http.SugarAuth(keyfile.value)) - - conn.post(['context'], { - 'guid': 'bundle_id', - 'type': 'activity', - 'title': 'title', - 'summary': 'summary', - 'description': 'description', - }) - activity_info = '\n'.join([ + blob1 = self.zips(('topdir/activity/activity.info', '\n'.join([ '[Activity]', 'name = TestActivitry', 'bundle_id = bundle_id', @@ -706,42 +528,12 @@ class NodeTest(tests.Test): 'icon = icon', 'activity_version = 1', 'license = Public Domain', - 'stability = developer', - 'requires = sugar>=0.88; dep' - ]) - changelog = "LOG" - bundle1 = self.zips( - ('topdir/activity/activity.info', activity_info), - ('topdir/CHANGELOG', changelog), - ) - guid1 = json.load(conn.request('POST', ['release'], bundle1, params={'cmd': 'submit'}).raw) - - impl = volume['release'].get(guid1) - self.assertEqual('bundle_id', impl['context']) - self.assertEqual('1', impl['version']) - self.assertEqual('developer', impl['stability']) - self.assertEqual(['Public Domain'], impl['license']) - self.assertEqual('developer', impl['stability']) - self.assertEqual({'en-us': changelog}, impl['notes']) - assert impl['ctime'] > 0 - assert impl['mtime'] > 0 - self.assertEqual({tests.UID: {'role': 3, 'name': 'f470db873b6a35903aca1f2492188e1c4b9ffc42', 'order': 0}}, impl['author']) - - data = impl.meta('data') - self.assertEqual({ - '*-*': { - 'commands': {'activity': {'exec': 'true'}}, - 'requires': {'dep': {}, 'sugar': {'restrictions': [['0.88', None]]}}, - }, - }, - data['spec']) - - self.assertEqual('application/vnd.olpc-sugar', data['mime_type']) - self.assertEqual(len(bundle1), data['blob_size']) - self.assertEqual(len(activity_info) + len(changelog), data.get('unpack_size')) - self.assertEqual(bundle1, conn.get(['context', 'bundle_id'], cmd='clone', stability='developer')) + 'requires = dep1', + 'stability = stable', + ]))) + release1 = json.load(client.request('POST', ['context'], blob1, params={'cmd': 'submit', 'initial': True}).raw) - activity_info = '\n'.join([ + blob2 = self.zips(('topdir/activity/activity.info', '\n'.join([ '[Activity]', 'name = TestActivitry', 'bundle_id = bundle_id', @@ -749,606 +541,152 @@ class NodeTest(tests.Test): 'icon = icon', 'activity_version = 2', 'license = Public Domain', + 'requires = dep2 < 3; dep3', 'stability = stable', - ]) - bundle2 = self.zips(('topdir/activity/activity.info', activity_info)) - guid2 = json.load(conn.request('POST', ['release'], bundle2, params={'cmd': 'submit'}).raw) - - self.assertEqual('1', volume['release'].get(guid1)['version']) - self.assertEqual(['origin'], volume['release'].get(guid1)['layer']) - self.assertEqual('2', volume['release'].get(guid2)['version']) - self.assertEqual(['origin'], volume['release'].get(guid2)['layer']) - self.assertEqual(bundle2, conn.get(['context', 'bundle_id'], cmd='clone')) + ]))) + release2 = json.load(client.request('POST', ['context'], blob2, params={'cmd': 'submit'}).raw) - activity_info = '\n'.join([ + blob3 = self.zips(('topdir/activity/activity.info', '\n'.join([ '[Activity]', 'name = TestActivitry', 'bundle_id = bundle_id', 'exec = true', 'icon = icon', - 'activity_version = 1', + 'activity_version = 3', 'license = Public Domain', + 'requires = dep2 >= 2', 'stability = stable', - ]) - bundle3 = self.zips(('topdir/activity/activity.info', activity_info)) - guid3 = json.load(conn.request('POST', ['release'], bundle3, params={'cmd': 'submit'}).raw) - - self.assertEqual('1', volume['release'].get(guid1)['version']) - self.assertEqual(sorted(['origin', 'deleted']), sorted(volume['release'].get(guid1)['layer'])) - self.assertEqual('2', volume['release'].get(guid2)['version']) - self.assertEqual(['origin'], volume['release'].get(guid2)['layer']) - self.assertEqual('1', volume['release'].get(guid3)['version']) - self.assertEqual(['origin'], volume['release'].get(guid3)['layer']) - self.assertEqual(bundle2, conn.get(['context', 'bundle_id'], cmd='clone')) + ]))) + release3 = json.load(client.request('POST', ['context'], blob3, params={'cmd': 'submit'}).raw) - activity_info = '\n'.join([ + blob4 = self.zips(('topdir/activity/activity.info', '\n'.join([ '[Activity]', 'name = TestActivitry', 'bundle_id = bundle_id', 'exec = true', 'icon = icon', - 'activity_version = 2', + 'activity_version = 4', 'license = Public Domain', - 'stability = buggy', - ]) - bundle4 = self.zips(('topdir/activity/activity.info', activity_info)) - guid4 = json.load(conn.request('POST', ['release'], bundle4, params={'cmd': 'submit'}).raw) - - self.assertEqual('1', volume['release'].get(guid1)['version']) - self.assertEqual(sorted(['origin', 'deleted']), sorted(volume['release'].get(guid1)['layer'])) - self.assertEqual('2', volume['release'].get(guid2)['version']) - self.assertEqual(sorted(['origin', 'deleted']), sorted(volume['release'].get(guid2)['layer'])) - self.assertEqual('1', volume['release'].get(guid3)['version']) - self.assertEqual(['origin'], volume['release'].get(guid3)['layer']) - self.assertEqual('2', volume['release'].get(guid4)['version']) - self.assertEqual(['origin'], volume['release'].get(guid4)['layer']) - self.assertEqual(bundle3, conn.get(['context', 'bundle_id'], cmd='clone')) - - def test_release_UpdateContext(self): - volume = self.start_master() - conn = Connection(auth=http.SugarAuth(keyfile.value)) + 'stability = developer', + ]))) + release4 = json.load(client.request('POST', ['context'], blob4, params={'cmd': 'submit'}).raw) - conn.post(['context'], { - 'guid': 'org.laptop.ImageViewerActivity', - 'type': 'activity', - 'title': {'en': ''}, - 'summary': {'en': ''}, - 'description': {'en': ''}, - }) - svg = '\n'.join([ - '', - '', - ' ', - ']>', - '', - ' ', - ' ', - ' ', - ' ', - ' ', - ' ', - ' ', - ' ', - ' ', - '', - ]) - bundle = self.zips( - ('ImageViewer.activity/activity/activity.info', '\n'.join([ - '[Activity]', - 'bundle_id = org.laptop.ImageViewerActivity', - 'name = Image Viewer', - 'summary = The Image Viewer activity is a simple and fast image viewer tool', - 'description = It has features one would expect of a standard image viewer, like zoom, rotate, etc.', - 'homepage = http://wiki.sugarlabs.org/go/Activities/Image_Viewer', - 'activity_version = 22', - 'license = GPLv2+', - 'icon = activity-imageviewer', - 'exec = true', - 'mime_types = image/bmp;image/gif', - ])), - ('ImageViewer.activity/locale/ru/LC_MESSAGES/org.laptop.ImageViewerActivity.mo', - base64.b64decode('3hIElQAAAAAMAAAAHAAAAHwAAAARAAAA3AAAAAAAAAAgAQAADwAAACEBAAAOAAAAMQEAAA0AAABAAQAACgAAAE4BAAAMAAAAWQEAAA0AAABmAQAAJwAAAHQBAAAUAAAAnAEAABAAAACxAQAABwAAAMIBAAAIAAAAygEAANEBAADTAQAAIQAAAKUDAAATAAAAxwMAABwAAADbAwAAFwAAAPgDAAAhAAAAEAQAAB0AAAAyBAAAQAAAAFAEAAA9AAAAkQQAADUAAADPBAAAFAAAAAUFAAAQAAAAGgUAAAEAAAACAAAABwAAAAAAAAADAAAAAAAAAAwAAAAJAAAAAAAAAAoAAAAEAAAAAAAAAAAAAAALAAAABgAAAAgAAAAFAAAAAENob29zZSBkb2N1bWVudABEb3dubG9hZGluZy4uLgBGaXQgdG8gd2luZG93AEZ1bGxzY3JlZW4ASW1hZ2UgVmlld2VyAE9yaWdpbmFsIHNpemUAUmV0cmlldmluZyBzaGFyZWQgaW1hZ2UsIHBsZWFzZSB3YWl0Li4uAFJvdGF0ZSBhbnRpY2xvY2t3aXNlAFJvdGF0ZSBjbG9ja3dpc2UAWm9vbSBpbgBab29tIG91dABQcm9qZWN0LUlkLVZlcnNpb246IFBBQ0tBR0UgVkVSU0lPTgpSZXBvcnQtTXNnaWQtQnVncy1UbzogClBPVC1DcmVhdGlvbi1EYXRlOiAyMDEyLTA5LTI3IDE0OjU3LTA0MDAKUE8tUmV2aXNpb24tRGF0ZTogMjAxMC0wOS0yMiAxMzo1MCswMjAwCkxhc3QtVHJhbnNsYXRvcjoga3JvbTlyYSA8a3JvbTlyYUBnbWFpbC5jb20+Ckxhbmd1YWdlLVRlYW06IExBTkdVQUdFIDxMTEBsaS5vcmc+Ckxhbmd1YWdlOiAKTUlNRS1WZXJzaW9uOiAxLjAKQ29udGVudC1UeXBlOiB0ZXh0L3BsYWluOyBjaGFyc2V0PVVURi04CkNvbnRlbnQtVHJhbnNmZXItRW5jb2Rpbmc6IDhiaXQKUGx1cmFsLUZvcm1zOiBucGx1cmFscz0zOyBwbHVyYWw9KG4lMTA9PTEgJiYgbiUxMDAhPTExID8gMCA6IG4lMTA+PTIgJiYgbiUxMDw9NCAmJiAobiUxMDA8MTAgfHwgbiUxMDA+PTIwKSA/IDEgOiAyKTsKWC1HZW5lcmF0b3I6IFBvb3RsZSAyLjAuMwoA0JLRi9Cx0LXRgNC40YLQtSDQtNC+0LrRg9C80LXQvdGCANCX0LDQs9GA0YPQt9C60LAuLi4A0KPQvNC10YHRgtC40YLRjCDQsiDQvtC60L3QtQDQn9C+0LvQvdGL0Lkg0Y3QutGA0LDQvQDQn9GA0L7RgdC80L7RgtGAINC60LDRgNGC0LjQvdC+0LoA0JjRgdGC0LjQvdC90YvQuSDRgNCw0LfQvNC10YAA0J/QvtC70YPRh9C10L3QuNC1INC40LfQvtCx0YDQsNC20LXQvdC40LksINC/0L7QtNC+0LbQtNC40YLQtS4uLgDQn9C+0LLQtdGA0L3Rg9GC0Ywg0L/RgNC+0YLQuNCyINGH0LDRgdC+0LLQvtC5INGB0YLRgNC10LvQutC4ANCf0L7QstC10YDQvdGD0YLRjCDQv9C+INGH0LDRgdC+0LLQvtC5INGB0YLRgNC10LvQutC1ANCf0YDQuNCx0LvQuNC30LjRgtGMANCe0YLQtNCw0LvQuNGC0YwA')), - ('ImageViewer.activity/activity/activity-imageviewer.svg', svg), - ) - impl = json.load(conn.request('POST', ['release'], bundle, params={'cmd': 'submit'}).raw) + assert blob3 == client.get(['context', 'bundle_id'], cmd='clone') + assert blob4 == client.get(['context', 'bundle_id'], cmd='clone', stability='developer') + assert blob1 == client.get(['context', 'bundle_id'], cmd='clone', version='1') - context = volume['context'].get('org.laptop.ImageViewerActivity') - self.assertEqual({ - 'en': 'Image Viewer', - 'ru': u'Просмотр картинок', - }, - context['title']) - self.assertEqual({ - 'en': 'The Image Viewer activity is a simple and fast image viewer tool', - }, - context['summary']) - self.assertEqual({ - 'en': 'It has features one would expect of a standard image viewer, like zoom, rotate, etc.', - }, - context['description']) - self.assertEqual(svg, file(context['artifact_icon']['blob']).read()) - assert 'blob' in context['icon'] - assert 'blob' in context['logo'] - self.assertEqual('http://wiki.sugarlabs.org/go/Activities/Image_Viewer', context['homepage']) - self.assertEqual(['image/bmp', 'image/gif'], context['mime_types']) - - def test_release_CreateContext(self): - volume = self.start_master() - conn = Connection(auth=http.SugarAuth(keyfile.value)) - - bundle = self.zips( - ('ImageViewer.activity/activity/activity.info', '\n'.join([ - '[Activity]', - 'bundle_id = org.laptop.ImageViewerActivity', - 'name = Image Viewer', - 'summary = The Image Viewer activity is a simple and fast image viewer tool', - 'description = It has features one would expect of a standard image viewer, like zoom, rotate, etc.', - 'homepage = http://wiki.sugarlabs.org/go/Activities/Image_Viewer', - 'activity_version = 22', - 'license = GPLv2+', - 'icon = activity-imageviewer', - 'exec = true', - 'mime_types = image/bmp;image/gif', - ])), - ('ImageViewer.activity/activity/activity-imageviewer.svg', ''), - ) - self.assertRaises(http.NotFound, conn.request, 'POST', ['release'], bundle, params={'cmd': 'submit'}) - impl = json.load(conn.request('POST', ['release'], bundle, params={'cmd': 'submit', 'initial': 1}).raw) - - context = volume['context'].get('org.laptop.ImageViewerActivity') - self.assertEqual({'en': 'Image Viewer'}, context['title']) - self.assertEqual({'en': 'The Image Viewer activity is a simple and fast image viewer tool'}, context['summary']) - self.assertEqual({'en': 'It has features one would expect of a standard image viewer, like zoom, rotate, etc.'}, context['description']) - self.assertEqual('http://wiki.sugarlabs.org/go/Activities/Image_Viewer', context['homepage']) - self.assertEqual(['image/bmp', 'image/gif'], context['mime_types']) - assert context['ctime'] > 0 - assert context['mtime'] > 0 - self.assertEqual({tests.UID: {'role': 3, 'name': 'f470db873b6a35903aca1f2492188e1c4b9ffc42', 'order': 0}}, context['author']) - - def test_release_ByNonAuthors(self): - volume = self.start_master() - bundle = self.zips( - ('ImageViewer.activity/activity/activity.info', '\n'.join([ - '[Activity]', - 'bundle_id = org.laptop.ImageViewerActivity', - 'name = Image Viewer', - 'activity_version = 1', - 'license = GPLv2+', - 'icon = activity-imageviewer', - 'exec = true', - ])), - ('ImageViewer.activity/activity/activity-imageviewer.svg', ''), - ) + assert blob1 == client.get(['context', 'bundle_id'], cmd='clone', requires='dep1') + assert blob3 == client.get(['context', 'bundle_id'], cmd='clone', requires='dep2') + assert blob2 == client.get(['context', 'bundle_id'], cmd='clone', requires='dep2=1') + assert blob3 == client.get(['context', 'bundle_id'], cmd='clone', requires='dep2=2') + assert blob2 == client.get(['context', 'bundle_id'], cmd='clone', requires='dep3') - conn = Connection(auth=http.SugarAuth(join(tests.root, 'data', tests.UID))) - impl1 = json.load(conn.request('POST', ['release'], bundle, params={'cmd': 'submit', 'initial': 1}).raw) - impl2 = json.load(conn.request('POST', ['release'], bundle, params={'cmd': 'submit'}).raw) - self.assertEqual(sorted(['origin', 'deleted']), sorted(volume['release'].get(impl1)['layer'])) - self.assertEqual(['origin'], volume['release'].get(impl2)['layer']) + self.assertRaises(http.NotFound, client.get, ['context', 'bundle_id'], cmd='clone', requires='dep4') + self.assertRaises(http.NotFound, client.get, ['context', 'bundle_id'], cmd='clone', stability='foo') - conn = Connection(auth=http.SugarAuth(join(tests.root, 'data', tests.UID2))) - conn.get(cmd='whoami') - impl3 = json.load(conn.request('POST', ['release'], bundle, params={'cmd': 'submit'}).raw) - self.assertEqual(sorted(['origin', 'deleted']), sorted(volume['release'].get(impl1)['layer'])) - self.assertEqual(sorted(['origin', 'deleted']), sorted(volume['release'].get(impl2)['layer'])) - self.assertEqual([], volume['release'].get(impl3)['layer']) + response = Response() + client.call(Request(method='GET', path=['context', 'bundle_id'], cmd='clone'), response) + announce = next(volume['post'].find(query='3', limit=1)[0]).guid + self.assertEqual({ + 'license': ['Public Domain'], + 'unpack_size': 162, + 'stability': 'stable', + 'version': '3', + 'release': [[3], 0], + 'announce': announce, + 'requires': ['dep2-2'], + 'spec': { + '*-*': { + 'commands': {'activity': {'exec': u'true'}}, + 'requires': {'dep2': {'restrictions': [['2', None]]}}, + 'bundle': str(hash(blob3)), + }, + }, + }, response.meta) - def test_release_PopulateRequires(self): + def test_release(self): volume = self.start_master() conn = Connection(auth=http.SugarAuth(keyfile.value)) + activity_info = '\n'.join([ + '[Activity]', + 'name = TestActivitry', + 'bundle_id = bundle_id', + 'exec = true', + 'icon = icon', + 'activity_version = 1', + 'license = Public Domain', + 'stability = developer', + ]) + changelog = "LOG" bundle = self.zips( - ('ImageViewer.activity/activity/activity.info', '\n'.join([ - '[Activity]', - 'bundle_id = org.laptop.ImageViewerActivity', - 'name = Image Viewer', - 'activity_version = 22', - 'license = GPLv2+', - 'icon = activity-imageviewer', - 'exec = true', - 'requires = dep1, dep2<10, dep3<=20, dep4>30, dep5>=40, dep6>5<7, dep7>=1<=3', - ])), - ('ImageViewer.activity/activity/activity-imageviewer.svg', ''), + ('topdir/activity/activity.info', activity_info), + ('topdir/CHANGELOG', changelog), ) - self.assertRaises(http.NotFound, conn.request, 'POST', ['release'], bundle, params={'cmd': 'submit'}) - impl = json.load(conn.request('POST', ['release'], bundle, params={'cmd': 'submit', 'initial': 1}).raw) - - self.assertEqual( - sorted([ - 'dep1', 'dep2', 'dep3', 'dep4-31', 'dep5-40', - 'dep6-6', - 'dep7-1', 'dep7-2', 'dep7-3', - ]), - sorted(volume['release'].get(impl)['requires'])) - - def test_generate_node_stats_Posts(self): - node.stats_root.value = 'stats' - stats_node.stats_node.value = True - stats_node.stats_node_rras.value = ['RRA:AVERAGE:0.5:1:10', 'RRA:AVERAGE:0.5:10:10'] - volume = db.Volume('db', model.RESOURCES) - - ts = 1000000000 - - volume['user'].create({ - 'guid': 'user_1', - 'ctime': ts + 1, - 'mtime': ts + 1, - 'layer': [], - 'name': '', - }) - volume['context'].create({ - 'guid': 'context_1', - 'ctime': ts + 1, - 'mtime': ts + 1, - 'layer': [], - 'type': 'activity', - 'title': '', - 'summary': '', - 'description': '', - }) - volume['release'].create({ - 'guid': 'impl_1', - 'ctime': ts + 2, - 'mtime': ts + 2, - 'layer': [], - 'context': 'context_1', - 'license': ['GPL-3'], - 'version': '1', - }) - volume['post'].create({ - 'guid': 'topic_1', - 'ctime': ts + 3, - 'mtime': ts + 3, - 'layer': [], - 'context': 'context_1', - 'type': 'object', - 'title': '', - 'message': '', - }) - volume['post'].create({ - 'guid': 'solution_1', - 'ctime': ts + 5, - 'mtime': ts + 5, - 'layer': [], - 'context': 'context_1', - 'topic': 'topic_1', - 'title': '', - 'message': '', - 'type': 'answer', - }) - volume['post'].create({ - 'guid': 'review_1', - 'ctime': ts + 6, - 'mtime': ts + 6, - 'layer': [], - 'context': 'context_1', - 'vote': 1, - 'title': '', - 'message': '', - 'type': 'review', - }) - volume['post'].create({ - 'guid': 'review_2', - 'ctime': ts + 6, - 'mtime': ts + 6, - 'layer': [], - 'context': 'context_1', - 'topic': 'topic_1', - 'vote': 2, - 'title': '', - 'message': '', - 'type': 'feedback', - }) - volume['report'].create({ - 'guid': 'report_1', - 'ctime': ts + 8, - 'mtime': ts + 8, - 'layer': [], - 'context': 'context_1', - 'release': 'impl_1', - 'error': '', - }) - volume['user'].create({ - 'guid': 'user_2', - 'ctime': ts + 4, - 'mtime': ts + 4, - 'layer': [], - 'name': '', - }) - volume['context'].create({ - 'guid': 'context_2', - 'ctime': ts + 4, - 'mtime': ts + 4, - 'layer': [], - 'type': 'activity', - 'title': '', - 'summary': '', - 'description': '', - }) - volume['release'].create({ - 'guid': 'impl_2', - 'ctime': ts + 4, - 'mtime': ts + 4, - 'layer': [], - 'context': 'context_2', - 'license': ['GPL-3'], - 'version': '1', - }) - volume['release'].create({ - 'guid': 'impl_3', - 'ctime': ts + 4, - 'mtime': ts + 4, - 'layer': [], - 'context': 'context_2', - 'license': ['GPL-3'], - 'version': '1', - }) - volume['post'].create({ - 'guid': 'review_3', - 'ctime': ts + 4, - 'mtime': ts + 4, - 'layer': [], - 'context': 'context_2', - 'vote': 3, - 'title': '', - 'message': '', - 'type': 'review', - }) - volume['post'].create({ - 'guid': 'review_4', - 'ctime': ts + 4, - 'mtime': ts + 4, - 'layer': [], - 'context': 'context_2', - 'vote': 4, - 'title': '', - 'message': '', - 'type': 'review', - }) - volume['report'].create({ - 'guid': 'report_2', - 'ctime': ts + 4, - 'mtime': ts + 4, - 'layer': [], - 'context': 'context_2', - 'release': 'impl_1', - 'error': '', - }) - volume['report'].create({ - 'guid': 'report_3', - 'ctime': ts + 4, - 'mtime': ts + 4, - 'layer': [], - 'context': 'context_2', - 'release': 'impl_1', - 'error': '', - }) - volume['post'].create({ - 'guid': 'topic_2', - 'ctime': ts + 4, - 'mtime': ts + 4, - 'layer': [], - 'context': 'context_2', - 'type': 'object', - 'title': '', - 'message': '', - }) - volume['post'].create({ - 'guid': 'solution_2', - 'ctime': ts + 4, - 'mtime': ts + 4, - 'layer': [], - 'context': 'context_2', - 'topic': 'topic_2', - 'title': '', - 'message': '', - 'type': 'answer', - }) - - self.override(time, 'time', lambda: ts + 9) - old_stats = stats_node.Sniffer(volume, 'stats/node') - old_stats.log(Request(method='GET', path=['release', 'impl_1', 'data'])) - old_stats.log(Request(method='GET', path=['post', 'topic_1', 'data'])) - old_stats.commit(ts + 1) - old_stats.commit_objects() - old_stats.commit(ts + 2) - old_stats.commit(ts + 3) - old_stats.log(Request(method='GET', path=['release', 'impl_1', 'data'])) - old_stats.log(Request(method='GET', path=['release', 'impl_2', 'data'])) - old_stats.commit(ts + 4) - old_stats.commit_objects() - old_stats.commit(ts + 5) - old_stats.commit(ts + 6) - old_stats.log(Request(method='GET', path=['post', 'topic_1', 'data'])) - old_stats.log(Request(method='GET', path=['post', 'topic_2', 'data'])) - old_stats.commit(ts + 7) - old_stats.commit_objects() - old_stats.commit(ts + 8) - old_stats.commit_objects() - - generate_node_stats(volume, 'stats/node') - cp = NodeRoutes('guid', volume) + release = json.load(conn.request('POST', ['context'], bundle, params={'cmd': 'submit', 'initial': True}).raw) + announce = next(volume['post'].find(query='1', limit=1)[0]).guid self.assertEqual({ - 'user': [ - (ts + 1, {'total': 1.0}), - (ts + 2, {'total': 1.0}), - (ts + 3, {'total': 1.0}), - (ts + 4, {'total': 2.0}), - (ts + 5, {'total': 2.0}), - (ts + 6, {'total': 2.0}), - (ts + 7, {'total': 2.0}), - (ts + 8, {'total': 2.0}), - (ts + 9, {'total': 2.0}), - ], - 'context': [ - (ts + 1, {'total': 1.0, 'released': 0.0, 'failed': 0.0, 'downloaded': 1.0}), - (ts + 2, {'total': 1.0, 'released': 1.0, 'failed': 0.0, 'downloaded': 1.0}), - (ts + 3, {'total': 1.0, 'released': 1.0, 'failed': 0.0, 'downloaded': 1.0}), - (ts + 4, {'total': 2.0, 'released': 3.0, 'failed': 2.0, 'downloaded': 3.0}), - (ts + 5, {'total': 2.0, 'released': 3.0, 'failed': 2.0, 'downloaded': 3.0}), - (ts + 6, {'total': 2.0, 'released': 3.0, 'failed': 2.0, 'downloaded': 3.0}), - (ts + 7, {'total': 2.0, 'released': 3.0, 'failed': 2.0, 'downloaded': 3.0}), - (ts + 8, {'total': 2.0, 'released': 3.0, 'failed': 3.0, 'downloaded': 3.0}), - (ts + 9, {'total': 2.0, 'released': 3.0, 'failed': 3.0, 'downloaded': 3.0}), - ], - 'post': [ - (ts + 1, {'total': 0.0, 'downloaded': 1.0}), - (ts + 2, {'total': 0.0, 'downloaded': 1.0}), - (ts + 3, {'total': 1.0, 'downloaded': 1.0}), - (ts + 4, {'total': 5.0, 'downloaded': 1.0}), - (ts + 5, {'total': 6.0, 'downloaded': 1.0}), - (ts + 6, {'total': 8.0, 'downloaded': 1.0}), - (ts + 7, {'total': 8.0, 'downloaded': 3.0}), - (ts + 8, {'total': 8.0, 'downloaded': 3.0}), - (ts + 9, {'total': 8.0, 'downloaded': 3.0}), - ], - }, - call(cp, method='GET', cmd='stats', source=[ - 'user.total', - 'context.total', - 'context.released', - 'context.failed', - 'context.downloaded', - 'post.total', - 'post.downloaded', - ], start=ts + 1, end=ts + 10)) + release: { + 'seqno': 4, + 'author': {tests.UID: {'name': tests.UID, 'order': 0, 'role': 3}}, + 'value': { + 'license': ['Public Domain'], + 'announce': announce, + 'release': [[1], 0], + 'requires': [], + 'spec': {'*-*': {'bundle': str(hash(bundle)), 'commands': {'activity': {'exec': 'true'}}, 'requires': {}}}, + 'stability': 'developer', + 'unpack_size': len(activity_info) + len(changelog), + 'version': '1', + }, + }, + }, conn.get(['context', 'bundle_id', 'releases'])) + post = volume['post'][announce] + assert tests.UID in post['author'] + self.assertEqual('notification', post['type']) self.assertEqual({ - 'downloads': 2, - 'rating': [1, 1], - }, - volume['context'].get('context_1').properties(['downloads', 'rating'])) - self.assertEqual({ - 'downloads': 1, - 'rating': [2, 7], - }, - volume['context'].get('context_2').properties(['downloads', 'rating'])) - self.assertEqual({ - 'downloads': 2, - 'rating': [1, 2], - }, - volume['post'].get('topic_1').properties(['downloads', 'rating'])) - self.assertEqual({ - 'downloads': 1, - 'rating': [0, 0], - }, - volume['post'].get('topic_2').properties(['downloads', 'rating'])) - - def test_generate_node_stats_Deletes(self): - node.stats_root.value = 'stats' - stats_node.stats_node.value = True - stats_node.stats_node_rras.value = ['RRA:AVERAGE:0.5:1:10', 'RRA:AVERAGE:0.5:10:10'] - volume = db.Volume('db', model.RESOURCES) - - ts = 1000000000 - - volume['user'].create({ - 'guid': 'user_1', - 'ctime': ts + 1, - 'mtime': ts + 2, - 'layer': ['deleted'], - 'name': '', - }) - volume['context'].create({ - 'guid': 'context_1', - 'ctime': ts + 1, - 'mtime': ts + 2, - 'layer': ['deleted'], - 'type': 'activity', - 'title': '', - 'summary': '', - 'description': '', - }) - volume['release'].create({ - 'guid': 'impl_1', - 'ctime': ts + 1, - 'mtime': ts + 2, - 'layer': ['deleted'], - 'context': 'context_1', - 'license': ['GPL-3'], - 'version': '1', - }) - volume['post'].create({ - 'guid': 'post_1', - 'ctime': ts + 1, - 'mtime': ts + 2, - 'layer': ['deleted'], - 'context': 'context_1', - 'type': 'object', - 'title': '', - 'message': '', - }) - volume['report'].create({ - 'guid': 'report_1', - 'ctime': ts + 1, - 'mtime': ts + 2, - 'layer': ['deleted'], - 'context': 'context_1', - 'release': 'impl_1', - 'error': '', - }) - - self.override(time, 'time', lambda: ts + 9) - generate_node_stats(volume, 'stats/node') - cp = NodeRoutes('guid', volume) - + 'en': 'TestActivitry 1 release', + 'es': 'TestActivitry 1 release', + 'fr': 'TestActivitry 1 release', + }, post['title']) self.assertEqual({ - 'user': [ - (ts + 1, {'total': 1.0}), - (ts + 2, {'total': 0.0}), - (ts + 3, {'total': 0.0}), - ], - 'context': [ - (ts + 1, {'total': 1.0}), - (ts + 2, {'total': 0.0}), - (ts + 3, {'total': 0.0}), - ], - 'post': [ - (ts + 1, {'total': 1.0}), - (ts + 2, {'total': 0.0}), - (ts + 3, {'total': 0.0}), - ], - }, - call(cp, method='GET', cmd='stats', source=[ - 'user.total', - 'context.total', - 'post.total', - ], start=ts + 1, end=ts + 3)) + 'en-us': 'LOG', + }, post['message']) def test_AggpropInsertAccess(self): class Document(db.Resource): - @db.stored_property(typecast=db.AggregatedType, default=db.AggregatedType(), acl=ACL.READ | ACL.INSERT) + @db.stored_property(db.Aggregated, acl=ACL.READ | ACL.INSERT) def prop1(self, value): return value - @db.stored_property(typecast=db.AggregatedType, default=db.AggregatedType(), acl=ACL.READ | ACL.INSERT | ACL.AUTHOR) + @db.stored_property(db.Aggregated, acl=ACL.READ | ACL.INSERT | ACL.AUTHOR) def prop2(self, value): return value volume = db.Volume('db', [Document, User]) - volume['user'].create({'guid': tests.UID, 'name': 'user1', 'pubkey': {'blob': StringIO(tests.PUBKEY)}}) - volume['user'].create({'guid': tests.UID2, 'name': 'user2', 'pubkey': {'blob': StringIO(tests.PUBKEY2)}}) + cp = NodeRoutes('node', volume=volume) + volume['user'].create({'guid': tests.UID, 'name': 'user1', 'pubkey': tests.PUBKEY}) + volume['user'].create({'guid': tests.UID2, 'name': 'user2', 'pubkey': tests.PUBKEY2}) - cp = NodeRoutes('node', volume) guid = call(cp, method='POST', document='document', principal=tests.UID, content={}) self.override(time, 'time', lambda: 0) - call(cp, method='POST', path=['document', guid, 'prop1'], principal=tests.UID, content={'guid': '1'}) - call(cp, method='POST', path=['document', guid, 'prop1'], principal=tests.UID2, content={'guid': '2'}) + agg1 = call(cp, method='POST', path=['document', guid, 'prop1'], principal=tests.UID) + agg2 = call(cp, method='POST', path=['document', guid, 'prop1'], principal=tests.UID2) self.assertEqual({ - '1': {'seqno': 4, 'author': {tests.UID: {'name': 'user1', 'order': 0, 'role': 3}}}, - '2': {'seqno': 5, 'author': {tests.UID2: {'name': 'user2', 'order': 0, 'role': 3}}}, + agg1: {'seqno': 4, 'author': {tests.UID: {'name': 'user1', 'order': 0, 'role': 3}}, 'value': None}, + agg2: {'seqno': 5, 'author': {tests.UID2: {'name': 'user2', 'order': 0, 'role': 1}}, 'value': None}, }, call(cp, method='GET', path=['document', guid, 'prop1'])) - call(cp, method='POST', path=['document', guid, 'prop2'], principal=tests.UID, content={'guid': '1'}) - self.assertRaises(http. Forbidden, call, cp, method='POST', path=['document', guid, 'prop2'], principal=tests.UID2, content={'guid': '2'}) + agg3 = call(cp, method='POST', path=['document', guid, 'prop2'], principal=tests.UID) + self.assertRaises(http. Forbidden, call, cp, method='POST', path=['document', guid, 'prop2'], principal=tests.UID2) self.assertEqual({ - '1': {'seqno': 6, 'author': {tests.UID: {'name': 'user1', 'order': 0, 'role': 3}}}, + agg3: {'seqno': 6, 'author': {tests.UID: {'name': 'user1', 'order': 0, 'role': 3}}, 'value': None}, }, call(cp, method='GET', path=['document', guid, 'prop2'])) @@ -1356,64 +694,64 @@ class NodeTest(tests.Test): class Document(db.Resource): - @db.stored_property(typecast=db.AggregatedType, default=db.AggregatedType(), acl=ACL.READ | ACL.INSERT | ACL.REMOVE) + @db.stored_property(db.Aggregated, acl=ACL.READ | ACL.INSERT | ACL.REMOVE) def prop1(self, value): return value - @db.stored_property(typecast=db.AggregatedType, default=db.AggregatedType(), acl=ACL.READ | ACL.INSERT | ACL.REMOVE | ACL.AUTHOR) + @db.stored_property(db.Aggregated, acl=ACL.READ | ACL.INSERT | ACL.REMOVE | ACL.AUTHOR) def prop2(self, value): return value volume = db.Volume('db', [Document, User]) - volume['user'].create({'guid': tests.UID, 'name': 'user1', 'pubkey': {'blob': StringIO(tests.PUBKEY)}}) - volume['user'].create({'guid': tests.UID2, 'name': 'user2', 'pubkey': {'blob': StringIO(tests.PUBKEY2)}}) + cp = NodeRoutes('node', volume=volume) + volume['user'].create({'guid': tests.UID, 'name': 'user1', 'pubkey': tests.PUBKEY}) + volume['user'].create({'guid': tests.UID2, 'name': 'user2', 'pubkey': tests.PUBKEY2}) - cp = NodeRoutes('node', volume) guid = call(cp, method='POST', document='document', principal=tests.UID, content={}) self.override(time, 'time', lambda: 0) - call(cp, method='POST', path=['document', guid, 'prop1'], principal=tests.UID, content={'guid': '1', 'probe': True}) - call(cp, method='POST', path=['document', guid, 'prop1'], principal=tests.UID2, content={'guid': '2', 'probe': True}) + agg1 = call(cp, method='POST', path=['document', guid, 'prop1'], principal=tests.UID, content=True) + agg2 = call(cp, method='POST', path=['document', guid, 'prop1'], principal=tests.UID2, content=True) self.assertEqual({ - '1': {'seqno': 4, 'probe': True, 'author': {tests.UID: {'name': 'user1', 'order': 0, 'role': 3}}}, - '2': {'seqno': 5, 'probe': True, 'author': {tests.UID2: {'name': 'user2', 'order': 0, 'role': 3}}}, + agg1: {'seqno': 4, 'value': True, 'author': {tests.UID: {'name': 'user1', 'order': 0, 'role': 3}}}, + agg2: {'seqno': 5, 'value': True, 'author': {tests.UID2: {'name': 'user2', 'order': 0, 'role': 1}}}, }, call(cp, method='GET', path=['document', guid, 'prop1'])) - self.assertRaises(http.Forbidden, call, cp, method='DELETE', path=['document', guid, 'prop1', '1'], principal=tests.UID2) - self.assertRaises(http.Forbidden, call, cp, method='DELETE', path=['document', guid, 'prop1', '2'], principal=tests.UID) + self.assertRaises(http.Forbidden, call, cp, method='DELETE', path=['document', guid, 'prop1', agg1], principal=tests.UID2) + self.assertRaises(http.Forbidden, call, cp, method='DELETE', path=['document', guid, 'prop1', agg2], principal=tests.UID) self.assertEqual({ - '1': {'seqno': 4, 'probe': True, 'author': {tests.UID: {'name': 'user1', 'order': 0, 'role': 3}}}, - '2': {'seqno': 5, 'probe': True, 'author': {tests.UID2: {'name': 'user2', 'order': 0, 'role': 3}}}, + agg1: {'seqno': 4, 'value': True, 'author': {tests.UID: {'name': 'user1', 'order': 0, 'role': 3}}}, + agg2: {'seqno': 5, 'value': True, 'author': {tests.UID2: {'name': 'user2', 'order': 0, 'role': 1}}}, }, call(cp, method='GET', path=['document', guid, 'prop1'])) - call(cp, method='DELETE', path=['document', guid, 'prop1', '1'], principal=tests.UID) + call(cp, method='DELETE', path=['document', guid, 'prop1', agg1], principal=tests.UID) self.assertEqual({ - '1': {'seqno': 6}, - '2': {'seqno': 5, 'probe': True, 'author': {tests.UID2: {'name': 'user2', 'order': 0, 'role': 3}}}, + agg1: {'seqno': 6, 'author': {tests.UID: {'name': 'user1', 'order': 0, 'role': 3}}}, + agg2: {'seqno': 5, 'value': True, 'author': {tests.UID2: {'name': 'user2', 'order': 0, 'role': 1}}}, }, call(cp, method='GET', path=['document', guid, 'prop1'])) - call(cp, method='DELETE', path=['document', guid, 'prop1', '2'], principal=tests.UID2) + call(cp, method='DELETE', path=['document', guid, 'prop1', agg2], principal=tests.UID2) self.assertEqual({ - '1': {'seqno': 6}, - '2': {'seqno': 7}, + agg1: {'seqno': 6, 'author': {tests.UID: {'name': 'user1', 'order': 0, 'role': 3}}}, + agg2: {'seqno': 7, 'author': {tests.UID2: {'name': 'user2', 'order': 0, 'role': 1}}}, }, call(cp, method='GET', path=['document', guid, 'prop1'])) - call(cp, method='POST', path=['document', guid, 'prop2'], principal=tests.UID, content={'guid': '1', 'probe': True}) + agg3 = call(cp, method='POST', path=['document', guid, 'prop2'], principal=tests.UID, content=True) self.assertEqual({ - '1': {'seqno': 8, 'probe': True, 'author': {tests.UID: {'name': 'user1', 'order': 0, 'role': 3}}}, + agg3: {'seqno': 8, 'value': True, 'author': {tests.UID: {'name': 'user1', 'order': 0, 'role': 3}}}, }, call(cp, method='GET', path=['document', guid, 'prop2'])) - self.assertRaises(http.Forbidden, call, cp, method='DELETE', path=['document', guid, 'prop2', '1'], principal=tests.UID2) + self.assertRaises(http.Forbidden, call, cp, method='DELETE', path=['document', guid, 'prop2', agg3], principal=tests.UID2) self.assertEqual({ - '1': {'seqno': 8, 'probe': True, 'author': {tests.UID: {'name': 'user1', 'order': 0, 'role': 3}}}, + agg3: {'seqno': 8, 'value': True, 'author': {tests.UID: {'name': 'user1', 'order': 0, 'role': 3}}}, }, call(cp, method='GET', path=['document', guid, 'prop2'])) - call(cp, method='DELETE', path=['document', guid, 'prop2', '1'], principal=tests.UID) + call(cp, method='DELETE', path=['document', guid, 'prop2', agg3], principal=tests.UID) self.assertEqual({ - '1': {'seqno': 9}, + agg3: {'seqno': 9, 'author': {tests.UID: {'name': 'user1', 'order': 0, 'role': 3}}}, }, call(cp, method='GET', path=['document', guid, 'prop2'])) diff --git a/tests/units/node/obs.py b/tests/units/node/obs.py index bf43ed6..21b53a0 100755 --- a/tests/units/node/obs.py +++ b/tests/units/node/obs.py @@ -39,8 +39,8 @@ class ObsTest(tests.Test): ])) self.assertEqual([ - {'distributor_id': 'Debian', 'name': 'Debian-6.0', 'arches': ['i586', 'x86_64']}, - {'distributor_id': 'Fedora', 'name': 'Fedora-11', 'arches': ['i586']}, + {'lsb_id': 'Debian', 'lsb_release': '6.0', 'name': 'Debian-6.0', 'arches': ['i586', 'x86_64']}, + {'lsb_id': 'Fedora', 'lsb_release': '11', 'name': 'Fedora-11', 'arches': ['i586']}, ], obs.get_repos()) @@ -51,21 +51,10 @@ class ObsTest(tests.Test): 'project': 'base', 'repository': 'repo', 'arch': 'arch', - 'package': 'pkg1', + 'package': ['pkg1', 'pkg2'], }}, [ '', ' ', - '', - ], - ), - (('GET', ['resolve']), - {'allowed': (400, 404), 'params': { - 'project': 'base', - 'repository': 'repo', - 'arch': 'arch', - 'package': 'pkg2', - }}, - [ '', ' ', '', ], @@ -122,10 +111,7 @@ class ObsTest(tests.Test): ('http://pkg2-2.prm', ['4']), ])) - obs.presolve({ - 'Debian': {'binary': [['deb']]}, - 'Fedora': {'binary': [['pkg1', 'pkg2']], 'devel': [['pkg3']]}, - }, '.') + obs.presolve(None, ['pkg1', 'pkg2'], '.') self.assertEqual({ 'arch': [ diff --git a/tests/units/node/stats_node.py b/tests/units/node/stats_node.py deleted file mode 100755 index eab7fb8..0000000 --- a/tests/units/node/stats_node.py +++ /dev/null @@ -1,338 +0,0 @@ -#!/usr/bin/env python -# sugar-lint: disable - -import time - -from __init__ import tests - -from sugar_network import db, model -from sugar_network.node.stats_node import Sniffer, stats_node_step -from sugar_network.toolkit.rrd import Rrd -from sugar_network.toolkit.router import Request - - -class StatsTest(tests.Test): - - def test_InitializeTotals(self): - volume = db.Volume('local', model.RESOURCES) - - stats = Sniffer(volume, 'stats/node') - self.assertEqual(0, stats._stats['user']['total']) - self.assertEqual(0, stats._stats['context']['total']) - self.assertEqual(0, stats._stats['post']['total']) - - volume['user'].create({'guid': 'user', 'name': 'user', 'pubkey': ''}) - volume['context'].create({'guid': 'context', 'type': 'activity', 'title': '', 'summary': '', 'description': ''}) - volume['post'].create({'guid': 'post', 'context': 'context', 'title': '', 'message': '', 'type': 'update'}) - - stats = Sniffer(volume, 'stats/node') - self.assertEqual(1, stats._stats['user']['total']) - self.assertEqual(1, stats._stats['context']['total']) - self.assertEqual(1, stats._stats['post']['total']) - - def test_POSTs(self): - volume = db.Volume('local', model.RESOURCES) - stats = Sniffer(volume, 'stats/node') - - request = Request(method='POST', path=['context']) - request.principal = 'user' - stats.log(request) - stats.log(request) - stats.log(request) - self.assertEqual(3, stats._stats['context']['total']) - - def test_DELETEs(self): - volume = db.Volume('local', model.RESOURCES) - stats = Sniffer(volume, 'stats/node') - - request = Request(method='DELETE', path=['context']) - request.principal = 'user' - stats.log(request) - stats.log(request) - stats.log(request) - self.assertEqual(-3, stats._stats['context']['total']) - - def test_Posts(self): - volume = db.Volume('local', model.RESOURCES) - stats = Sniffer(volume, 'stats/node') - volume['context'].create({'guid': 'context', 'type': 'activity', 'title': '', 'summary': '', 'description': ''}) - volume['post'].create({'guid': 'topic', 'type': 'update', 'context': 'context', 'title': '', 'message': ''}) - - request = Request(method='POST', path=['post']) - request.principal = 'user' - request.content = {'context': 'context', 'vote': 1, 'type': 'review', 'title': '', 'message': ''} - stats.log(request) - self.assertEqual(1, stats._stats['post']['total']) - - request = Request(method='POST', path=['post']) - request.principal = 'user' - request.content = {'context': 'context', 'vote': 2, 'type': 'review', 'title': '', 'message': ''} - stats.log(request) - self.assertEqual(2, stats._stats['post']['total']) - - request = Request(method='POST', path=['post']) - request.principal = 'user' - request.content = {'topic': 'topic', 'vote': 3, 'type': 'feedback', 'title': '', 'message': ''} - stats.log(request) - self.assertEqual(3, stats._stats['post']['total']) - - stats.commit_objects() - self.assertEqual([2, 3], volume['context'].get('context')['rating']) - self.assertEqual([1, 3], volume['post'].get('topic')['rating']) - - def test_ContextDownloaded(self): - volume = db.Volume('local', model.RESOURCES) - stats = Sniffer(volume, 'stats/node') - volume['context'].create({'guid': 'context', 'type': 'activity', 'title': '', 'summary': '', 'description': ''}) - volume['release'].create({'guid': 'release', 'context': 'context', 'license': 'GPLv3', 'version': '1', 'date': 0, 'stability': 'stable', 'notes': ''}) - - request = Request(method='GET', path=['release', 'release', 'fake']) - request.principal = 'user' - stats.log(request) - self.assertEqual(0, stats._stats['context']['downloaded']) - - request = Request(method='GET', path=['release', 'release', 'data']) - request.principal = 'user' - stats.log(request) - self.assertEqual(1, stats._stats['context']['downloaded']) - - def test_ContextReleased(self): - volume = db.Volume('local', model.RESOURCES) - stats = Sniffer(volume, 'stats/node') - volume['context'].create({'guid': 'context', 'type': 'activity', 'title': '', 'summary': '', 'description': ''}) - - request = Request(method='POST', path=['release']) - request.principal = 'user' - request.content = {'context': 'context'} - stats.log(request) - self.assertEqual(1, stats._stats['context']['released']) - - def test_ContextFailed(self): - volume = db.Volume('local', model.RESOURCES) - stats = Sniffer(volume, 'stats/node') - volume['context'].create({'guid': 'context', 'type': 'activity', 'title': '', 'summary': '', 'description': ''}) - - request = Request(method='POST', path=['report']) - request.principal = 'user' - request.content = {'context': 'context'} - stats.log(request) - self.assertEqual(1, stats._stats['context']['failed']) - - def test_PostDownloaded(self): - volume = db.Volume('local', model.RESOURCES) - stats = Sniffer(volume, 'stats/node') - volume['post'].create({'guid': 'topic', 'type': 'object', 'context': 'context', 'title': '', 'message': ''}) - - request = Request(method='GET', path=['post', 'topic', 'fake']) - request.principal = 'user' - stats.log(request) - self.assertEqual(0, stats._stats['post']['downloaded']) - - request = Request(method='GET', path=['post', 'topic', 'data']) - request.principal = 'user' - stats.log(request) - self.assertEqual(1, stats._stats['post']['downloaded']) - - def test_Commit(self): - volume = db.Volume('local', model.RESOURCES) - volume['user'].create({'guid': 'user', 'name': 'user', 'pubkey': ''}) - volume['context'].create({'guid': 'context', 'type': 'activity', 'title': '', 'summary': '', 'description': ''}) - volume['post'].create({'guid': 'review', 'context': 'context', 'type': 'review', 'title': '', 'message': '', 'vote': 5}) - - stats = Sniffer(volume, 'stats/node') - request = Request(method='GET', path=['user', 'user']) - request.principal = 'user' - stats.log(request) - request = Request(method='GET', path=['context', 'context']) - request.principal = 'user' - stats.log(request) - request = Request(method='GET', path=['post', 'review']) - request.principal = 'user' - stats.log(request) - - self.assertEqual(1, stats._stats['user']['total']) - self.assertEqual(1, stats._stats['context']['total']) - self.assertEqual(1, stats._stats['post']['total']) - - ts = int(time.time()) - stats.commit(ts) - stats.commit_objects() - - self.assertEqual(1, stats._stats['user']['total']) - self.assertEqual(1, stats._stats['context']['total']) - self.assertEqual(1, stats._stats['post']['total']) - - self.assertEqual([ - [('post', ts, { - 'downloaded': 0.0, - 'total': 1.0, - })], - [('user', ts, { - 'total': 1.0, - })], - [('context', ts, { - 'failed': 0.0, - 'downloaded': 0.0, - 'total': 1.0, - 'released': 0.0, - })], - ], - [[(j.name,) + i for i in j.get(j.last, j.last)] for j in Rrd('stats/node', 1)]) - - def test_CommitContextStats(self): - volume = db.Volume('local', model.RESOURCES) - - volume['context'].create({'guid': 'context', 'type': 'activity', 'title': '', 'summary': '', 'description': ''}) - volume['release'].create({'guid': 'release', 'context': 'context', 'license': 'GPLv3', 'version': '1', 'date': 0, 'stability': 'stable', 'notes': ''}) - - self.assertEqual(0, volume['context'].get('context')['downloads']) - self.assertEqual([0, 0], volume['context'].get('context')['rating']) - - stats = Sniffer(volume, 'stats/node') - request = Request(method='GET', path=['release', 'release', 'data']) - request.principal = 'user' - stats.log(request) - request = Request(method='POST', path=['post']) - request.principal = 'user' - request.content = {'context': 'context', 'vote': 5, 'type': 'review', 'title': '', 'message': ''} - stats.log(request) - - stats.commit() - stats.commit_objects() - - self.assertEqual(1, volume['context'].get('context')['downloads']) - self.assertEqual([1, 5], volume['context'].get('context')['rating']) - - stats.commit() - stats.commit_objects() - - self.assertEqual(1, volume['context'].get('context')['downloads']) - self.assertEqual([1, 5], volume['context'].get('context')['rating']) - - stats = Sniffer(volume, 'stats/node') - request = Request(method='GET', path=['release', 'release', 'data']) - request.principal = 'user' - stats.log(request) - request = Request(method='POST', path=['post']) - request.principal = 'user' - request.content = {'context': 'context', 'vote': 1, 'type': 'review', 'title': '', 'message': ''} - stats.log(request) - stats.commit() - stats.commit_objects() - - self.assertEqual(2, volume['context'].get('context')['downloads']) - self.assertEqual([2, 6], volume['context'].get('context')['rating']) - - def test_CommitTopicStats(self): - volume = db.Volume('local', model.RESOURCES) - - volume['context'].create({'guid': 'context', 'type': 'activity', 'title': '', 'summary': '', 'description': ''}) - volume['post'].create({'guid': 'topic', 'type': 'object', 'context': 'context', 'title': '', 'message': ''}) - - self.assertEqual(0, volume['post'].get('topic')['downloads']) - self.assertEqual([0, 0], volume['post'].get('topic')['rating']) - - stats = Sniffer(volume, 'stats/node') - request = Request(method='GET', path=['post', 'topic', 'data']) - request.principal = 'user' - stats.log(request) - request = Request(method='POST', path=['post']) - request.principal = 'user' - request.content = {'topic': 'topic', 'vote': 5, 'type': 'feedback'} - stats.log(request) - stats.commit() - stats.commit_objects() - - self.assertEqual(1, volume['post'].get('topic')['downloads']) - self.assertEqual([1, 5], volume['post'].get('topic')['rating']) - - stats.commit() - stats.commit_objects() - - self.assertEqual(1, volume['post'].get('topic')['downloads']) - self.assertEqual([1, 5], volume['post'].get('topic')['rating']) - - request = Request(method='GET', path=['post', 'topic', 'data']) - request.principal = 'user' - stats.log(request) - request = Request(method='POST', path=['post']) - request.principal = 'user' - request.content = {'topic': 'topic', 'vote': 1, 'type': 'feedback'} - stats.log(request) - stats.commit() - stats.commit_objects() - - self.assertEqual(2, volume['post'].get('topic')['downloads']) - self.assertEqual([2, 6], volume['post'].get('topic')['rating']) - - def test_Suspend(self): - stats_node_step.value = 5 - volume = db.Volume('local', model.RESOURCES) - volume['context'].create({'guid': 'context', 'type': 'activity', 'title': '', 'summary': '', 'description': ''}) - volume['release'].create({'guid': 'impl', 'context': 'context', 'license': 'GPLv3', 'version': '1', 'date': 0, 'stability': 'stable', 'notes': ''}) - - ts = self.ts = 1000000000 - self.override(time, 'time', lambda: self.ts) - - stats = Sniffer(volume, 'stats') - request = Request(method='POST', path=['context']) - stats.log(request) - request = Request(method='GET', path=['release', 'impl', 'data'], context='context') - stats.log(request) - stats.suspend() - - rdb = Rrd('stats', 1)['context'] - self.assertEqual([ - ], - [i for i in rdb.get(ts, ts + 10)]) - - stats = Sniffer(volume, 'stats') - stats.suspend() - - rdb = Rrd('stats', 1)['context'] - self.assertEqual([ - ], - [i for i in rdb.get(ts, ts + 10)]) - - self.ts += 6 - stats = Sniffer(volume, 'stats') - - rdb = Rrd('stats', 1)['context'] - self.assertEqual([ - (ts + 0, {'failed': 0.0, 'downloaded': 0.0, 'total': 0.0, 'released': 0.0}), - (ts + 5, {'failed': 0.0, 'downloaded': 1.0, 'total': 2.0, 'released': 0.0}), - ], - [i for i in rdb.get(ts, ts + 20)]) - - request = Request(method='POST', path=['context']) - stats.log(request) - request = Request(method='GET', path=['release', 'impl', 'data'], context='context') - stats.log(request) - request = Request(method='GET', path=['release', 'impl', 'data'], context='context') - stats.log(request) - stats.suspend() - - stats = Sniffer(volume, 'stats') - stats.suspend() - - rdb = Rrd('stats', 1)['context'] - self.assertEqual([ - (ts + 0, {'failed': 0.0, 'downloaded': 0.0, 'total': 0.0, 'released': 0.0}), - (ts + 5, {'failed': 0.0, 'downloaded': 1.0, 'total': 2.0, 'released': 0.0}), - ], - [i for i in rdb.get(ts, ts + 10)]) - - self.ts += 6 - stats = Sniffer(volume, 'stats') - - rdb = Rrd('stats', 1)['context'] - self.assertEqual([ - (ts + 0, {'failed': 0.0, 'downloaded': 0.0, 'total': 0.0, 'released': 0.0}), - (ts + 5, {'failed': 0.0, 'downloaded': 1.0, 'total': 2.0, 'released': 0.0}), - (ts + 10, {'failed': 0.0, 'downloaded': 3.0, 'total': 3.0, 'released': 0.0}), - ], - [i for i in rdb.get(ts, ts + 20)]) - - -if __name__ == '__main__': - tests.main() diff --git a/tests/units/node/sync_online.py b/tests/units/node/sync_online.py index 7ee6dcb..e2c864a 100755 --- a/tests/units/node/sync_online.py +++ b/tests/units/node/sync_online.py @@ -44,11 +44,11 @@ class SyncOnlineTest(tests.Test): def type(self, value): return value - @db.indexed_property(slot=1, prefix='N', full_text=True, localized=True) + @db.indexed_property(db.Localized, slot=1, prefix='N', full_text=True) def title(self, value): return value - @db.indexed_property(prefix='D', full_text=True, localized=True) + @db.indexed_property(db.Localized, prefix='D', full_text=True) def message(self, value): return value @@ -80,8 +80,8 @@ class SyncOnlineTest(tests.Test): self.assertEqual([[4, None]], json.load(file('slave/pull.sequence'))) self.assertEqual([[2, None]], json.load(file('slave/push.sequence'))) - guid1 = client.post(['document'], {'context': '', 'message': '1', 'title': '', 'type': 'comment'}) - guid2 = client.post(['document'], {'context': '', 'message': '2', 'title': '', 'type': 'comment'}) + guid1 = client.post(['document'], {'context': '', 'message': '1', 'title': '', 'type': 'post'}) + guid2 = client.post(['document'], {'context': '', 'message': '2', 'title': '', 'type': 'post'}) client.post(cmd='online-sync') self.assertEqual([ @@ -92,7 +92,7 @@ class SyncOnlineTest(tests.Test): self.assertEqual([[6, None]], json.load(file('slave/pull.sequence'))) self.assertEqual([[4, None]], json.load(file('slave/push.sequence'))) - guid3 = client.post(['document'], {'context': '', 'message': '3', 'title': '', 'type': 'comment'}) + guid3 = client.post(['document'], {'context': '', 'message': '3', 'title': '', 'type': 'post'}) client.post(cmd='online-sync') self.assertEqual([ {'guid': guid1, 'message': {'en-us': '1'}}, @@ -128,7 +128,7 @@ class SyncOnlineTest(tests.Test): client.put(['document', guid1], {'message': 'a'}) client.put(['document', guid2], {'message': 'b'}) client.put(['document', guid3], {'message': 'c'}) - guid4 = client.post(['document'], {'context': '', 'message': 'd', 'title': '', 'type': 'comment'}) + guid4 = client.post(['document'], {'context': '', 'message': 'd', 'title': '', 'type': 'post'}) client.delete(['document', guid2]) client.post(cmd='online-sync') self.assertEqual([ @@ -158,8 +158,8 @@ class SyncOnlineTest(tests.Test): self.assertEqual([[4, None]], json.load(file('slave/pull.sequence'))) self.assertEqual([[2, None]], json.load(file('slave/push.sequence'))) - guid1 = client.post(['document'], {'context': '', 'message': '1', 'title': '', 'type': 'comment'}) - guid2 = client.post(['document'], {'context': '', 'message': '2', 'title': '', 'type': 'comment'}) + guid1 = client.post(['document'], {'context': '', 'message': '1', 'title': '', 'type': 'post'}) + guid2 = client.post(['document'], {'context': '', 'message': '2', 'title': '', 'type': 'post'}) slave_client.post(cmd='online-sync') self.assertEqual([ @@ -170,7 +170,7 @@ class SyncOnlineTest(tests.Test): self.assertEqual([[6, None]], json.load(file('slave/pull.sequence'))) self.assertEqual([[2, None]], json.load(file('slave/push.sequence'))) - guid3 = client.post(['document'], {'context': '', 'message': '3', 'title': '', 'type': 'comment'}) + guid3 = client.post(['document'], {'context': '', 'message': '3', 'title': '', 'type': 'post'}) slave_client.post(cmd='online-sync') self.assertEqual([ {'guid': guid1, 'message': {'en-us': '1'}}, @@ -206,7 +206,7 @@ class SyncOnlineTest(tests.Test): client.put(['document', guid1], {'message': 'a'}) client.put(['document', guid2], {'message': 'b'}) client.put(['document', guid3], {'message': 'c'}) - guid4 = client.post(['document'], {'context': '', 'message': 'd', 'title': '', 'type': 'comment'}) + guid4 = client.post(['document'], {'context': '', 'message': 'd', 'title': '', 'type': 'post'}) client.delete(['document', guid2]) slave_client.post(cmd='online-sync') self.assertEqual([ @@ -252,7 +252,7 @@ class SyncOnlineTest(tests.Test): self.assertEqual([[4, None]], json.load(file('slave/pull.sequence'))) self.assertEqual([[2, None]], json.load(file('slave/push.sequence'))) - guid = slave.post(['document'], {'context': '', 'message': '1', 'title': '1', 'type': 'comment'}) + guid = slave.post(['document'], {'context': '', 'message': '1', 'title': '1', 'type': 'post'}) slave.post(cmd='online-sync') coroutine.sleep(1) diff --git a/tests/units/node/volume.py b/tests/units/node/volume.py deleted file mode 100755 index 01e71a7..0000000 --- a/tests/units/node/volume.py +++ /dev/null @@ -1,826 +0,0 @@ -#!/usr/bin/env python -# sugar-lint: disable - -import os -import time -import urllib2 -import hashlib -from cStringIO import StringIO - -from __init__ import tests - -from sugar_network import db, toolkit, model -from sugar_network.node.volume import diff, merge -from sugar_network.node.stats_node import Sniffer -from sugar_network.node.routes import NodeRoutes -from sugar_network.toolkit.rrd import Rrd -from sugar_network.toolkit.router import Router, Request, Response, fallbackroute, Blob, ACL, route - - -current_time = time.time - - -class VolumeTest(tests.Test): - - def setUp(self): - tests.Test.setUp(self) - self.override(time, 'time', lambda: 0) - self.override(NodeRoutes, 'authorize', lambda self, user, role: True) - - def test_diff(self): - - class Document(db.Resource): - - @db.indexed_property(slot=1) - def prop(self, value): - return value - - volume = db.Volume('db', [Document]) - cp = NodeRoutes('guid', volume) - - guid1 = call(cp, method='POST', document='document', content={'prop': 'a'}) - self.utime('db/document/%s/%s' % (guid1[:2], guid1), 1) - guid2 = call(cp, method='POST', document='document', content={'prop': 'b'}) - self.utime('db/document/%s/%s' % (guid2[:2], guid2), 2) - - in_seq = toolkit.Sequence([[1, None]]) - self.assertEqual([ - {'resource': 'document'}, - {'guid': guid1, - 'diff': { - 'guid': {'value': guid1, 'mtime': 1}, - 'mtime': {'value': 0, 'mtime': 1}, - 'ctime': {'value': 0, 'mtime': 1}, - 'prop': {'value': 'a', 'mtime': 1}, - 'author': {'mtime': 1, 'value': {}}, - 'layer': {'mtime': 1, 'value': []}, - 'tags': {'mtime': 1, 'value': []}, - }, - }, - {'guid': guid2, - 'diff': { - 'guid': {'value': guid2, 'mtime': 2}, - 'mtime': {'value': 0, 'mtime': 2}, - 'ctime': {'value': 0, 'mtime': 2}, - 'prop': {'value': 'b', 'mtime': 2}, - 'author': {'mtime': 2, 'value': {}}, - 'layer': {'mtime': 2, 'value': []}, - 'tags': {'mtime': 2, 'value': []}, - }, - }, - {'commit': [[1, 2]]}, - ], - [i for i in diff(volume, in_seq)]) - self.assertEqual([[1, None]], in_seq) - - def test_diff_Partial(self): - - class Document(db.Resource): - - @db.indexed_property(slot=1) - def prop(self, value): - return value - - volume = db.Volume('db', [Document]) - cp = NodeRoutes('guid', volume) - - guid1 = call(cp, method='POST', document='document', content={'prop': 'a'}) - self.utime('db/document/%s/%s' % (guid1[:2], guid1), 1) - guid2 = call(cp, method='POST', document='document', content={'prop': 'b'}) - self.utime('db/document/%s/%s' % (guid2[:2], guid2), 2) - - in_seq = toolkit.Sequence([[1, None]]) - patch = diff(volume, in_seq) - self.assertEqual({'resource': 'document'}, next(patch)) - self.assertEqual(guid1, next(patch)['guid']) - self.assertEqual({'commit': []}, patch.throw(StopIteration())) - try: - next(patch) - assert False - except StopIteration: - pass - - patch = diff(volume, in_seq) - self.assertEqual({'resource': 'document'}, next(patch)) - self.assertEqual(guid1, next(patch)['guid']) - self.assertEqual(guid2, next(patch)['guid']) - self.assertEqual({'commit': [[1, 1]]}, patch.throw(StopIteration())) - try: - next(patch) - assert False - except StopIteration: - pass - - def test_diff_Stretch(self): - - class Document(db.Resource): - - @db.indexed_property(slot=1) - def prop(self, value): - return value - - volume = db.Volume('db', [Document]) - cp = NodeRoutes('guid', volume) - - guid1 = call(cp, method='POST', document='document', content={'prop': 'a'}) - self.utime('db/document/%s/%s' % (guid1[:2], guid1), 1) - guid2 = call(cp, method='POST', document='document', content={'prop': 'b'}) - volume['document'].delete(guid2) - guid3 = call(cp, method='POST', document='document', content={'prop': 'c'}) - self.utime('db/document/%s/%s' % (guid3[:2], guid3), 2) - guid4 = call(cp, method='POST', document='document', content={'prop': 'd'}) - volume['document'].delete(guid4) - guid5 = call(cp, method='POST', document='document', content={'prop': 'f'}) - self.utime('db/document/%s/%s' % (guid5[:2], guid5), 2) - - in_seq = toolkit.Sequence([[1, None]]) - patch = diff(volume, in_seq) - self.assertEqual({'resource': 'document'}, patch.send(None)) - self.assertEqual(guid1, patch.send(None)['guid']) - self.assertEqual(guid3, patch.send(None)['guid']) - self.assertEqual(guid5, patch.send(None)['guid']) - self.assertEqual({'commit': [[1, 1], [3, 3]]}, patch.throw(StopIteration())) - try: - patch.send(None) - assert False - except StopIteration: - pass - - patch = diff(volume, in_seq) - self.assertEqual({'resource': 'document'}, patch.send(None)) - self.assertEqual(guid1, patch.send(None)['guid']) - self.assertEqual(guid3, patch.send(None)['guid']) - self.assertEqual(guid5, patch.send(None)['guid']) - self.assertEqual({'commit': [[1, 5]]}, patch.send(None)) - try: - patch.send(None) - assert False - except StopIteration: - pass - - def test_diff_DoNotStretchContinuesPacket(self): - - class Document(db.Resource): - - @db.indexed_property(slot=1) - def prop(self, value): - return value - - volume = db.Volume('db', [Document]) - cp = NodeRoutes('guid', volume) - - guid1 = call(cp, method='POST', document='document', content={'prop': 'a'}) - volume['document'].delete(guid1) - guid2 = call(cp, method='POST', document='document', content={'prop': 'b'}) - volume['document'].delete(guid2) - guid3 = call(cp, method='POST', document='document', content={'prop': 'c'}) - self.utime('db/document/%s/%s' % (guid3[:2], guid3), 2) - guid4 = call(cp, method='POST', document='document', content={'prop': 'd'}) - volume['document'].delete(guid4) - guid5 = call(cp, method='POST', document='document', content={'prop': 'f'}) - self.utime('db/document/%s/%s' % (guid5[:2], guid5), 2) - - in_seq = toolkit.Sequence([[1, None]]) - patch = diff(volume, in_seq, toolkit.Sequence([[1, 1]])) - self.assertEqual({'resource': 'document'}, patch.send(None)) - self.assertEqual(guid3, patch.send(None)['guid']) - self.assertEqual(guid5, patch.send(None)['guid']) - self.assertEqual({'commit': [[1, 1], [3, 3], [5, 5]]}, patch.send(None)) - try: - patch.send(None) - assert False - except StopIteration: - pass - - def test_diff_TheSameInSeqForAllDocuments(self): - - class Document1(db.Resource): - pass - - class Document2(db.Resource): - pass - - class Document3(db.Resource): - pass - - volume = db.Volume('db', [Document1, Document2, Document3]) - cp = NodeRoutes('guid', volume) - - guid3 = call(cp, method='POST', document='document1', content={}) - self.utime('db/document/%s/%s' % (guid3[:2], guid3), 3) - guid2 = call(cp, method='POST', document='document2', content={}) - self.utime('db/document/%s/%s' % (guid2[:2], guid2), 2) - guid1 = call(cp, method='POST', document='document3', content={}) - self.utime('db/document/%s/%s' % (guid1[:2], guid1), 1) - - in_seq = toolkit.Sequence([[1, None]]) - patch = diff(volume, in_seq) - self.assertEqual({'resource': 'document1'}, patch.send(None)) - self.assertEqual(guid3, patch.send(None)['guid']) - self.assertEqual({'resource': 'document2'}, patch.send(None)) - self.assertEqual(guid2, patch.send(None)['guid']) - self.assertEqual({'resource': 'document3'}, patch.send(None)) - self.assertEqual(guid1, patch.send(None)['guid']) - self.assertEqual({'commit': [[1, 3]]}, patch.send(None)) - try: - patch.send(None) - assert False - except StopIteration: - pass - - def test_merge_Create(self): - - class Document1(db.Resource): - - @db.indexed_property(slot=1) - def prop(self, value): - return value - - class Document2(db.Resource): - pass - - self.touch(('db/seqno', '100')) - volume = db.Volume('db', [Document1, Document2]) - - records = [ - {'resource': 'document1'}, - {'guid': '1', 'diff': { - 'guid': {'value': '1', 'mtime': 1.0}, - 'ctime': {'value': 2, 'mtime': 2.0}, - 'mtime': {'value': 3, 'mtime': 3.0}, - 'prop': {'value': '4', 'mtime': 4.0}, - }}, - {'resource': 'document2'}, - {'guid': '5', 'diff': { - 'guid': {'value': '5', 'mtime': 5.0}, - 'ctime': {'value': 6, 'mtime': 6.0}, - 'mtime': {'value': 7, 'mtime': 7.0}, - }}, - {'commit': [[1, 2]]}, - ] - self.assertEqual(([[1, 2]], [[101, 102]]), merge(volume, records)) - - self.assertEqual( - {'guid': '1', 'prop': '4', 'ctime': 2, 'mtime': 3}, - volume['document1'].get('1').properties(['guid', 'ctime', 'mtime', 'prop'])) - self.assertEqual(1, os.stat('db/document1/1/1/guid').st_mtime) - self.assertEqual(2, os.stat('db/document1/1/1/ctime').st_mtime) - self.assertEqual(3, os.stat('db/document1/1/1/mtime').st_mtime) - self.assertEqual(4, os.stat('db/document1/1/1/prop').st_mtime) - - self.assertEqual( - {'guid': '5', 'ctime': 6, 'mtime': 7}, - volume['document2'].get('5').properties(['guid', 'ctime', 'mtime'])) - self.assertEqual(5, os.stat('db/document2/5/5/guid').st_mtime) - self.assertEqual(6, os.stat('db/document2/5/5/ctime').st_mtime) - self.assertEqual(7, os.stat('db/document2/5/5/mtime').st_mtime) - - def test_merge_Update(self): - - class Document(db.Resource): - - @db.indexed_property(slot=1) - def prop(self, value): - return value - - self.touch(('db/seqno', '100')) - volume = db.Volume('db', [Document]) - volume['document'].create({'guid': '1', 'prop': '1', 'ctime': 1, 'mtime': 1}) - for i in os.listdir('db/document/1/1'): - os.utime('db/document/1/1/%s' % i, (2, 2)) - - records = [ - {'resource': 'document'}, - {'guid': '1', 'diff': {'prop': {'value': '2', 'mtime': 1.0}}}, - {'commit': [[1, 1]]}, - ] - self.assertEqual(([[1, 1]], []), merge(volume, records)) - self.assertEqual( - {'guid': '1', 'prop': '1', 'ctime': 1, 'mtime': 1}, - volume['document'].get('1').properties(['guid', 'ctime', 'mtime', 'prop'])) - self.assertEqual(2, os.stat('db/document/1/1/prop').st_mtime) - - records = [ - {'resource': 'document'}, - {'guid': '1', 'diff': {'prop': {'value': '3', 'mtime': 2.0}}}, - {'commit': [[2, 2]]}, - ] - self.assertEqual(([[2, 2]], []), merge(volume, records)) - self.assertEqual( - {'guid': '1', 'prop': '1', 'ctime': 1, 'mtime': 1}, - volume['document'].get('1').properties(['guid', 'ctime', 'mtime', 'prop'])) - self.assertEqual(2, os.stat('db/document/1/1/prop').st_mtime) - - records = [ - {'resource': 'document'}, - {'guid': '1', 'diff': {'prop': {'value': '4', 'mtime': 3.0}}}, - {'commit': [[3, 3]]}, - ] - self.assertEqual(([[3, 3]], [[102, 102]]), merge(volume, records)) - self.assertEqual( - {'guid': '1', 'prop': '4', 'ctime': 1, 'mtime': 1}, - volume['document'].get('1').properties(['guid', 'ctime', 'mtime', 'prop'])) - self.assertEqual(3, os.stat('db/document/1/1/prop').st_mtime) - - def test_merge_MultipleCommits(self): - - class Document(db.Resource): - - @db.stored_property() - def prop(self, value): - return value - - self.touch(('db/seqno', '100')) - volume = db.Volume('db', [Document]) - - def generator(): - for i in [ - {'resource': 'document'}, - {'commit': [[1, 1]]}, - {'guid': '1', 'diff': { - 'guid': {'value': '1', 'mtime': 1.0}, - 'ctime': {'value': 2, 'mtime': 2.0}, - 'mtime': {'value': 3, 'mtime': 3.0}, - 'prop': {'value': '4', 'mtime': 4.0}, - }}, - {'commit': [[2, 3]]}, - ]: - yield i - - records = generator() - self.assertEqual(([[1, 3]], [[101, 101]]), merge(volume, records)) - assert volume['document'].exists('1') - - def test_merge_UpdateStats(self): - volume = db.Volume('db', model.RESOURCES) - cp = NodeRoutes('guid', volume) - stats = Sniffer(volume, 'stats/node') - - records = [ - {'resource': 'context'}, - {'guid': 'context', 'diff': { - 'guid': {'value': 'context', 'mtime': 1.0}, - 'ctime': {'value': 1, 'mtime': 1.0}, - 'mtime': {'value': 1, 'mtime': 1.0}, - 'type': {'value': ['package'], 'mtime': 1.0}, - 'title': {'value': {}, 'mtime': 1.0}, - 'summary': {'value': {}, 'mtime': 1.0}, - 'description': {'value': {}, 'mtime': 1.0}, - }}, - {'resource': 'post'}, - {'guid': 'topic_1', 'diff': { - 'guid': {'value': 'topic_1', 'mtime': 1.0}, - 'ctime': {'value': 1, 'mtime': 1.0}, - 'mtime': {'value': 1, 'mtime': 1.0}, - 'type': {'value': 'object', 'mtime': 1.0}, - 'context': {'value': 'context', 'mtime': 1.0}, - 'title': {'value': {}, 'mtime': 1.0}, - 'message': {'value': {}, 'mtime': 1.0}, - 'solution': {'value': 'solution_1', 'mtime': 1.0}, - }}, - {'guid': 'topic_2', 'diff': { - 'guid': {'value': 'topic_2', 'mtime': 1.0}, - 'ctime': {'value': 1, 'mtime': 1.0}, - 'mtime': {'value': 1, 'mtime': 1.0}, - 'type': {'value': 'object', 'mtime': 1.0}, - 'context': {'value': 'context', 'mtime': 1.0}, - 'title': {'value': {}, 'mtime': 1.0}, - 'message': {'value': {}, 'mtime': 1.0}, - 'solution': {'value': 'solution_2', 'mtime': 1.0}, - }}, - {'guid': 'context_review', 'diff': { - 'guid': {'value': 'context_review', 'mtime': 1.0}, - 'ctime': {'value': 1, 'mtime': 1.0}, - 'mtime': {'value': 1, 'mtime': 1.0}, - 'context': {'value': 'context', 'mtime': 1.0}, - 'vote': {'value': 1, 'mtime': 1.0}, - 'author': {'mtime': 1, 'value': {}}, - 'layer': {'mtime': 1, 'value': []}, - 'tags': {'mtime': 1, 'value': []}, - 'type': {'value': 'review', 'mtime': 1.0}, - }}, - {'guid': 'topic_review', 'diff': { - 'guid': {'value': 'topic_review', 'mtime': 1.0}, - 'ctime': {'value': 1, 'mtime': 1.0}, - 'mtime': {'value': 1, 'mtime': 1.0}, - 'context': {'value': 'context', 'mtime': 1.0}, - 'topic': {'value': 'topic_1', 'mtime': 1.0}, - 'vote': {'value': 1, 'mtime': 1.0}, - 'author': {'mtime': 1, 'value': {}}, - 'layer': {'mtime': 1, 'value': []}, - 'tags': {'mtime': 1, 'value': []}, - 'type': {'value': 'feedback', 'mtime': 1.0}, - }}, - {'guid': 'solution_1', 'diff': { - 'guid': {'value': 'solution_1', 'mtime': 1.0}, - 'ctime': {'value': 1, 'mtime': 1.0}, - 'mtime': {'value': 1, 'mtime': 1.0}, - 'context': {'value': 'context', 'mtime': 1.0}, - 'topic': {'value': 'topic_1', 'mtime': 1.0}, - 'type': {'value': 'answer', 'mtime': 1.0}, - 'title': {'value': {}, 'mtime': 1.0}, - 'message': {'value': {}, 'mtime': 1.0}, - }}, - {'guid': 'solution_2', 'diff': { - 'guid': {'value': 'solution_2', 'mtime': 1.0}, - 'ctime': {'value': 1, 'mtime': 1.0}, - 'mtime': {'value': 1, 'mtime': 1.0}, - 'context': {'value': 'context', 'mtime': 1.0}, - 'topic': {'value': 'topic_2', 'mtime': 1.0}, - 'type': {'value': 'answer', 'mtime': 1.0}, - 'title': {'value': {}, 'mtime': 1.0}, - 'message': {'value': {}, 'mtime': 1.0}, - }}, - {'resource': 'release'}, - {'guid': 'release', 'diff': { - 'guid': {'value': 'release', 'mtime': 1.0}, - 'ctime': {'value': 1, 'mtime': 1.0}, - 'mtime': {'value': 1, 'mtime': 1.0}, - 'context': {'value': 'context', 'mtime': 1.0}, - 'license': {'value': ['GPL-3.0'], 'mtime': 1.0}, - 'version': {'value': '1', 'mtime': 1.0}, - 'stability': {'value': 'stable', 'mtime': 1.0}, - 'notes': {'value': {}, 'mtime': 1.0}, - }}, - {'commit': [[1, 1]]}, - ] - merge(volume, records, stats=stats) - ts = int(current_time()) - stats.commit(ts) - stats.commit_objects() - - self.assertEqual([ - [('post', ts, { - 'downloaded': 0.0, - 'total': 6.0, - })], - [('user', ts, { - 'total': 0.0, - })], - [('context', ts, { - 'failed': 0.0, - 'downloaded': 0.0, - 'total': 1.0, - 'released': 1.0, - })], - ], - [[(j.name,) + i for i in j.get(j.last, j.last)] for j in Rrd('stats/node', 1)]) - self.assertEqual([1, 1], volume['context'].get('context')['rating']) - self.assertEqual([1, 1], volume['post'].get('topic_1')['rating']) - - records = [ - {'resource': 'post'}, - {'guid': 'topic_2', 'diff': {'solution': {'value': '', 'mtime': 2.0}}}, - {'commit': [[2, 2]]}, - ] - merge(volume, records, stats=stats) - ts += 1 - stats.commit(ts) - stats.commit_objects() - - self.assertEqual([ - [('post', ts, { - 'downloaded': 0.0, - 'total': 6.0, - })], - [('user', ts, { - 'total': 0.0, - })], - [('context', ts, { - 'failed': 0.0, - 'downloaded': 0.0, - 'total': 1.0, - 'released': 1.0, - })], - ], - [[(j.name,) + i for i in j.get(j.last, j.last)] for j in Rrd('stats/node', 1)]) - - records = [ - {'resource': 'context'}, - {'guid': 'context', 'diff': {'layer': {'value': ['deleted'], 'mtime': 3.0}}}, - {'resource': 'post'}, - {'guid': 'topic_1', 'diff': {'layer': {'value': ['deleted'], 'mtime': 3.0}}}, - {'guid': 'topic_2', 'diff': {'layer': {'value': ['deleted'], 'mtime': 3.0}}}, - {'guid': 'context_review', 'diff': {'layer': {'value': ['deleted'], 'mtime': 3.0}}}, - {'guid': 'topic_review', 'diff': {'layer': {'value': ['deleted'], 'mtime': 3.0}}}, - {'guid': 'solution_1', 'diff': {'layer': {'value': ['deleted'], 'mtime': 3.0}}}, - {'guid': 'solution_2', 'diff': {'layer': {'value': ['deleted'], 'mtime': 3.0}}}, - {'resource': 'release'}, - {'guid': 'release', 'diff': {'layer': {'value': ['deleted'], 'mtime': 3.0}}}, - {'commit': [[3, 3]]}, - ] - merge(volume, records, stats=stats) - ts += 1 - stats.commit(ts) - stats.commit_objects() - - self.assertEqual([ - [('post', ts, { - 'downloaded': 0.0, - 'total': 0.0, - })], - [('user', ts, { - 'total': 0.0, - })], - [('context', ts, { - 'failed': 0.0, - 'downloaded': 0.0, - 'total': 0.0, - 'released': 1.0, - })], - ], - [[(j.name,) + i for i in j.get(j.last, j.last)] for j in Rrd('stats/node', 1)]) - - def test_diff_Blobs(self): - - class Document(db.Resource): - - @db.blob_property() - def prop(self, value): - return value - - volume = db.Volume('db', [Document]) - cp = NodeRoutes('guid', volume) - - guid = call(cp, method='POST', document='document', content={}) - call(cp, method='PUT', document='document', guid=guid, content={'prop': 'payload'}) - self.utime('db', 0) - - patch = diff(volume, toolkit.Sequence([[1, None]])) - self.assertEqual( - {'resource': 'document'}, - next(patch)) - record = next(patch) - self.assertEqual('payload', ''.join([i for i in record.pop('blob')])) - self.assertEqual( - {'guid': guid, 'blob_size': len('payload'), 'diff': { - 'prop': { - 'digest': hashlib.sha1('payload').hexdigest(), - 'blob_size': len('payload'), - 'mime_type': 'application/octet-stream', - 'mtime': 0, - }, - }}, - record) - self.assertEqual( - {'guid': guid, 'diff': { - 'guid': {'value': guid, 'mtime': 0}, - 'author': {'mtime': 0, 'value': {}}, - 'layer': {'mtime': 0, 'value': []}, - 'tags': {'mtime': 0, 'value': []}, - 'mtime': {'value': 0, 'mtime': 0}, - 'ctime': {'value': 0, 'mtime': 0}, - }}, - next(patch)) - self.assertEqual( - {'commit': [[1, 2]]}, - next(patch)) - self.assertRaises(StopIteration, next, patch) - - def test_diff_BlobUrls(self): - url = 'http://src.sugarlabs.org/robots.txt' - blob = urllib2.urlopen(url).read() - - class Document(db.Resource): - - @db.blob_property() - def prop(self, value): - return value - - volume = db.Volume('db', [Document]) - cp = NodeRoutes('guid', volume) - - guid = call(cp, method='POST', document='document', content={}) - call(cp, method='PUT', document='document', guid=guid, content={'prop': {'url': url}}) - self.utime('db', 1) - - self.assertEqual([ - {'resource': 'document'}, - {'guid': guid, - 'diff': { - 'guid': {'value': guid, 'mtime': 1}, - 'author': {'mtime': 1, 'value': {}}, - 'layer': {'mtime': 1, 'value': []}, - 'tags': {'mtime': 1, 'value': []}, - 'mtime': {'value': 0, 'mtime': 1}, - 'ctime': {'value': 0, 'mtime': 1}, - 'prop': {'url': url, 'mtime': 1}, - }, - }, - {'commit': [[1, 2]]}, - ], - [i for i in diff(volume, toolkit.Sequence([[1, None]]))]) - - patch = diff(volume, toolkit.Sequence([[1, None]]), fetch_blobs=True) - self.assertEqual( - {'resource': 'document'}, - next(patch)) - record = next(patch) - self.assertEqual(blob, ''.join([i for i in record.pop('blob')])) - self.assertEqual( - {'guid': guid, 'blob_size': len(blob), 'diff': {'prop': {'mtime': 1}}}, - record) - self.assertEqual( - {'guid': guid, 'diff': { - 'guid': {'value': guid, 'mtime': 1}, - 'author': {'mtime': 1, 'value': {}}, - 'layer': {'mtime': 1, 'value': []}, - 'tags': {'mtime': 1, 'value': []}, - 'mtime': {'value': 0, 'mtime': 1}, - 'ctime': {'value': 0, 'mtime': 1}, - }}, - next(patch)) - self.assertEqual( - {'commit': [[1, 2]]}, - next(patch)) - self.assertRaises(StopIteration, next, patch) - - def test_diff_SkipBrokenBlobUrls(self): - - class Document(db.Resource): - - @db.blob_property() - def prop(self, value): - return value - - volume = db.Volume('db', [Document]) - cp = NodeRoutes('guid', volume) - - guid1 = call(cp, method='POST', document='document', content={}) - call(cp, method='PUT', document='document', guid=guid1, content={'prop': {'url': 'http://foo/bar'}}) - guid2 = call(cp, method='POST', document='document', content={}) - self.utime('db', 1) - - self.assertEqual([ - {'resource': 'document'}, - {'guid': guid1, - 'diff': { - 'guid': {'value': guid1, 'mtime': 1}, - 'author': {'mtime': 1, 'value': {}}, - 'layer': {'mtime': 1, 'value': []}, - 'tags': {'mtime': 1, 'value': []}, - 'mtime': {'value': 0, 'mtime': 1}, - 'ctime': {'value': 0, 'mtime': 1}, - 'prop': {'url': 'http://foo/bar', 'mtime': 1}, - }, - }, - {'guid': guid2, - 'diff': { - 'guid': {'value': guid2, 'mtime': 1}, - 'author': {'mtime': 1, 'value': {}}, - 'layer': {'mtime': 1, 'value': []}, - 'tags': {'mtime': 1, 'value': []}, - 'mtime': {'value': 0, 'mtime': 1}, - 'ctime': {'value': 0, 'mtime': 1}, - }, - }, - {'commit': [[1, 3]]}, - ], - [i for i in diff(volume, toolkit.Sequence([[1, None]]), fetch_blobs=False)]) - - self.assertEqual([ - {'resource': 'document'}, - {'guid': guid1, - 'diff': { - 'guid': {'value': guid1, 'mtime': 1}, - 'author': {'mtime': 1, 'value': {}}, - 'layer': {'mtime': 1, 'value': []}, - 'tags': {'mtime': 1, 'value': []}, - 'mtime': {'value': 0, 'mtime': 1}, - 'ctime': {'value': 0, 'mtime': 1}, - }, - }, - {'guid': guid2, - 'diff': { - 'guid': {'value': guid2, 'mtime': 1}, - 'author': {'mtime': 1, 'value': {}}, - 'layer': {'mtime': 1, 'value': []}, - 'tags': {'mtime': 1, 'value': []}, - 'mtime': {'value': 0, 'mtime': 1}, - 'ctime': {'value': 0, 'mtime': 1}, - }, - }, - {'commit': [[1, 3]]}, - ], - [i for i in diff(volume, toolkit.Sequence([[1, None]]), fetch_blobs=True)]) - - def test_merge_Blobs(self): - - class Document(db.Resource): - - @db.blob_property() - def prop(self, value): - return value - - volume = db.Volume('db', [Document]) - - merge(volume, [ - {'resource': 'document'}, - {'guid': '1', 'diff': { - 'guid': {'value': '1', 'mtime': 1.0}, - 'ctime': {'value': 2, 'mtime': 2.0}, - 'mtime': {'value': 3, 'mtime': 3.0}, - 'prop': { - 'blob': StringIO('payload'), - 'blob_size': len('payload'), - 'digest': hashlib.sha1('payload').hexdigest(), - 'mime_type': 'foo/bar', - 'mtime': 1, - }, - }}, - {'commit': [[1, 1]]}, - ]) - - assert volume['document'].exists('1') - blob = volume['document'].get('1')['prop'] - self.assertEqual(1, blob['mtime']) - self.assertEqual('foo/bar', blob['mime_type']) - self.assertEqual(hashlib.sha1('payload').hexdigest(), blob['digest']) - self.assertEqual(tests.tmpdir + '/db/document/1/1/prop.blob', blob['blob']) - self.assertEqual('payload', file(blob['blob']).read()) - - def test_diff_ByLayers(self): - - class Context(db.Resource): - pass - - class release(db.Resource): - pass - - class Review(db.Resource): - pass - - volume = db.Volume('db', [Context, release, Review]) - volume['context'].create({'guid': '0', 'ctime': 1, 'mtime': 1, 'layer': ['layer0', 'common']}) - volume['context'].create({'guid': '1', 'ctime': 1, 'mtime': 1, 'layer': 'layer1'}) - volume['release'].create({'guid': '2', 'ctime': 2, 'mtime': 2, 'layer': 'layer2'}) - volume['review'].create({'guid': '3', 'ctime': 3, 'mtime': 3, 'layer': 'layer3'}) - - volume['context'].update('0', {'tags': '0'}) - volume['context'].update('1', {'tags': '1'}) - volume['release'].update('2', {'tags': '2'}) - volume['review'].update('3', {'tags': '3'}) - self.utime('db', 0) - - self.assertEqual(sorted([ - {'resource': 'context'}, - {'guid': '0', 'diff': {'tags': {'value': '0', 'mtime': 0}}}, - {'guid': '1', 'diff': {'tags': {'value': '1', 'mtime': 0}}}, - {'resource': 'release'}, - {'guid': '2', 'diff': {'tags': {'value': '2', 'mtime': 0}}}, - {'resource': 'review'}, - {'guid': '3', 'diff': {'tags': {'value': '3', 'mtime': 0}}}, - {'commit': [[5, 8]]}, - ]), - sorted([i for i in diff(volume, toolkit.Sequence([[5, None]]))])) - - self.assertEqual(sorted([ - {'resource': 'context'}, - {'guid': '0', 'diff': {'tags': {'value': '0', 'mtime': 0}}}, - {'guid': '1', 'diff': {'tags': {'value': '1', 'mtime': 0}}}, - {'resource': 'release'}, - {'resource': 'review'}, - {'guid': '3', 'diff': {'tags': {'value': '3', 'mtime': 0}}}, - {'commit': [[5, 8]]}, - ]), - sorted([i for i in diff(volume, toolkit.Sequence([[5, None]]), layer='layer1')])) - - self.assertEqual(sorted([ - {'resource': 'context'}, - {'guid': '0', 'diff': {'tags': {'value': '0', 'mtime': 0}}}, - {'resource': 'release'}, - {'guid': '2', 'diff': {'tags': {'value': '2', 'mtime': 0}}}, - {'resource': 'review'}, - {'guid': '3', 'diff': {'tags': {'value': '3', 'mtime': 0}}}, - {'commit': [[5, 8]]}, - ]), - sorted([i for i in diff(volume, toolkit.Sequence([[5, None]]), layer='layer2')])) - - self.assertEqual(sorted([ - {'resource': 'context'}, - {'guid': '0', 'diff': {'tags': {'value': '0', 'mtime': 0}}}, - {'resource': 'release'}, - {'resource': 'review'}, - {'guid': '3', 'diff': {'tags': {'value': '3', 'mtime': 0}}}, - {'commit': [[5, 8]]}, - ]), - sorted([i for i in diff(volume, toolkit.Sequence([[5, None]]), layer='foo')])) - - -def call(routes, method, document=None, guid=None, prop=None, cmd=None, content=None, **kwargs): - path = [] - if document: - path.append(document) - if guid: - path.append(guid) - if prop: - path.append(prop) - request = Request(method=method, path=path, cmd=cmd, content=content) - request.update(kwargs) - request.environ['HTTP_HOST'] = '127.0.0.1' - router = Router(routes) - return router.call(request, Response()) - - -if __name__ == '__main__': - tests.main() diff --git a/tests/units/toolkit/__main__.py b/tests/units/toolkit/__main__.py index 79b0e5b..68cb254 100644 --- a/tests/units/toolkit/__main__.py +++ b/tests/units/toolkit/__main__.py @@ -2,6 +2,7 @@ from __init__ import tests +from coroutine import * from http import * from lsb_release import * from mountpoints import * @@ -11,6 +12,7 @@ from options import * from spec import * from router import * from gbus import * +from i18n import * if __name__ == '__main__': tests.main() diff --git a/tests/units/toolkit/coroutine.py b/tests/units/toolkit/coroutine.py new file mode 100755 index 0000000..95738d0 --- /dev/null +++ b/tests/units/toolkit/coroutine.py @@ -0,0 +1,31 @@ +#!/usr/bin/env python +# sugar-lint: disable + +from __init__ import tests + +from sugar_network.toolkit.coroutine import Spooler, spawn, sleep + + +class CoroutineTest(tests.Test): + + def test_Spooler_ContinuousFeeding(self): + spooler = Spooler() + events = [] + + def consumer(num): + while True: + events[num].append(spooler.wait()) + + for i in range(10): + events.append([]) + spawn(consumer, i) + sleep(.1) + + for i in range(10): + spooler.notify_all(i) + sleep(.1) + self.assertEqual([range(10)] * 10, events) + + +if __name__ == '__main__': + tests.main() diff --git a/tests/units/toolkit/i18n.py b/tests/units/toolkit/i18n.py new file mode 100755 index 0000000..6c4c0ca --- /dev/null +++ b/tests/units/toolkit/i18n.py @@ -0,0 +1,68 @@ +#!/usr/bin/env python +# sugar-lint: disable + +import gettext + +from __init__ import tests + +from sugar_network.toolkit import i18n + + +class I18nTest(tests.Test): + + def test_decode(self): + # Fallback to default lang + i18n._default_langs = ['default'] + self.assertEqual('foo', i18n.decode({'lang': 'foo', 'default': 'bar'}, 'lang')) + self.assertEqual('bar', i18n.decode({'lang': 'foo', 'default': 'bar'}, 'fake')) + + # Exact accept_language + self.assertEqual('', i18n.decode(None, 'lang')) + self.assertEqual('foo', i18n.decode('foo', 'lang')) + self.assertEqual('foo', i18n.decode({'lang': 'foo', 'fake': 'bar', 'default': 'default'}, 'lang')) + self.assertEqual('foo', i18n.decode({'lang': 'foo', 'fake': 'bar', 'default': 'default'}, ['lang', 'fake'])) + self.assertEqual('bar', i18n.decode({'lang': 'foo', 'fake': 'bar', 'default': 'default'}, ['fake', 'lang'])) + + # Last resort + self.assertEqual('foo', i18n.decode({'1': 'foo', '2': 'bar'}, 'fake')) + + # Primed accept_language + self.assertEqual('foo', i18n.decode({'1': 'foo', '2': 'bar', 'default': 'default'}, '1-a')) + + # Primed i18n value + self.assertEqual('bar', i18n.decode({'1-a': 'foo', '1': 'bar', 'default': 'default'}, '1-b')) + self.assertEqual('foo', i18n.decode({'1-a': 'foo', '2': 'bar', 'default': 'default'}, '1-b')) + + def test_decode_EnAsTheLastResort(self): + i18n._default_langs = ['en-us'] + self.assertEqual('right', i18n.decode({'a': 'wrong', 'en': 'right'}, 'probe')) + self.assertEqual('exact', i18n.decode({'a': 'wrong', 'en': 'right', 'probe': 'exact'}, 'probe')) + + def test_encode(self): + self.assertEqual({ + 'en': 'Delete Log File', + 'es': 'Borrar el archivo de registro', + 'fr': 'Supprimer le fichier log', + }, i18n.encode('Delete Log File')) + + self.assertEqual({ + 'en': "Error: Can't open file 'probe'\n", + 'es': "Error: No se puede abrir el archivo 'probe'\n", + 'fr': "Erreur : Ouverture du fichier 'probe' impossible\n", + }, i18n.encode("Error: Can't open file '%s'\n", 'probe')) + + self.assertEqual({ + 'en': "Error: Can't open file '1'\n", + 'es': "Error: No se puede abrir el archivo '2'\n", + 'fr': "Erreur : Ouverture du fichier '3' impossible\n", + }, i18n.encode("Error: Can't open file '%s'\n", {'en': 1, 'es': 2, 'fr': 3})) + + self.assertEqual({ + 'en': '1 when deleting 5', + 'es': '2 borrando 6', + 'fr': '3 lors de la suppression de 7', + }, i18n.encode('%(error)s when deleting %(file)s', error={'en': 1, 'es': 2, 'fr': 3}, file={'en': 5, 'es': 6, 'fr': 7})) + + +if __name__ == '__main__': + tests.main() diff --git a/tests/units/toolkit/router.py b/tests/units/toolkit/router.py index a9b17f2..3dd1306 100755 --- a/tests/units/toolkit/router.py +++ b/tests/units/toolkit/router.py @@ -9,9 +9,10 @@ from cStringIO import StringIO from __init__ import tests, src_root -from sugar_network import db, client -from sugar_network.toolkit.router import Blob, Router, Request, _parse_accept_language, route, fallbackroute, preroute, postroute, _filename -from sugar_network.toolkit import default_lang, http, coroutine +from sugar_network import db, client, toolkit +from sugar_network.toolkit.router import Router, Request, _parse_accept_language, route, fallbackroute, preroute, postroute +from sugar_network.toolkit.coroutine import this +from sugar_network.toolkit import http, coroutine class RouterTest(tests.Test): @@ -499,26 +500,44 @@ class RouterTest(tests.Test): def test_routes_Pre(self): - class Routes(object): + class A(object): @route('PROBE') def ok(self, request, response): return request['probe'] @preroute - def preroute(self, op, request, response): - request['probe'] = 'request' + def _(self, op, request, response): + request['probe'] = '_' - router = Router(Routes()) + class B1(A): + + @preroute + def z(self, op, request, response): + request['probe'] += 'z' + + class B2(object): + + @preroute + def f(self, op, request, response): + request['probe'] += 'f' + + class C(B1, B2): + + @preroute + def a(self, op, request, response): + request['probe'] += 'a' + + router = Router(C()) self.assertEqual( - ['request'], + ['_afz'], [i for i in router({'REQUEST_METHOD': 'PROBE', 'PATH_INFO': '/'}, lambda *args: None)]) def test_routes_Post(self): postroutes = [] - class Routes(object): + class A(object): @route('OK') def ok(self): @@ -529,20 +548,51 @@ class RouterTest(tests.Test): raise Exception('fail') @postroute - def postroute(self, request, response, result, exception): - postroutes.append((result, str(exception))) + def _(self, request, response, result, exception): + postroutes.append(('_', result, str(exception))) - router = Router(Routes()) + class B1(A): + + @postroute + def z(self, request, response, result, exception): + postroutes.append(('z', result, str(exception))) + + class B2(object): + + @postroute + def f(self, request, response, result, exception): + postroutes.append(('f', result, str(exception))) + + class C(B1, B2): + + @postroute + def a(self, request, response, result, exception): + postroutes.append(('a', result, str(exception))) + + router = Router(C()) self.assertEqual( ['ok'], [i for i in router({'REQUEST_METHOD': 'OK', 'PATH_INFO': '/'}, lambda *args: None)]) - self.assertEqual(('ok', 'None'), postroutes[-1]) + self.assertEqual([ + ('_', 'ok', 'None'), + ('a', 'ok', 'None'), + ('f', 'ok', 'None'), + ('z', 'ok', 'None'), + ], + postroutes) + del postroutes[:] self.assertEqual( ['{"request": "/", "error": "fail"}'], [i for i in router({'REQUEST_METHOD': 'FAIL', 'PATH_INFO': '/'}, lambda *args: None)]) - self.assertEqual((None, 'fail'), postroutes[-1]) + self.assertEqual([ + ('_', None, 'fail'), + ('a', None, 'fail'), + ('f', None, 'fail'), + ('z', None, 'fail'), + ], + postroutes) def test_routes_WildcardsAsLastResort(self): @@ -968,14 +1018,14 @@ class RouterTest(tests.Test): ], response) - def test_BlobsRedirects(self): + def test_FilesRedirects(self): URL = 'http://sugarlabs.org' class CommandsProcessor(object): @route('GET') def get(self, response): - return Blob(url=URL) + return toolkit.File(meta={'url': URL}) router = Router(CommandsProcessor()) @@ -1171,33 +1221,18 @@ class RouterTest(tests.Test): ], response) - def test_filename(self): - self.assertEqual('Foo', _filename('foo', None)) - self.assertEqual('Foo-Bar', _filename(['foo', 'bar'], None)) - self.assertEqual('FOO-BaR', _filename([' f o o', ' ba r '], None)) - - self.assertEqual('12-3', _filename(['/1/2/', '/3/'], None)) - - self.assertEqual('Foo.png', _filename('foo', 'image/png')) - self.assertEqual('Foo-Bar.gif', _filename(['foo', 'bar'], 'image/gif')) - self.assertEqual('Fake', _filename('fake', 'foo/bar')) - - self.assertEqual('Eng', _filename({default_lang(): 'eng'}, None)) - self.assertEqual('Eng', _filename([{default_lang(): 'eng'}], None)) - self.assertEqual('Bar-1', _filename([{'lang': 'foo', default_lang(): 'bar'}, '1'], None)) - - def test_BlobsDisposition(self): + def test_FilesDisposition(self): self.touch(('blob.data', 'value')) class CommandsProcessor(object): @route('GET', [], '1') def cmd1(self, request): - return Blob(name='foo', blob='blob.data') + return toolkit.File('blob.data', {'name': 'foo', 'mime_type': 'application/octet-stream'}) @route('GET', [], cmd='2') def cmd2(self, request): - return Blob(filename='foo.bar', blob='blob.data') + return toolkit.File('blob.data', {'filename': 'foo.bar'}) router = Router(CommandsProcessor()) @@ -1216,7 +1251,7 @@ class RouterTest(tests.Test): 'last-modified': formatdate(os.stat('blob.data').st_mtime, localtime=False, usegmt=True), 'content-length': str(len(result)), 'content-type': 'application/octet-stream', - 'content-disposition': 'attachment; filename="Foo.obj"', + 'content-disposition': 'attachment; filename="foo.obj"', } ], response) @@ -1292,7 +1327,6 @@ class RouterTest(tests.Test): [i for i in reply]) def test_SpawnEventStream(self): - events = [] class Routes(object): @@ -1301,8 +1335,10 @@ class RouterTest(tests.Test): yield {} yield {'foo': 'bar'} - def broadcast(self, event): - events.append(event.copy()) + events = [] + def localcast(event): + events.append(event.copy()) + this.localcast = localcast reply = Router(Routes(), allow_spawn=True)({ 'PATH_INFO': '/resource/guid/prop', @@ -1321,7 +1357,6 @@ class RouterTest(tests.Test): del events[:] def test_SpawnEventStreamFailure(self): - events = [] class Routes(object): @@ -1332,8 +1367,10 @@ class RouterTest(tests.Test): yield {'foo': 'bar'}, {'add': 'on'} raise RuntimeError('error') - def broadcast(self, event): - events.append(event.copy()) + events = [] + def localcast(event): + events.append(event.copy()) + this.localcast = localcast reply = Router(Routes(), allow_spawn=True)({ 'PATH_INFO': '/', @@ -1353,7 +1390,6 @@ class RouterTest(tests.Test): del events[:] def test_ReadRequestOnEventStreamSpawn(self): - events = [] class Routes(object): @@ -1362,8 +1398,10 @@ class RouterTest(tests.Test): yield {} yield {'request': request.content} - def broadcast(self, event): - events.append(event.copy()) + events = [] + def localcast(event): + events.append(event.copy()) + this.localcast = localcast reply = Router(Routes(), allow_spawn=True)({ 'PATH_INFO': '/', diff --git a/tests/units/toolkit/toolkit.py b/tests/units/toolkit/toolkit.py index 8c13b84..07ed9c6 100755 --- a/tests/units/toolkit/toolkit.py +++ b/tests/units/toolkit/toolkit.py @@ -8,7 +8,7 @@ from cStringIO import StringIO from __init__ import tests from sugar_network import toolkit -from sugar_network.toolkit import Seqno, Sequence +from sugar_network.toolkit import Seqno, Sequence, File class UtilTest(tests.Test): @@ -421,33 +421,14 @@ class UtilTest(tests.Test): ['d', 'a', 'b', 'c'], [i for i in stack]) - def test_gettext(self): - # Fallback to default lang - toolkit._default_langs = ['default'] - self.assertEqual('foo', toolkit.gettext({'lang': 'foo', 'default': 'bar'}, 'lang')) - self.assertEqual('bar', toolkit.gettext({'lang': 'foo', 'default': 'bar'}, 'fake')) - - # Exact accept_language - self.assertEqual('', toolkit.gettext(None, 'lang')) - self.assertEqual('foo', toolkit.gettext('foo', 'lang')) - self.assertEqual('foo', toolkit.gettext({'lang': 'foo', 'fake': 'bar', 'default': 'default'}, 'lang')) - self.assertEqual('foo', toolkit.gettext({'lang': 'foo', 'fake': 'bar', 'default': 'default'}, ['lang', 'fake'])) - self.assertEqual('bar', toolkit.gettext({'lang': 'foo', 'fake': 'bar', 'default': 'default'}, ['fake', 'lang'])) - - # Last resort - self.assertEqual('foo', toolkit.gettext({'1': 'foo', '2': 'bar'}, 'fake')) - - # Primed accept_language - self.assertEqual('foo', toolkit.gettext({'1': 'foo', '2': 'bar', 'default': 'default'}, '1-a')) - - # Primed i18n value - self.assertEqual('bar', toolkit.gettext({'1-a': 'foo', '1': 'bar', 'default': 'default'}, '1-b')) - self.assertEqual('foo', toolkit.gettext({'1-a': 'foo', '2': 'bar', 'default': 'default'}, '1-b')) - - def test_gettext_EnAsTheLastResort(self): - toolkit._default_langs = ['en-us'] - self.assertEqual('right', toolkit.gettext({'a': 'wrong', 'en': 'right'}, 'probe')) - self.assertEqual('exact', toolkit.gettext({'a': 'wrong', 'en': 'right', 'probe': 'exact'}, 'probe')) + def test_FileName(self): + self.assertEqual('blob', File().name) + self.assertEqual('blob', File('foo/bar').name) + self.assertEqual('digest', File(digest='digest').name) + self.assertEqual('foo', File(meta={'filename': 'foo'}).name) + self.assertEqual('foo', File(meta={'name': 'foo'}).name) + self.assertEqual('foo', File(meta={'filename': 'foo', 'mime_type': 'image/png'}).name) + self.assertEqual('digest.png', File(digest='digest', meta={'mime_type': 'image/png'}).name) if __name__ == '__main__': -- cgit v0.9.1