Web   ·   Wiki   ·   Activities   ·   Blog   ·   Lists   ·   Chat   ·   Meeting   ·   Bugs   ·   Git   ·   Translate   ·   Archive   ·   People   ·   Donate
summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorAleksey Lim <alsroot@sugarlabs.org>2014-04-27 14:52:25 (GMT)
committer Aleksey Lim <alsroot@sugarlabs.org>2014-04-27 14:52:25 (GMT)
commit2dbc9b554f322ea23b224d923d9a6475e33ad6e9 (patch)
tree55df42ddf7a0ec8d4ca6ef007218b1056409dc0b
parent046073b04229021ec53833a353ffd069d0a5b561 (diff)
Implementation polishing
* http.request does not load posting stream before sending; * one-sigment packets; * move node related code to node module; * Principal capabilities; * batch posting while pushing client offline updates.
-rw-r--r--TODO14
-rwxr-xr-xsugar-network-node1
-rw-r--r--sugar_network/client/__init__.py10
-rw-r--r--sugar_network/client/auth.py4
-rw-r--r--sugar_network/client/journal.py4
-rw-r--r--sugar_network/client/model.py62
-rw-r--r--sugar_network/client/routes.py113
-rw-r--r--sugar_network/db/__init__.py2
-rw-r--r--sugar_network/db/blobs.py156
-rw-r--r--sugar_network/db/directory.py57
-rw-r--r--sugar_network/db/index.py20
-rw-r--r--sugar_network/db/metadata.py6
-rw-r--r--sugar_network/db/resource.py2
-rw-r--r--sugar_network/db/routes.py119
-rw-r--r--sugar_network/db/storage.py5
-rw-r--r--sugar_network/db/volume.py86
-rw-r--r--sugar_network/model/__init__.py244
-rw-r--r--sugar_network/model/context.py8
-rw-r--r--sugar_network/model/post.py11
-rw-r--r--sugar_network/model/report.py2
-rw-r--r--sugar_network/model/routes.py2
-rw-r--r--sugar_network/node/auth.py49
-rw-r--r--sugar_network/node/master.py21
-rw-r--r--sugar_network/node/model.py521
-rw-r--r--sugar_network/node/routes.py129
-rw-r--r--sugar_network/node/slave.py19
-rw-r--r--sugar_network/toolkit/__init__.py6
-rw-r--r--sugar_network/toolkit/coroutine.py24
-rw-r--r--sugar_network/toolkit/http.py74
-rw-r--r--sugar_network/toolkit/packets.py (renamed from sugar_network/toolkit/parcel.py)298
-rw-r--r--sugar_network/toolkit/router.py93
-rw-r--r--sugar_network/toolkit/spec.py2
-rw-r--r--tests/__init__.py9
-rw-r--r--tests/units/client/__main__.py3
-rwxr-xr-xtests/units/client/client_model.py213
-rwxr-xr-xtests/units/client/client_routes.py (renamed from tests/units/client/routes.py)206
-rwxr-xr-xtests/units/client/injector.py35
-rw-r--r--tests/units/db/__main__.py2
-rwxr-xr-xtests/units/db/blobs.py23
-rwxr-xr-xtests/units/db/db_routes.py (renamed from tests/units/db/routes.py)187
-rwxr-xr-xtests/units/db/index.py22
-rwxr-xr-xtests/units/db/resource.py98
-rwxr-xr-xtests/units/db/volume.py938
-rwxr-xr-xtests/units/model/context.py127
-rwxr-xr-xtests/units/model/model.py521
-rwxr-xr-xtests/units/model/routes.py4
-rw-r--r--tests/units/node/__main__.py4
-rwxr-xr-xtests/units/node/master.py117
-rwxr-xr-xtests/units/node/model.py978
-rwxr-xr-xtests/units/node/node_model.py2780
-rwxr-xr-xtests/units/node/node_routes.py (renamed from tests/units/node/node.py)487
-rwxr-xr-xtests/units/node/slave.py117
-rw-r--r--tests/units/toolkit/__main__.py2
-rwxr-xr-xtests/units/toolkit/http.py149
-rwxr-xr-xtests/units/toolkit/packets.py (renamed from tests/units/toolkit/parcel.py)464
-rwxr-xr-xtests/units/toolkit/router.py18
-rwxr-xr-xtests/units/toolkit/spec.py48
57 files changed, 5428 insertions, 4288 deletions
diff --git a/TODO b/TODO
index b841d22..6cef066 100644
--- a/TODO
+++ b/TODO
@@ -1,15 +1,17 @@
-- proxying as a tool to sort out downstream content
-- push local offline changes to the node on getting online
+- auth while offline posts
+- do not return Context.releases while non-slave sync
- deliver spawn events only to local subscribers
- test/run presolve
- if node relocates api calls, do it only once in toolkit.http
-- changed pulls should take into account accept_length
-- secure node-to-node sync
-- cache init sync pull
- switch auth from WWW-AUTHENTICATE to mutual authentication over the HTTPS
- restrict ACL.LOCAL routes only to localhost clients
-- prevent calling diff api cmd from clients to avoid disclosuring private props
- pull node changes periodically for checked-in contexts
+- refount blobs on node and client sides to delete orphaned ones
+- secure node-to-node sync
+
+v1.0
+====
+- move API docs to sources to share them from `GET /` route
v2.0
====
diff --git a/sugar-network-node b/sugar-network-node
index 64249c7..ecb4ecb 100755
--- a/sugar-network-node
+++ b/sugar-network-node
@@ -69,6 +69,7 @@ class Application(application.Daemon):
auth=SugarAuth(node.data_root.value),
find_limit=node.find_limit.value)
self.jobs.spawn(volume.populate)
+ self.jobs.spawn(node_routes.populate)
logging.info('Listening for requests on %s:%s',
node.host.value, node.port.value)
diff --git a/sugar_network/client/__init__.py b/sugar_network/client/__init__.py
index 648d418..b985f90 100644
--- a/sugar_network/client/__init__.py
+++ b/sugar_network/client/__init__.py
@@ -15,7 +15,6 @@
import os
import logging
-from base64 import b64encode
from os.path import join, expanduser, exists
from sugar_network.toolkit import http, Option
@@ -166,10 +165,15 @@ def stability(context):
return value.split()
-def Connection(url=None, **args):
+def Connection(url=None, creds=None, **kwargs):
if url is None:
url = api.value
- return http.Connection(url, verify=not no_check_certificate.value, **args)
+ if creds is None and keyfile.value:
+ from sugar_network.client.auth import SugarCreds
+ creds = SugarCreds(keyfile.value)
+ return http.Connection(url,
+ auth_request={'method': 'GET', 'params': {'cmd': 'logon'}},
+ creds=creds, verify=not no_check_certificate.value, **kwargs)
def IPCConnection():
diff --git a/sugar_network/client/auth.py b/sugar_network/client/auth.py
index db95aa5..c1c86ed 100644
--- a/sugar_network/client/auth.py
+++ b/sugar_network/client/auth.py
@@ -15,11 +15,15 @@
import os
import hashlib
+import logging
from base64 import b64encode
from urllib2 import parse_http_list, parse_keqv_list
from os.path import abspath, expanduser, dirname, exists
+_logger = logging.getLogger('client.auth')
+
+
class BasicCreds(object):
def __init__(self, login, password):
diff --git a/sugar_network/client/journal.py b/sugar_network/client/journal.py
index 5a6f894..de0fbf8 100644
--- a/sugar_network/client/journal.py
+++ b/sugar_network/client/journal.py
@@ -141,13 +141,13 @@ class Routes(object):
subrequest = Request(method='PUT', document='artifact',
guid=subguid, prop='preview')
subrequest.content_type = 'image/png'
- with file(preview_path, 'rb') as subrequest.content_stream:
+ with file(preview_path, 'rb') as subrequest.content:
self.fallback(subrequest)
subrequest = Request(method='PUT', document='artifact',
guid=subguid, prop='data')
subrequest.content_type = get(guid, 'mime_type') or 'application/octet'
- with file(data_path, 'rb') as subrequest.content_stream:
+ with file(data_path, 'rb') as subrequest.content:
self.fallback(subrequest)
def journal_update(self, guid, data=None, **kwargs):
diff --git a/sugar_network/client/model.py b/sugar_network/client/model.py
index fd85a4d..0c5991f 100644
--- a/sugar_network/client/model.py
+++ b/sugar_network/client/model.py
@@ -20,8 +20,8 @@ from sugar_network.model.user import User
from sugar_network.model.post import Post
from sugar_network.model.report import Report
from sugar_network.model.context import Context as _Context
+from sugar_network.toolkit.router import ACL, File
from sugar_network.toolkit.coroutine import this
-from sugar_network.toolkit.router import ACL
_logger = logging.getLogger('client.model')
@@ -43,3 +43,63 @@ class Volume(db.Volume):
db.Volume.__init__(self, root, resources)
for directory in self.values():
directory.metadata['author'].acl |= ACL.LOCAL
+
+
+def dump_volume(volume):
+ for resource, directory in volume.items():
+ if not directory.has_seqno:
+ continue
+
+ for doc in directory:
+ if not doc['seqno'] or doc['state'] != 'active':
+ continue
+
+ dump = {}
+ op = dump['op'] = {}
+ props = dump['content'] = {}
+ keys = []
+ postfix = []
+
+ for name, prop in doc.metadata.items():
+ meta = doc.meta(name)
+ if meta is None or 'seqno' not in meta:
+ continue
+ if isinstance(prop, db.Aggregated):
+ for aggid, value in doc.repr(name):
+ aggop = {
+ 'method': 'POST',
+ 'path': [resource, doc.guid, name, aggid],
+ }
+ if isinstance(value, File):
+ value.meta['op'] = aggop
+ postfix.append(value)
+ else:
+ postfix.append({'op': aggop, 'content': value})
+ elif prop.acl & (ACL.WRITE | ACL.CREATE):
+ if isinstance(prop, db.Blob):
+ blob = volume.blobs.get(doc[name])
+ blob.meta['op'] = {
+ 'method': 'PUT',
+ 'path': [resource, doc.guid, name],
+ }
+ postfix.append(blob)
+ else:
+ if isinstance(prop, db.Reference):
+ keys.append(name)
+ props[name] = doc[name]
+
+ if 'seqno' in doc.meta('guid'):
+ keys.append('guid')
+ props['guid'] = doc.guid
+ op['method'] = 'POST'
+ op['path'] = [resource]
+ else:
+ op['method'] = 'PUT'
+ op['path'] = [resource, doc.guid]
+
+ if keys:
+ dump['keys'] = keys
+
+ yield dump
+ for dump in postfix:
+ yield dump
diff --git a/sugar_network/client/routes.py b/sugar_network/client/routes.py
index f618df3..8a037ee 100644
--- a/sugar_network/client/routes.py
+++ b/sugar_network/client/routes.py
@@ -20,11 +20,12 @@ from os.path import join
from sugar_network import db, client, node, toolkit
from sugar_network.model import FrontRoutes
+from sugar_network.client import model
from sugar_network.client.journal import Routes as JournalRoutes
from sugar_network.toolkit.router import Request, Router, Response
from sugar_network.toolkit.router import route, fallbackroute
from sugar_network.toolkit.coroutine import this
-from sugar_network.toolkit import netlink, zeroconf, coroutine, http, parcel
+from sugar_network.toolkit import netlink, zeroconf, coroutine, http, packets
from sugar_network.toolkit import ranges, lsb_release, enforce
@@ -54,10 +55,6 @@ class ClientRoutes(FrontRoutes, JournalRoutes):
self._no_subscription = no_subscription
self._pull_r = toolkit.Bin(
join(home_volume.root, 'var', 'pull'), [[1, None]])
- self._push_r = toolkit.Bin(
- join(home_volume.root, 'var', 'push'), [[1, None]])
- self._push_guids_map = toolkit.Bin(
- join(home_volume.root, 'var', 'push-guids'), {})
def connect(self, api=None):
if self._connect_jobs:
@@ -123,7 +120,7 @@ class ClientRoutes(FrontRoutes, JournalRoutes):
result = self.fallback()
result['route'] = 'proxy'
else:
- result = {'roles': [], 'route': 'offline'}
+ result = {'route': 'offline'}
result['guid'] = self._creds.login
return result
@@ -141,7 +138,7 @@ class ClientRoutes(FrontRoutes, JournalRoutes):
for logfile in logs:
with file(logfile) as f:
self.fallback(method='POST', path=['report', guid, 'logs'],
- content_stream=f, content_type='text/plain')
+ content=f, content_type='text/plain')
yield {'event': 'done', 'guid': guid}
@route('GET', ['context', None], cmd='launch', arguments={'args': list},
@@ -196,7 +193,7 @@ class ClientRoutes(FrontRoutes, JournalRoutes):
if pins and item['mtime'] > checkin['mtime']:
pull = Request(method='GET',
path=[checkin.metadata.name, checkin.guid], cmd='diff')
- self._sync_jobs.spawn(self._pull_checkin, pull, None, 'range')
+ self._sync_jobs.spawn(self._pull_checkin, pull, None, 'ranges')
return result
@route('GET', [None, None], mime_type='application/json')
@@ -353,7 +350,7 @@ class ClientRoutes(FrontRoutes, JournalRoutes):
_logger.debug('Checkin %r context', local_context.guid)
pull = Request(method='GET',
path=['context', local_context.guid], cmd='diff')
- self._pull_checkin(pull, None, 'range')
+ self._pull_checkin(pull, None, 'ranges')
pins = local_context['pins']
if pin and pin not in pins:
contexts.update(local_context.guid, {'pins': pins + [pin]})
@@ -374,79 +371,69 @@ class ClientRoutes(FrontRoutes, JournalRoutes):
def _pull_checkin(self, request, response, header_key):
request.headers[header_key] = self._pull_r.value
- patch = self.fallback(request, response)
- __, committed = self._local.volume.patch(next(parcel.decode(patch)),
- shift_seqno=False)
- ranges.exclude(self._pull_r.value, committed)
+ packet = packets.decode(self.fallback(request, response))
- def _sync(self):
- _logger.info('Start pulling updates')
+ volume = self._local.volume
+ volume[request.resource].patch(request.guid, packet['patch'])
+ for blob in packet:
+ volume.blobs.patch(blob)
+ ranges.exclude(self._pull_r.value, packet['ranges'])
+
+ def _pull(self):
+ _logger.debug('Start pulling checkin updates')
+ response = Response()
for directory in self._local.volume.values():
if directory.empty:
continue
request = Request(method='GET',
path=[directory.metadata.name], cmd='diff')
- response = Response()
while True:
- request.headers['range'] = self._pull_r.value
- r, guids = self.fallback(request, response)
- if not r:
+ request.headers['ranges'] = self._pull_r.value
+ diff = self.fallback(request, response)
+ if not diff:
break
- for guid in guids:
+ for guid, r in diff.items():
checkin = Request(method='GET',
path=[request.resource, guid], cmd='diff')
- self._pull_checkin(checkin, response, 'range')
- ranges.exclude(self._pull_r.value, r)
- self._pull_r.commit()
- this.localcast({'event': 'sync', 'state': 'pull'})
-
- """
- resource = None
- metadata = None
-
- for diff in self._local.volume.diff(self._push_r.value, blobs=False):
- if 'resource' in diff:
- resource = diff['resource']
- metadata = self._local.volume[resource]
- elif 'commit' in diff:
- ranges.exclude(self._push_r.value, diff['commit'])
- self._push_r.commit()
- # No reasons to keep failure reports after pushing
- self._local.volume['report'].wipe()
- else:
- props = {}
- blobs = []
- for prop, meta in diff['patch'].items():
- if isinstance(metadata[prop], db.Blob):
- blobs.application
-
-
-
- props[prop] = meta['value']
-
-
-
- if isinstance(diff, File):
- with file(diff.path, 'rb') as f:
- self.fallback(method='POST')
+ self._pull_checkin(checkin, response, 'ranges')
+ ranges.exclude(self._pull_r.value, r)
+ def _push(self):
+ volume = self._local.volume
+ _logger.debug('Start pushing offline updates')
+ dump = packets.encode(model.dump_volume(volume))
+ request = Request(method='POST', cmd='apply', content=dump)
+ self.fallback(request, Response())
+ _logger.debug('Wipeout offline updates')
+ for directory in volume.values():
+ if directory.empty:
+ continue
+ if directory.has_noseqno:
+ directory.dilute()
+ else:
+ directory.wipe()
- pass
+ _logger.debug('Wipeout offline blobs')
+ for blob in volume.blobs.walk():
+ if int(blob.meta['x-seqno']):
+ volume.blobs.wipe(blob)
- if 'guid' in props:
- request = Request(method='POST', path=[resource])
- else:
- request = Request(method='PUT', path=[resource, guid])
- request.content_type = 'application/json'
- request.content = props
- self.fallback(request)
- """
+ def _sync(self):
+ try:
+ self._pull()
+ if self._local.volume.has_seqno:
+ self._push()
+ except:
+ this.localcast({'event': 'sync', 'state': 'failed'})
+ raise
+ else:
+ this.localcast({'event': 'sync', 'state': 'done'})
class _LocalRoutes(db.Routes, Router):
diff --git a/sugar_network/db/__init__.py b/sugar_network/db/__init__.py
index d6b12c5..6a6b27c 100644
--- a/sugar_network/db/__init__.py
+++ b/sugar_network/db/__init__.py
@@ -351,7 +351,7 @@ Volume
from sugar_network.db.metadata import \
stored_property, indexed_property, Property, Numeric, Boolean, Dict, \
- Enum, List, Aggregated, Blob, Localized
+ Enum, List, Aggregated, Blob, Localized, Reference
from sugar_network.db.index import index_flush_timeout, \
index_flush_threshold, index_write_queue
from sugar_network.db.resource import Resource
diff --git a/sugar_network/db/blobs.py b/sugar_network/db/blobs.py
index ce5bb1b..94e914c 100644
--- a/sugar_network/db/blobs.py
+++ b/sugar_network/db/blobs.py
@@ -43,6 +43,8 @@ class Blobs(object):
def path(self, path=None):
if path is None:
return join(self._root, 'files')
+ if isinstance(path, File):
+ return self._blob_path(path.digest)
if isinstance(path, basestring):
path = path.split(os.sep)
if len(path) == 1 and len(path[0]) == 40 and '.' not in path[0]:
@@ -51,7 +53,47 @@ class Blobs(object):
return join(assets.PATH, *path[1:])
return join(self._root, 'files', *path)
+ def walk(self, path=None, include=None, recursive=True, all_files=False):
+ if path is None:
+ is_files = False
+ root = self._blob_path()
+ else:
+ path = path.strip('/').split('/')
+ enforce(not [i for i in path if i == '..'],
+ http.BadRequest, 'Relative paths are not allowed')
+ is_files = True
+ root = self.path(path)
+
+ for root, __, files in os.walk(root):
+ if include is not None and \
+ not ranges.contains(include, int(os.stat(root).st_mtime)):
+ continue
+ api_path = root[len(self._root) + 7:] if is_files else None
+ for filename in files:
+ if filename.endswith(_META_SUFFIX):
+ if not all_files:
+ digest = filename[:-len(_META_SUFFIX)]
+ path = join(root, digest)
+ yield File(path, digest, _read_meta(path))
+ continue
+ elif not all_files:
+ continue
+ yield root, api_path, filename
+ if not recursive:
+ break
+
def post(self, content, mime_type=None, digest_to_assert=None, meta=None):
+ if isinstance(content, File):
+ seqno = self._seqno.next()
+ meta = content.meta.copy()
+ meta['x-seqno'] = str(seqno)
+ path = self._blob_path(content.digest)
+ if not exists(dirname(path)):
+ os.makedirs(dirname(path))
+ os.link(content.path, path)
+ _write_meta(path, meta, seqno)
+ return File(path, content.digest, meta)
+
if meta is None:
meta = []
meta.append(('content-type',
@@ -94,9 +136,8 @@ class Blobs(object):
seqno = self._seqno.next()
meta.append(('content-length', str(blob.tell())))
meta.append(('x-seqno', str(seqno)))
- _write_meta(path, meta, seqno)
blob.name = path
- os.utime(path, (seqno, seqno))
+ _write_meta(path, meta, seqno)
_logger.debug('Post %r file', path)
@@ -121,75 +162,67 @@ class Blobs(object):
def delete(self, path):
self._delete(self.path(path), None)
+ def wipe(self, path):
+ path = self.path(path)
+ if exists(path + _META_SUFFIX):
+ os.unlink(path + _META_SUFFIX)
+ if exists(path):
+ _logger.debug('Wipe %r file', path)
+ os.unlink(path)
+
def populate(self, path=None, recursive=True):
for __ in self.diff([[1, None]], path or '', recursive):
pass
def diff(self, r, path=None, recursive=True):
- if path is None:
- is_files = False
- root = self._blob_path()
- else:
- path = path.strip('/').split('/')
- enforce(not [i for i in path if i == '..'],
- http.BadRequest, 'Relative paths are not allowed')
- is_files = True
- root = self.path(path)
+ is_files = path is not None
checkin_seqno = None
- for root, __, files in os.walk(root):
- if not ranges.contains(r, int(os.stat(root).st_mtime)):
- continue
- rel_root = root[len(self._root) + 7:] if is_files else None
- for filename in files:
- path = join(root, filename)
- if filename.endswith(_META_SUFFIX):
- seqno = int(os.stat(path).st_mtime)
- path = path[:-len(_META_SUFFIX)]
- meta = None
- if exists(path):
- stat = os.stat(path)
- if seqno != int(stat.st_mtime):
- _logger.debug('Found updated %r blob', path)
- seqno = self._seqno.next()
- meta = _read_meta(path)
- meta['x-seqno'] = str(seqno)
- meta['content-length'] = str(stat.st_size)
- _write_meta(path, meta, seqno)
- os.utime(path, (seqno, seqno))
- if not ranges.contains(r, seqno):
- continue
- if meta is None:
+ for root, rel_root, filename in self.walk(path, r, recursive, True):
+ path = join(root, filename)
+ if filename.endswith(_META_SUFFIX):
+ seqno = int(os.stat(path).st_mtime)
+ path = path[:-len(_META_SUFFIX)]
+ meta = None
+ if exists(path):
+ stat = os.stat(path)
+ if seqno != int(stat.st_mtime):
+ _logger.debug('Found updated %r blob', path)
+ seqno = self._seqno.next()
meta = _read_meta(path)
- if is_files:
- digest = join(rel_root, filename[:-len(_META_SUFFIX)])
- meta['path'] = digest
- else:
- digest = filename[:-len(_META_SUFFIX)]
- elif not is_files or exists(path + _META_SUFFIX):
+ meta['x-seqno'] = str(seqno)
+ meta['content-length'] = str(stat.st_size)
+ _write_meta(path, meta, seqno)
+ if not ranges.contains(r, seqno):
continue
+ if meta is None:
+ meta = _read_meta(path)
+ if is_files:
+ digest = join(rel_root, filename[:-len(_META_SUFFIX)])
+ meta['path'] = digest
else:
- _logger.debug('Found new %r blob', path)
- mime_type = mimetypes.guess_type(filename)[0] or \
- 'application/octet-stream'
- if checkin_seqno is None:
- checkin_seqno = self._seqno.next()
- seqno = checkin_seqno
- meta = [('content-type', mime_type),
- ('content-length', str(os.stat(path).st_size)),
- ('x-seqno', str(seqno)),
- ]
- _write_meta(path, meta, seqno)
- os.utime(path, (seqno, seqno))
- if not ranges.contains(r, seqno):
- continue
- digest = join(rel_root, filename)
- meta.append(('path', digest))
- yield File(path, digest, meta)
- if not recursive:
- break
+ digest = filename[:-len(_META_SUFFIX)]
+ elif not is_files or exists(path + _META_SUFFIX):
+ continue
+ else:
+ _logger.debug('Found new %r blob', path)
+ mime_type = mimetypes.guess_type(filename)[0] or \
+ 'application/octet-stream'
+ if checkin_seqno is None:
+ checkin_seqno = self._seqno.next()
+ seqno = checkin_seqno
+ meta = [('content-type', mime_type),
+ ('content-length', str(os.stat(path).st_size)),
+ ('x-seqno', str(seqno)),
+ ]
+ _write_meta(path, meta, seqno)
+ if not ranges.contains(r, seqno):
+ continue
+ digest = join(rel_root, filename)
+ meta.append(('path', digest))
+ yield File(path, digest, meta)
- def patch(self, patch, seqno):
+ def patch(self, patch, seqno=0):
if 'path' in patch.meta:
path = self.path(patch.meta.pop('path'))
else:
@@ -207,7 +240,6 @@ class Blobs(object):
meta = patch.meta
meta['x-seqno'] = str(seqno)
_write_meta(path, meta, seqno)
- os.utime(path, (seqno, seqno))
def _delete(self, path, seqno):
if exists(path + _META_SUFFIX):
@@ -228,6 +260,8 @@ class Blobs(object):
def _write_meta(path, meta, seqno):
+ if seqno:
+ os.utime(path, (seqno, seqno))
path += _META_SUFFIX
with toolkit.new_file(path) as f:
for key, value in meta.items() if isinstance(meta, dict) else meta:
diff --git a/sugar_network/db/directory.py b/sugar_network/db/directory.py
index 17ff27d..79e7332 100644
--- a/sugar_network/db/directory.py
+++ b/sugar_network/db/directory.py
@@ -27,6 +27,10 @@ from sugar_network.toolkit import enforce
# To invalidate existed index on stcuture changes
_LAYOUT_VERSION = 4
+_STATE_HAS_SEQNO = 1
+_STATE_HAS_NOSEQNO = 2
+
+
_logger = logging.getLogger('db.directory')
@@ -53,12 +57,26 @@ class Directory(object):
self._storage = None
self._index = None
self._broadcast = broadcast
+ self._state = toolkit.Bin(
+ join(root, 'index', self.metadata.name, 'state'), 0)
self._open()
@property
def empty(self):
- return True if self._index is None else (self._index.mtime == 0)
+ return not self._state.value & (_STATE_HAS_SEQNO | _STATE_HAS_NOSEQNO)
+
+ @property
+ def has_seqno(self):
+ return self._state.value & _STATE_HAS_SEQNO
+
+ @property
+ def has_noseqno(self):
+ return self._state.value & _STATE_HAS_NOSEQNO
+
+ def __iter__(self):
+ for guid in self._storage.walk(0):
+ yield self.get(guid)
def wipe(self):
self.close()
@@ -67,8 +85,23 @@ class Directory(object):
ignore_errors=True)
shutil.rmtree(join(self._root, 'db', self.metadata.name),
ignore_errors=True)
+ self._state.value = 0
self._open()
+ def dilute(self):
+ for doc in self:
+ if 'seqno' in doc.record.get('guid'):
+ self._index.delete(doc.guid, self._postdelete, doc.guid, None)
+ continue
+ doc.record.unset('seqno')
+ for prop in self.metadata.keys():
+ meta = doc.record.get(prop)
+ if meta is None or 'seqno' not in meta:
+ continue
+ meta.pop('seqno')
+ doc.record.set(prop, **meta)
+ self._state.value ^= _STATE_HAS_SEQNO
+
def close(self):
"""Flush index write pending queue and close the index."""
if self._index is None:
@@ -158,9 +191,9 @@ class Directory(object):
"""
found = False
- migrate = (self._index.mtime == 0)
+ migrate = self.empty
- for guid in self._storage.walk(self._index.mtime):
+ for guid in self._storage.walk(self._state.mtime):
if not found:
_logger.info('Start populating %r index', self.metadata.name)
found = True
@@ -175,7 +208,7 @@ class Directory(object):
meta = record.get(name)
if meta is not None:
props[name] = meta['value']
- self._index.store(guid, props)
+ self._index.store(guid, props, self._preindex)
yield
except Exception:
_logger.exception('Cannot populate %r in %r, invalidate it',
@@ -195,7 +228,7 @@ class Directory(object):
for doc in docs:
yield doc
- def patch(self, guid, patch, seqno=None):
+ def patch(self, guid, patch, seqno=False):
"""Apply changes for documents."""
doc = self.resource(guid, self._storage.get(guid))
merged = False
@@ -239,6 +272,10 @@ class Directory(object):
doc = self.resource(guid, self._storage.get(guid), changes)
for prop in self.metadata:
enforce(doc[prop] is not None, 'Empty %r property', prop)
+ if changes.get('seqno'):
+ self._state.value |= _STATE_HAS_SEQNO
+ else:
+ self._state.value |= _STATE_HAS_NOSEQNO
return doc
def _prestore(self, guid, changes, event):
@@ -253,15 +290,21 @@ class Directory(object):
return None
for prop in self.metadata.keys():
enforce(doc[prop] is not None, 'Empty %r property', prop)
+ if changes.get('seqno'):
+ self._state.value |= _STATE_HAS_SEQNO
+ else:
+ self._state.value |= _STATE_HAS_NOSEQNO
return doc
def _postdelete(self, guid, event):
self._storage.delete(guid)
- self.broadcast(event)
+ if event:
+ self.broadcast(event)
def _postcommit(self):
self._seqno.commit()
- self.broadcast({'event': 'commit', 'mtime': self._index.mtime})
+ self._state.commit()
+ self.broadcast({'event': 'commit', 'mtime': self._state.mtime})
def _save_layout(self):
path = join(self._root, 'index', self.metadata.name, 'layout')
diff --git a/sugar_network/db/index.py b/sugar_network/db/index.py
index 0270dd4..b46fe1b 100644
--- a/sugar_network/db/index.py
+++ b/sugar_network/db/index.py
@@ -13,12 +13,10 @@
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
-import os
import re
import time
import shutil
import logging
-from os.path import exists, join
import xapian
@@ -60,21 +58,12 @@ class IndexReader(object):
self._db = None
self._props = {}
self._path = root
- self._mtime_path = join(self._path, 'mtime')
self._commit_cb = commit_cb
for name, prop in self.metadata.items():
if prop.indexed:
self._props[name] = prop
- @property
- def mtime(self):
- """UNIX seconds of the last `commit()` call."""
- if exists(self._mtime_path):
- return int(os.stat(self._mtime_path).st_mtime)
- else:
- return 0
-
def ensure_open(self):
pass
@@ -418,17 +407,10 @@ class IndexWriter(IndexReader):
self._db.commit()
else:
self._db.flush()
-
- checkpoint = time.time()
- if exists(self._mtime_path):
- os.utime(self._mtime_path, (checkpoint, checkpoint))
- else:
- with file(self._mtime_path, 'w'):
- pass
self._pending_updates = 0
_logger.debug('Commit to %r took %s seconds',
- self.metadata.name, checkpoint - ts)
+ self.metadata.name, time.time() - ts)
if self._commit_cb is not None:
self._commit_cb()
diff --git a/sugar_network/db/metadata.py b/sugar_network/db/metadata.py
index e820fc9..d7d9065 100644
--- a/sugar_network/db/metadata.py
+++ b/sugar_network/db/metadata.py
@@ -149,6 +149,8 @@ class Property(object):
enforce(name == 'guid' or prefix != GUID_PREFIX,
'Prefix %r is reserved for internal needs in %r',
GUID_PREFIX, name)
+ enforce(acl ^ ACL.AUTHOR or acl & ACL.AUTH,
+ 'ACL.AUTHOR without ACL.AUTH')
self.setter = None
self.on_get = lambda self, x: x
@@ -202,6 +204,10 @@ class Property(object):
ACL.NAMES[mode], self.name)
+class Reference(Property):
+ pass
+
+
class Boolean(Property):
def typecast(self, value):
diff --git a/sugar_network/db/resource.py b/sugar_network/db/resource.py
index 2c2e46b..7560024 100644
--- a/sugar_network/db/resource.py
+++ b/sugar_network/db/resource.py
@@ -168,7 +168,7 @@ class Resource(object):
def diff(self, r, out_r=None):
patch = {}
for name, prop in self.metadata.items():
- if name == 'seqno' or prop.acl & (ACL.CALC | ACL.LOCAL):
+ if name == 'seqno' or prop.acl & ACL.LOCAL:
continue
meta = self.meta(name)
if meta is None:
diff --git a/sugar_network/db/routes.py b/sugar_network/db/routes.py
index a1bb75e..0ea1305 100644
--- a/sugar_network/db/routes.py
+++ b/sugar_network/db/routes.py
@@ -15,21 +15,17 @@
# pylint: disable-msg=W0611
-import re
import logging
from contextlib import contextmanager
from sugar_network import toolkit
from sugar_network.db.metadata import Aggregated
from sugar_network.toolkit.router import ACL, File
-from sugar_network.toolkit.router import route, postroute, fallbackroute
+from sugar_network.toolkit.router import route, fallbackroute, preroute
from sugar_network.toolkit.coroutine import this
-from sugar_network.toolkit import http, parcel, ranges, enforce
+from sugar_network.toolkit import http, ranges, enforce
-_GUID_RE = re.compile('[a-zA-Z0-9_+-.]+$')
-_GROUPED_DIFF_LIMIT = 1024
-
_logger = logging.getLogger('db.routes')
@@ -37,18 +33,12 @@ class Routes(object):
def __init__(self, volume, find_limit=None):
this.volume = self.volume = volume
+ this.add_property('resource', _get_resource)
self._find_limit = find_limit
- @postroute
- def postroute(self, result, exception):
- request = this.request
- if not request.guid:
- return result
- pull = request.headers['pull']
- if pull is None:
- return result
- this.response.content_type = 'application/octet-stream'
- return self._object_diff(pull)
+ @preroute
+ def __preroute__(self, op):
+ this.reset_property('resource')
@route('POST', [None], acl=ACL.AUTH, mime_type='application/json')
def create(self):
@@ -71,11 +61,7 @@ class Routes(object):
@route('PUT', [None, None, None], acl=ACL.AUTH | ACL.AUTHOR)
def update_prop(self):
request = this.request
- if request.content is None:
- value = request.content_stream
- else:
- value = request.content
- request.content = {request.prop: value}
+ request.content = {request.prop: request.content}
self.update()
@route('DELETE', [None, None], acl=ACL.AUTH | ACL.AUTHOR)
@@ -133,17 +119,17 @@ class Routes(object):
return self.get_prop()
@route('POST', [None, None, None],
- acl=ACL.AUTH, mime_type='application/json')
+ acl=ACL.AUTH | ACL.AGG_AUTHOR, mime_type='application/json')
def insert_to_aggprop(self):
return self._aggpost(ACL.INSERT)
@route('PUT', [None, None, None, None],
- acl=ACL.AUTH, mime_type='application/json')
+ acl=ACL.AUTH | ACL.AGG_AUTHOR, mime_type='application/json')
def update_aggprop(self):
self._aggpost(ACL.REPLACE)
@route('DELETE', [None, None, None, None],
- acl=ACL.AUTH, mime_type='application/json')
+ acl=ACL.AUTH | ACL.AGG_AUTHOR, mime_type='application/json')
def remove_from_aggprop(self):
self._aggpost(ACL.REMOVE)
@@ -180,80 +166,17 @@ class Routes(object):
del authors[user]
directory.update(request.guid, {'author': authors})
- @route('GET', [None], cmd='diff', mime_type='application/json')
- def grouped_diff(self, key):
- if not key:
- key = 'guid'
- in_r = this.request.headers['range'] or [[1, None]]
- out_r = []
- diff = set()
-
- for doc in self.volume[this.request.resource].diff(in_r):
- diff.add(doc.guid)
- if len(diff) > _GROUPED_DIFF_LIMIT:
- break
- ranges.include(out_r, doc['seqno'], doc['seqno'])
- doc.diff(in_r, out_r)
-
- return out_r, list(diff)
-
- @route('GET', [None, None], cmd='diff')
- def object_diff(self):
- return self._object_diff(this.request.headers['range'])
-
@fallbackroute('GET', ['blobs'])
def blobs(self):
return self.volume.blobs.get(this.request.guid)
- def _object_diff(self, in_r):
- request = this.request
- doc = self.volume[request.resource][request.guid]
- enforce(doc.exists, http.NotFound, 'Resource not found')
-
- out_r = []
- if in_r is None:
- in_r = [[1, None]]
- patch = doc.diff(in_r, out_r)
- if not patch:
- return parcel.encode([(None, None, [])], compresslevel=0)
-
- diff = [{'resource': request.resource},
- {'guid': request.guid, 'patch': patch},
- ]
-
- def add_blob(blob):
- if not isinstance(blob, File):
- return
- seqno = int(blob.meta['x-seqno'])
- ranges.include(out_r, seqno, seqno)
- diff.append(blob)
-
- for prop, meta in patch.items():
- prop = doc.metadata[prop]
- value = prop.reprcast(meta['value'])
- if isinstance(prop, Aggregated):
- for __, aggvalue in value:
- add_blob(aggvalue)
- else:
- add_blob(value)
- diff.append({'commit': out_r})
-
- return parcel.encode([(None, None, diff)], compresslevel=0)
-
@contextmanager
def _post(self, access):
content = this.request.content
enforce(isinstance(content, dict), http.BadRequest, 'Invalid value')
if access == ACL.CREATE:
- guid = content.get('guid')
- if guid:
- enforce(this.principal and this.principal.admin,
- http.BadRequest, 'GUID should not be specified')
- enforce(_GUID_RE.match(guid) is not None,
- http.BadRequest, 'Malformed GUID')
- else:
- guid = toolkit.uuid()
+ guid = content.get('guid') or toolkit.uuid()
doc = self.volume[this.request.resource][guid]
enforce(not doc.exists, 'Resource already exists')
doc.posts['guid'] = guid
@@ -261,6 +184,8 @@ class Routes(object):
if name not in content and prop.default is not None:
doc.posts[name] = prop.default
else:
+ enforce('guid' not in content, http.BadRequest,
+ 'GUID in cannot be changed')
doc = self.volume[this.request.resource][this.request.guid]
enforce(doc.available, 'Resource not found')
this.resource = doc
@@ -334,27 +259,16 @@ class Routes(object):
'Property is not aggregated')
prop.assert_access(acl)
- def enforce_authority(author):
- if prop.acl & ACL.AUTHOR:
- author = doc['author']
- enforce(not author or this.principal in author or
- this.principal and this.principal.admin,
- http.Forbidden, 'Authors only')
-
aggid = request.key
if aggid and aggid in doc[request.prop]:
aggvalue = doc[request.prop][aggid]
- enforce_authority(aggvalue.get('author'))
prop.subteardown(aggvalue['value'])
else:
enforce(acl != ACL.REMOVE, http.NotFound, 'No aggregated item')
- enforce_authority(None)
aggvalue = {}
if acl != ACL.REMOVE:
- value = prop.subtypecast(
- request.content_stream if request.content is None
- else request.content)
+ value = prop.subtypecast(request.content)
if type(value) is tuple:
aggid_, value = value
enforce(not aggid or aggid == aggid_, http.BadRequest,
@@ -373,3 +287,8 @@ class Routes(object):
self.volume[request.resource].update(request.guid, doc.posts)
return aggid
+
+
+def _get_resource():
+ request = this.request
+ return this.volume[request.resource][request.guid]
diff --git a/sugar_network/db/storage.py b/sugar_network/db/storage.py
index bbb50db..87d08b3 100644
--- a/sugar_network/db/storage.py
+++ b/sugar_network/db/storage.py
@@ -132,3 +132,8 @@ class Record(object):
# Touch directory to let it possible to crawl it on startup
# when index was not previously closed properly
os.utime(join(self._root, '..'), (mtime, mtime))
+
+ def unset(self, prop):
+ meta_path = join(self._root, prop)
+ if exists(meta_path):
+ os.unlink(meta_path)
diff --git a/sugar_network/db/volume.py b/sugar_network/db/volume.py
index 382176c..25ae1bb 100644
--- a/sugar_network/db/volume.py
+++ b/sugar_network/db/volume.py
@@ -15,16 +15,14 @@
import os
import logging
-from copy import deepcopy
from os.path import exists, join, abspath
from sugar_network import toolkit
from sugar_network.db.directory import Directory
from sugar_network.db.index import IndexWriter
from sugar_network.db.blobs import Blobs
-from sugar_network.toolkit.router import File
from sugar_network.toolkit.coroutine import this
-from sugar_network.toolkit import http, coroutine, ranges, enforce
+from sugar_network.toolkit import http, coroutine, enforce
_logger = logging.getLogger('db.volume')
@@ -70,6 +68,20 @@ class Volume(dict):
return False
return True
+ @property
+ def has_seqno(self):
+ for directory in self.values():
+ if directory.has_seqno:
+ return True
+ return False
+
+ @property
+ def has_noseqno(self):
+ for directory in self.values():
+ if directory.has_noseqno:
+ return True
+ return False
+
def close(self):
"""Close operations with the server."""
_logger.info('Closing documents in %r', self._root)
@@ -83,74 +95,6 @@ class Volume(dict):
for __ in cls.populate():
coroutine.dispatch()
- def diff(self, r, exclude=None, files=None, blobs=True, one_way=False):
- if exclude:
- include = deepcopy(r)
- ranges.exclude(include, exclude)
- else:
- include = r
- last_seqno = None
- found = False
-
- try:
- for resource, directory in self.items():
- if one_way and directory.resource.one_way:
- continue
- yield {'resource': resource}
- for doc in directory.diff(r):
- patch = doc.diff(include)
- if patch:
- yield {'guid': doc.guid, 'patch': patch}
- found = True
- last_seqno = max(last_seqno, doc['seqno'])
- if blobs:
- for blob in self.blobs.diff(include):
- seqno = int(blob.meta.pop('x-seqno'))
- yield blob
- found = True
- last_seqno = max(last_seqno, seqno)
- for dirpath in files or []:
- for blob in self.blobs.diff(include, dirpath):
- seqno = int(blob.meta.pop('x-seqno'))
- yield blob
- found = True
- last_seqno = max(last_seqno, seqno)
- except StopIteration:
- pass
-
- if found:
- commit_r = include if exclude else deepcopy(r)
- ranges.exclude(commit_r, last_seqno + 1, None)
- ranges.exclude(r, None, last_seqno)
- yield {'commit': commit_r}
-
- def patch(self, records, shift_seqno=True):
- directory = None
- committed = []
- seqno = None if shift_seqno else False
-
- for record in records:
- if isinstance(record, File):
- if seqno is None:
- seqno = self.seqno.next()
- self.blobs.patch(record, seqno or 0)
- continue
- resource = record.get('resource')
- if resource:
- directory = self[resource]
- continue
- guid = record.get('guid')
- if guid is not None:
- seqno = directory.patch(guid, record['patch'], seqno)
- continue
- commit = record.get('commit')
- if commit is not None:
- ranges.include(committed, commit)
- continue
- raise http.BadRequest('Malformed patch')
-
- return seqno, committed
-
def broadcast(self, event):
if not self.mute:
if event['event'] == 'commit':
diff --git a/sugar_network/model/__init__.py b/sugar_network/model/__init__.py
index 3f6aef1..6a314f5 100644
--- a/sugar_network/model/__init__.py
+++ b/sugar_network/model/__init__.py
@@ -15,22 +15,17 @@
import os
import gettext
-import logging
-import mimetypes
from os.path import join
import xapian
-from sugar_network import toolkit, db
+from sugar_network import db
from sugar_network.model.routes import FrontRoutes
-from sugar_network.toolkit.spec import parse_version, parse_requires
-from sugar_network.toolkit.spec import EMPTY_LICENSE
-from sugar_network.toolkit.coroutine import this
-from sugar_network.toolkit.bundle import Bundle
-from sugar_network.toolkit.router import ACL
-from sugar_network.toolkit import i18n, http, svg_to_png, enforce
+ICON_SIZE = 55
+LOGO_SIZE = 140
+
CONTEXT_TYPES = [
'activity', 'group', 'package', 'book',
]
@@ -58,11 +53,6 @@ RESOURCES = (
'sugar_network.model.user',
)
-ICON_SIZE = 55
-LOGO_SIZE = 140
-
-_logger = logging.getLogger('model')
-
class Rating(db.List):
@@ -72,229 +62,3 @@ class Rating(db.List):
def slotting(self, value):
rating = float(value[1]) / value[0] if value[0] else 0
return xapian.sortable_serialise(rating)
-
-
-class Release(object):
-
- def typecast(self, release):
- if this.resource.exists and \
- 'activity' not in this.resource['type'] and \
- 'book' not in this.resource['type']:
- return release
- if not isinstance(release, dict):
- __, release = load_bundle(
- this.volume.blobs.post(release, this.request.content_type),
- context=this.request.guid)
- return release['bundles']['*-*']['blob'], release
-
- def reprcast(self, release):
- return this.volume.blobs.get(release['bundles']['*-*']['blob'])
-
- def teardown(self, release):
- if this.resource.exists and \
- 'activity' not in this.resource['type'] and \
- 'book' not in this.resource['type']:
- return
- for bundle in release['bundles'].values():
- this.volume.blobs.delete(bundle['blob'])
-
- def encode(self, value):
- return []
-
-
-def generate_node_stats(volume):
-
- def calc_rating(**kwargs):
- rating = [0, 0]
- alldocs, __ = volume['post'].find(**kwargs)
- for post in alldocs:
- if post['vote']:
- rating[0] += 1
- rating[1] += post['vote']
- return rating
-
- alldocs, __ = volume['context'].find()
- for context in alldocs:
- rating = calc_rating(type='review', context=context.guid)
- volume['context'].update(context.guid, {'rating': rating})
-
- alldocs, __ = volume['post'].find(topic='')
- for topic in alldocs:
- rating = calc_rating(type='feedback', topic=topic.guid)
- volume['post'].update(topic.guid, {'rating': rating})
-
-
-def load_bundle(blob, context=None, initial=False, extra_deps=None):
- context_type = None
- context_meta = None
- release_notes = None
- release = {}
- version = None
-
- try:
- bundle = Bundle(blob.path, mime_type='application/zip')
- except Exception:
- context_type = 'book'
- if not context:
- context = this.request['context']
- version = this.request['version']
- if 'license' in this.request:
- release['license'] = this.request['license']
- if isinstance(release['license'], basestring):
- release['license'] = [release['license']]
- release['stability'] = 'stable'
- release['bundles'] = {
- '*-*': {
- 'blob': blob.digest,
- },
- }
- else:
- context_type = 'activity'
- unpack_size = 0
-
- with bundle:
- changelog = join(bundle.rootdir, 'CHANGELOG')
- for arcname in bundle.get_names():
- if changelog and arcname == changelog:
- with bundle.extractfile(changelog) as f:
- release_notes = f.read()
- changelog = None
- unpack_size += bundle.getmember(arcname).size
- spec = bundle.get_spec()
- context_meta = _load_context_metadata(bundle, spec)
-
- if not context:
- context = spec['context']
- else:
- enforce(context == spec['context'],
- http.BadRequest, 'Wrong context')
- if extra_deps:
- spec.requires.update(parse_requires(extra_deps))
-
- version = spec['version']
- release['stability'] = spec['stability']
- if spec['license'] is not EMPTY_LICENSE:
- release['license'] = spec['license']
- release['commands'] = spec.commands
- release['requires'] = spec.requires
- release['bundles'] = {
- '*-*': {
- 'blob': blob.digest,
- 'unpack_size': unpack_size,
- },
- }
- blob.meta['content-type'] = 'application/vnd.olpc-sugar'
-
- enforce(context, http.BadRequest, 'Context is not specified')
- enforce(version, http.BadRequest, 'Version is not specified')
- release['version'] = parse_version(version)
-
- doc = this.volume['context'][context]
- if initial and not doc.exists:
- enforce(context_meta, http.BadRequest, 'No way to initate context')
- context_meta['guid'] = context
- context_meta['type'] = [context_type]
- with this.principal as principal:
- principal.admin = True
- this.call(method='POST', path=['context'], content=context_meta,
- principal=principal)
- else:
- enforce(doc.available, http.NotFound, 'No context')
- enforce(context_type in doc['type'],
- http.BadRequest, 'Inappropriate bundle type')
-
- if 'license' not in release:
- releases = doc['releases'].values()
- enforce(releases, http.BadRequest, 'License is not specified')
- recent = max(releases, key=lambda x: x.get('value', {}).get('release'))
- enforce(recent, http.BadRequest, 'License is not specified')
- release['license'] = recent['value']['license']
-
- _logger.debug('Load %r release: %r', context, release)
-
- if this.principal in doc['author']:
- patch = doc.format_patch(context_meta)
- if patch:
- this.call(method='PUT', path=['context', context], content=patch,
- principal=this.principal)
- doc.posts.update(patch)
- # TRANS: Release notes title
- title = i18n._('%(name)s %(version)s release')
- else:
- # TRANS: 3rd party release notes title
- title = i18n._('%(name)s %(version)s third-party release')
- release['announce'] = this.call(method='POST', path=['post'],
- content={
- 'context': context,
- 'type': 'notification',
- 'title': i18n.encode(title,
- name=doc['title'],
- version=version,
- ),
- 'message': release_notes or '',
- },
- content_type='application/json', principal=this.principal)
-
- blob.meta['content-disposition'] = 'attachment; filename="%s-%s%s"' % (
- ''.join(i18n.decode(doc['title']).split()), version,
- mimetypes.guess_extension(blob.meta.get('content-type')) or '',
- )
- this.volume.blobs.update(blob.digest, blob.meta)
-
- return context, release
-
-
-def _load_context_metadata(bundle, spec):
- result = {}
- for prop in ('homepage', 'mime_types'):
- if spec[prop]:
- result[prop] = spec[prop]
- result['guid'] = spec['context']
-
- try:
- from sugar_network.toolkit.sugar import color_svg
-
- icon_file = bundle.extractfile(join(bundle.rootdir, spec['icon']))
- svg = color_svg(icon_file.read(), result['guid'])
- blobs = this.volume.blobs
-
- result['artefact_icon'] = \
- blobs.post(svg, 'image/svg+xml').digest
- result['icon'] = \
- blobs.post(svg_to_png(svg, ICON_SIZE), 'image/png').digest
- result['logo'] = \
- blobs.post(svg_to_png(svg, LOGO_SIZE), 'image/png').digest
-
- icon_file.close()
- except Exception:
- _logger.exception('Failed to load icon')
-
- msgids = {}
- for prop, confname in [
- ('title', 'name'),
- ('summary', 'summary'),
- ('description', 'description'),
- ]:
- if spec[confname]:
- msgids[prop] = spec[confname]
- result[prop] = {'en': spec[confname]}
- with toolkit.mkdtemp() as tmpdir:
- for path in bundle.get_names():
- if not path.endswith('.mo'):
- continue
- mo_path = path.strip(os.sep).split(os.sep)
- if len(mo_path) != 5 or mo_path[1] != 'locale':
- continue
- lang = mo_path[2]
- bundle.extract(path, tmpdir)
- try:
- translation = gettext.translation(spec['context'],
- join(tmpdir, *mo_path[:2]), [lang])
- for prop, value in msgids.items():
- msgstr = translation.gettext(value).decode('utf8')
- if lang == 'en' or msgstr != value:
- result[prop][lang] = msgstr
- except Exception:
- _logger.exception('Gettext failed to read %r', mo_path[-1])
-
- return result
diff --git a/sugar_network/model/context.py b/sugar_network/model/context.py
index 9153552..cf24650 100644
--- a/sugar_network/model/context.py
+++ b/sugar_network/model/context.py
@@ -95,17 +95,17 @@ class Context(db.Resource):
def previews(self, value):
return value
- @db.stored_property(db.Aggregated, subtype=model.Release(),
- acl=ACL.READ | ACL.INSERT | ACL.REMOVE | ACL.REPLACE)
+ @db.stored_property(db.Aggregated, subtype=db.Dict(),
+ acl=ACL.READ | ACL.LOCAL)
def releases(self, value):
return value
@db.indexed_property(db.Numeric, slot=2, default=0,
- acl=ACL.READ | ACL.CALC)
+ acl=ACL.READ | ACL.LOCAL)
def downloads(self, value):
return value
- @db.indexed_property(model.Rating, slot=3, acl=ACL.READ | ACL.CALC)
+ @db.indexed_property(model.Rating, slot=3, acl=ACL.READ | ACL.LOCAL)
def rating(self, value):
return value
diff --git a/sugar_network/model/post.py b/sugar_network/model/post.py
index d924617..e0b3b25 100644
--- a/sugar_network/model/post.py
+++ b/sugar_network/model/post.py
@@ -20,11 +20,12 @@ from sugar_network.toolkit.coroutine import this
class Post(db.Resource):
- @db.indexed_property(prefix='C', acl=ACL.CREATE | ACL.READ)
+ @db.indexed_property(db.Reference, prefix='C', acl=ACL.CREATE | ACL.READ)
def context(self, value):
return value
- @db.indexed_property(prefix='A', default='', acl=ACL.CREATE | ACL.READ)
+ @db.indexed_property(db.Reference, prefix='A', default='',
+ acl=ACL.CREATE | ACL.READ)
def topic(self, value):
return value
@@ -42,7 +43,7 @@ class Post(db.Resource):
def message(self, value):
return value
- @db.indexed_property(prefix='R', default='')
+ @db.indexed_property(db.Reference, prefix='R', default='')
def solution(self, value):
return value
@@ -82,10 +83,10 @@ class Post(db.Resource):
return value
@db.indexed_property(db.Numeric, slot=2, default=0,
- acl=ACL.READ | ACL.CALC)
+ acl=ACL.READ | ACL.LOCAL)
def downloads(self, value):
return value
- @db.indexed_property(model.Rating, slot=3, acl=ACL.READ | ACL.CALC)
+ @db.indexed_property(model.Rating, slot=3, acl=ACL.READ | ACL.LOCAL)
def rating(self, value):
return value
diff --git a/sugar_network/model/report.py b/sugar_network/model/report.py
index a434a6d..4f201d5 100644
--- a/sugar_network/model/report.py
+++ b/sugar_network/model/report.py
@@ -34,7 +34,7 @@ class Report(db.Resource):
one_way = True
- @db.indexed_property(prefix='C', acl=ACL.CREATE | ACL.READ)
+ @db.indexed_property(db.Reference, prefix='C', acl=ACL.CREATE | ACL.READ)
def context(self, value):
return value
diff --git a/sugar_network/model/routes.py b/sugar_network/model/routes.py
index 8012853..63c98b1 100644
--- a/sugar_network/model/routes.py
+++ b/sugar_network/model/routes.py
@@ -58,7 +58,7 @@ class FrontRoutes(object):
# initiate a subscription and do not stuck in waiting for the 1st event
yield {'event': 'pong'}
- subscription = this.request.content_stream
+ subscription = this.request.content
if subscription is not None:
coroutine.spawn(self._wait_for_closing, subscription)
diff --git a/sugar_network/node/auth.py b/sugar_network/node/auth.py
index 00054f5..d14bde6 100644
--- a/sugar_network/node/auth.py
+++ b/sugar_network/node/auth.py
@@ -40,19 +40,46 @@ class Unauthorized(http.Unauthorized):
class Principal(str):
- admin = False
- editor = False
- translator = False
+ def __new__(cls, value, caps=0):
+ if not isinstance(value, basestring):
+ value, caps = value
+ self = str.__new__(cls, value)
+ # pylint: disable-msg=W0212
+ self._caps = caps
+ self._backup = 0
+ return self
+
+ @property
+ def cap_author_override(self):
+ return self._caps & 1
+
+ @cap_author_override.setter
+ def cap_author_override(self, value):
+ if value:
+ self._caps |= 1
+ else:
+ self._caps ^= 1
- _backup = None
+ @property
+ def cap_create_with_guid(self):
+ return self._caps & 1
+
+ @cap_create_with_guid.setter
+ def cap_create_with_guid(self, value):
+ if value:
+ self._caps |= 1
+ else:
+ self._caps ^= 1
def __enter__(self):
- self._backup = (self.admin, self.editor, self.translator)
+ self._backup = self._caps
return self
def __exit__(self, exc_type, exc_value, traceback):
- self.admin, self.editor, self.translator = self._backup
- self._backup = None
+ self._caps = self._backup
+
+ def dump(self):
+ return self, self._caps
class SugarAuth(object):
@@ -109,10 +136,8 @@ class SugarAuth(object):
for role in self._config.get('permissions', user).split():
role = role.lower()
if role == 'admin':
- principal.admin = True
- elif role == 'editor':
- principal.editor = True
- elif role == 'translator':
- principal.translator = True
+ principal.cap_author_override = True
+ principal.cap_create_with_guid = True
+ # TODO
return principal
diff --git a/sugar_network/node/master.py b/sugar_network/node/master.py
index c5b15e6..c94d047 100644
--- a/sugar_network/node/master.py
+++ b/sugar_network/node/master.py
@@ -19,15 +19,14 @@ from urlparse import urlsplit
from sugar_network import toolkit
from sugar_network.model.post import Post
from sugar_network.model.report import Report
-from sugar_network.node.model import User, Context
-from sugar_network.node import obs, master_api
+from sugar_network.node import obs, master_api, model
from sugar_network.node.routes import NodeRoutes
from sugar_network.toolkit.router import route, ACL
from sugar_network.toolkit.coroutine import this
-from sugar_network.toolkit import http, parcel, pylru, ranges, enforce
+from sugar_network.toolkit import http, packets, pylru, ranges, enforce
-RESOURCES = (User, Context, Post, Report)
+RESOURCES = (model.User, model.Context, Post, Report)
_logger = logging.getLogger('node.master')
@@ -40,20 +39,20 @@ class MasterRoutes(NodeRoutes):
@route('POST', cmd='sync', arguments={'accept_length': int})
def sync(self, accept_length):
- return parcel.encode(self._push() + (self._pull() or []),
+ return packets.encode(self._push() + (self._pull() or []),
limit=accept_length, header={'from': self.guid},
on_complete=this.cookie.clear)
@route('POST', cmd='push')
def push(self):
- return parcel.encode(self._push(), header={'from': self.guid})
+ return packets.encode(self._push(), header={'from': self.guid})
@route('GET', cmd='pull', arguments={'accept_length': int})
def pull(self, accept_length):
reply = self._pull()
if reply is None:
return None
- return parcel.encode(reply, limit=accept_length,
+ return packets.encode(reply, limit=accept_length,
header={'from': self.guid}, on_complete=this.cookie.clear)
@route('PUT', ['context', None], cmd='presolve',
@@ -72,13 +71,13 @@ class MasterRoutes(NodeRoutes):
cookie = this.cookie
reply = []
- for packet in parcel.decode(
- this.request.content_stream, this.request.content_length):
+ for packet in packets.decode(
+ this.request.content, this.request.content_length):
sender = packet['from']
enforce(packet['to'] == self.guid, http.BadRequest,
'Misaddressed packet')
if packet.name == 'push':
- seqno, push_r = this.volume.patch(packet)
+ seqno, push_r = model.patch_volume(packet)
ack_r = [] if seqno is None else [[seqno, seqno]]
ack = {'ack': ack_r, 'ranges': push_r, 'to': sender}
reply.append(('ack', ack, None))
@@ -129,7 +128,7 @@ class MasterRoutes(NodeRoutes):
r = reduce(lambda x, y: ranges.intersect(x, y), acked.values())
ranges.include(exclude, r)
- push = this.volume.diff(pull_r, exclude, one_way=True, files=[''])
+ push = model.diff_volume(pull_r, exclude, one_way=True, files=[''])
reply.append(('push', None, push))
return reply
diff --git a/sugar_network/node/model.py b/sugar_network/node/model.py
index 144dab0..f178913 100644
--- a/sugar_network/node/model.py
+++ b/sugar_network/node/model.py
@@ -13,19 +13,32 @@
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
+import os
+import json
import bisect
import hashlib
import logging
+import gettext
+import mimetypes
+from copy import deepcopy
from os.path import join
from sugar_network import db, toolkit
-from sugar_network.model import Release, context as _context, user as _user
+from sugar_network.model import context as _context, user as _user
+from sugar_network.model import ICON_SIZE, LOGO_SIZE
from sugar_network.node import obs
-from sugar_network.toolkit.router import ACL
-from sugar_network.toolkit.coroutine import this
-from sugar_network.toolkit import spec, sat, http, coroutine, i18n, enforce
+from sugar_network.node.auth import Principal
+from sugar_network.toolkit.router import ACL, File, Request, Response
+from sugar_network.toolkit.coroutine import Queue, this
+from sugar_network.toolkit.spec import EMPTY_LICENSE, ensure_version
+from sugar_network.toolkit.spec import parse_requires, parse_version
+from sugar_network.toolkit.bundle import Bundle
+from sugar_network.toolkit import sat, http, i18n, ranges, packets
+from sugar_network.toolkit import svg_to_png, enforce
+BATCH_SUFFIX = '.meta'
+
_logger = logging.getLogger('node.model')
_presolve_queue = None
@@ -36,73 +49,42 @@ class User(_user.User):
self.posts['guid'] = str(hashlib.sha1(self['pubkey']).hexdigest())
-class _Release(Release):
+class _ReleaseValue(dict):
- _package_cast = db.Dict(db.List())
+ guid = None
- def typecast(self, value):
- if not this.resource.exists or 'package' not in this.resource['type']:
- return Release.typecast(self, value)
-
- value = self._package_cast.typecast(value)
- enforce(value.get('binary'), http.BadRequest, 'No binary aliases')
-
- distro = this.request.key
- if distro == '*':
- lsb_id = None
- lsb_release = None
- elif '-' in this.request.key:
- lsb_id, lsb_release = distro.split('-', 1)
- else:
- lsb_id = distro
- lsb_release = None
- releases = this.resource.record.get('releases')
- resolves = releases['value'].setdefault('resolves', {})
- to_presolve = []
-
- for repo in obs.get_repos():
- if lsb_id and lsb_id != repo['lsb_id'] or \
- lsb_release and lsb_release != repo['lsb_release']:
- continue
- # Make sure there are no alias overrides
- if not lsb_id and repo['lsb_id'] in releases['value'] or \
- not lsb_release and repo['name'] in releases['value']:
- continue
- pkgs = sum([value.get(i, []) for i in ('binary', 'devel')], [])
- version = None
- try:
- for arch in repo['arches']:
- version = obs.resolve(repo['name'], arch, pkgs)['version']
- except Exception, error:
- _logger.warning('Failed to resolve %r on %s',
- pkgs, repo['name'])
- resolve = {'status': str(error)}
- else:
- to_presolve.append((repo['name'], pkgs))
- resolve = {
- 'version': spec.parse_version(version),
- 'packages': pkgs,
- 'status': 'success',
- }
- resolves.setdefault(repo['name'], {}).update(resolve)
- if to_presolve and _presolve_queue is not None:
- _presolve_queue.put(to_presolve)
- if resolves:
- this.resource.record.set('releases', **releases)
+class _Release(object):
- return value
+ _package_subcast = db.Dict(db.List())
+
+ def typecast(self, value):
+ if isinstance(value, _ReleaseValue):
+ return value.guid, value
+ doc = this.volume['context'][this.request.guid]
+ if 'package' in doc['type']:
+ value = _ReleaseValue(self._package_subcast.typecast(value))
+ value.guid = this.request.key
+ _resolve_package_alias(doc, value)
+ return value
+ bundle = this.volume.blobs.post(value, this.request.content_type)
+ __, value = load_bundle(bundle, context=this.request.guid)
+ return value.guid, value
+
+ def encode(self, value):
+ return []
def teardown(self, value):
- if 'package' not in this.resource['type']:
- return Release.teardown(self, value)
+ if 'bundles' in value:
+ for bundle in value['bundles'].values():
+ this.volume.blobs.delete(bundle['blob'])
# TODO Delete presolved files
class Context(_context.Context):
@db.stored_property(db.Aggregated, subtype=_Release(),
- acl=ACL.READ | ACL.INSERT | ACL.REMOVE | ACL.REPLACE)
+ acl=ACL.READ | ACL.INSERT | ACL.REMOVE | ACL.REPLACE | ACL.LOCAL)
def releases(self, value):
return value
@@ -135,6 +117,168 @@ class Volume(db.Volume):
self.release_seqno.commit()
+def diff_volume(r, exclude=None, files=None, blobs=True, one_way=False):
+ volume = this.volume
+ if exclude:
+ include = deepcopy(r)
+ ranges.exclude(include, exclude)
+ else:
+ include = r
+ last_seqno = None
+ found = False
+
+ try:
+ for resource, directory in volume.items():
+ if one_way and directory.resource.one_way:
+ continue
+ yield {'resource': resource}
+ for doc in directory.diff(r):
+ patch = doc.diff(include)
+ if patch:
+ yield {'guid': doc.guid, 'patch': patch}
+ found = True
+ last_seqno = max(last_seqno, doc['seqno'])
+ if blobs:
+ for blob in volume.blobs.diff(include):
+ seqno = int(blob.meta.pop('x-seqno'))
+ yield blob
+ found = True
+ last_seqno = max(last_seqno, seqno)
+ for dirpath in files or []:
+ for blob in volume.blobs.diff(include, dirpath):
+ seqno = int(blob.meta.pop('x-seqno'))
+ yield blob
+ found = True
+ last_seqno = max(last_seqno, seqno)
+ except StopIteration:
+ pass
+
+ if found:
+ commit_r = include if exclude else deepcopy(r)
+ ranges.exclude(commit_r, last_seqno + 1, None)
+ ranges.exclude(r, None, last_seqno)
+ yield {'commit': commit_r}
+
+
+def patch_volume(records, shift_seqno=True):
+ volume = this.volume
+ directory = None
+ committed = []
+ seqno = None if shift_seqno else False
+
+ for record in records:
+ if isinstance(record, File):
+ if seqno is None:
+ seqno = volume.seqno.next()
+ volume.blobs.patch(record, seqno or 0)
+ continue
+ resource = record.get('resource')
+ if resource:
+ directory = volume[resource]
+ continue
+ guid = record.get('guid')
+ if guid is not None:
+ seqno = directory.patch(guid, record['patch'], seqno)
+ continue
+ commit = record.get('commit')
+ if commit is not None:
+ ranges.include(committed, commit)
+ continue
+ raise http.BadRequest('Malformed patch')
+
+ return seqno, committed
+
+
+def diff_resource(in_r):
+ request = this.request
+ enforce(request.resource != 'user', http.BadRequest,
+ 'Not allowed for User resource')
+ doc = this.volume[request.resource][request.guid]
+ enforce(doc.exists, http.NotFound, 'Resource not found')
+
+ out_r = []
+ if in_r is None:
+ in_r = [[1, None]]
+ patch = doc.diff(in_r, out_r)
+ if not patch:
+ return packets.encode([], compresslevel=0)
+ blobs = []
+
+ def add_blob(blob):
+ if not isinstance(blob, File):
+ return
+ seqno = int(blob.meta['x-seqno'])
+ ranges.include(out_r, seqno, seqno)
+ blobs.append(blob)
+
+ for prop, meta in patch.items():
+ prop = doc.metadata[prop]
+ value = prop.reprcast(meta['value'])
+ if isinstance(prop, db.Aggregated):
+ for __, aggvalue in value:
+ add_blob(aggvalue)
+ else:
+ add_blob(value)
+
+ return packets.encode(blobs, patch=patch, ranges=out_r, compresslevel=0)
+
+
+def apply_batch(path):
+ with file(path + BATCH_SUFFIX) as f:
+ meta = json.load(f)
+ principal = Principal(meta['principal'])
+ principal.cap_create_with_guid = True
+ only_nums = meta.get('failed')
+ guid_map = meta.setdefault('guid_map', {})
+ failed = meta['failed'] = []
+ volume = this.volume
+
+ def map_guid(remote_guid):
+ local_guid = guid_map.get(remote_guid)
+ if not local_guid:
+ if volume[request.resource][remote_guid].exists:
+ return remote_guid
+ local_guid = guid_map[remote_guid] = toolkit.uuid()
+ return local_guid
+
+ with file(path, 'rb') as batch:
+ num = 0
+ for record in packets.decode(batch):
+ num += 1
+ if only_nums and not ranges.contains(only_nums, num):
+ continue
+ if isinstance(record, File):
+ request = Request(**record.meta.pop('op'))
+ request.content = record
+ else:
+ request = Request(**record['op'])
+ props = record['content']
+ keys = record.get('keys') or []
+ enforce('guid' not in props or 'guid' in keys,
+ http.BadRequest, 'Guid values is not mapped')
+ for key in keys:
+ enforce(key in props, http.BadRequest,
+ 'No mapped property value')
+ props[key] = map_guid(props[key])
+ request.content = props
+ if request.guid and \
+ not volume[request.resource][request.guid].exists:
+ request.guid = map_guid(request.guid)
+ request.principal = principal
+ try:
+ this.call(request, Response())
+ except Exception:
+ _logger.exception('Failed to apply %r', request)
+ ranges.include(failed, num, num)
+
+ if failed:
+ with toolkit.new_file(path + BATCH_SUFFIX) as f:
+ json.dump(meta, f)
+ else:
+ os.unlink(path + BATCH_SUFFIX)
+ os.unlink(path)
+
+
def solve(volume, top_context, command=None, lsb_id=None, lsb_release=None,
stability=None, requires=None):
top_context = volume['context'][top_context]
@@ -145,12 +289,12 @@ def solve(volume, top_context, command=None, lsb_id=None, lsb_release=None,
top_cond = []
top_requires = {}
if isinstance(requires, basestring):
- top_requires.update(spec.parse_requires(requires))
+ top_requires.update(parse_requires(requires))
elif requires:
for i in requires:
- top_requires.update(spec.parse_requires(i))
+ top_requires.update(parse_requires(i))
if top_context['dependencies']:
- top_requires.update(spec.parse_requires(top_context['dependencies']))
+ top_requires.update(parse_requires(top_context['dependencies']))
if top_context.guid in top_requires:
top_cond = top_requires.pop(top_context.guid)
@@ -173,7 +317,7 @@ def solve(volume, top_context, command=None, lsb_id=None, lsb_release=None,
for dep, cond in deps.items():
dep_clause = [-v_usage]
for v_release in add_context(dep):
- if spec.ensure(varset[v_release][1]['version'], cond):
+ if ensure_version(varset[v_release][1]['version'], cond):
dep_clause.append(v_release)
clauses.append(dep_clause)
@@ -211,7 +355,7 @@ def solve(volume, top_context, command=None, lsb_id=None, lsb_release=None,
release = release['value']
if release['stability'] not in stability or \
context.guid == top_context.guid and \
- not spec.ensure(release['version'], top_cond):
+ not ensure_version(release['version'], top_cond):
continue
bisect.insort(candidates, rate_release(digest, release))
for release in reversed(candidates):
@@ -272,12 +416,259 @@ def solve(volume, top_context, command=None, lsb_id=None, lsb_release=None,
def presolve(presolve_path):
global _presolve_queue
- _presolve_queue = coroutine.Queue()
+ _presolve_queue = Queue()
for repo_name, pkgs in _presolve_queue:
obs.presolve(repo_name, pkgs, presolve_path)
+def load_bundle(blob, context=None, initial=False, extra_deps=None):
+ context_type = None
+ context_meta = None
+ release_notes = None
+ version = None
+ release = _ReleaseValue()
+ release.guid = blob.digest
+
+ try:
+ bundle = Bundle(blob.path, mime_type='application/zip')
+ except Exception:
+ context_type = 'book'
+ if not context:
+ context = this.request['context']
+ version = this.request['version']
+ if 'license' in this.request:
+ release['license'] = this.request['license']
+ if isinstance(release['license'], basestring):
+ release['license'] = [release['license']]
+ release['stability'] = 'stable'
+ release['bundles'] = {
+ '*-*': {
+ 'blob': blob.digest,
+ },
+ }
+ else:
+ context_type = 'activity'
+ unpack_size = 0
+
+ with bundle:
+ changelog = join(bundle.rootdir, 'CHANGELOG')
+ for arcname in bundle.get_names():
+ if changelog and arcname == changelog:
+ with bundle.extractfile(changelog) as f:
+ release_notes = f.read()
+ changelog = None
+ unpack_size += bundle.getmember(arcname).size
+ spec = bundle.get_spec()
+ context_meta = _load_context_metadata(bundle, spec)
+
+ if not context:
+ context = spec['context']
+ else:
+ enforce(context == spec['context'],
+ http.BadRequest, 'Wrong context')
+ if extra_deps:
+ spec.requires.update(parse_requires(extra_deps))
+
+ version = spec['version']
+ release['stability'] = spec['stability']
+ if spec['license'] is not EMPTY_LICENSE:
+ release['license'] = spec['license']
+ release['commands'] = spec.commands
+ release['requires'] = spec.requires
+ release['bundles'] = {
+ '*-*': {
+ 'blob': blob.digest,
+ 'unpack_size': unpack_size,
+ },
+ }
+ blob.meta['content-type'] = 'application/vnd.olpc-sugar'
+
+ enforce(context, http.BadRequest, 'Context is not specified')
+ enforce(version, http.BadRequest, 'Version is not specified')
+ release['version'] = parse_version(version)
+
+ doc = this.volume['context'][context]
+ if initial and not doc.exists:
+ enforce(context_meta, http.BadRequest, 'No way to initate context')
+ context_meta['guid'] = context
+ context_meta['type'] = [context_type]
+ with this.principal as principal:
+ principal.cap_create_with_guid = True
+ this.call(method='POST', path=['context'], content=context_meta,
+ principal=principal)
+ else:
+ enforce(doc.available, http.NotFound, 'No context')
+ enforce(context_type in doc['type'],
+ http.BadRequest, 'Inappropriate bundle type')
+
+ if 'license' not in release:
+ releases = doc['releases'].values()
+ enforce(releases, http.BadRequest, 'License is not specified')
+ recent = max(releases, key=lambda x: x.get('value', {}).get('release'))
+ enforce(recent, http.BadRequest, 'License is not specified')
+ release['license'] = recent['value']['license']
+
+ _logger.debug('Load %r release: %r', context, release)
+
+ if this.principal in doc['author']:
+ patch = doc.format_patch(context_meta)
+ if patch:
+ this.call(method='PUT', path=['context', context], content=patch,
+ principal=this.principal)
+ doc.posts.update(patch)
+ # TRANS: Release notes title
+ title = i18n._('%(name)s %(version)s release')
+ else:
+ # TRANS: 3rd party release notes title
+ title = i18n._('%(name)s %(version)s third-party release')
+ release['announce'] = this.call(method='POST', path=['post'],
+ content={
+ 'context': context,
+ 'type': 'notification',
+ 'title': i18n.encode(title,
+ name=doc['title'],
+ version=version,
+ ),
+ 'message': release_notes or '',
+ },
+ content_type='application/json', principal=this.principal)
+
+ blob.meta['content-disposition'] = 'attachment; filename="%s-%s%s"' % (
+ ''.join(i18n.decode(doc['title']).split()), version,
+ mimetypes.guess_extension(blob.meta.get('content-type')) or '',
+ )
+ this.volume.blobs.update(blob.digest, blob.meta)
+
+ return context, release
+
+
+def generate_node_stats(volume):
+
+ def calc_rating(**kwargs):
+ rating = [0, 0]
+ alldocs, __ = volume['post'].find(**kwargs)
+ for post in alldocs:
+ if post['vote']:
+ rating[0] += 1
+ rating[1] += post['vote']
+ return rating
+
+ alldocs, __ = volume['context'].find()
+ for context in alldocs:
+ rating = calc_rating(type='review', context=context.guid)
+ volume['context'].update(context.guid, {'rating': rating})
+
+ alldocs, __ = volume['post'].find(topic='')
+ for topic in alldocs:
+ rating = calc_rating(type='feedback', topic=topic.guid)
+ volume['post'].update(topic.guid, {'rating': rating})
+
+
+def _load_context_metadata(bundle, spec):
+ result = {}
+ for prop in ('homepage', 'mime_types'):
+ if spec[prop]:
+ result[prop] = spec[prop]
+ result['guid'] = spec['context']
+
+ try:
+ from sugar_network.toolkit.sugar import color_svg
+
+ icon_file = bundle.extractfile(join(bundle.rootdir, spec['icon']))
+ svg = color_svg(icon_file.read(), result['guid'])
+ blobs = this.volume.blobs
+
+ result['artefact_icon'] = \
+ blobs.post(svg, 'image/svg+xml').digest
+ result['icon'] = \
+ blobs.post(svg_to_png(svg, ICON_SIZE), 'image/png').digest
+ result['logo'] = \
+ blobs.post(svg_to_png(svg, LOGO_SIZE), 'image/png').digest
+
+ icon_file.close()
+ except Exception:
+ _logger.exception('Failed to load icon')
+
+ msgids = {}
+ for prop, confname in [
+ ('title', 'name'),
+ ('summary', 'summary'),
+ ('description', 'description'),
+ ]:
+ if spec[confname]:
+ msgids[prop] = spec[confname]
+ result[prop] = {'en': spec[confname]}
+ with toolkit.mkdtemp() as tmpdir:
+ for path in bundle.get_names():
+ if not path.endswith('.mo'):
+ continue
+ mo_path = path.strip(os.sep).split(os.sep)
+ if len(mo_path) != 5 or mo_path[1] != 'locale':
+ continue
+ lang = mo_path[2]
+ bundle.extract(path, tmpdir)
+ try:
+ translation = gettext.translation(spec['context'],
+ join(tmpdir, *mo_path[:2]), [lang])
+ for prop, value in msgids.items():
+ msgstr = translation.gettext(value).decode('utf8')
+ if lang == 'en' or msgstr != value:
+ result[prop][lang] = msgstr
+ except Exception:
+ _logger.exception('Gettext failed to read %r', mo_path[-1])
+
+ return result
+
+
+def _resolve_package_alias(doc, value):
+ enforce(value.get('binary'), http.BadRequest, 'No binary aliases')
+
+ distro = this.request.key
+ enforce(distro, http.BadRequest, 'No distro in path')
+ if distro == '*':
+ lsb_id = None
+ lsb_release = None
+ elif '-' in this.request.key:
+ lsb_id, lsb_release = distro.split('-', 1)
+ else:
+ lsb_id = distro
+ lsb_release = None
+ releases = doc['releases']
+ resolves = releases.get('resolves') or {}
+ to_presolve = []
+
+ for repo in obs.get_repos():
+ if lsb_id and lsb_id != repo['lsb_id'] or \
+ lsb_release and lsb_release != repo['lsb_release']:
+ continue
+ # Make sure there are no alias overrides
+ if not lsb_id and repo['lsb_id'] in releases or \
+ not lsb_release and repo['name'] in releases:
+ continue
+ pkgs = sum([value.get(i, []) for i in ('binary', 'devel')], [])
+ version = None
+ try:
+ for arch in repo['arches']:
+ version = obs.resolve(repo['name'], arch, pkgs)['version']
+ except Exception, error:
+ _logger.warning('Failed to resolve %r on %s',
+ pkgs, repo['name'])
+ resolve = {'status': str(error)}
+ else:
+ to_presolve.append((repo['name'], pkgs))
+ resolve = {
+ 'version': parse_version(version),
+ 'packages': pkgs,
+ 'status': 'success',
+ }
+ resolves.setdefault(repo['name'], {}).update(resolve)
+
+ if to_presolve and _presolve_queue is not None:
+ _presolve_queue.put(to_presolve)
+ doc.post('releases', {'resolves': resolves})
+
+
_STABILITY_RATES = {
'insecure': 0,
'buggy': 1,
diff --git a/sugar_network/node/routes.py b/sugar_network/node/routes.py
index ac8a840..ee28e89 100644
--- a/sugar_network/node/routes.py
+++ b/sugar_network/node/routes.py
@@ -15,20 +15,28 @@
# pylint: disable-msg=W0611
+import os
+import re
+import json
+import time
+import shutil
import logging
-from os.path import join
+from os.path import join, exists
-from sugar_network import db
-from sugar_network.model import FrontRoutes, load_bundle
+from sugar_network import db, toolkit
+from sugar_network.model import FrontRoutes
from sugar_network.node import model
-from sugar_network.toolkit.router import ACL, File
-from sugar_network.toolkit.router import route, fallbackroute, preroute
+from sugar_network.toolkit.router import ACL, File, Request, Response, route
+from sugar_network.toolkit.router import fallbackroute, preroute, postroute
from sugar_network.toolkit.spec import parse_requires, parse_version
from sugar_network.toolkit.bundle import Bundle
from sugar_network.toolkit.coroutine import this
-from sugar_network.toolkit import http, coroutine, enforce
+from sugar_network.toolkit import http, coroutine, ranges, packets, enforce
+_GROUPED_DIFF_LIMIT = 1024
+_GUID_RE = re.compile('[a-zA-Z0-9_+-.]+$')
+
_logger = logging.getLogger('node.routes')
@@ -39,6 +47,10 @@ class NodeRoutes(db.Routes, FrontRoutes):
FrontRoutes.__init__(self)
self._guid = guid
self._auth = auth
+ self._batch_dir = join(self.volume.root, 'batch')
+
+ if not exists(self._batch_dir):
+ os.makedirs(self._batch_dir)
@property
def guid(self):
@@ -47,35 +59,49 @@ class NodeRoutes(db.Routes, FrontRoutes):
@preroute
def preroute(self, op):
request = this.request
+
if request.principal:
this.principal = request.principal
elif op.acl & ACL.AUTH:
this.principal = self._auth.logon(request)
else:
this.principal = None
- if op.acl & ACL.AUTHOR and request.guid:
- if not this.principal:
- this.principal = self._auth.logon(request)
- allowed = this.principal.admin
- if not allowed:
- if request.resource == 'user':
- allowed = (this.principal == request.guid)
- else:
- doc = self.volume[request.resource].get(request.guid)
- allowed = this.principal in doc['author']
+
+ if op.acl & ACL.AUTHOR and not this.principal.cap_author_override:
+ if request.resource == 'user':
+ allowed = (this.principal == request.guid)
+ else:
+ allowed = this.principal in this.resource['author']
enforce(allowed, http.Forbidden, 'Authors only')
- if op.acl & ACL.SUPERUSER:
- if not this.principal:
- this.principal = self._auth.logon(request)
- enforce(this.principal.admin, http.Forbidden, 'Superusers only')
+
+ if op.acl & ACL.AGG_AUTHOR and not this.principal.cap_author_override:
+ if this.resource.metadata[request.prop].acl & ACL.AUTHOR:
+ allowed = this.principal in this.resource['author']
+ elif request.key:
+ value = this.resource[request.prop].get(request.key)
+ allowed = value is None or this.principal in value['author']
+ else:
+ allowed = True
+ enforce(allowed, http.Forbidden, 'Authors only')
+
+ @postroute
+ def postroute(self, result, exception):
+ request = this.request
+ if not request.guid:
+ return result
+ pull = request.headers['pull']
+ if pull is None:
+ return result
+ this.response.content_type = 'application/octet-stream'
+ return model.diff_resource(pull)
+
+ @route('GET', cmd='logon', acl=ACL.AUTH)
+ def logon(self):
+ pass
@route('GET', cmd='whoami', mime_type='application/json')
def whoami(self):
- roles = []
- if this.principal and this.principal.admin:
- roles.append('root')
- return {'roles': roles,
- 'guid': this.principal,
+ return {'guid': this.principal,
'route': 'direct',
}
@@ -123,9 +149,9 @@ class NodeRoutes(db.Routes, FrontRoutes):
mime_type='application/json', acl=ACL.AUTH)
def submit_release(self, initial):
blob = self.volume.blobs.post(
- this.request.content_stream, this.request.content_type)
+ this.request.content, this.request.content_type)
try:
- context, release = load_bundle(blob, initial=initial)
+ context, release = model.load_bundle(blob, initial=initial)
except Exception:
self.volume.blobs.delete(blob.digest)
raise
@@ -147,5 +173,54 @@ class NodeRoutes(db.Routes, FrontRoutes):
solution = self.solve()
return self.volume.blobs.get(solution[this.request.guid]['blob'])
+ @route('GET', [None, None], cmd='diff')
+ def diff_resource(self):
+ return model.diff_resource(this.request.headers['ranges'])
+
+ @route('GET', [None], cmd='diff', mime_type='application/json')
+ def grouped_diff(self, key):
+ request = this.request
+ enforce(request.resource != 'user', http.BadRequest,
+ 'Not allowed for User resource')
+
+ if not key:
+ key = 'guid'
+ in_r = request.headers['ranges'] or [[1, None]]
+ diff = {}
+
+ for doc in self.volume[request.resource].diff(in_r):
+ out_r = diff.get(doc[key])
+ if out_r is None:
+ if len(diff) >= _GROUPED_DIFF_LIMIT:
+ break
+ out_r = diff[doc[key]] = []
+ ranges.include(out_r, doc['seqno'], doc['seqno'])
+ doc.diff(in_r, out_r)
+
+ return diff
+
+ @route('POST', cmd='apply', acl=ACL.AUTH)
+ def batched_post(self):
+ with toolkit.NamedTemporaryFile(dir=self._batch_dir,
+ prefix=this.principal, delete=False) as batch:
+ try:
+ shutil.copyfileobj(this.request.content, batch)
+ except Exception:
+ os.unlink(batch.name)
+ raise
+ with file(batch.name + '.meta', 'w') as f:
+ json.dump({'principal': this.principal.dump()}, f)
+ coroutine.spawn(model.apply_batch, batch.name)
+
+ def create(self):
+ if this.principal and this.principal.cap_create_with_guid:
+ guid = this.request.content.get('guid')
+ enforce(not guid or _GUID_RE.match(guid), http.BadRequest,
+ 'Malformed GUID')
+ else:
+ enforce('guid' not in this.request.content, http.BadRequest,
+ 'GUID should not be specified')
+ return db.Routes.create(self)
+
this.principal = None
diff --git a/sugar_network/node/slave.py b/sugar_network/node/slave.py
index 074ae79..176defd 100644
--- a/sugar_network/node/slave.py
+++ b/sugar_network/node/slave.py
@@ -25,15 +25,14 @@ from sugar_network import toolkit
from sugar_network.model.context import Context
from sugar_network.model.post import Post
from sugar_network.model.report import Report
-from sugar_network.node.model import User
-from sugar_network.node import master_api
+from sugar_network.node import master_api, model
from sugar_network.node.routes import NodeRoutes
from sugar_network.toolkit.router import route, ACL
from sugar_network.toolkit.coroutine import this
-from sugar_network.toolkit import http, parcel, ranges, enforce
+from sugar_network.toolkit import http, packets, ranges, enforce
-RESOURCES = (User, Context, Post, Report)
+RESOURCES = (model.User, Context, Post, Report)
_logger = logging.getLogger('node.slave')
@@ -62,13 +61,13 @@ class SlaveRoutes(NodeRoutes):
def online_sync(self, no_pull=False):
conn = http.Connection(master_api.value)
response = conn.request('POST',
- data=parcel.encode(self._export(not no_pull), header={
+ data=packets.encode(self._export(not no_pull), header={
'from': self.guid,
'to': self._master_guid,
}),
params={'cmd': 'sync'},
headers={'Transfer-Encoding': 'chunked'})
- self._import(parcel.decode(response.raw))
+ self._import(packets.decode(response.raw))
@route('POST', cmd='offline_sync', acl=ACL.LOCAL)
def offline_sync(self, path):
@@ -82,7 +81,7 @@ class SlaveRoutes(NodeRoutes):
'event': 'sync_progress',
'progress': _('Reading sneakernet packages'),
})
- requests = self._import(parcel.decode_dir(path))
+ requests = self._import(packets.decode_dir(path))
this.broadcast({
'event': 'sync_progress',
@@ -91,7 +90,7 @@ class SlaveRoutes(NodeRoutes):
offline_script = join(dirname(sys.argv[0]), 'sugar-network-sync')
if exists(offline_script):
shutil.copy(offline_script, path)
- parcel.encode_dir(requests + self._export(True), root=path, header={
+ packets.encode_dir(requests + self._export(True), root=path, header={
'from': self.guid,
'to': self._master_guid,
})
@@ -110,7 +109,7 @@ class SlaveRoutes(NodeRoutes):
sender = packet['from']
from_master = (sender == self._master_guid)
if packet.name == 'push':
- seqno, committed = this.volume.patch(packet)
+ seqno, committed = model.patch_volume(packet)
if seqno is not None:
if from_master:
with self._pull_r as r:
@@ -136,5 +135,5 @@ class SlaveRoutes(NodeRoutes):
export = []
if pull:
export.append(('pull', {'ranges': self._pull_r.value}, None))
- export.append(('push', None, self.volume.diff(self._push_r.value)))
+ export.append(('push', None, model.diff_volume(self._push_r.value)))
return export
diff --git a/sugar_network/toolkit/__init__.py b/sugar_network/toolkit/__init__.py
index 7585e29..bf80271 100644
--- a/sugar_network/toolkit/__init__.py
+++ b/sugar_network/toolkit/__init__.py
@@ -506,7 +506,7 @@ class Bin(object):
@property
def mtime(self):
if exists(self._path):
- return os.stat(self._path).st_mtime
+ return int(os.stat(self._path).st_mtime)
else:
return 0
@@ -650,7 +650,7 @@ class _NewFile(object):
dst_path = None
def __init__(self, **kwargs):
- self._file = tempfile.NamedTemporaryFile(delete=False, **kwargs)
+ self._file = NamedTemporaryFile(delete=False, **kwargs)
@property
def name(self):
@@ -666,6 +666,8 @@ class _NewFile(object):
def close(self):
self._file.close()
if exists(self.name):
+ if not exists(dirname(self.dst_path)):
+ os.makedirs(dirname(self.dst_path))
os.rename(self.name, self.dst_path)
def __enter__(self):
diff --git a/sugar_network/toolkit/coroutine.py b/sugar_network/toolkit/coroutine.py
index 4a54975..e3a6173 100644
--- a/sugar_network/toolkit/coroutine.py
+++ b/sugar_network/toolkit/coroutine.py
@@ -303,25 +303,45 @@ class Spooler(object):
class _Local(object):
+ PROPERTY_NOT_SET = object()
+
def __init__(self):
self.attrs = set()
+ self.properties = {}
if hasattr(gevent.getcurrent(), 'local'):
current = gevent.getcurrent().local
for attr in current.attrs:
self.attrs.add(attr)
setattr(self, attr, getattr(current, attr))
+ self.properties = current.properties
class _LocalAccess(object):
def __getattr__(self, name):
- return getattr(gevent.getcurrent().local, name)
+ local = gevent.getcurrent().local
+ value = getattr(local, name)
+ if value is _Local.PROPERTY_NOT_SET:
+ value = local.properties[name]()
+ setattr(local, name, value)
+ return value
def __setattr__(self, name, value):
local = gevent.getcurrent().local
local.attrs.add(name)
- return setattr(local, name, value)
+ if value is None and name in local.properties:
+ value = _Local.PROPERTY_NOT_SET
+ setattr(local, name, value)
+
+ def add_property(self, name, getter):
+ local = gevent.getcurrent().local
+ local.properties[name] = getter
+ setattr(local, name, _Local.PROPERTY_NOT_SET)
+
+ def reset_property(self, name):
+ local = gevent.getcurrent().local
+ setattr(local, name, _Local.PROPERTY_NOT_SET)
class _Child(object):
diff --git a/sugar_network/toolkit/http.py b/sugar_network/toolkit/http.py
index 0cbd535..4096b7c 100644
--- a/sugar_network/toolkit/http.py
+++ b/sugar_network/toolkit/http.py
@@ -1,4 +1,4 @@
-# Copyright (C) 2012-2013 Aleksey Lim
+# Copyright (C) 2012-2014 Aleksey Lim
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
@@ -15,7 +15,6 @@
import sys
import json
-import types
import logging
from os.path import join, dirname
@@ -110,12 +109,14 @@ class Connection(object):
_Session = None
- def __init__(self, url='', creds=None, max_retries=0, **session_args):
+ def __init__(self, url='', creds=None, max_retries=0, auth_request=None,
+ **session_args):
self.url = url
self.creds = creds
self._max_retries = max_retries
self._session_args = session_args
self._session = None
+ self._auth_request = auth_request
def __repr__(self):
return '<Connection url=%s>' % self.url
@@ -146,13 +147,17 @@ class Connection(object):
return self._decode_reply(reply)
def post(self, path_=None, data_=None, query_=None, **kwargs):
- reply = self.request('POST', path_, json.dumps(data_),
+ if data_ is not None:
+ data_ = json.dumps(data_)
+ reply = self.request('POST', path_, data_,
headers={'Content-Type': 'application/json'},
params=query_ or kwargs)
return self._decode_reply(reply)
def put(self, path_=None, data_=None, query_=None, **kwargs):
- reply = self.request('PUT', path_, json.dumps(data_),
+ if data_ is not None:
+ data_ = json.dumps(data_)
+ reply = self.request('PUT', path_, data_,
headers={'Content-Type': 'application/json'},
params=query_ or kwargs)
return self._decode_reply(reply)
@@ -182,8 +187,8 @@ class Connection(object):
f.close()
return reply
- def upload(self, path_=None, data_=None, **kwargs):
- reply = self.request('POST', path_, data_, params=kwargs)
+ def upload(self, path_=None, data=None, **kwargs):
+ reply = self.request('POST', path_, data, params=kwargs)
if reply.headers.get('Content-Type') == 'application/json':
return json.loads(reply.content)
else:
@@ -191,6 +196,11 @@ class Connection(object):
def request(self, method, path=None, data=None, headers=None, allowed=None,
params=None, **kwargs):
+ if data is not None and self._auth_request:
+ auth_request = self._auth_request
+ self._auth_request = None
+ self.request(**auth_request)
+
if self._session is None:
self._init()
@@ -209,6 +219,9 @@ class Connection(object):
reply = self._session.request(method, path, data=data,
headers=headers, params=params, **kwargs)
if reply.status_code == Unauthorized.status_code:
+ enforce(data is None,
+ 'Authorization is requited '
+ 'but no way to resend posting data')
enforce(self.creds is not None, Unauthorized, 'No credentials')
challenge_ = reply.headers.get('www-authenticate')
if challenge and challenge == challenge_:
@@ -218,6 +231,7 @@ class Connection(object):
self.post(['user'], profile)
challenge = challenge_
self._session.headers.update(self.creds.logon(challenge))
+ self._auth_request = None
try_ = 0
elif reply.status_code == 200 or \
allowed and reply.status_code in allowed:
@@ -228,12 +242,12 @@ class Connection(object):
error = json.loads(content)['error']
except Exception:
# On non-JSONified fail response, assume that the error
- # was not sent by the application level server code, i.e.,
+ # was not sent by the application-level server code, i.e.,
# something happaned on low level, like connection abort.
# If so, try to resend request.
- if try_ <= self._max_retries and method in ('GET', 'HEAD'):
+ if try_ <= self._max_retries and data is None:
continue
- error = content or reply.headers.get('x-sn-error') or \
+ error = content or reply.headers.get('x-error') or \
'No error message provided'
cls = _FORWARD_STATUSES.get(reply.status_code, RuntimeError) \
or ConnectionError
@@ -242,24 +256,11 @@ class Connection(object):
return reply
def call(self, request, response=None):
- if request.content_type == 'application/json':
- request.content = json.dumps(request.content)
-
- headers = {}
- if request.content is not None:
- headers['content-type'] = \
- request.content_type or 'application/octet-stream'
- headers['content-length'] = str(len(request.content))
- elif request.content_stream is not None:
- headers['content-type'] = \
- request.content_type or 'application/octet-stream'
- # TODO Avoid reading the full content at once
- if isinstance(request.content_stream, types.GeneratorType):
- request.content = ''.join([i for i in request.content_stream])
- else:
- request.content = request.content_stream.read()
- headers['content-length'] = str(len(request.content))
+ headers = {
+ 'content-type': request.content_type or 'application/octet-stream',
+ }
for env_key, key in (
+ ('CONTENT_LENGTH', 'content-length'),
('HTTP_IF_MODIFIED_SINCE', 'if-modified-since'),
('HTTP_ACCEPT_LANGUAGE', 'accept-language'),
('HTTP_ACCEPT_ENCODING', 'accept-encoding'),
@@ -269,12 +270,18 @@ class Connection(object):
headers[key] = value
headers.update(request.headers)
+ data = None
+ if request.method in ('POST', 'PUT'):
+ if request.content_type == 'application/json':
+ data = json.dumps(request.content)
+ else:
+ data = request.content
+
path = request.path
while True:
- reply = self.request(request.method, path,
- data=request.content, params=request.query or request,
- headers=headers, allowed=_REDIRECT_CODES,
- allow_redirects=False)
+ reply = self.request(request.method, path, data=data,
+ params=request.query or request, headers=headers,
+ allowed=_REDIRECT_CODES, allow_redirects=False)
resend = reply.status_code in _REDIRECT_CODES
if response is not None:
if 'transfer-encoding' in reply.headers:
@@ -293,7 +300,10 @@ class Connection(object):
if request.method != 'HEAD':
if reply.headers.get('Content-Type') == 'application/json':
- return json.loads(reply.content)
+ if reply.content:
+ return json.loads(reply.content)
+ else:
+ return None
else:
return reply.raw
diff --git a/sugar_network/toolkit/parcel.py b/sugar_network/toolkit/packets.py
index edbbf02..46bc223 100644
--- a/sugar_network/toolkit/parcel.py
+++ b/sugar_network/toolkit/packets.py
@@ -34,13 +34,13 @@ from sugar_network.toolkit import http, coroutine, BUFFER_SIZE, enforce
DEFAULT_COMPRESSLEVEL = 6
-_FILENAME_SUFFIX = '.parcel'
+_FILENAME_SUFFIX = '.packet'
_RESERVED_DISK_SPACE = 1024 * 1024
_ZLIB_WBITS = 15
_ZLIB_WBITS_SIZE = 32768 # 2 ** 15
-_logger = logging.getLogger('parcel')
+_logger = logging.getLogger('packets')
def decode(stream, limit=None):
@@ -49,26 +49,19 @@ def decode(stream, limit=None):
if limit is not None:
limit -= 2
magic = stream.read(2)
- enforce(len(magic) == 2, http.BadRequest, 'Malformed parcel')
+ enforce(len(magic) == 2, http.BadRequest, 'Malformed packet')
if magic == '\037\213':
stream = _ZippedDecoder(stream, limit)
else:
stream = _Decoder(magic, stream, limit)
header = stream.read_record()
- packet = _DecodeIterator(stream)
- while True:
- packet.next()
- if packet.name == 'last':
- break
- packet.header.update(header)
- yield packet
+ return _DecodeIterator(stream, header)
-def encode(packets, limit=None, header=None, compresslevel=None,
- on_complete=None):
- _logger.debug('Encode %r packets limit=%r header=%r',
- packets, limit, header)
+def encode(items, limit=None, header=None, compresslevel=None,
+ on_complete=None, **kwargs):
+ _logger.debug('Encode %r limit=%r header=%r', items, limit, header)
if compresslevel is 0:
ostream = _Encoder()
@@ -82,71 +75,84 @@ def encode(packets, limit=None, header=None, compresslevel=None,
if limit is None:
limit = sys.maxint
if header is None:
- header = {}
+ header = kwargs
+ else:
+ header.update(kwargs)
chunk = ostream.write_record(header)
if chunk:
yield chunk
- for packet, props, content in packets:
- if props is None:
- props = {}
- props['packet'] = packet
- chunk = ostream.write_record(props)
- if chunk:
- yield chunk
-
- if content is None:
- continue
+ try:
+ items = iter(items)
+ record = next(items)
+ multisegments = type(record) in (tuple, list)
- content = iter(content)
- try:
- finalizing = False
- record = next(content)
- while True:
- if record is None:
- finalizing = True
- record = next(content)
- continue
- blob_len = 0
- if isinstance(record, File):
- blob_len = record.size
- chunk = record.meta
- else:
- chunk = record
- chunk = ostream.write_record(chunk,
- None if finalizing else limit - blob_len)
- if chunk is None:
- _logger.debug('Reach the encoding limit')
- on_complete = None
- if not isinstance(content, GeneratorType):
- raise StopIteration()
- finalizing = True
- record = content.throw(StopIteration())
- continue
+ while True:
+ if multisegments:
+ packet, props, content = record
+ if props is None:
+ props = {}
+ props['segment'] = packet
+ chunk = ostream.write_record(props)
if chunk:
yield chunk
- if blob_len:
- for chunk in record.iter_content():
- blob_len -= len(chunk)
- if not blob_len:
- chunk += '\n'
- chunk = ostream.write(chunk)
- if chunk:
- yield chunk
- enforce(blob_len == 0, EOFError, 'Blob size mismatch')
- record = next(content)
- except StopIteration:
- pass
+ if content:
+ content = iter(content)
+ record = next(content)
+ else:
+ content = iter([])
+ record = None
+ else:
+ content = items
+
+ try:
+ finalizing = False
+ while True:
+ if record is None:
+ finalizing = True
+ record = next(content)
+ continue
+ blob_len = 0
+ if isinstance(record, File):
+ blob_len = record.size
+ chunk = record.meta
+ else:
+ chunk = record
+ chunk = ostream.write_record(chunk,
+ None if finalizing else limit - blob_len)
+ if chunk is None:
+ _logger.debug('Reach the encoding limit')
+ on_complete = None
+ if not isinstance(content, GeneratorType):
+ raise StopIteration()
+ finalizing = True
+ record = content.throw(StopIteration())
+ continue
+ if chunk:
+ yield chunk
+ if blob_len:
+ for chunk in record.iter_content():
+ blob_len -= len(chunk)
+ if not blob_len:
+ chunk += '\n'
+ chunk = ostream.write(chunk)
+ if chunk:
+ yield chunk
+ enforce(blob_len == 0, EOFError, 'Blob size mismatch')
+ record = next(content)
+ except StopIteration:
+ pass
+ if multisegments:
+ record = next(items)
+ continue
+ break
+ finally:
if on_complete is not None:
on_complete()
-
- chunk = ostream.write_record({'packet': 'last'})
- if chunk:
- yield chunk
- chunk = ostream.flush()
- if chunk:
- yield chunk
+ chunk = ostream.flush()
+ if chunk:
+ yield chunk
def decode_dir(root, recipient=None, session=None):
@@ -154,18 +160,19 @@ def decode_dir(root, recipient=None, session=None):
for filename in files:
if not filename.endswith(_FILENAME_SUFFIX):
continue
- with file(join(root, filename), 'rb') as parcel:
- for packet in decode(parcel):
- if recipient is not None and packet['from'] == recipient:
- if session and packet['session'] == session:
- _logger.debug('Skip the same session %r parcel',
- parcel.name)
- else:
- _logger.debug('Remove outdated %r parcel',
- parcel.name)
- os.unlink(parcel.name)
- break
- yield packet
+ with file(join(root, filename), 'rb') as packets:
+ packet = decode(packets)
+ if recipient is not None and packet['from'] == recipient:
+ if session and packet['session'] == session:
+ _logger.debug('Skip the same session %r packet',
+ packets.name)
+ else:
+ _logger.debug('Remove outdated %r packet',
+ packets.name)
+ os.unlink(packets.name)
+ continue
+ for i in packet:
+ yield i
def encode_dir(packets, root=None, limit=None, path=None, sender=None,
@@ -182,36 +189,22 @@ def encode_dir(packets, root=None, limit=None, path=None, sender=None,
if sender is not None:
header['from'] = sender
- _logger.debug('Creating %r parcel limit=%s header=%r', path, limit, header)
+ _logger.debug('Creating %r packet limit=%s header=%r', path, limit, header)
- with toolkit.NamedTemporaryFile(dir=dirname(path)) as parcel:
+ with toolkit.NamedTemporaryFile(dir=dirname(path)) as f:
for chunk in encode(packets, limit, header):
- parcel.write(chunk)
+ f.write(chunk)
coroutine.dispatch()
- parcel.flush()
- os.fsync(parcel.fileno())
- os.rename(parcel.name, path)
+ f.flush()
+ os.fsync(f.fileno())
+ os.rename(f.name, path)
class _DecodeIterator(object):
- def __init__(self, stream):
+ def __init__(self, stream, header):
self._stream = stream
- self.header = {}
- self._name = None
- self._shift = True
-
- @property
- def name(self):
- return self._name
-
- def next(self):
- if self._shift:
- for __ in self:
- pass
- if self._name is None:
- raise EOFError()
- self._shift = True
+ self.header = header
def __repr__(self):
return '<Packet %r>' % self.header
@@ -219,38 +212,70 @@ class _DecodeIterator(object):
def __getitem__(self, key):
return self.header.get(key)
+ def __enter__(self):
+ return self
+
+ def __exit__(self, exc_type, exc_value, traceback):
+ pass
+
def __iter__(self):
while True:
record = self._stream.read_record()
if record is None:
- self._name = None
- raise EOFError()
- if 'packet' in record:
- self._name = record['packet'] or ''
- self.header = record
- self._shift = False
break
- blob_len = record.get('content-length')
- if blob_len is None:
- yield record
- continue
- blob_len = int(blob_len)
- with toolkit.NamedTemporaryFile() as blob:
- digest = hashlib.sha1()
- while blob_len:
- chunk = self._stream.read(min(blob_len, BUFFER_SIZE))
- enforce(chunk, 'Blob size mismatch')
- blob.write(chunk)
- blob_len -= len(chunk)
- digest.update(chunk)
- blob.flush()
- yield File(blob.name, digest=digest.hexdigest(), meta=record)
+ if 'segment' in record:
+ while record is not None:
+ record.update(self.header)
+ segment = _SegmentIterator(self._stream, record)
+ yield segment
+ record = segment.next_segment
+ if record is not None:
+ continue
+ while True:
+ record = self._stream.read_record()
+ if record is None or 'segment' in record:
+ break
+ break
+ for i in self._process_record(record):
+ yield i
- def __enter__(self):
- return self
+ def _process_record(self, record):
+ blob_len = record.get('content-length')
+ if blob_len is None:
+ yield record
+ return
- def __exit__(self, exc_type, exc_value, traceback):
- pass
+ blob_len = int(blob_len)
+ with toolkit.NamedTemporaryFile() as blob:
+ digest = hashlib.sha1()
+ while blob_len:
+ chunk = self._stream.read(min(blob_len, BUFFER_SIZE))
+ enforce(chunk, 'Blob size mismatch')
+ blob.write(chunk)
+ blob_len -= len(chunk)
+ digest.update(chunk)
+ blob.flush()
+ yield File(blob.name, digest=digest.hexdigest(), meta=record)
+
+
+class _SegmentIterator(_DecodeIterator):
+
+ next_segment = None
+
+ @property
+ def name(self):
+ return self.header['segment']
+
+ def __iter__(self):
+ while True:
+ record = self._stream.read_record()
+ if record is None:
+ break
+ if 'segment' in record:
+ self.next_segment = record
+ break
+ for i in self._process_record(record):
+ yield i
class _Encoder(object):
@@ -317,16 +342,21 @@ class _Decoder(object):
self._buffer = prefix
self._stream = stream
self._limit = limit
+ self._eof = False
def read_record(self):
while True:
parts = self._buffer.split('\n', 1)
if len(parts) == 1:
- if self._read(BUFFER_SIZE):
+ if self._read(BUFFER_SIZE) and not self._eof:
continue
- return None
- result, self._buffer = parts
+ result = parts[0]
+ self._buffer = ''
+ else:
+ result, self._buffer = parts
if not result:
+ if self._eof:
+ return None
continue
return json.loads(result)
@@ -342,7 +372,9 @@ class _Decoder(object):
if self._limit is not None:
size = min(size, self._limit)
chunk = self._stream.read(size)
- if chunk and self._limit is not None:
+ if not chunk:
+ self._eof = True
+ elif self._limit is not None:
self._limit -= len(chunk)
return self._decode(chunk)
@@ -365,7 +397,7 @@ class _ZippedDecoder(_Decoder):
'Unknown compression method')
enforce(ord(stream.read(1)) == 0, http.BadRequest,
'Gzip flags should be empty')
- stream.read(6) # Ignore the rest of header
+ stream.read(6) # Ignore the rest of ZIP header
def _decode(self, chunk):
if chunk:
diff --git a/sugar_network/toolkit/router.py b/sugar_network/toolkit/router.py
index f4b23ce..bd5da32 100644
--- a/sugar_network/toolkit/router.py
+++ b/sugar_network/toolkit/router.py
@@ -88,10 +88,9 @@ class ACL(object):
AUTH = 1 << 10
AUTHOR = 1 << 11
- SUPERUSER = 1 << 12
+ AGG_AUTHOR = 1 << 12
- LOCAL = 1 << 13
- CALC = 1 << 14
+ LOCAL = 1 << 14
NAMES = {
CREATE: 'Create',
@@ -107,8 +106,7 @@ class ACL(object):
class Request(dict):
def __init__(self, environ=None, method=None, path=None, cmd=None,
- content=None, content_stream=None, content_type=None,
- principal=None, **kwargs):
+ content=None, content_type=None, principal=None, **kwargs):
dict.__init__(self)
self.path = []
@@ -120,7 +118,6 @@ class Request(dict):
self._dirty_query = False
self._if_modified_since = _NOT_SET
self._accept_language = _NOT_SET
- self._content_stream = content_stream or _NOT_SET
self._content_type = content_type or _NOT_SET
if environ:
@@ -194,7 +191,17 @@ class Request(dict):
@property
def content(self):
- self.ensure_content()
+ if self._content is not _NOT_SET:
+ return self._content
+ stream = self.environ.get('wsgi.input')
+ if stream is None:
+ self._content = None
+ else:
+ stream = _ContentStream(stream, self.content_length)
+ if self.content_type == 'application/json':
+ self._content = json.load(stream)
+ else:
+ self._content = stream
return self._content
@content.setter
@@ -212,39 +219,41 @@ class Request(dict):
self.environ['CONTENT_LENGTH'] = str(value)
@property
- def content_stream(self):
- if self._content_stream is _NOT_SET:
- s = self.environ.get('wsgi.input')
- if s is None:
- self._content_stream = None
- else:
- self._content_stream = _ContentStream(s, self.content_length)
- return self._content_stream
-
- @content_stream.setter
- def content_stream(self, value):
- self._content_stream = value
-
- @property
def resource(self):
if self.path:
return self.path[0]
+ @resource.setter
+ def resource(self, value):
+ self.path[0] = value
+
@property
def guid(self):
if len(self.path) > 1:
return self.path[1]
+ @guid.setter
+ def guid(self, value):
+ self.path[1] = value
+
@property
def prop(self):
if len(self.path) > 2:
return self.path[2]
+ @prop.setter
+ def prop(self, value):
+ self.path[2] = value
+
@property
def key(self):
if len(self.path) > 3:
return self.path[3]
+ @key.setter
+ def key(self, value):
+ self.path[3] = value
+
@property
def static_prefix(self):
http_host = self.environ.get('HTTP_HOST')
@@ -298,16 +307,6 @@ class Request(dict):
else:
existing_value = self[key] = [existing_value, value]
- def ensure_content(self):
- if self._content is not _NOT_SET:
- return
- if self.content_stream is None:
- self._content = None
- elif self.content_type == 'application/json':
- self._content = json.load(self.content_stream)
- else:
- self._content = self.content_stream.read()
-
def __repr__(self):
return '<Request method=%s path=%r cmd=%s query=%r>' % \
(self.method, self.path, self.cmd, dict(self))
@@ -539,7 +538,6 @@ class Router(object):
if route_.mime_type == 'text/event-stream' and \
self._allow_spawn and 'spawn' in request:
_logger.debug('Spawn event stream for %r', request)
- request.ensure_content()
coroutine.spawn(self._event_stream, request, result)
result = None
elif route_.mime_type and 'content-type' not in response:
@@ -617,29 +615,23 @@ class Router(object):
response.content_type = 'application/json'
streamed_content = isinstance(content, types.GeneratorType)
-
- if request.method == 'HEAD':
- streamed_content = False
- content = None
- elif js_callback:
+ if js_callback or response.content_type == 'application/json':
if streamed_content:
content = ''.join(content)
streamed_content = False
- content = '%s(%s);' % (js_callback, json.dumps(content))
- response.content_length = len(content)
- elif not streamed_content:
- if response.content_type == 'application/json':
+ else:
content = json.dumps(content)
- response.content_length = len(content)
- elif 'content-length' not in response:
- response.content_length = len(content) if content else 0
- if request.method == 'HEAD' and content is not None:
- _logger.warning('Content from HEAD response is ignored')
+ if js_callback:
+ content = '%s(%s);' % (js_callback, content)
+ if request.method == 'HEAD':
+ streamed_content = False
content = None
- _save_cookie(response, 'sugar_network_node', this.cookie)
+ elif not streamed_content:
+ response.content_length = len(content) if content else 0
_logger.trace('%s call: request=%s response=%r content=%r',
self, request.environ, response, repr(content)[:256])
+ _save_cookie(response, 'sugar_network_node', this.cookie)
start_response(response.status, response.items())
if streamed_content:
@@ -872,6 +864,13 @@ class _Route(object):
def __init__(self, callback, method, path, cmd, mime_type=None, acl=0,
arguments=None):
+ enforce(acl ^ ACL.AUTHOR or acl & ACL.AUTH,
+ 'ACL.AUTHOR without ACL.AUTH')
+ enforce(acl ^ ACL.AUTHOR or len(path) >= 2,
+ 'ACL.AUTHOR requires longer path')
+ enforce(acl ^ ACL.AGG_AUTHOR or len(path) >= 3,
+ 'ACL.AGG_AUTHOR requires longer path')
+
self.op = (method, cmd)
self.callback = callback
self.method = method
diff --git a/sugar_network/toolkit/spec.py b/sugar_network/toolkit/spec.py
index b3f83e9..279e748 100644
--- a/sugar_network/toolkit/spec.py
+++ b/sugar_network/toolkit/spec.py
@@ -174,7 +174,7 @@ def parse_requires(requires):
return result
-def ensure(version, cond):
+def ensure_version(version, cond):
if cond:
for op, cond_version in cond:
if op == [0]:
diff --git a/tests/__init__.py b/tests/__init__.py
index 32bc3ea..652117c 100644
--- a/tests/__init__.py
+++ b/tests/__init__.py
@@ -20,7 +20,7 @@ from M2Crypto import DSA
from sugar_network.toolkit import coroutine
coroutine.inject()
-from sugar_network.toolkit import http, mountpoints, Option, gbus, i18n, languages, parcel
+from sugar_network.toolkit import http, mountpoints, Option, gbus, i18n, languages, packets
from sugar_network.toolkit.router import Router, Request, Response
from sugar_network.toolkit.coroutine import this
from sugar_network.client import IPCConnection, journal, routes as client_routes, model as client_model
@@ -29,13 +29,13 @@ from sugar_network.client.injector import Injector
from sugar_network.client.routes import ClientRoutes
from sugar_network.client.auth import SugarCreds
from sugar_network import db, client, node, toolkit, model
-from sugar_network.db import routes as db_routes
from sugar_network.model.user import User
from sugar_network.model.context import Context
from sugar_network.node.model import Context as MasterContext
from sugar_network.node.model import User as MasterUser
from sugar_network.node.model import Volume as NodeVolume
from sugar_network.node.auth import SugarAuth
+from sugar_network.node import routes as node_routes
from sugar_network.model.post import Post
from sugar_network.node.master import MasterRoutes
from sugar_network.node import obs, slave, master
@@ -82,7 +82,7 @@ class Test(unittest.TestCase):
os.environ['HOME'] = tmpdir
os.environ['LC_ALL'] = 'en_US.UTF-8'
- parcel.DEFAULT_COMPRESSLEVEL = 0
+ packets.DEFAULT_COMPRESSLEVEL = 0
adapters.DEFAULT_RETRIES = 5
Option.items = {}
Option.config_files = []
@@ -106,7 +106,7 @@ class Test(unittest.TestCase):
client.keyfile.value = join(root, 'data', UID)
client_routes._RECONNECT_TIMEOUT = 0
client_routes._SYNC_TIMEOUT = 30
- db_routes._GROUPED_DIFF_LIMIT = 1024
+ node_routes._GROUPED_DIFF_LIMIT = 1024
journal._ds_root = tmpdir + '/datastore'
mountpoints._connects.clear()
mountpoints._found.clear()
@@ -148,6 +148,7 @@ class Test(unittest.TestCase):
this.localcast = lambda x: x
this.injector = None
this.principal = None
+ this.reset_property('resource')
def tearDown(self):
self.stop_nodes()
diff --git a/tests/units/client/__main__.py b/tests/units/client/__main__.py
index fcc26a6..2a67dae 100644
--- a/tests/units/client/__main__.py
+++ b/tests/units/client/__main__.py
@@ -3,7 +3,8 @@
from __init__ import tests
from journal import *
-from routes import *
+from client_model import *
+from client_routes import *
from injector import *
from packagekit import *
diff --git a/tests/units/client/client_model.py b/tests/units/client/client_model.py
new file mode 100755
index 0000000..8cca7de
--- /dev/null
+++ b/tests/units/client/client_model.py
@@ -0,0 +1,213 @@
+#!/usr/bin/env python
+# sugar-lint: disable
+
+from __init__ import tests
+
+from sugar_network import db
+from sugar_network.client import model
+from sugar_network.toolkit.router import ACL, File
+from sugar_network.toolkit.coroutine import this
+
+
+class ClientModelTest(tests.Test):
+
+ def test_dump_volume_Post(self):
+
+ class Document(db.Resource):
+
+ @db.stored_property()
+ def prop(self, value):
+ return value
+
+ volume = db.Volume('.', [Document])
+ volume['document'].create({'guid': '1', 'prop': '1'})
+ volume['document'].create({'guid': '2', 'prop': '2'})
+ volume['document'].create({'guid': '3', 'prop': '3'})
+
+ self.assertEqual([
+ {'op': {'method': 'POST', 'path': ['document']}, 'content': {'guid': '3', 'prop': '3'}, 'keys': ['guid']},
+ {'op': {'method': 'POST', 'path': ['document']}, 'content': {'guid': '2', 'prop': '2'}, 'keys': ['guid']},
+ {'op': {'method': 'POST', 'path': ['document']}, 'content': {'guid': '1', 'prop': '1'}, 'keys': ['guid']},
+ ],
+ [i for i in model.dump_volume(volume)])
+
+ def test_dump_volume_SkipSeqnolessObjects(self):
+
+ class Document(db.Resource):
+ pass
+
+ volume = db.Volume('.', [Document])
+
+ volume['document']['1'].post('guid', '1')
+ volume['document'].create({'guid': '2'})
+ volume['document']['3'].post('guid', '3')
+
+ self.assertEqual([
+ {'op': {'method': 'POST', 'path': ['document']}, 'content': {'guid': '2'}, 'keys': ['guid']},
+ ],
+ [i for i in model.dump_volume(volume)])
+
+ def test_dump_volume_SkipSeqnolessProps(self):
+
+ class Document(db.Resource):
+
+ @db.stored_property()
+ def prop1(self, value):
+ return value
+
+ @db.stored_property(acl=ACL.LOCAL | ACL.PUBLIC)
+ def prop2(self, value):
+ return value
+
+ @db.stored_property()
+ def prop3(self, value):
+ return value
+
+ volume = db.Volume('.', [Document])
+ volume['document'].create({'guid': 'guid', 'prop1': 'a', 'prop2': 'b', 'prop3': 'c'})
+
+ self.assertEqual([
+ {'op': {'method': 'POST', 'path': ['document']}, 'content': {'guid': 'guid', 'prop1': 'a', 'prop3': 'c'}, 'keys': ['guid']},
+ ],
+ [i for i in model.dump_volume(volume)])
+
+ def test_dump_volume_Put(self):
+
+ class Document(db.Resource):
+
+ @db.stored_property()
+ def prop1(self, value):
+ return value
+
+ @db.stored_property(acl=ACL.LOCAL | ACL.PUBLIC)
+ def prop2(self, value):
+ return value
+
+ volume = db.Volume('.', [Document])
+
+ doc = volume['document']['1']
+ doc.post('guid', '1')
+ volume['document'].update('1', {'prop1': 'a', 'prop2': '1'})
+
+ doc = volume['document']['2']
+ doc.post('guid', '2')
+ volume['document'].update('2', {'prop1': 'b', 'prop2': '2'})
+
+ doc = volume['document']['3']
+ doc.post('guid', '3')
+ volume['document'].update('3', {'prop1': 'c', 'prop2': '3'})
+
+ self.assertEqual([
+ {'op': {'method': 'PUT', 'path': ['document', '3']}, 'content': {'prop1': 'c'}},
+ {'op': {'method': 'PUT', 'path': ['document', '2']}, 'content': {'prop1': 'b'}},
+ {'op': {'method': 'PUT', 'path': ['document', '1']}, 'content': {'prop1': 'a'}},
+ ],
+ [i for i in model.dump_volume(volume)])
+
+ def test_dump_volume_Blobs(self):
+
+ class Document(db.Resource):
+
+ @db.stored_property(db.Blob)
+ def blob(self, value):
+ return value
+
+ volume = db.Volume('.', [Document])
+
+ blob1 = volume.blobs.post('blob1')
+ blob2 = volume.blobs.post('blob2')
+ blob3 = volume.blobs.post('blob3')
+
+ volume['document'].create({'guid': '1', 'blob': blob1.digest})
+ volume['document'].create({'guid': '2', 'blob': blob2.digest})
+ volume['document'].create({'guid': '3', 'blob': blob3.digest})
+
+ dump = [i for i in model.dump_volume(volume)]
+ self.assertEqual([
+ {'op': {'method': 'POST', 'path': ['document']}, 'content': {'guid': '3'}, 'keys': ['guid']},
+ {'content-length': '5', 'content-type': 'application/octet-stream', 'x-seqno': '3',
+ 'op': {'method': 'PUT', 'path': ['document', '3', 'blob']},
+ },
+ {'op': {'method': 'POST', 'path': ['document']}, 'content': {'guid': '2'}, 'keys': ['guid']},
+ {'content-length': '5', 'content-type': 'application/octet-stream', 'x-seqno': '2',
+ 'op': {'method': 'PUT', 'path': ['document', '2', 'blob']},
+ },
+ {'op': {'method': 'POST', 'path': ['document']}, 'content': {'guid': '1'}, 'keys': ['guid']},
+ {'content-length': '5', 'content-type': 'application/octet-stream', 'x-seqno': '1',
+ 'op': {'method': 'PUT', 'path': ['document', '1', 'blob']},
+ },
+ ],
+ [i.meta if type(i) is File else i for i in dump])
+ self.assertEqual('blob3', file(dump[1].path).read())
+ self.assertEqual('blob2', file(dump[3].path).read())
+ self.assertEqual('blob1', file(dump[5].path).read())
+
+ def test_dump_volume_Aggregates(self):
+
+ class Document(db.Resource):
+
+ @db.stored_property(db.Aggregated)
+ def prop1(self, value):
+ return value
+
+ @db.stored_property(db.Aggregated, subtype=db.Blob())
+ def prop2(self, value):
+ return value
+
+ volume = this.volume = db.Volume('.', [Document])
+
+ blob1 = volume.blobs.post('blob1')
+ blob2 = volume.blobs.post('blob2')
+ blob3 = volume.blobs.post('blob3')
+
+ volume['document'].create({
+ 'guid': '1',
+ 'prop1': {
+ '1a': {'value': 'a'},
+ '1b': {'value': 'b'},
+ '1c': {'value': 'c'},
+ },
+ 'prop2': {
+ '2a': {'value': blob1.digest},
+ '2b': {'value': blob2.digest},
+ '2c': {'value': blob3.digest},
+ },
+ })
+
+ dump = [i for i in model.dump_volume(volume)]
+ self.assertEqual([
+ {'op': {'method': 'POST', 'path': ['document']}, 'content': {'guid': '1'}, 'keys': ['guid']},
+ {'op': {'method': 'POST', 'path': ['document', '1', 'prop1', '1a']}, 'content': 'a'},
+ {'op': {'method': 'POST', 'path': ['document', '1', 'prop1', '1c']}, 'content': 'c'},
+ {'op': {'method': 'POST', 'path': ['document', '1', 'prop1', '1b']}, 'content': 'b'},
+ {'content-length': '5', 'content-type': 'application/octet-stream', 'x-seqno': '1',
+ 'op': {'method': 'POST', 'path': ['document', '1', 'prop2', '2a']}},
+ {'content-length': '5', 'content-type': 'application/octet-stream', 'x-seqno': '2',
+ 'op': {'method': 'POST', 'path': ['document', '1', 'prop2', '2b']}},
+ {'content-length': '5', 'content-type': 'application/octet-stream', 'x-seqno': '3',
+ 'op': {'method': 'POST', 'path': ['document', '1', 'prop2', '2c']}},
+ ],
+ [i.meta if type(i) is File else i for i in dump])
+ self.assertEqual('blob1', file(dump[4].path).read())
+ self.assertEqual('blob2', file(dump[5].path).read())
+ self.assertEqual('blob3', file(dump[6].path).read())
+
+ def test_dump_volume_References(self):
+
+ class Document(db.Resource):
+
+ @db.stored_property(db.Reference)
+ def prop(self, value):
+ return value
+
+ volume = db.Volume('.', [Document])
+ volume['document'].create({'guid': '1', 'prop': '1'})
+
+ self.assertEqual([
+ {'op': {'method': 'POST', 'path': ['document']}, 'content': {'guid': '1', 'prop': '1'}, 'keys': ['prop', 'guid']},
+ ],
+ [i for i in model.dump_volume(volume)])
+
+
+if __name__ == '__main__':
+ tests.main()
diff --git a/tests/units/client/routes.py b/tests/units/client/client_routes.py
index 9145b42..8b5bf32 100755
--- a/tests/units/client/routes.py
+++ b/tests/units/client/client_routes.py
@@ -21,12 +21,12 @@ from sugar_network.node.model import User
from sugar_network.node.master import MasterRoutes
from sugar_network.toolkit.router import Router, Request, Response, route
from sugar_network.toolkit.coroutine import this
-from sugar_network.toolkit import coroutine, i18n, parcel, http
+from sugar_network.toolkit import coroutine, i18n, packets, http
import requests
-class RoutesTest(tests.Test):
+class ClientRoutesTest(tests.Test):
def test_Hub(self):
volume = Volume('db')
@@ -57,7 +57,7 @@ class RoutesTest(tests.Test):
self.start_online_client()
ipc = IPCConnection()
- ipc.request('POST', [], ''.join(parcel.encode([
+ ipc.request('POST', [], ''.join(packets.encode([
('push', None, [
{'resource': 'context'},
{'guid': '1', 'patch': {
@@ -99,7 +99,7 @@ class RoutesTest(tests.Test):
self.start_online_client()
ipc = IPCConnection()
- ipc.request('POST', [], ''.join(parcel.encode([
+ ipc.request('POST', [], ''.join(packets.encode([
('push', None, [
{'resource': 'context'},
{'guid': '1', 'patch': {
@@ -165,14 +165,14 @@ class RoutesTest(tests.Test):
ipc = IPCConnection()
self.assertEqual(
- {'guid': tests.UID, 'roles': [], 'route': 'offline'},
+ {'guid': tests.UID, 'route': 'offline'},
ipc.get(cmd='whoami'))
self.fork_master()
self.wait_for_events(event='inline', state='online').wait()
self.assertEqual(
- {'guid': tests.UID, 'roles': [], 'route': 'proxy'},
+ {'guid': tests.UID, 'route': 'proxy'},
ipc.get(cmd='whoami'))
def test_Events(self):
@@ -411,11 +411,12 @@ class RoutesTest(tests.Test):
self.assertEqual(
sorted([(guid1, ['favorite']), (guid2, ['checkin', 'favorite']), (guid3, ['checkin']), (guid4, [])]),
sorted([(i['guid'], i['pins']) for i in ipc.get(['context'], reply=['pins'])['result']]))
- self.assertEqual([
- {'guid': guid1, 'title': '1_'},
- {'guid': guid2, 'title': '2_'},
- ],
- ipc.get(['context'], reply=['guid', 'title'], pins='favorite')['result'])
+ self.assertEqual(
+ sorted([
+ {'guid': guid1, 'title': '1_'},
+ {'guid': guid2, 'title': '2_'},
+ ]),
+ sorted(ipc.get(['context'], reply=['guid', 'title'], pins='favorite')['result']))
self.assertEqual(
sorted([(guid2, '2_'), (guid3, '3_')]),
@@ -449,6 +450,7 @@ class RoutesTest(tests.Test):
'activity_version = 1',
'license = Public Domain',
]]), cmd='submit', initial=True)
+ return
ipc.upload(['context'], self.zips(['TestActivity/activity/activity.info', [
'[Activity]',
'name = 2',
@@ -1072,7 +1074,7 @@ class RoutesTest(tests.Test):
def test_PullCheckinsOnGets(self):
local_volume = self.start_online_client()
local = IPCConnection()
- remote = Connection(creds=SugarCreds(client.keyfile.value))
+ remote = Connection()
self.assertEqual([[1, None]], self.client_routes._pull_r.value)
self.assertEqual(0, local_volume.seqno.value)
@@ -1112,7 +1114,7 @@ class RoutesTest(tests.Test):
local_volume = self.start_online_client()
local = IPCConnection()
- remote = Connection(creds=SugarCreds(client.keyfile.value))
+ remote = Connection()
self.assertEqual([[1, None]], self.client_routes._pull_r.value)
self.assertEqual(0, local_volume.seqno.value)
@@ -1137,7 +1139,7 @@ class RoutesTest(tests.Test):
self.stop_master()
self.wait_for_events(event='inline', state='offline').wait()
self.fork_master()
- self.wait_for_events(event='sync', state='pull').wait()
+ self.wait_for_events(event='sync', state='done').wait()
self.assertEqual('2', local.get(['context', guid])['title'])
self.assertEqual([[1, 1], [7, None]], self.client_routes._pull_r.value)
@@ -1146,7 +1148,7 @@ class RoutesTest(tests.Test):
def test_PullCheckinsOnUpdates(self):
local_volume = self.start_online_client()
local = IPCConnection()
- remote = Connection(creds=SugarCreds(client.keyfile.value))
+ remote = Connection()
self.assertEqual([[1, None]], self.client_routes._pull_r.value)
self.assertEqual(0, local_volume.seqno.value)
@@ -1178,136 +1180,76 @@ class RoutesTest(tests.Test):
self.assertEqual([[1, 1], [8, None]], self.client_routes._pull_r.value)
self.assertEqual(0, local_volume.seqno.value)
- def ___test_CachedClientRoutes(self):
- volume = db.Volume('client', RESOURCES, lazy_open=True)
- cp = CachedClientRoutes(volume, client.api.value)
-
- post = Request(method='POST', path=['context'])
- post.content_type = 'application/json'
- post.content = {
- 'type': 'activity',
- 'title': 'title',
- 'summary': 'summary',
- 'description': 'description',
- 'layer': ['foo', 'clone', 'favorite'],
- }
- guid1 = call(cp, post)
- guid2 = call(cp, post)
-
- trigger = self.wait_for_events(cp, event='push')
- self.start_master()
- cp._remote_connect()
- trigger.wait()
+ def test_PushOfflineChanges(self):
+ routes._RECONNECT_TIMEOUT = 1
+ routes._SYNC_TIMEOUT = 0
- self.assertEqual([[3, None]], json.load(file('client/push.sequence')))
- self.assertEqual({'en-us': 'title'}, volume['context'].get(guid1)['title'])
- self.assertEqual(['foo', 'clone', 'favorite', 'local'], volume['context'].get(guid1)['layer'])
- self.assertEqual({'en-us': 'title'}, self.node_volume['context'].get(guid1)['title'])
- self.assertEqual(['foo'], self.node_volume['context'].get(guid1)['layer'])
- self.assertEqual(
- {tests.UID: {'role': 3, 'name': 'test', 'order': 0}},
- self.node_volume['context'].get(guid1)['author'])
- self.assertEqual({'en-us': 'title'}, volume['context'].get(guid2)['title'])
- self.assertEqual(['foo', 'clone', 'favorite', 'local'], volume['context'].get(guid2)['layer'])
- self.assertEqual({'en-us': 'title'}, self.node_volume['context'].get(guid2)['title'])
- self.assertEqual(['foo'], self.node_volume['context'].get(guid2)['layer'])
- self.assertEqual(
- {tests.UID: {'role': 3, 'name': 'test', 'order': 0}},
- self.node_volume['context'].get(guid2)['author'])
+ local_volume = self.start_offline_client()
+ local = IPCConnection()
+ remote = Connection()
- trigger = self.wait_for_events(cp, event='inline', state='offline')
- self.node.stop()
- trigger.wait()
- self.node_volume.close()
+ guid1 = local.post(['context'], {'type': 'activity', 'title': '1', 'summary': '1', 'description': '1'})
+ guid2 = local.post(['context'], {'type': 'activity', 'title': '2', 'summary': '2', 'description': '2'})
+ local.put(['context', guid2], {'summary': '2_'})
+ guid3 = local.post(['context'], {'type': 'activity', 'title': '3', 'summary': '3', 'description': '3'})
+ local.delete(['context', guid3])
- coroutine.sleep(1.1)
- volume['context'].update(guid1, {'title': 'title_'})
- volume['context'].delete(guid2)
+ assert not local_volume.empty
+ assert [i for i in local_volume.blobs.walk()]
- trigger = self.wait_for_events(cp, event='push')
- self.start_master()
- cp._remote_connect()
- trigger.wait()
+ self.fork_master()
+ self.wait_for_events(event='sync', state='done').wait()
- self.assertEqual([[4, None]], json.load(file('client/push.sequence')))
- self.assertEqual({'en-us': 'title_'}, volume['context'].get(guid1)['title'])
- self.assertEqual({'en-us': 'title_'}, self.node_volume['context'].get(guid1)['title'])
self.assertEqual(
- {tests.UID: {'role': 3, 'name': 'test', 'order': 0}},
- self.node_volume['context'].get(guid1)['author'])
- assert not volume['context'].exists(guid2)
- self.assertEqual({'en-us': 'title'}, self.node_volume['context'].get(guid2)['title'])
- self.assertEqual(
- {tests.UID: {'role': 3, 'name': 'test', 'order': 0}},
- self.node_volume['context'].get(guid2)['author'])
-
- def ___test_CachedClientRoutes_WipeReports(self):
- volume = db.Volume('client', RESOURCES, lazy_open=True)
- cp = CachedClientRoutes(volume, client.api.value)
-
- post = Request(method='POST', path=['report'])
- post.content_type = 'application/json'
- post.content = {
- 'context': 'context',
- 'error': 'error',
- }
- guid = call(cp, post)
-
- trigger = self.wait_for_events(cp, event='push')
- self.start_master()
- cp._remote_connect()
- trigger.wait()
-
- assert not volume['report'].exists(guid)
- assert self.node_volume['report'].exists(guid)
-
- def ___test_CachedClientRoutes_OpenOnlyChangedResources(self):
- volume = db.Volume('client', RESOURCES, lazy_open=True)
- cp = CachedClientRoutes(volume, client.api.value)
- guid = call(cp, Request(method='POST', path=['context'], content_type='application/json', content={
- 'type': 'activity',
- 'title': 'title',
- 'summary': 'summary',
- 'description': 'description',
- 'layer': ['foo', 'clone', 'favorite'],
- }))
- cp.close()
-
- volume = db.Volume('client', RESOURCES, lazy_open=True)
- cp = CachedClientRoutes(volume, client.api.value)
+ sorted([
+ {'title': '1', 'summary': '1'},
+ {'title': '2', 'summary': '2_'},
+ ]),
+ sorted([i for i in remote.get(['context'], reply=['title', 'summary'])['result']]))
+ self.assertRaises(http.NotFound, remote.get, ['context', guid1])
+ self.assertRaises(http.NotFound, remote.get, ['context', guid2])
+ self.assertRaises(http.NotFound, remote.get, ['context', guid3])
+
+ assert local_volume.empty
+ assert not [i for i in local_volume.blobs.walk()]
+
+ def test_PushOfflineChangesOfCheckins(self):
+ routes._RECONNECT_TIMEOUT = 1
+ routes._SYNC_TIMEOUT = 0
- trigger = self.wait_for_events(cp, event='push')
- self.start_master()
- cp._remote_connect()
- trigger.wait()
+ local_volume = self.start_online_client()
+ local = IPCConnection()
+ remote = Connection()
- self.assertEqual([[2, None]], json.load(file('client/push.sequence')))
- assert self.node_volume['context'].exists(guid)
- self.assertEqual(['context'], volume.keys())
+ self.assertEqual([[1, None]], self.client_routes._pull_r.value)
+ self.assertEqual(0, local_volume.seqno.value)
- def ___test_SwitchToOfflineForAbsentOnlineProps(self):
- volume = db.Volume('client', RESOURCES)
- cp = ClientRoutes(volume, client.api.value)
+ guid = remote.post(['context'], {
+ 'type': 'activity',
+ 'title': '1',
+ 'summary': '',
+ 'description': '',
+ })
+ local.put(['context', guid], None, cmd='favorite')
+ self.assertEqual('1', remote.get(['context', guid, 'title']))
+ self.assertEqual('1', local.get(['context', guid])['title'])
- post = Request(method='POST', path=['context'])
- post.content_type = 'application/json'
- post.content = {
- 'type': 'activity',
- 'title': 'title',
- 'summary': 'summary',
- 'description': 'description',
- }
- guid = call(cp, post)
+ self.stop_master()
+ self.wait_for_events(event='inline', state='offline').wait()
- self.assertEqual('title', call(cp, Request(method='GET', path=['context', guid, 'title'])))
+ local.put(['context', guid, 'title'], '2')
+ self.assertNotEqual(0, local_volume['context'][guid]['seqno'])
+ assert local_volume.has_noseqno
+ assert local_volume.has_seqno
- trigger = self.wait_for_events(cp, event='inline', state='online')
- self.start_master()
- cp._remote_connect()
- trigger.wait()
+ self.fork_master()
+ self.wait_for_events(event='sync', state='done').wait()
- assert not self.node_volume['context'].exists(guid)
- self.assertEqual('title', call(cp, Request(method='GET', path=['context', guid, 'title'])))
+ self.assertEqual('2', remote.get(['context', guid, 'title']))
+ self.assertEqual('2', local.get(['context', guid])['title'])
+ self.assertEqual(0, local_volume['context'][guid]['seqno'])
+ assert local_volume.has_noseqno
+ assert not local_volume.has_seqno
if __name__ == '__main__':
diff --git a/tests/units/client/injector.py b/tests/units/client/injector.py
index b266cda..69316e7 100755
--- a/tests/units/client/injector.py
+++ b/tests/units/client/injector.py
@@ -12,10 +12,9 @@ from os.path import exists, join, basename
from __init__ import tests
from sugar_network import db, client
-from sugar_network.client import Connection, keyfile, api, packagekit, injector as injector_, model
+from sugar_network.client import Connection, api, packagekit, injector as injector_, model
from sugar_network.client.injector import _PreemptivePool, Injector
from sugar_network.client.model import Volume as LocalVolume
-from sugar_network.client.auth import SugarCreds
from sugar_network.toolkit.coroutine import this
from sugar_network.toolkit import http, lsb_release
@@ -352,7 +351,7 @@ class InjectorTest(tests.Test):
def test_solve(self):
self.fork_master()
this.volume = LocalVolume('client')
- conn = Connection(creds=SugarCreds(keyfile.value))
+ conn = Connection()
injector = Injector('client')
injector.api = client.api.value
injector.seqno = 0
@@ -386,7 +385,7 @@ class InjectorTest(tests.Test):
def test_solve_FailInOffline(self):
self.fork_master()
this.volume = LocalVolume('client')
- conn = Connection(creds=SugarCreds(keyfile.value))
+ conn = Connection()
injector = Injector('client')
injector.api = None
injector.seqno = 0
@@ -408,7 +407,7 @@ class InjectorTest(tests.Test):
def test_solve_ReuseCachedSolution(self):
self.fork_master()
this.volume = LocalVolume('client')
- conn = Connection(creds=SugarCreds(keyfile.value))
+ conn = Connection()
injector = Injector('client')
injector.api = client.api.value
injector.seqno = 0
@@ -432,7 +431,7 @@ class InjectorTest(tests.Test):
def test_solve_InvalidateCachedSolution(self):
self.fork_master()
this.volume = LocalVolume('client')
- conn = Connection(creds=SugarCreds(keyfile.value))
+ conn = Connection()
injector = Injector('client')
injector.api = 'http://127.0.0.1:7777'
injector.seqno = 1
@@ -499,7 +498,7 @@ class InjectorTest(tests.Test):
def test_solve_ForceUsingStaleCachedSolutionInOffline(self):
self.fork_master()
this.volume = LocalVolume('client')
- conn = Connection(creds=SugarCreds(keyfile.value))
+ conn = Connection()
injector = Injector('client')
injector.api = client.api.value
injector.seqno = 0
@@ -527,7 +526,7 @@ class InjectorTest(tests.Test):
def test_download_SetExecPermissions(self):
self.fork_master()
this.volume = LocalVolume('client')
- conn = Connection(creds=SugarCreds(keyfile.value))
+ conn = Connection()
injector = Injector('client')
injector.api = client.api.value
injector.seqno = 0
@@ -561,7 +560,7 @@ class InjectorTest(tests.Test):
def test_checkin(self):
self.fork_master()
this.volume = LocalVolume('client')
- conn = Connection(creds=SugarCreds(keyfile.value))
+ conn = Connection()
injector = Injector('client')
injector.api = client.api.value
injector.seqno = 0
@@ -613,7 +612,7 @@ class InjectorTest(tests.Test):
def test_checkin_PreemptivePool(self):
self.fork_master()
this.volume = LocalVolume('client')
- conn = Connection(creds=SugarCreds(keyfile.value))
+ conn = Connection()
injector = Injector('client')
injector.api = client.api.value
injector.seqno = 0
@@ -671,7 +670,7 @@ class InjectorTest(tests.Test):
def test_checkin_Refresh(self):
self.fork_master()
this.volume = LocalVolume('client')
- conn = Connection(creds=SugarCreds(keyfile.value))
+ conn = Connection()
injector = Injector('client')
injector.api = client.api.value
injector.seqno = 0
@@ -706,7 +705,7 @@ class InjectorTest(tests.Test):
def test_launch(self):
self.fork_master()
this.volume = LocalVolume('client')
- conn = Connection(creds=SugarCreds(keyfile.value))
+ conn = Connection()
injector = Injector('client')
injector.api = client.api.value
injector.seqno = 0
@@ -755,7 +754,7 @@ class InjectorTest(tests.Test):
def test_launch_PreemptivePool(self):
self.fork_master()
this.volume = LocalVolume('client')
- conn = Connection(creds=SugarCreds(keyfile.value))
+ conn = Connection()
injector = Injector('client')
injector.api = client.api.value
injector.seqno = 0
@@ -797,7 +796,7 @@ class InjectorTest(tests.Test):
def test_launch_DonntAcquireCheckins(self):
self.fork_master()
this.volume = LocalVolume('client')
- conn = Connection(creds=SugarCreds(keyfile.value))
+ conn = Connection()
injector = Injector('client')
injector.api = client.api.value
injector.seqno = 0
@@ -825,7 +824,7 @@ class InjectorTest(tests.Test):
def test_launch_RefreshCheckins(self):
self.fork_master()
this.volume = LocalVolume('client')
- conn = Connection(creds=SugarCreds(keyfile.value))
+ conn = Connection()
injector = Injector(tests.tmpdir + '/client')
injector.api = client.api.value
injector.seqno = 1
@@ -896,7 +895,7 @@ class InjectorTest(tests.Test):
self.fork_master(cb=master_cb)
this.volume = LocalVolume('client')
- conn = Connection(creds=SugarCreds(keyfile.value))
+ conn = Connection()
injector = Injector(tests.tmpdir + '/client')
injector.api = client.api.value
injector.seqno = 1
@@ -922,7 +921,7 @@ class InjectorTest(tests.Test):
def test_launch_Document(self):
self.fork_master()
this.volume = LocalVolume('client')
- conn = Connection(creds=SugarCreds(keyfile.value))
+ conn = Connection()
injector = Injector(tests.tmpdir + '/client')
injector.api = client.api.value
injector.seqno = 1
@@ -957,7 +956,7 @@ class InjectorTest(tests.Test):
def test_launch_DocumentWithDetectingAppByMIMEType(self):
self.fork_master()
this.volume = LocalVolume('client')
- conn = Connection(creds=SugarCreds(keyfile.value))
+ conn = Connection()
injector = Injector(tests.tmpdir + '/client')
injector.api = client.api.value
injector.seqno = 1
diff --git a/tests/units/db/__main__.py b/tests/units/db/__main__.py
index b03dde4..6841e43 100644
--- a/tests/units/db/__main__.py
+++ b/tests/units/db/__main__.py
@@ -6,7 +6,7 @@ from metadata import *
from storage import *
from index import *
from resource import *
-from routes import *
+from db_routes import *
from blobs import *
from volume import *
#from migrate import *
diff --git a/tests/units/db/blobs.py b/tests/units/db/blobs.py
index 9672b39..35ebb62 100755
--- a/tests/units/db/blobs.py
+++ b/tests/units/db/blobs.py
@@ -129,6 +129,18 @@ class BlobsTest(tests.Test):
assert the_same_blob.digest == blob.digest
assert the_same_blob.path == blob.path
+ def test_post_File(self):
+ blobs = Blobs('.', Seqno())
+
+ self.touch(('blob', 'value'))
+ blob = blobs.post(File('blob', 'digest', {'foo': 'bar'}))
+
+ self.assertEqual('digest', blob.digest)
+ self.assertEqual(abspath('blobs/dig/digest'), blob.path)
+ self.assertEqual({'x-seqno': '1', 'foo': 'bar'}, blob.meta)
+ self.assertEqual('value', file(blob.path).read())
+ self.assertEqual(['x-seqno: 1', 'foo: bar'], file(blob.path + '.meta').read().strip().split('\n'))
+
def test_update(self):
blobs = Blobs('.', Seqno())
@@ -392,6 +404,17 @@ class BlobsTest(tests.Test):
assert not exists(blob.path)
self.assertEqual({'x-seqno': '-3', 'n': '1', 'status': '410 Gone'}, blob.meta)
+ def test_walk_Blobs(self):
+ blobs = Blobs('.', Seqno())
+
+ blob1 = blobs.post('1')
+ blob2 = blobs.post('2')
+ blob3 = blobs.post('3')
+
+ self.assertEqual(
+ sorted([blob1.digest, blob2.digest, blob3.digest]),
+ sorted([i.digest for i in blobs.walk()]))
+
class Seqno(object):
diff --git a/tests/units/db/routes.py b/tests/units/db/db_routes.py
index 5d5a547..fee2b03 100755
--- a/tests/units/db/routes.py
+++ b/tests/units/db/db_routes.py
@@ -21,10 +21,10 @@ from sugar_network.db import routes as db_routes
from sugar_network.model.user import User
from sugar_network.toolkit.router import Router, Request, Response, fallbackroute, ACL, File
from sugar_network.toolkit.coroutine import this
-from sugar_network.toolkit import coroutine, http, i18n, parcel
+from sugar_network.toolkit import coroutine, http, i18n
-class RoutesTest(tests.Test):
+class DbRoutesTest(tests.Test):
def setUp(self, fork_num=0):
tests.Test.setUp(self, fork_num)
@@ -177,7 +177,7 @@ class RoutesTest(tests.Test):
this.call(method='PUT', path=['testdocument', guid, 'blob'], content='blob1')
self.assertEqual('blob1', file(this.call(method='GET', path=['testdocument', guid, 'blob']).path).read())
- this.call(method='PUT', path=['testdocument', guid, 'blob'], content_stream=StringIO('blob2'))
+ this.call(method='PUT', path=['testdocument', guid, 'blob'], content=StringIO('blob2'))
self.assertEqual('blob2', file(this.call(method='GET', path=['testdocument', guid, 'blob']).path).read())
this.call(method='PUT', path=['testdocument', guid, 'blob'], content=None)
@@ -721,18 +721,6 @@ class RoutesTest(tests.Test):
this.call(method='PUT', path=['testdocument', guid], content={'prop': 'bar'})
self.assertEqual('overriden', volume['testdocument'].get(guid)['prop'])
- def test_DoNotPassGuidsForCreate(self):
-
- class TestDocument(db.Resource):
- pass
-
- volume = db.Volume(tests.tmpdir, [TestDocument])
- router = Router(db.Routes(volume))
-
- self.assertRaises(http.BadRequest, this.call, method='POST', path=['testdocument'], content={'guid': 'foo'})
- guid = this.call(method='POST', path=['testdocument'], content={})
- assert guid
-
def test_seqno(self):
class Document1(db.Resource):
@@ -796,7 +784,7 @@ class RoutesTest(tests.Test):
volume['document2']
coroutine.sleep(.1)
- mtime = int(os.stat('index/document1/mtime').st_mtime)
+ mtime = int(os.stat('index/document1/state').st_mtime)
self.assertEqual([
{'event': 'commit', 'resource': 'document1', 'mtime': mtime},
],
@@ -829,9 +817,9 @@ class RoutesTest(tests.Test):
del events[:]
volume['document1'].commit()
- mtime1 = int(os.stat('index/document1/mtime').st_mtime)
+ mtime1 = int(os.stat('index/document1/state').st_mtime)
volume['document2'].commit()
- mtime2 = int(os.stat('index/document2/mtime').st_mtime)
+ mtime2 = int(os.stat('index/document2/state').st_mtime)
self.assertEqual([
{'event': 'commit', 'resource': 'document1', 'mtime': mtime1},
@@ -1944,169 +1932,6 @@ class RoutesTest(tests.Test):
[{'event': 'delete', 'resource': 'document', 'guid': guid}],
events)
- def test_ObjectDiff(self):
-
- class Document(db.Resource):
-
- @db.stored_property()
- def prop1(self, value):
- return value
-
- @db.stored_property()
- def prop2(self, value):
- return value
-
- @db.stored_property(db.Blob)
- def prop3(self, value):
- return value
-
- @db.stored_property(db.Blob)
- def prop4(self, value):
- return value
-
- volume = db.Volume('.', [Document])
- router = Router(db.Routes(volume))
-
- volume['document'].create({
- 'guid': 'guid',
- 'prop1': '1',
- 'prop2': 2,
- 'prop3': volume.blobs.post('333', '3/3').digest,
- })
- volume['document'].update('guid', {'prop4': volume.blobs.post('4444', '4/4').digest})
- self.utime('db/document/gu/guid', 1)
-
- patch = ''.join([i for i in this.call(method='GET', path=['document', 'guid'], cmd='diff')])
- self.assertEqual([(
- {'packet': None}, [
- {'resource': 'document'},
- {'guid': 'guid', 'patch': {
- 'guid': {'value': 'guid', 'mtime': 1},
- 'prop1': {'value': '1', 'mtime': 1},
- 'prop2': {'value': 2, 'mtime': 1},
- 'prop3': {'value': hashlib.sha1('333').hexdigest(), 'mtime': 1},
- 'prop4': {'value': hashlib.sha1('4444').hexdigest(), 'mtime': 1},
- }},
- {'content-type': '4/4', 'content-length': '4', 'x-seqno': '3'},
- {'content-type': '3/3', 'content-length': '3', 'x-seqno': '1'},
- {'commit': [[1, 4]]},
- ],
- )],
- [(packet.header, [i.meta if isinstance(i, File) else i for i in packet]) for packet in parcel.decode(StringIO(patch))])
-
- patch = ''.join([i for i in this.call(method='GET', path=['document', 'guid'], cmd='diff', environ={
- 'HTTP_X_RANGE': json.dumps([[1, 1]]),
- })])
- self.assertEqual([(
- {'packet': None}, [],
- )],
- [(packet.header, [i.meta if isinstance(i, File) else i for i in packet]) for packet in parcel.decode(StringIO(patch))])
-
- patch = ''.join([i for i in this.call(method='GET', path=['document', 'guid'], cmd='diff', environ={
- 'HTTP_X_RANGE': json.dumps([[2, 2]]),
- })])
- self.assertEqual([(
- {'packet': None}, [
- {'resource': 'document'},
- {'guid': 'guid', 'patch': {
- 'guid': {'value': 'guid', 'mtime': 1},
- 'prop1': {'value': '1', 'mtime': 1},
- 'prop2': {'value': 2, 'mtime': 1},
- 'prop3': {'value': hashlib.sha1('333').hexdigest(), 'mtime': 1},
- }},
- {'content-type': '3/3', 'content-length': '3', 'x-seqno': '1'},
- {'commit': [[1, 2]]},
- ],
- )],
- [(packet.header, [i.meta if isinstance(i, File) else i for i in packet]) for packet in parcel.decode(StringIO(patch))])
-
- patch = ''.join([i for i in this.call(method='GET', path=['document', 'guid'], cmd='diff', environ={
- 'HTTP_X_RANGE': json.dumps([[3, 3]]),
- })])
- self.assertEqual([(
- {'packet': None}, [],
- )],
- [(packet.header, [i.meta if isinstance(i, File) else i for i in packet]) for packet in parcel.decode(StringIO(patch))])
-
- patch = ''.join([i for i in this.call(method='GET', path=['document', 'guid'], cmd='diff', environ={
- 'HTTP_X_RANGE': json.dumps([[4, 4]]),
- })])
- self.assertEqual([(
- {'packet': None}, [
- {'resource': 'document'},
- {'guid': 'guid', 'patch': {
- 'prop4': {'value': hashlib.sha1('4444').hexdigest(), 'mtime': 1},
- }},
- {'content-type': '4/4', 'content-length': '4', 'x-seqno': '3'},
- {'commit': [[3, 4]]},
- ],
- )],
- [(packet.header, [i.meta if isinstance(i, File) else i for i in packet]) for packet in parcel.decode(StringIO(patch))])
-
- def test_GroupedDiff(self):
-
- class Document(db.Resource):
-
- @db.stored_property()
- def prop(self, value):
- return value
-
- volume = db.Volume('.', [Document])
- router = Router(db.Routes(volume))
-
- volume['document'].create({'guid': '1', 'prop': 'q'})
- volume['document'].create({'guid': '2', 'prop': 'w'})
- volume['document'].create({'guid': '3', 'prop': 'w'})
- volume['document'].create({'guid': '4', 'prop': 'e'})
- volume['document'].create({'guid': '5', 'prop': 'e'})
- volume['document'].create({'guid': '6', 'prop': 'e'})
- self.utime('db/document', 0)
-
- self.assertEqual({
- '1': [[1, 1]],
- '2': [[2, 2]],
- '3': [[3, 3]],
- '4': [[4, 4]],
- '5': [[5, 5]],
- '6': [[6, 6]],
- },
- this.call(method='GET', path=['document'], cmd='diff'))
-
- self.assertEqual({
- 'q': [[1, 1]],
- 'w': [[2, 3]],
- 'e': [[4, 6]],
- },
- this.call(method='GET', path=['document'], cmd='diff', key='prop'))
-
- def test_GroupedDiffLimit(self):
- db_routes._GROUPED_DIFF_LIMIT = 2
-
- class Document(db.Resource):
- pass
-
- volume = db.Volume('.', [Document])
- router = Router(db.Routes(volume))
-
- volume['document'].create({'guid': '1'})
- volume['document'].create({'guid': '2'})
- volume['document'].create({'guid': '3'})
- volume['document'].create({'guid': '4'})
- volume['document'].create({'guid': '5'})
- self.utime('db/document', 0)
-
- self.assertEqual({
- '1': [[1, 1]],
- '2': [[2, 2]],
- },
- this.call(method='GET', path=['document'], cmd='diff'))
-
- self.assertEqual({
- '3': [[3, 3]],
- '4': [[4, 4]],
- },
- this.call(method='GET', path=['document'], cmd='diff', environ={'HTTP_X_RANGE': json.dumps([[3, None]])}))
-
if __name__ == '__main__':
tests.main()
diff --git a/tests/units/db/index.py b/tests/units/db/index.py
index c0072c1..4d1f659 100755
--- a/tests/units/db/index.py
+++ b/tests/units/db/index.py
@@ -439,28 +439,6 @@ class IndexTest(tests.Test):
db.delete('1', lambda *args: deleted.append(args))
self.assertEqual(1, len(deleted))
- def test_mtime(self):
- # No index at start; checkpoint didn't happen
- db = Index({})
- self.assertEqual(0, db.mtime)
- db.store('1', {})
- db.commit()
- db.close()
-
- # Index exists at start; commit did happen
- db = Index({})
- self.assertNotEqual(0, db.mtime)
- db.close()
-
- # Index exists at start; mtime is outdated
- os.utime('index/mtime', (1, 1))
- db = Index({})
- self.assertEqual(1, db.mtime)
- db.store('3', {})
- db.commit()
- self.assertNotEqual(1, db.mtime)
- db.close()
-
def test_find_OrderByGUIDAllTime(self):
db = Index({'prop': Property('prop', 1, 'P')})
diff --git a/tests/units/db/resource.py b/tests/units/db/resource.py
index 4bf80b7..2e12a3c 100755
--- a/tests/units/db/resource.py
+++ b/tests/units/db/resource.py
@@ -17,7 +17,7 @@ import gobject
from __init__ import tests
-from sugar_network import db
+from sugar_network import db, toolkit
from sugar_network.db import storage, index
from sugar_network.db import directory as directory_
from sugar_network.db.directory import Directory
@@ -134,6 +134,23 @@ class ResourceTest(tests.Test):
self.assertEqual(0, directory.find(query='foo')[-1])
self.assertEqual(1, directory.find(query='bar')[-1])
+ def test_create(self):
+
+ class Document(db.Resource):
+
+ @db.stored_property()
+ def prop(self, value):
+ return value
+
+ directory = Directory(tests.tmpdir, Document, IndexWriter, _SessionSeqno(), this.broadcast)
+ assert not directory.has_seqno
+ assert not directory.has_noseqno
+
+ guid = directory.create({'prop': '1'})
+ self.assertEqual(1, directory[guid]['seqno'])
+ assert directory.has_seqno
+ assert not directory.has_noseqno
+
def test_update(self):
class Document(db.Resource):
@@ -147,16 +164,24 @@ class ResourceTest(tests.Test):
return value
directory = Directory(tests.tmpdir, Document, IndexWriter, _SessionSeqno(), this.broadcast)
+ assert not directory.has_seqno
+ assert not directory.has_noseqno
guid = directory.create({'prop_1': '1', 'prop_2': '2'})
self.assertEqual(
[('1', '2')],
[(i.prop_1, i.prop_2) for i in directory.find()[0]])
+ self.assertEqual(1, directory[guid]['seqno'])
+ assert directory.has_seqno
+ assert not directory.has_noseqno
directory.update(guid, {'prop_1': '3', 'prop_2': '4'})
self.assertEqual(
[('3', '4')],
[(i.prop_1, i.prop_2) for i in directory.find()[0]])
+ self.assertEqual(2, directory[guid]['seqno'])
+ assert directory.has_seqno
+ assert not directory.has_noseqno
def test_delete(self):
@@ -204,31 +229,31 @@ class ResourceTest(tests.Test):
('db/document/1/1/ctime', '{"value": 1}'),
('db/document/1/1/mtime', '{"value": 1}'),
('db/document/1/1/prop', '{"value": "prop-1"}'),
- ('db/document/1/1/seqno', '{"value": 0}'),
+ ('db/document/1/1/seqno', '{"value": 1}'),
('db/document/2/2/guid', '{"value": "2"}'),
('db/document/2/2/ctime', '{"value": 2}'),
('db/document/2/2/mtime', '{"value": 2}'),
('db/document/2/2/prop', '{"value": "prop-2"}'),
- ('db/document/2/2/seqno', '{"value": 0}'),
+ ('db/document/2/2/seqno', '{"value": 2}'),
)
directory = Directory(tests.tmpdir, Document, IndexWriter, _SessionSeqno(), this.broadcast)
- self.assertEqual(0, directory._index.mtime)
for i in directory.populate():
pass
- self.assertNotEqual(0, directory._index.mtime)
doc = directory.get('1')
self.assertEqual(1, doc['ctime'])
self.assertEqual(1, doc['mtime'])
self.assertEqual('prop-1', doc['prop'])
+ self.assertEqual(1, directory['1']['seqno'])
doc = directory.get('2')
self.assertEqual(2, doc['ctime'])
self.assertEqual(2, doc['mtime'])
self.assertEqual('prop-2', doc['prop'])
+ self.assertEqual(2, directory['2']['seqno'])
self.assertEqual(
[
@@ -236,6 +261,37 @@ class ResourceTest(tests.Test):
(2, 2, 'prop-2'),
],
[(i.ctime, i.mtime, i.prop) for i in directory.find()[0]])
+ assert directory.has_seqno
+ assert not directory.has_noseqno
+
+ def test_populate_NoSeqnoSatus(self):
+
+ class Document(db.Resource):
+
+ @db.indexed_property(slot=1)
+ def prop(self, value):
+ return value
+
+ self.touch(
+ ('db/document/1/1/guid', '{"value": "1"}'),
+ ('db/document/1/1/ctime', '{"value": 1}'),
+ ('db/document/1/1/mtime', '{"value": 1}'),
+ ('db/document/1/1/prop', '{"value": "prop-1"}'),
+ )
+ directory = Directory(tests.tmpdir, Document, IndexWriter, _SessionSeqno(), this.broadcast)
+ assert not directory.has_seqno
+ assert not directory.has_noseqno
+
+ for i in directory.populate():
+ pass
+
+ doc = directory.get('1')
+ self.assertEqual(1, doc['ctime'])
+ self.assertEqual(1, doc['mtime'])
+ self.assertEqual('prop-1', doc['prop'])
+ self.assertEqual(0, directory['1']['seqno'])
+ assert not directory.has_seqno
+ assert directory.has_noseqno
def test_populate_IgnoreBadDocuments(self):
@@ -522,6 +578,38 @@ class ResourceTest(tests.Test):
directory[guid].diff([[1, None]], out_r))
self.assertEqual([[1, 3]], out_r)
+ def test_CommitLastSeqno(self):
+
+ class Document(db.Resource):
+
+ @db.stored_property()
+ def prop(self, value):
+ return value
+
+ directory = Directory(tests.tmpdir, Document, IndexWriter, _SessionSeqno(), this.broadcast)
+ directory.create({'prop': '1'})
+ assert directory.has_seqno
+ directory.commit()
+ directory.close()
+
+ directory = Directory(tests.tmpdir, Document, IndexWriter, _SessionSeqno(), this.broadcast)
+ assert directory.has_seqno
+
+ def test_IterateDirectory(self):
+
+ class Document(db.Resource):
+ pass
+
+ directory = Directory(tests.tmpdir, Document, IndexWriter, _SessionSeqno(), this.broadcast)
+
+ guid1 = directory.create({})
+ guid2 = directory.create({})
+ guid3 = directory.create({})
+
+ self.assertEqual(
+ sorted([guid1, guid2, guid3]),
+ sorted([i.guid for i in directory]))
+
class _SessionSeqno(object):
diff --git a/tests/units/db/volume.py b/tests/units/db/volume.py
index a770a35..0ebd33f 100755
--- a/tests/units/db/volume.py
+++ b/tests/units/db/volume.py
@@ -33,747 +33,6 @@ class VolumeTest(tests.Test):
tests.Test.setUp(self, fork_num)
this.localcast = lambda x: x
- def test_diff(self):
-
- class Document(db.Resource):
-
- @db.indexed_property(slot=1)
- def prop(self, value):
- return value
-
- volume = db.Volume('.', [Document])
-
- volume['document'].create({'guid': '1', 'prop': '1', 'ctime': 1, 'mtime': 1})
- self.utime('db/document/1/1', 1)
- volume['document'].create({'guid': '2', 'prop': '2', 'ctime': 2, 'mtime': 2})
- self.utime('db/document/2/2', 2)
- volume['document'].create({'guid': '3', 'prop': '3', 'ctime': 3, 'mtime': 3})
- self.utime('db/document/3/3', 3)
- volume.blobs.post('1')
- self.touch(('files/foo/2', '22'))
- self.touch(('files/bar/3', '333'))
-
- r = [[1, None]]
- self.assertEqual([
- {'resource': 'document'},
- {'guid': '1', 'patch': {
- 'guid': {'value': '1', 'mtime': 1},
- 'ctime': {'value': 1, 'mtime': 1},
- 'prop': {'value': '1', 'mtime': 1},
- 'mtime': {'value': 1, 'mtime': 1},
- }},
- {'guid': '2', 'patch': {
- 'guid': {'value': '2', 'mtime': 2},
- 'ctime': {'value': 2, 'mtime': 2},
- 'prop': {'value': '2', 'mtime': 2},
- 'mtime': {'value': 2, 'mtime': 2},
- }},
- {'guid': '3', 'patch': {
- 'guid': {'value': '3', 'mtime': 3},
- 'ctime': {'value': 3, 'mtime': 3},
- 'prop': {'value': '3', 'mtime': 3},
- 'mtime': {'value': 3, 'mtime': 3},
- }},
- {'content-type': 'application/octet-stream', 'content-length': '1'},
- {'content-type': 'application/octet-stream', 'content-length': '2', 'path': 'foo/2'},
- {'commit': [[1, 5]]},
- ],
- [i.meta if isinstance(i, File) else i for i in volume.diff(r, files=['foo'])])
- self.assertEqual([[6, None]], r)
-
- r = [[2, 2]]
- self.assertEqual([
- {'resource': 'document'},
- {'guid': '2', 'patch': {
- 'guid': {'value': '2', 'mtime': 2},
- 'ctime': {'value': 2, 'mtime': 2},
- 'prop': {'value': '2', 'mtime': 2},
- 'mtime': {'value': 2, 'mtime': 2},
- }},
- {'commit': [[2, 2]]},
- ],
- [i for i in volume.diff(r)])
- self.assertEqual([], r)
-
- r = [[6, None]]
- self.assertEqual([
- {'resource': 'document'},
- ],
- [i for i in volume.diff(r)])
- self.assertEqual([[6, None]], r)
-
- volume['document'].update('2', {'prop': '22'})
-
- r = [[6, None]]
- self.assertEqual([
- {'resource': 'document'},
- {'guid': '2', 'patch': {
- 'prop': {'value': '22', 'mtime': int(os.stat('db/document/2/2/prop').st_mtime)},
- }},
- {'commit': [[6, 6]]},
- ],
- [i for i in volume.diff(r)])
- self.assertEqual([[7, None]], r)
-
- volume.blobs.post('4444')
- self.touch(('files/foo/2', '2222'))
-
- r = [[7, None]]
- self.assertEqual([
- {'resource': 'document'},
- {'content-type': 'application/octet-stream', 'content-length': '4'},
- {'content-type': 'application/octet-stream', 'content-length': '4', 'path': 'foo/2'},
- {'content-type': 'application/octet-stream', 'content-length': '3', 'path': 'bar/3'},
- {'commit': [[7, 9]]},
- ],
- [i.meta if isinstance(i, File) else i for i in volume.diff(r, files=['foo', 'bar'])])
- self.assertEqual([[10, None]], r)
-
- def test_diff_SyncUsecase(self):
-
- class Document(db.Resource):
-
- @db.stored_property()
- def prop1(self, value):
- return value
-
- @db.stored_property()
- def prop2(self, value):
- return value
-
- volume = db.Volume('.', [Document])
-
- volume['document'].create({'guid': 'guid', 'ctime': 1, 'mtime': 1, 'prop1': 1, 'prop2': 1})
- self.utime('db/document/gu/guid', 1)
-
- # Fresh update to pull
- volume['document'].update('guid', {'prop1': 2})
- self.utime('db/document/gu/guid/prop1', 2)
-
- # Recently pushed
- volume['document'].update('guid', {'prop2': 2})
- self.utime('db/document/gu/guid/prop2', 2)
-
- # Exclude `prop2` ack from the pull reanges
- r = [[2, None]]
- ranges.exclude(r, 3, 3)
- self.assertEqual([
- {'resource': 'document'},
- ],
- [dict(i) for i in volume.diff(r)])
- self.assertEqual([[2, 2], [4, None]], r)
-
- # Pass `prop2` ack in `exclude`
- r = [[2, None]]
- self.assertEqual([
- {'resource': 'document'},
- {'guid': 'guid', 'patch': {
- 'prop1': {'value': 2, 'mtime': 2},
- }},
- {'commit': [[2, 2]]},
- ],
- [dict(i) for i in volume.diff(r, [[3, 3]])])
- self.assertEqual([[4, None]], r)
-
- def test_diff_Partial(self):
- self.override(time, 'time', lambda: 0)
-
- class Document(db.Resource):
-
- @db.indexed_property(slot=1)
- def prop(self, value):
- return value
-
- volume = db.Volume('.', [Document])
- volume['document'].create({'guid': '1', 'prop': '1', 'ctime': 1, 'mtime': 1})
- self.utime('db/document/1/1', 1)
- volume['document'].create({'guid': '2', 'prop': '2', 'ctime': 2, 'mtime': 2})
- self.utime('db/document/2/2', 2)
-
- r = [[1, None]]
- patch = volume.diff(r)
- self.assertEqual({'resource': 'document'}, next(patch))
- self.assertEqual('1', next(patch)['guid'])
- self.assertRaises(StopIteration, patch.throw, StopIteration)
- self.assertRaises(StopIteration, patch.next)
- self.assertEqual([[1, None]], r)
-
- r = [[1, None]]
- patch = volume.diff(r)
- self.assertEqual({'resource': 'document'}, next(patch))
- self.assertEqual('1', next(patch)['guid'])
- self.assertEqual('2', next(patch)['guid'])
- self.assertEqual({'commit': [[1, 1]]}, patch.throw(StopIteration()))
- self.assertRaises(StopIteration, patch.next)
- self.assertEqual([[2, None]], r)
-
- r = [[1, None]]
- patch = volume.diff(r)
- self.assertEqual({'resource': 'document'}, next(patch))
- self.assertEqual('1', next(patch)['guid'])
- self.assertEqual('2', next(patch)['guid'])
- self.assertEqual({'commit': [[1, 2]]}, next(patch))
- self.assertRaises(StopIteration, patch.next)
- self.assertEqual([[3, None]], r)
-
- def test_diff_IgnoreCalcProps(self):
-
- class Document(db.Resource):
-
- @db.indexed_property(slot=1, acl=ACL.PUBLIC | ACL.CALC)
- def prop(self, value):
- return value
-
- volume = db.Volume('.', [Document])
- volume['document'].create({'guid': '1', 'prop': '1', 'ctime': 1, 'mtime': 1})
- self.utime('db/document/1/1', 1)
-
- r = [[1, None]]
- self.assertEqual([
- {'resource': 'document'},
- {'guid': '1', 'patch': {
- 'guid': {'value': '1', 'mtime': 1},
- 'ctime': {'value': 1, 'mtime': 1},
- 'mtime': {'value': 1, 'mtime': 1},
- }},
- {'commit': [[1, 1]]},
- ],
- [i for i in volume.diff(r)])
- self.assertEqual([[2, None]], r)
-
- volume['document'].update('1', {'prop': '2'})
- self.assertEqual([
- {'resource': 'document'},
- ],
- [i for i in volume.diff(r)])
- self.assertEqual([[2, None]], r)
-
- volume['document'].create({'guid': '2', 'prop': '2', 'ctime': 2, 'mtime': 2})
- self.utime('db/document/2/2', 2)
-
- self.assertEqual([
- {'resource': 'document'},
- {'guid': '2', 'patch': {
- 'guid': {'value': '2', 'mtime': 2},
- 'ctime': {'value': 2, 'mtime': 2},
- 'mtime': {'value': 2, 'mtime': 2},
- }},
- {'commit': [[2, 3]]},
- ],
- [i for i in volume.diff(r)])
- self.assertEqual([[4, None]], r)
-
- def test_diff_IgnoreOneWayResources(self):
-
- class Document(db.Resource):
- one_way = True
-
- volume = db.Volume('.', [Document])
- volume['document'].create({'guid': '1', 'ctime': 1, 'mtime': 1})
- self.utime('db/document/1/1', 1)
-
- r = [[1, None]]
- self.assertEqual([
- {'resource': 'document'},
- {'guid': '1', 'patch': {
- 'guid': {'value': '1', 'mtime': 1},
- 'ctime': {'value': 1, 'mtime': 1},
- 'mtime': {'value': 1, 'mtime': 1},
- }},
- {'commit': [[1, 1]]},
- ],
- [i for i in volume.diff(r)])
- self.assertEqual([[2, None]], r)
-
- r = [[1, None]]
- self.assertEqual([
- ],
- [i for i in volume.diff(r, one_way=True)])
- self.assertEqual([[1, None]], r)
-
- def test_diff_TheSameInSeqForAllDocuments(self):
- self.override(time, 'time', lambda: 0)
-
- class Document1(db.Resource):
- pass
-
- class Document2(db.Resource):
- pass
-
- class Document3(db.Resource):
- pass
-
- volume = db.Volume('.', [Document1, Document2, Document3])
- volume['document1'].create({'guid': '3', 'ctime': 3, 'mtime': 3})
- self.utime('db/document1/3/3', 3)
- volume['document2'].create({'guid': '2', 'ctime': 2, 'mtime': 2})
- self.utime('db/document2/2/2', 2)
- volume['document3'].create({'guid': '1', 'ctime': 1, 'mtime': 1})
- self.utime('db/document3/1/1', 1)
-
- r = [[1, None]]
- patch = volume.diff(r)
- self.assertEqual({'resource': 'document1'}, patch.send(None))
- self.assertEqual('3', patch.send(None)['guid'])
- self.assertEqual({'resource': 'document2'}, patch.send(None))
- self.assertEqual('2', patch.send(None)['guid'])
- self.assertEqual({'resource': 'document3'}, patch.send(None))
- self.assertEqual('1', patch.send(None)['guid'])
- self.assertEqual({'commit': [[1, 3]]}, patch.send(None))
- self.assertRaises(StopIteration, patch.next)
- self.assertEqual([[4, None]], r)
-
- def test_patch_New(self):
-
- class Document(db.Resource):
-
- @db.indexed_property(slot=1)
- def prop(self, value):
- return value
-
- volume1 = db.Volume('1', [Document])
- volume1['document'].create({'guid': '1', 'prop': '1', 'ctime': 1, 'mtime': 1})
- self.utime('1/db/document/1/1', 1)
- volume1['document'].create({'guid': '2', 'prop': '2', 'ctime': 2, 'mtime': 2})
- self.utime('1/db/document/2/2', 2)
- volume1['document'].create({'guid': '3', 'prop': '3', 'ctime': 3, 'mtime': 3})
- self.utime('1/db/document/3/3', 3)
- volume1.blobs.post('1')
- self.touch(('1/files/foo/2', '22'))
- self.touch(('1/files/bar/3', '333'))
-
- volume2 = db.Volume('2', [Document])
- volume2.patch(volume1.diff([[1, None]], files=['foo']))
-
- self.assertEqual(
- sorted([
- (1, '1', 1, '1'),
- (2, '2', 2, '2'),
- (3, '3', 3, '3'),
- ]),
- sorted([(i['ctime'], i['prop'], i['mtime'], i['guid']) for i in volume2['document'].find()[0]]))
-
- doc = volume2['document'].get('1')
- self.assertEqual(1, doc.get('seqno'))
- self.assertEqual(1, doc.meta('guid')['mtime'])
- self.assertEqual(1, doc.meta('ctime')['mtime'])
- self.assertEqual(1, doc.meta('prop')['mtime'])
- self.assertEqual(1, doc.meta('mtime')['mtime'])
-
- doc = volume2['document'].get('2')
- self.assertEqual(1, doc.get('seqno'))
- self.assertEqual(2, doc.meta('guid')['mtime'])
- self.assertEqual(2, doc.meta('ctime')['mtime'])
- self.assertEqual(2, doc.meta('prop')['mtime'])
- self.assertEqual(2, doc.meta('mtime')['mtime'])
-
- doc = volume2['document'].get('3')
- self.assertEqual(1, doc.get('seqno'))
- self.assertEqual(3, doc.meta('guid')['mtime'])
- self.assertEqual(3, doc.meta('ctime')['mtime'])
- self.assertEqual(3, doc.meta('prop')['mtime'])
- self.assertEqual(3, doc.meta('mtime')['mtime'])
-
- blob = volume2.blobs.get(hashlib.sha1('1').hexdigest())
- self.assertEqual({
- 'x-seqno': '1',
- 'content-length': '1',
- 'content-type': 'application/octet-stream',
- },
- blob.meta)
- self.assertEqual('1', file(blob.path).read())
-
- blob = volume2.blobs.get('foo/2')
- self.assertEqual({
- 'x-seqno': '1',
- 'content-length': '2',
- 'content-type': 'application/octet-stream',
- },
- blob.meta)
- self.assertEqual('22', file(blob.path).read())
-
- assert volume2.blobs.get('bar/3') is None
-
- def test_patch_Update(self):
-
- class Document(db.Resource):
-
- @db.stored_property(default='')
- def prop(self, value):
- return value
-
- volume1 = db.Volume('1', [Document])
- volume1['document'].create({'guid': 'guid', 'ctime': 1, 'mtime': 1})
- volume1['document'].update('guid', {'prop': '1'})
- self.utime('1/db/document/gu/guid', 1)
-
- volume2 = db.Volume('2', [Document])
- volume2['document'].create({'guid': 'guid', 'ctime': 2, 'mtime': 2})
- volume2['document'].update('guid', {'prop': '2'})
- self.utime('2/db/document/gu/guid', 2)
-
- self.assertEqual(
- [(2, 2, 'guid')],
- [(i['ctime'], i['mtime'], i['guid']) for i in volume2['document'].find()[0]])
- doc = volume2['document'].get('guid')
- self.assertEqual(2, doc.get('seqno'))
- self.assertEqual(2, doc.meta('guid')['mtime'])
- self.assertEqual(2, doc.meta('ctime')['mtime'])
- self.assertEqual(2, doc.meta('mtime')['mtime'])
- self.assertEqual(2, doc.meta('prop')['mtime'])
- self.assertEqual('2', doc.meta('prop')['value'])
-
- volume2.patch(volume1.diff([[1, None]]))
-
- self.assertEqual(
- [(2, 2, 'guid')],
- [(i['ctime'], i['mtime'], i['guid']) for i in volume2['document'].find()[0]])
- doc = volume2['document'].get('guid')
- self.assertEqual(2, doc.get('seqno'))
- self.assertEqual(2, doc.meta('guid')['mtime'])
- self.assertEqual(2, doc.meta('ctime')['mtime'])
- self.assertEqual(2, doc.meta('mtime')['mtime'])
- self.assertEqual(2, doc.meta('prop')['mtime'])
- self.assertEqual('2', doc.meta('prop')['value'])
-
- os.utime('1/db/document/gu/guid/mtime', (3, 3))
- volume2.patch(volume1.diff([[1, None]]))
-
- self.assertEqual(
- [(2, 1, 'guid')],
- [(i['ctime'], i['mtime'], i['guid']) for i in volume2['document'].find()[0]])
- doc = volume2['document'].get('guid')
- self.assertEqual(3, doc.get('seqno'))
- self.assertEqual(2, doc.meta('guid')['mtime'])
- self.assertEqual(2, doc.meta('ctime')['mtime'])
- self.assertEqual(3, doc.meta('mtime')['mtime'])
- self.assertEqual(2, doc.meta('prop')['mtime'])
- self.assertEqual('2', doc.meta('prop')['value'])
-
- os.utime('1/db/document/gu/guid/prop', (4, 4))
- volume2.patch(volume1.diff([[1, None]]))
-
- self.assertEqual(
- [(2, 1, 'guid')],
- [(i['ctime'], i['mtime'], i['guid']) for i in volume2['document'].find()[0]])
- doc = volume2['document'].get('guid')
- self.assertEqual(4, doc.get('seqno'))
- self.assertEqual(2, doc.meta('guid')['mtime'])
- self.assertEqual(2, doc.meta('ctime')['mtime'])
- self.assertEqual(3, doc.meta('mtime')['mtime'])
- self.assertEqual(4, doc.meta('prop')['mtime'])
- self.assertEqual('1', doc.meta('prop')['value'])
-
- def test_diff_AggProps(self):
-
- class Document(db.Resource):
-
- @db.stored_property(db.Aggregated, db.Property())
- def prop(self, value):
- return value
-
- volume = db.Volume('.', [Document])
- volume['document'].create({'guid': '1', 'ctime': 1, 'mtime': 1, 'prop': {'1': {'prop': 1}}})
- self.utime('db/document/1/1', 1)
- volume['document'].create({'guid': '2', 'ctime': 2, 'mtime': 2, 'prop': {'2': {'prop': 2}}})
- self.utime('db/document/2/2', 2)
-
- r = [[1, None]]
- self.assertEqual([
- {'resource': 'document'},
- {'guid': '1', 'patch': {
- 'guid': {'value': '1', 'mtime': 1},
- 'ctime': {'value': 1, 'mtime': 1},
- 'mtime': {'value': 1, 'mtime': 1},
- 'prop': {'value': {'1': {'prop': 1}}, 'mtime': 1},
- }},
- {'guid': '2', 'patch': {
- 'guid': {'value': '2', 'mtime': 2},
- 'ctime': {'value': 2, 'mtime': 2},
- 'mtime': {'value': 2, 'mtime': 2},
- 'prop': {'value': {'2': {'prop': 2}}, 'mtime': 2},
- }},
- {'commit': [[1, 2]]},
- ],
- [i for i in volume.diff(r)])
- self.assertEqual([[3, None]], r)
-
- r = [[1, 1]]
- self.assertEqual([
- {'resource': 'document'},
- {'guid': '1', 'patch': {
- 'guid': {'value': '1', 'mtime': 1},
- 'ctime': {'value': 1, 'mtime': 1},
- 'mtime': {'value': 1, 'mtime': 1},
- 'prop': {'value': {'1': {'prop': 1}}, 'mtime': 1},
- }},
- {'commit': [[1, 1]]},
- ],
- [i for i in volume.diff(r)])
- self.assertEqual([], r)
-
- r = [[2, 2]]
- self.assertEqual([
- {'resource': 'document'},
- {'guid': '2', 'patch': {
- 'guid': {'value': '2', 'mtime': 2},
- 'ctime': {'value': 2, 'mtime': 2},
- 'mtime': {'value': 2, 'mtime': 2},
- 'prop': {'value': {'2': {'prop': 2}}, 'mtime': 2},
- }},
- {'commit': [[2, 2]]},
- ],
- [i for i in volume.diff(r)])
- self.assertEqual([], r)
-
- r = [[3, None]]
- self.assertEqual([
- {'resource': 'document'},
- ],
- [i for i in volume.diff(r)])
- self.assertEqual([[3, None]], r)
-
- self.assertEqual({
- '1': {'seqno': 1, 'prop': 1},
- },
- volume['document'].get('1')['prop'])
- self.assertEqual({
- '2': {'seqno': 2, 'prop': 2},
- },
- volume['document'].get('2')['prop'])
-
- volume['document'].update('2', {'prop': {'2': {}, '3': {'prop': 3}}})
- r = [[3, None]]
- self.assertEqual([
- {'resource': 'document'},
- {'guid': '2', 'patch': {
- 'prop': {'value': {'2': {}, '3': {'prop': 3}}, 'mtime': int(os.stat('db/document/2/2/prop').st_mtime)},
- }},
- {'commit': [[3, 3]]},
- ],
- [i for i in volume.diff(r)])
- self.assertEqual([[4, None]], r)
-
- self.assertEqual({
- '2': {'seqno': 3},
- '3': {'seqno': 3, 'prop': 3},
- },
- volume['document'].get('2')['prop'])
-
- volume['document'].update('1', {'prop': {'1': {'foo': 'bar'}}})
- r = [[4, None]]
- self.assertEqual([
- {'resource': 'document'},
- {'guid': '1', 'patch': {
- 'prop': {'value': {'1': {'foo': 'bar'}}, 'mtime': int(os.stat('db/document/1/1/prop').st_mtime)},
- }},
- {'commit': [[4, 4]]},
- ],
- [i for i in volume.diff(r)])
- self.assertEqual([[5, None]], r)
-
- self.assertEqual({
- '1': {'seqno': 4, 'foo': 'bar'},
- },
- volume['document'].get('1')['prop'])
-
- volume['document'].update('2', {'prop': {'2': {'restore': True}}})
- r = [[5, None]]
- self.assertEqual([
- {'resource': 'document'},
- {'guid': '2', 'patch': {
- 'prop': {'value': {'2': {'restore': True}}, 'mtime': int(os.stat('db/document/2/2/prop').st_mtime)},
- }},
- {'commit': [[5, 5]]},
- ],
- [i for i in volume.diff(r)])
- self.assertEqual([[6, None]], r)
-
- self.assertEqual({
- '2': {'seqno': 5, 'restore': True},
- '3': {'seqno': 3, 'prop': 3},
- },
- volume['document'].get('2')['prop'])
-
- volume['document'].update('2', {'ctime': 0})
- r = [[6, None]]
- self.assertEqual([
- {'resource': 'document'},
- {'guid': '2', 'patch': {
- 'ctime': {'value': 0, 'mtime': int(os.stat('db/document/2/2/prop').st_mtime)},
- }},
- {'commit': [[6, 6]]},
- ],
- [i for i in volume.diff(r)])
- self.assertEqual([[7, None]], r)
-
- self.assertEqual({
- '2': {'seqno': 5, 'restore': True},
- '3': {'seqno': 3, 'prop': 3},
- },
- volume['document'].get('2')['prop'])
-
- def test_patch_Aggprops(self):
-
- class Document(db.Resource):
-
- @db.stored_property(db.Aggregated, db.Property())
- def prop(self, value):
- return value
-
- volume = db.Volume('.', [Document])
-
- volume.patch([
- {'resource': 'document'},
- {'guid': '1', 'patch': {
- 'guid': {'mtime': 1, 'value': '1'},
- 'ctime': {'mtime': 1, 'value': 1},
- 'mtime': {'mtime': 1, 'value': 1},
- 'prop': {'mtime': 1, 'value': {'1': {}}},
- }},
- ])
- self.assertEqual({
- '1': {'seqno': 1},
- },
- volume['document'].get('1')['prop'])
-
- volume.patch([
- {'resource': 'document'},
- {'guid': '1', 'patch': {
- 'prop': {'mtime': 1, 'value': {'1': {'probe': False}}},
- }},
- ])
- self.assertEqual({
- '1': {'seqno': 1},
- },
- volume['document'].get('1')['prop'])
-
- volume.patch([
- {'resource': 'document'},
- {'guid': '1', 'patch': {
- 'prop': {'mtime': 2, 'value': {'1': {'probe': True}}},
- }},
- ])
- self.assertEqual({
- '1': {'seqno': 2, 'probe': True},
- },
- volume['document'].get('1')['prop'])
-
- volume.patch([
- {'resource': 'document'},
- {'guid': '1', 'patch': {
- 'prop': {'mtime': 3, 'value': {'2': {'foo': 'bar'}}},
- }},
- ])
- self.assertEqual({
- '1': {'seqno': 2, 'probe': True},
- '2': {'seqno': 3, 'foo': 'bar'},
- },
- volume['document'].get('1')['prop'])
-
- volume.patch([
- {'resource': 'document'},
- {'guid': '1', 'patch': {
- 'prop': {'mtime': 4, 'value': {'2': {}, '3': {'foo': 'bar'}}},
- }},
- ])
- self.assertEqual({
- '1': {'seqno': 2, 'probe': True},
- '2': {'seqno': 4},
- '3': {'seqno': 4, 'foo': 'bar'},
- },
- volume['document'].get('1')['prop'])
-
- def test_patch_Ranges(self):
-
- class Document(db.Resource):
-
- @db.stored_property(default='')
- def prop(self, value):
- return value
-
- volume1 = db.Volume('db1', [Document])
- volume2 = db.Volume('db2', [Document])
-
- seqno, committed = volume2.patch(volume1.diff([[1, None]]))
- self.assertEqual([], committed)
- self.assertEqual(None, seqno)
-
- volume1['document'].create({'guid': '1', 'ctime': 1, 'mtime': 1})
- seqno, committed = volume2.patch(volume1.diff([[1, None]]))
- self.assertEqual([[1, 1]], committed)
- self.assertEqual(1, seqno)
- seqno, committed = volume2.patch(volume1.diff([[1, None]]))
- self.assertEqual([[1, 1]], committed)
- self.assertEqual(None, seqno)
-
- volume1['document'].update('1', {'prop': '1'})
- seqno, committed = volume2.patch(volume1.diff([[1, None]]))
- self.assertEqual([[1, 2]], committed)
- self.assertEqual(2, seqno)
- seqno, committed = volume2.patch(volume1.diff([[1, None]]))
- self.assertEqual([[1, 2]], committed)
- self.assertEqual(None, seqno)
-
- volume3 = db.Volume('db3', [Document])
- seqno, committed = volume3.patch(volume1.diff([[1, None]]))
- self.assertEqual([[1, 2]], committed)
- self.assertEqual(1, seqno)
- seqno, committed = volume3.patch(volume1.diff([[1, None]]))
- self.assertEqual([[1, 2]], committed)
- self.assertEqual(None, seqno)
-
- def test_patch_CallSetters(self):
-
- class Document(db.Resource):
-
- @db.stored_property(db.Numeric)
- def prop(self, value):
- return value
-
- @prop.setter
- def prop(self, value):
- return value + 1
-
- directory = Directory('document', Document, IndexWriter, _SessionSeqno(), this.localcast)
-
- directory.patch('1', {
- 'guid': {'mtime': 1, 'value': '1'},
- 'ctime': {'mtime': 1, 'value': 1},
- 'mtime': {'mtime': 1, 'value': 1},
- 'prop': {'mtime': 1, 'value': 1},
- })
- self.assertEqual(2, directory.get('1')['prop'])
-
- def test_patch_MultipleCommits(self):
-
- class Document(db.Resource):
-
- @db.indexed_property(slot=1)
- def prop(self, value):
- return value
-
- self.touch(('var/seqno', '100'))
- volume = db.Volume('.', [Document])
-
- def generator():
- for i in [
- {'resource': 'document'},
- {'commit': [[1, 1]]},
- {'guid': '1', 'patch': {
- 'guid': {'value': '1', 'mtime': 1.0},
- 'ctime': {'value': 2, 'mtime': 2.0},
- 'mtime': {'value': 3, 'mtime': 3.0},
- 'prop': {'value': '4', 'mtime': 4.0},
- }},
- {'commit': [[2, 3]]},
- ]:
- yield i
-
- patch = generator()
- self.assertEqual((101, [[1, 3]]), volume.patch(patch))
- assert volume['document']['1'].exists
-
def test_EditLocalProps(self):
class Document(db.Resource):
@@ -856,78 +115,6 @@ class VolumeTest(tests.Test):
{'seqno': 3, 'value': '4', 'mtime': 0},
directory['1'].meta('prop3'))
- def test_DiffLocalProps(self):
-
- class Document(db.Resource):
-
- @db.stored_property()
- def prop1(self, value):
- return value
-
- @db.stored_property(acl=ACL.PUBLIC | ACL.LOCAL)
- def prop2(self, value):
- return value
-
- @db.stored_property()
- def prop3(self, value):
- return value
-
- volume = db.Volume('.', [Document])
-
- volume['document'].create({'guid': '1', 'prop1': '1', 'prop2': '1', 'prop3': '1', 'ctime': 1, 'mtime': 1})
- self.utime('db/document/1/1', 0)
-
- r = [[1, None]]
- self.assertEqual([
- {'resource': 'document'},
- {'guid': '1', 'patch': {
- 'guid': {'value': '1', 'mtime': 0},
- 'ctime': {'value': 1, 'mtime': 0},
- 'prop1': {'value': '1', 'mtime': 0},
- 'prop3': {'value': '1', 'mtime': 0},
- 'mtime': {'value': 1, 'mtime': 0},
- }},
- {'commit': [[1, 1]]},
- ],
- [dict(i) for i in volume.diff(r, files=['foo'])])
- self.assertEqual([[2, None]], r)
-
- volume['document'].update('1', {'prop1': '2'})
- self.utime('db/document', 0)
-
- self.assertEqual([
- {'resource': 'document'},
- {'guid': '1', 'patch': {
- 'prop1': {'value': '2', 'mtime': 0},
- }},
- {'commit': [[2, 2]]},
- ],
- [dict(i) for i in volume.diff(r, files=['foo'])])
- self.assertEqual([[3, None]], r)
-
- volume['document'].update('1', {'prop2': '3'})
- self.utime('db/document', 0)
-
- self.assertEqual([
- {'resource': 'document'},
- ],
- [dict(i) for i in volume.diff(r, files=['foo'])])
- self.assertEqual([[3, None]], r)
-
- volume['document'].update('1', {'prop1': '4', 'prop2': '4', 'prop3': '4'})
- self.utime('db/document', 0)
-
- self.assertEqual([
- {'resource': 'document'},
- {'guid': '1', 'patch': {
- 'prop1': {'value': '4', 'mtime': 0},
- 'prop3': {'value': '4', 'mtime': 0},
- }},
- {'commit': [[3, 3]]},
- ],
- [dict(i) for i in volume.diff(r, files=['foo'])])
- self.assertEqual([[4, None]], r)
-
def test_DoNotShiftSeqnoForLocalProps(self):
class Document(db.Resource):
@@ -978,126 +165,27 @@ class VolumeTest(tests.Test):
{'value': '2', 'mtime': 0},
directory['1'].meta('prop2'))
- def test_patch_SeqnoLess(self):
+ def test_patch_CallSetters(self):
class Document(db.Resource):
- @db.indexed_property(slot=1)
+ @db.stored_property(db.Numeric)
def prop(self, value):
return value
- volume1 = db.Volume('1', [Document])
- volume1['document'].create({'guid': '1', 'prop': '1', 'ctime': 1, 'mtime': 1})
- self.utime('1/db/document/1/1', 1)
- volume1.blobs.post('1')
-
- volume2 = db.Volume('2', [Document])
- volume2.patch(volume1.diff([[1, None]]), shift_seqno=False)
-
- self.assertEqual(
- [(1, '1', 1, '1')],
- [(i['ctime'], i['prop'], i['mtime'], i['guid']) for i in volume2['document'].find()[0]])
-
- doc = volume2['document'].get('1')
- self.assertEqual(0, doc.get('seqno'))
- assert 'seqno' not in doc.meta('guid')
- assert 'seqno' not in doc.meta('ctime')
- assert 'seqno' not in doc.meta('mtime')
- assert 'seqno' not in doc.meta('prop')
-
- blob = volume2.blobs.get(hashlib.sha1('1').hexdigest())
- self.assertEqual({
- 'x-seqno': '0',
- 'content-length': '1',
- 'content-type': 'application/octet-stream',
- },
- blob.meta)
- self.assertEqual('1', file(blob.path).read())
-
- def test_diff_IgnoreSeqnolessUpdates(self):
-
- class Document(db.Resource):
-
- @db.stored_property()
- def prop1(self, value):
- return value
-
- @db.stored_property(acl=ACL.PUBLIC | ACL.LOCAL)
- def prop2(self, value):
- return value
-
- volume = db.Volume('.', [Document])
-
- volume['document'].create({'guid': '1', 'prop1': '1', 'prop2': '1', 'ctime': 1, 'mtime': 1})
- self.utime('db/document/1/1', 1)
-
- r = [[1, None]]
- self.assertEqual([
- {'resource': 'document'},
- {'guid': '1', 'patch': {
- 'guid': {'value': '1', 'mtime': 1},
- 'ctime': {'value': 1, 'mtime': 1},
- 'prop1': {'value': '1', 'mtime': 1},
- 'mtime': {'value': 1, 'mtime': 1},
- }},
- {'commit': [[1, 1]]},
- ],
- [i.meta if isinstance(i, File) else i for i in volume.diff(r)])
- self.assertEqual([[2, None]], r)
-
- volume['document'].update('1', {'prop2': '2'})
- self.utime('db/document/1/1', 1)
-
- r = [[1, None]]
- self.assertEqual([
- {'resource': 'document'},
- {'guid': '1', 'patch': {
- 'guid': {'value': '1', 'mtime': 1},
- 'ctime': {'value': 1, 'mtime': 1},
- 'prop1': {'value': '1', 'mtime': 1},
- 'mtime': {'value': 1, 'mtime': 1},
- }},
- {'commit': [[1, 1]]},
- ],
- [i.meta if isinstance(i, File) else i for i in volume.diff(r)])
- self.assertEqual([[2, None]], r)
-
- volume['document'].update('1', {'prop1': '2'})
- self.utime('db/document/1/1', 1)
-
- r = [[1, None]]
- self.assertEqual([
- {'resource': 'document'},
- {'guid': '1', 'patch': {
- 'guid': {'value': '1', 'mtime': 1},
- 'ctime': {'value': 1, 'mtime': 1},
- 'prop1': {'value': '2', 'mtime': 1},
- 'mtime': {'value': 1, 'mtime': 1},
- }},
- {'commit': [[1, 2]]},
- ],
- [i.meta if isinstance(i, File) else i for i in volume.diff(r)])
- self.assertEqual([[3, None]], r)
-
- self.assertEqual(False, volume['document'].patch('1', {'prop1': {'mtime': 2, 'value': '3'}}, seqno=False))
- self.assertEqual('3', volume['document']['1']['prop1'])
- self.utime('db/document/1/1', 1)
-
- r = [[1, None]]
- self.assertEqual([
- {'resource': 'document'},
- {'guid': '1', 'patch': {
- 'guid': {'value': '1', 'mtime': 1},
- 'ctime': {'value': 1, 'mtime': 1},
- 'mtime': {'value': 1, 'mtime': 1},
- }},
- {'commit': [[1, 2]]},
- ],
- [i.meta if isinstance(i, File) else i for i in volume.diff(r)])
- self.assertEqual([[3, None]], r)
-
+ @prop.setter
+ def prop(self, value):
+ return value + 1
+ directory = Directory('document', Document, IndexWriter, _SessionSeqno(), this.localcast)
+ directory.patch('1', {
+ 'guid': {'mtime': 1, 'value': '1'},
+ 'ctime': {'mtime': 1, 'value': 1},
+ 'mtime': {'mtime': 1, 'value': 1},
+ 'prop': {'mtime': 1, 'value': 1},
+ })
+ self.assertEqual(2, directory.get('1')['prop'])
class _SessionSeqno(object):
diff --git a/tests/units/model/context.py b/tests/units/model/context.py
index d2ba27e..15315ec 100755
--- a/tests/units/model/context.py
+++ b/tests/units/model/context.py
@@ -9,8 +9,7 @@ from __init__ import tests
from sugar_network import db
from sugar_network.db import blobs
-from sugar_network.client import IPCConnection, Connection, keyfile
-from sugar_network.client.auth import SugarCreds
+from sugar_network.client import IPCConnection, Connection
from sugar_network.model.context import Context
from sugar_network.toolkit.coroutine import this
from sugar_network.toolkit.router import Request
@@ -22,7 +21,7 @@ class ContextTest(tests.Test):
def test_PackageImages(self):
volume = self.start_master()
- conn = Connection(creds=SugarCreds(keyfile.value))
+ conn = Connection()
guid = conn.post(['context'], {
'type': 'package',
@@ -37,7 +36,7 @@ class ContextTest(tests.Test):
def test_ContextImages(self):
volume = self.start_master()
- conn = Connection(creds=SugarCreds(keyfile.value))
+ conn = Connection()
guid = conn.post(['context'], {
'type': 'activity',
@@ -72,126 +71,6 @@ class ContextTest(tests.Test):
assert conn.request('GET', ['context', guid, 'icon']).content == svg_to_png(svg, 55).getvalue()
assert conn.request('GET', ['context', guid, 'logo']).content == svg_to_png(svg, 140).getvalue()
- def test_Releases(self):
- volume = self.start_master()
- conn = Connection(creds=SugarCreds(keyfile.value))
-
- context = conn.post(['context'], {
- 'type': 'activity',
- 'title': 'Activity',
- 'summary': 'summary',
- 'description': 'description',
- })
-
- activity_info1 = '\n'.join([
- '[Activity]',
- 'name = Activity',
- 'bundle_id = %s' % context,
- 'exec = true',
- 'icon = icon',
- 'activity_version = 1',
- 'license = Public Domain',
- ])
- bundle1 = self.zips(('topdir/activity/activity.info', activity_info1))
- release1 = conn.upload(['context', context, 'releases'], StringIO(bundle1))
- assert release1 == str(hashlib.sha1(bundle1).hexdigest())
- self.assertEqual({
- release1: {
- 'seqno': 9,
- 'author': {tests.UID: {'name': 'test', 'order': 0, 'role': 3}},
- 'value': {
- 'license': ['Public Domain'],
- 'announce': next(volume['post'].find(query='title:1')[0]).guid,
- 'version': [[1], 0],
- 'requires': {},
- 'commands': {'activity': {'exec': 'true'}},
- 'bundles': {'*-*': {'blob': str(hashlib.sha1(bundle1).hexdigest()), 'unpack_size': len(activity_info1)}},
- 'stability': 'stable',
- },
- },
- }, volume['context'][context]['releases'])
- assert volume.blobs.get(str(hashlib.sha1(bundle1).hexdigest())).exists
-
- activity_info2 = '\n'.join([
- '[Activity]',
- 'name = Activity',
- 'bundle_id = %s' % context,
- 'exec = true',
- 'icon = icon',
- 'activity_version = 2',
- 'license = Public Domain',
- ])
- bundle2 = self.zips(('topdir/activity/activity.info', activity_info2))
- release2 = conn.upload(['context', context, 'releases'], StringIO(bundle2))
- assert release2 == str(hashlib.sha1(bundle2).hexdigest())
- self.assertEqual({
- release1: {
- 'seqno': 9,
- 'author': {tests.UID: {'name': 'test', 'order': 0, 'role': 3}},
- 'value': {
- 'license': ['Public Domain'],
- 'announce': next(volume['post'].find(query='title:1')[0]).guid,
- 'version': [[1], 0],
- 'requires': {},
- 'commands': {'activity': {'exec': 'true'}},
- 'bundles': {'*-*': {'blob': str(hashlib.sha1(bundle1).hexdigest()), 'unpack_size': len(activity_info1)}},
- 'stability': 'stable',
- },
- },
- release2: {
- 'seqno': 12,
- 'author': {tests.UID: {'name': 'test', 'order': 0, 'role': 3}},
- 'value': {
- 'license': ['Public Domain'],
- 'announce': next(volume['post'].find(query='title:2')[0]).guid,
- 'version': [[2], 0],
- 'requires': {},
- 'commands': {'activity': {'exec': 'true'}},
- 'bundles': {'*-*': {'blob': str(hashlib.sha1(bundle2).hexdigest()), 'unpack_size': len(activity_info2)}},
- 'stability': 'stable',
- },
- },
- }, volume['context'][context]['releases'])
- assert volume.blobs.get(str(hashlib.sha1(bundle1).hexdigest())).exists
- assert volume.blobs.get(str(hashlib.sha1(bundle2).hexdigest())).exists
-
- conn.delete(['context', context, 'releases', release1])
- self.assertEqual({
- release1: {
- 'seqno': 14,
- 'author': {tests.UID: {'name': 'test', 'order': 0, 'role': 3}},
- },
- release2: {
- 'seqno': 12,
- 'author': {tests.UID: {'name': 'test', 'order': 0, 'role': 3}},
- 'value': {
- 'license': ['Public Domain'],
- 'announce': next(volume['post'].find(query='title:2')[0]).guid,
- 'version': [[2], 0],
- 'requires': {},
- 'commands': {'activity': {'exec': 'true'}},
- 'bundles': {'*-*': {'blob': str(hashlib.sha1(bundle2).hexdigest()), 'unpack_size': len(activity_info2)}},
- 'stability': 'stable',
- },
- },
- }, volume['context'][context]['releases'])
- assert not volume.blobs.get(str(hashlib.sha1(bundle1).hexdigest())).exists
- assert volume.blobs.get(str(hashlib.sha1(bundle2).hexdigest())).exists
-
- conn.delete(['context', context, 'releases', release2])
- self.assertEqual({
- release1: {
- 'seqno': 14,
- 'author': {tests.UID: {'name': 'test', 'order': 0, 'role': 3}},
- },
- release2: {
- 'seqno': 16,
- 'author': {tests.UID: {'name': 'test', 'order': 0, 'role': 3}},
- },
- }, volume['context'][context]['releases'])
- assert not volume.blobs.get(str(hashlib.sha1(bundle1).hexdigest())).exists
- assert not volume.blobs.get(str(hashlib.sha1(bundle2).hexdigest())).exists
-
if __name__ == '__main__':
tests.main()
diff --git a/tests/units/model/model.py b/tests/units/model/model.py
index e28dd51..c333653 100755
--- a/tests/units/model/model.py
+++ b/tests/units/model/model.py
@@ -8,16 +8,10 @@ import mimetypes
from __init__ import tests
from sugar_network import db
-from sugar_network.model import load_bundle
from sugar_network.model.post import Post
from sugar_network.model.context import Context
from sugar_network.node.model import User
-from sugar_network.node.auth import Principal as _Principal
-from sugar_network.client import IPCConnection, Connection, keyfile
-from sugar_network.client.auth import SugarCreds
-from sugar_network.toolkit.router import Request
from sugar_network.toolkit.coroutine import this
-from sugar_network.toolkit import i18n, http, coroutine, enforce
class ModelTest(tests.Test):
@@ -42,520 +36,5 @@ class ModelTest(tests.Test):
['3', '5', '2', '4', '1'],
[i.guid for i in directory.find(order_by='-rating')[0]])
- def test_load_bundle_Activity(self):
- volume = self.start_master()
- blobs = volume.blobs
- conn = Connection(creds=SugarCreds(keyfile.value))
-
- bundle_id = conn.post(['context'], {
- 'type': 'activity',
- 'title': 'Activity',
- 'summary': 'summary',
- 'description': 'description',
- })
- activity_info = '\n'.join([
- '[Activity]',
- 'name = Activity',
- 'bundle_id = %s' % bundle_id,
- 'exec = true',
- 'icon = icon',
- 'activity_version = 1',
- 'license = Public Domain',
- 'stability = developer',
- 'requires = sugar>=0.88; dep'
- ])
- changelog = "LOG"
- bundle = self.zips(
- ('topdir/activity/activity.info', activity_info),
- ('topdir/CHANGELOG', changelog),
- )
- blob = blobs.post(bundle)
-
- this.principal = Principal(tests.UID)
- this.request = Request(method='POST', path=['context', bundle_id])
- context, release = load_bundle(blob, bundle_id)
-
- self.assertEqual({
- 'content-type': 'application/vnd.olpc-sugar',
- 'content-disposition': 'attachment; filename="Activity-1%s"' % (mimetypes.guess_extension('application/vnd.olpc-sugar') or ''),
- 'content-length': str(len(bundle)),
- 'x-seqno': '6',
- }, blobs.get(blob.digest).meta)
- self.assertEqual(bundle_id, context)
- self.assertEqual([[1], 0], release['version'])
- self.assertEqual('developer', release['stability'])
- self.assertEqual(['Public Domain'], release['license'])
- self.assertEqual('developer', release['stability'])
- self.assertEqual({
- 'dep': [],
- 'sugar': [([1, 0], [[0, 88], 0])],
- },
- release['requires'])
- self.assertEqual({
- '*-*': {
- 'blob': blob.digest,
- 'unpack_size': len(activity_info) + len(changelog),
- },
- },
- release['bundles'])
-
- post = volume['post'][release['announce']]
- assert tests.UID in post['author']
- self.assertEqual('notification', post['type'])
- self.assertEqual({
- 'en': 'Activity 1 release',
- 'es': 'Activity 1 release',
- 'fr': 'Activity 1 release',
- }, post['title'])
- self.assertEqual({
- 'en-us': 'LOG',
- }, post['message'])
-
- def test_load_bundle_NonActivity(self):
- volume = self.start_master()
- blobs = volume.blobs
- conn = Connection(creds=SugarCreds(keyfile.value))
-
- bundle_id = conn.post(['context'], {
- 'type': 'book',
- 'title': 'NonActivity',
- 'summary': 'summary',
- 'description': 'description',
- })
- bundle = 'non-activity'
- blob = blobs.post(bundle)
- blob.meta['content-type'] = 'application/pdf'
-
- this.principal = Principal(tests.UID)
- this.request = Request(method='POST', path=['context', bundle_id], version='2', license='GPL')
- context, release = load_bundle(blob, bundle_id)
-
- self.assertEqual({
- 'content-type': 'application/pdf',
- 'content-disposition': 'attachment; filename="NonActivity-2.pdf"',
- 'content-length': str(len(bundle)),
- 'x-seqno': '6',
- }, blobs.get(blob.digest).meta)
- self.assertEqual(bundle_id, context)
- self.assertEqual([[2], 0], release['version'])
- self.assertEqual(['GPL'], release['license'])
-
- post = volume['post'][release['announce']]
- assert tests.UID in post['author']
- self.assertEqual('notification', post['type'])
- self.assertEqual({
- 'en': 'NonActivity 2 release',
- 'es': 'NonActivity 2 release',
- 'fr': 'NonActivity 2 release',
- }, post['title'])
- self.assertEqual({
- 'en-us': '',
- }, post['message'])
-
- def test_load_bundle_ReuseActivityLicense(self):
- volume = self.start_master()
- blobs = volume.blobs
- conn = Connection(creds=SugarCreds(keyfile.value))
-
- bundle_id = conn.post(['context'], {
- 'type': 'activity',
- 'title': 'Activity',
- 'summary': 'summary',
- 'description': 'description',
- })
-
- activity_info_wo_license = '\n'.join([
- '[Activity]',
- 'name = Activity',
- 'bundle_id = %s' % bundle_id,
- 'exec = true',
- 'icon = icon',
- 'activity_version = 1',
- ])
- bundle = self.zips(('topdir/activity/activity.info', activity_info_wo_license))
- blob_wo_license = blobs.post(bundle)
- self.assertRaises(http.BadRequest, load_bundle, blob_wo_license, bundle_id)
-
- volume['context'].update(bundle_id, {'releases': {
- 'new': {'value': {'release': 2, 'license': ['New']}},
- }})
- this.principal = Principal(tests.UID)
- this.request = Request(method='POST', path=['context', bundle_id])
- context, release = load_bundle(blob_wo_license, bundle_id)
- self.assertEqual(['New'], release['license'])
-
- volume['context'].update(bundle_id, {'releases': {
- 'new': {'value': {'release': 2, 'license': ['New']}},
- 'old': {'value': {'release': 1, 'license': ['Old']}},
- }})
- this.principal = Principal(tests.UID)
- this.request = Request(method='POST', path=['context', bundle_id])
- context, release = load_bundle(blob_wo_license, bundle_id)
- self.assertEqual(['New'], release['license'])
-
- volume['context'].update(bundle_id, {'releases': {
- 'new': {'value': {'release': 2, 'license': ['New']}},
- 'old': {'value': {'release': 1, 'license': ['Old']}},
- 'newest': {'value': {'release': 3, 'license': ['Newest']}},
- }})
- this.principal = Principal(tests.UID)
- this.request = Request(method='POST', path=['context', bundle_id])
- context, release = load_bundle(blob_wo_license, bundle_id)
- self.assertEqual(['Newest'], release['license'])
-
- def test_load_bundle_ReuseNonActivityLicense(self):
- volume = self.start_master()
- blobs = volume.blobs
- conn = Connection(creds=SugarCreds(keyfile.value))
-
- bundle_id = conn.post(['context'], {
- 'type': 'book',
- 'title': 'Activity',
- 'summary': 'summary',
- 'description': 'description',
- })
-
- blob = blobs.post('non-activity')
- this.principal = Principal(tests.UID)
- this.request = Request(method='POST', path=['context', bundle_id], version='1')
- self.assertRaises(http.BadRequest, load_bundle, blob, bundle_id)
-
- volume['context'].update(bundle_id, {'releases': {
- 'new': {'value': {'release': 2, 'license': ['New']}},
- }})
- this.principal = Principal(tests.UID)
- this.request = Request(method='POST', path=['context', bundle_id], version='1')
- context, release = load_bundle(blob, bundle_id)
- self.assertEqual(['New'], release['license'])
-
- volume['context'].update(bundle_id, {'releases': {
- 'new': {'value': {'release': 2, 'license': ['New']}},
- 'old': {'value': {'release': 1, 'license': ['Old']}},
- }})
- this.principal = Principal(tests.UID)
- this.request = Request(method='POST', path=['context', bundle_id], version='1')
- context, release = load_bundle(blob, bundle_id)
- self.assertEqual(['New'], release['license'])
-
- volume['context'].update(bundle_id, {'releases': {
- 'new': {'value': {'release': 2, 'license': ['New']}},
- 'old': {'value': {'release': 1, 'license': ['Old']}},
- 'newest': {'value': {'release': 3, 'license': ['Newest']}},
- }})
- this.principal = Principal(tests.UID)
- this.request = Request(method='POST', path=['context', bundle_id], version='1')
- context, release = load_bundle(blob, bundle_id)
- self.assertEqual(['Newest'], release['license'])
-
- def test_load_bundle_WrontContextType(self):
- volume = self.start_master()
- blobs = volume.blobs
- conn = Connection(creds=SugarCreds(keyfile.value))
-
- bundle_id = conn.post(['context'], {
- 'type': 'group',
- 'title': 'NonActivity',
- 'summary': 'summary',
- 'description': 'description',
- })
-
- blob = blobs.post('non-activity')
- this.principal = Principal(tests.UID)
- this.request = Request(method='POST', path=['context', bundle_id], version='2', license='GPL')
- self.assertRaises(http.BadRequest, load_bundle, blob, bundle_id)
-
- activity_info = '\n'.join([
- '[Activity]',
- 'name = Activity',
- 'bundle_id = %s' % bundle_id,
- 'exec = true',
- 'icon = icon',
- 'activity_version = 1',
- 'license = Public Domain',
- 'stability = developer',
- 'requires = sugar>=0.88; dep'
- ])
- changelog = "LOG"
- bundle = self.zips(
- ('topdir/activity/activity.info', activity_info),
- ('topdir/CHANGELOG', changelog),
- )
- blob = blobs.post(bundle)
- self.assertRaises(http.BadRequest, load_bundle, blob, bundle_id)
-
- def test_load_bundle_MissedContext(self):
- volume = self.start_master()
- blobs = volume.blobs
- volume['user'].create({'guid': tests.UID, 'name': 'user', 'pubkey': tests.PUBKEY})
- conn = Connection(creds=SugarCreds(keyfile.value))
-
- bundle = self.zips(('topdir/activity/activity.info', '\n'.join([
- '[Activity]',
- 'name = Activity',
- 'bundle_id = bundle_id',
- 'exec = true',
- 'icon = icon',
- 'activity_version = 1',
- 'license = Public Domain',
- 'stability = developer',
- 'requires = sugar>=0.88; dep'
- ])))
- blob = blobs.post(bundle)
-
- this.principal = Principal(tests.UID)
- this.request = Request()
- self.assertRaises(http.NotFound, load_bundle, blob, initial=False)
-
- def test_load_bundle_CreateContext(self):
- volume = self.start_master()
- blobs = volume.blobs
- volume['user'].create({'guid': tests.UID, 'name': 'user', 'pubkey': tests.PUBKEY})
- conn = Connection(creds=SugarCreds(keyfile.value))
-
- bundle = self.zips(
- ('ImageViewer.activity/activity/activity.info', '\n'.join([
- '[Activity]',
- 'bundle_id = org.laptop.ImageViewerActivity',
- 'name = Image Viewer',
- 'summary = The Image Viewer activity is a simple and fast image viewer tool',
- 'description = It has features one would expect of a standard image viewer, like zoom, rotate, etc.',
- 'homepage = http://wiki.sugarlabs.org/go/Activities/Image_Viewer',
- 'activity_version = 1',
- 'license = GPLv2+',
- 'icon = activity-imageviewer',
- 'exec = true',
- 'mime_types = image/bmp;image/gif',
- ])),
- ('ImageViewer.activity/activity/activity-imageviewer.svg', ''),
- )
- blob = blobs.post(bundle)
-
- this.principal = Principal(tests.UID)
- this.request = Request()
- context, release = load_bundle(blob, initial=True)
- self.assertEqual('org.laptop.ImageViewerActivity', context)
-
- context = volume['context'].get('org.laptop.ImageViewerActivity')
- self.assertEqual({'en': 'Image Viewer'}, context['title'])
- self.assertEqual({'en': 'The Image Viewer activity is a simple and fast image viewer tool'}, context['summary'])
- self.assertEqual({'en': 'It has features one would expect of a standard image viewer, like zoom, rotate, etc.'}, context['description'])
- self.assertEqual('http://wiki.sugarlabs.org/go/Activities/Image_Viewer', context['homepage'])
- self.assertEqual(['image/bmp', 'image/gif'], context['mime_types'])
- assert context['ctime'] > 0
- assert context['mtime'] > 0
- self.assertEqual({tests.UID: {'role': 3, 'name': 'user', 'order': 0}}, context['author'])
-
- post = volume['post'][release['announce']]
- assert tests.UID in post['author']
- self.assertEqual('notification', post['type'])
- self.assertEqual({
- 'en': 'Image Viewer 1 release',
- 'es': 'Image Viewer 1 release',
- 'fr': 'Image Viewer 1 release',
- }, post['title'])
-
- def test_load_bundle_UpdateContext(self):
- volume = self.start_master()
- blobs = volume.blobs
- conn = Connection(creds=SugarCreds(keyfile.value))
- self.touch(('master/etc/authorization.conf', [
- '[permissions]',
- '%s = admin' % tests.UID,
- ]))
-
- conn.post(['context'], {
- 'guid': 'org.laptop.ImageViewerActivity',
- 'type': 'activity',
- 'title': {'en': ''},
- 'summary': {'en': ''},
- 'description': {'en': ''},
- })
- svg = '\n'.join([
- '<?xml version="1.0" encoding="UTF-8"?>',
- '<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd" [',
- ' <!ENTITY fill_color "#123456">',
- ' <!ENTITY stroke_color "#123456">',
- ']>',
- '<svg xmlns="http://www.w3.org/2000/svg" width="50" height="50">',
- ' <rect x="3" y="7" width="44" height="36" style="fill:&fill_color;;stroke:&stroke_color;;stroke-width:3"/>',
- ' <polyline points="15,7 25,1 35,7" style="fill:none;;stroke:&stroke_color;;stroke-width:1.25"/>',
- ' <circle cx="14" cy="19" r="4.5" style="fill:&stroke_color;;stroke:&stroke_color;;stroke-width:1.5"/>',
- ' <polyline points="3,36 16,32 26,35" style="fill:none;;stroke:&stroke_color;;stroke-width:2.5"/>',
- ' <polyline points="15,43 37,28 47,34 47,43" style="fill:&stroke_color;;stroke:&stroke_color;;stroke-width:3"/>',
- ' <polyline points="22,41.5 35,30 27,41.5" style="fill:&fill_color;;stroke:none;;stroke-width:0"/>',
- ' <polyline points="26,23 28,25 30,23" style="fill:none;;stroke:&stroke_color;;stroke-width:.9"/>',
- ' <polyline points="31.2,20 33.5,17.7 35.8,20" style="fill:none;;stroke:&stroke_color;;stroke-width:1"/>',
- ' <polyline points="36,13 38.5,15.5 41,13" style="fill:none;;stroke:&stroke_color;;stroke-width:1"/>',
- '</svg>',
- ])
- bundle = self.zips(
- ('ImageViewer.activity/activity/activity.info', '\n'.join([
- '[Activity]',
- 'bundle_id = org.laptop.ImageViewerActivity',
- 'name = Image Viewer',
- 'summary = The Image Viewer activity is a simple and fast image viewer tool',
- 'description = It has features one would expect of a standard image viewer, like zoom, rotate, etc.',
- 'homepage = http://wiki.sugarlabs.org/go/Activities/Image_Viewer',
- 'activity_version = 22',
- 'license = GPLv2+',
- 'icon = activity-imageviewer',
- 'exec = true',
- 'mime_types = image/bmp;image/gif',
- ])),
- ('ImageViewer.activity/locale/ru/LC_MESSAGES/org.laptop.ImageViewerActivity.mo',
- base64.b64decode('3hIElQAAAAAMAAAAHAAAAHwAAAARAAAA3AAAAAAAAAAgAQAADwAAACEBAAAOAAAAMQEAAA0AAABAAQAACgAAAE4BAAAMAAAAWQEAAA0AAABmAQAAJwAAAHQBAAAUAAAAnAEAABAAAACxAQAABwAAAMIBAAAIAAAAygEAANEBAADTAQAAIQAAAKUDAAATAAAAxwMAABwAAADbAwAAFwAAAPgDAAAhAAAAEAQAAB0AAAAyBAAAQAAAAFAEAAA9AAAAkQQAADUAAADPBAAAFAAAAAUFAAAQAAAAGgUAAAEAAAACAAAABwAAAAAAAAADAAAAAAAAAAwAAAAJAAAAAAAAAAoAAAAEAAAAAAAAAAAAAAALAAAABgAAAAgAAAAFAAAAAENob29zZSBkb2N1bWVudABEb3dubG9hZGluZy4uLgBGaXQgdG8gd2luZG93AEZ1bGxzY3JlZW4ASW1hZ2UgVmlld2VyAE9yaWdpbmFsIHNpemUAUmV0cmlldmluZyBzaGFyZWQgaW1hZ2UsIHBsZWFzZSB3YWl0Li4uAFJvdGF0ZSBhbnRpY2xvY2t3aXNlAFJvdGF0ZSBjbG9ja3dpc2UAWm9vbSBpbgBab29tIG91dABQcm9qZWN0LUlkLVZlcnNpb246IFBBQ0tBR0UgVkVSU0lPTgpSZXBvcnQtTXNnaWQtQnVncy1UbzogClBPVC1DcmVhdGlvbi1EYXRlOiAyMDEyLTA5LTI3IDE0OjU3LTA0MDAKUE8tUmV2aXNpb24tRGF0ZTogMjAxMC0wOS0yMiAxMzo1MCswMjAwCkxhc3QtVHJhbnNsYXRvcjoga3JvbTlyYSA8a3JvbTlyYUBnbWFpbC5jb20+Ckxhbmd1YWdlLVRlYW06IExBTkdVQUdFIDxMTEBsaS5vcmc+Ckxhbmd1YWdlOiAKTUlNRS1WZXJzaW9uOiAxLjAKQ29udGVudC1UeXBlOiB0ZXh0L3BsYWluOyBjaGFyc2V0PVVURi04CkNvbnRlbnQtVHJhbnNmZXItRW5jb2Rpbmc6IDhiaXQKUGx1cmFsLUZvcm1zOiBucGx1cmFscz0zOyBwbHVyYWw9KG4lMTA9PTEgJiYgbiUxMDAhPTExID8gMCA6IG4lMTA+PTIgJiYgbiUxMDw9NCAmJiAobiUxMDA8MTAgfHwgbiUxMDA+PTIwKSA/IDEgOiAyKTsKWC1HZW5lcmF0b3I6IFBvb3RsZSAyLjAuMwoA0JLRi9Cx0LXRgNC40YLQtSDQtNC+0LrRg9C80LXQvdGCANCX0LDQs9GA0YPQt9C60LAuLi4A0KPQvNC10YHRgtC40YLRjCDQsiDQvtC60L3QtQDQn9C+0LvQvdGL0Lkg0Y3QutGA0LDQvQDQn9GA0L7RgdC80L7RgtGAINC60LDRgNGC0LjQvdC+0LoA0JjRgdGC0LjQvdC90YvQuSDRgNCw0LfQvNC10YAA0J/QvtC70YPRh9C10L3QuNC1INC40LfQvtCx0YDQsNC20LXQvdC40LksINC/0L7QtNC+0LbQtNC40YLQtS4uLgDQn9C+0LLQtdGA0L3Rg9GC0Ywg0L/RgNC+0YLQuNCyINGH0LDRgdC+0LLQvtC5INGB0YLRgNC10LvQutC4ANCf0L7QstC10YDQvdGD0YLRjCDQv9C+INGH0LDRgdC+0LLQvtC5INGB0YLRgNC10LvQutC1ANCf0YDQuNCx0LvQuNC30LjRgtGMANCe0YLQtNCw0LvQuNGC0YwA')),
- ('ImageViewer.activity/activity/activity-imageviewer.svg', svg),
- )
-
- blob = blobs.post(bundle)
- this.principal = Principal(tests.UID)
- this.request = Request(method='POST', path=['context', 'org.laptop.ImageViewerActivity'])
- context, release = load_bundle(blob, initial=True)
-
- context = volume['context'].get('org.laptop.ImageViewerActivity')
- self.assertEqual({
- 'en': 'Image Viewer',
- 'ru': u'Просмотр картинок',
- },
- context['title'])
- self.assertEqual({
- 'en': 'The Image Viewer activity is a simple and fast image viewer tool',
- },
- context['summary'])
- self.assertEqual({
- 'en': 'It has features one would expect of a standard image viewer, like zoom, rotate, etc.',
- },
- context['description'])
- self.assertEqual(svg, file(blobs.get(context['artefact_icon']).path).read())
- assert context['icon'] != 'missing.png'
- assert context['logo'] != 'missing-logo.png'
- self.assertEqual('http://wiki.sugarlabs.org/go/Activities/Image_Viewer', context['homepage'])
- self.assertEqual(['image/bmp', 'image/gif'], context['mime_types'])
-
- def test_load_bundle_3rdPartyRelease(self):
- i18n._default_langs = ['en']
- volume = self.start_master()
- blobs = volume.blobs
- volume['user'].create({'guid': tests.UID2, 'name': 'user2', 'pubkey': tests.PUBKEY2})
- conn = Connection(creds=SugarCreds(keyfile.value))
-
- bundle_id = conn.post(['context'], {
- 'type': 'activity',
- 'title': 'Activity',
- 'summary': 'summary',
- 'description': 'description',
- })
-
- bundle = self.zips(('topdir/activity/activity.info', '\n'.join([
- '[Activity]',
- 'name = Activity2',
- 'bundle_id = %s' % bundle_id,
- 'exec = true',
- 'icon = icon',
- 'activity_version = 1',
- 'license = Public Domain',
- 'stability = developer',
- ])))
- blob = blobs.post(bundle)
- this.principal = Principal(tests.UID2)
- this.request = Request(method='POST', path=['context', bundle_id])
- context, release = load_bundle(blob, bundle_id)
-
- assert tests.UID in volume['context'][bundle_id]['author']
- assert tests.UID2 not in volume['context'][bundle_id]['author']
- self.assertEqual({'en': 'Activity'}, volume['context'][bundle_id]['title'])
-
- post = volume['post'][release['announce']]
- assert tests.UID not in post['author']
- assert tests.UID2 in post['author']
- self.assertEqual('notification', post['type'])
- self.assertEqual({
- 'en': 'Activity 1 third-party release',
- 'es': 'Activity 1 third-party release',
- 'fr': 'Activity 1 third-party release',
- }, post['title'])
-
- blobs.delete(blob.digest)
- blob = blobs.post(bundle)
- this.principal = Principal(tests.UID)
- this.request = Request(method='POST', path=['context', bundle_id])
- context, release = load_bundle(blob, bundle_id)
-
- assert tests.UID in volume['context'][bundle_id]['author']
- assert tests.UID2 not in volume['context'][bundle_id]['author']
- self.assertEqual({'en': 'Activity2'}, volume['context'][bundle_id]['title'])
-
- post = volume['post'][release['announce']]
- assert tests.UID in post['author']
- assert tests.UID2 not in post['author']
- self.assertEqual('notification', post['type'])
- self.assertEqual({
- 'en': 'Activity2 1 release',
- 'es': 'Activity2 1 release',
- 'fr': 'Activity2 1 release',
- }, post['title'])
-
- def test_load_bundle_PopulateRequires(self):
- volume = self.start_master()
- blobs = volume.blobs
- conn = Connection(creds=SugarCreds(keyfile.value))
-
- bundle_id = conn.post(['context'], {
- 'type': 'activity',
- 'title': 'Activity',
- 'summary': 'summary',
- 'description': 'description',
- })
- bundle = self.zips(
- ('ImageViewer.activity/activity/activity.info', '\n'.join([
- '[Activity]',
- 'bundle_id = %s' % bundle_id,
- 'name = Image Viewer',
- 'activity_version = 22',
- 'license = GPLv2+',
- 'icon = activity-imageviewer',
- 'exec = true',
- 'requires = dep1, dep2<10, dep3<=20, dep4>30, dep5>=40, dep6>5<7, dep7>=1<=3',
- ])),
- ('ImageViewer.activity/activity/activity-imageviewer.svg', ''),
- )
- blob = blobs.post(bundle)
- this.principal = Principal(tests.UID)
- this.request = Request(method='POST', path=['context', bundle_id])
- context, release = load_bundle(blob, bundle_id)
-
- self.assertEqual({
- 'dep5': [([1, 0], [[40], 0])],
- 'dep4': [([1], [[30], 0])],
- 'dep7': [([1, 0], [[1], 0]), ([-1, 0], [[3], 0])],
- 'dep6': [([1], [[5], 0]), ([-1], [[7], 0])],
- 'dep1': [],
- 'dep3': [([-1, 0], [[20], 0])],
- 'dep2': [([-1], [[10], 0])],
- },
- release['requires'])
-
- def test_load_bundle_IgnoreNotSupportedContextTypes(self):
- volume = self.start_master([User, Context])
- conn = Connection(creds=SugarCreds(keyfile.value))
-
- context = conn.post(['context'], {
- 'type': 'package',
- 'title': '',
- 'summary': '',
- 'description': '',
- })
- this.request = Request(method='POST', path=['context', context])
- aggid = conn.post(['context', context, 'releases'], {})
- self.assertEqual({
- aggid: {'seqno': 3, 'value': {}, 'author': {tests.UID: {'role': 3, 'name': 'test', 'order': 0}}},
- }, volume['context'][context]['releases'])
-
-
-class Principal(_Principal):
-
- admin = True
-
-
if __name__ == '__main__':
tests.main()
diff --git a/tests/units/model/routes.py b/tests/units/model/routes.py
index 06a3dc3..0489844 100755
--- a/tests/units/model/routes.py
+++ b/tests/units/model/routes.py
@@ -10,7 +10,7 @@ from os.path import exists
from __init__ import tests, src_root
from sugar_network import db, model
-from sugar_network.toolkit.router import Router, Request
+from sugar_network.toolkit.router import Router, Request, Response
from sugar_network.toolkit.coroutine import this
from sugar_network.toolkit import coroutine
@@ -28,6 +28,7 @@ class RoutesTest(tests.Test):
routes = model.FrontRoutes()
volume = db.Volume('db', [Document])
events = []
+ this.response = Response()
def read_events():
for event in routes.subscribe(event='!commit'):
@@ -55,6 +56,7 @@ class RoutesTest(tests.Test):
def test_SubscribeWithPong(self):
routes = model.FrontRoutes()
+ this.response = Response()
for event in routes.subscribe():
break
self.assertEqual({'event': 'pong'}, event)
diff --git a/tests/units/node/__main__.py b/tests/units/node/__main__.py
index 5fba512..27f0bab 100644
--- a/tests/units/node/__main__.py
+++ b/tests/units/node/__main__.py
@@ -3,8 +3,8 @@
from __init__ import tests
from obs import *
-from model import *
-from node import *
+from node_model import *
+from node_routes import *
from master import *
from slave import *
diff --git a/tests/units/node/master.py b/tests/units/node/master.py
index ff5bc5d..fa879ef 100755
--- a/tests/units/node/master.py
+++ b/tests/units/node/master.py
@@ -16,15 +16,14 @@ import rrdtool
from __init__ import tests
-from sugar_network.client import Connection, keyfile, api
+from sugar_network.client import Connection as Connection_, api
from sugar_network.db.directory import Directory
from sugar_network import db, node, toolkit
-from sugar_network.client.auth import SugarCreds
from sugar_network.node.master import MasterRoutes
from sugar_network.node.model import User
from sugar_network.db.volume import Volume
from sugar_network.toolkit.router import Response, File
-from sugar_network.toolkit import coroutine, parcel, http
+from sugar_network.toolkit import coroutine, packets, http
class MasterTest(tests.Test):
@@ -34,7 +33,7 @@ class MasterTest(tests.Test):
def next_uuid():
self.uuid += 1
- return self.uuid
+ return str(self.uuid)
self.uuid = 0
self.override(toolkit, 'uuid', next_uuid)
@@ -45,11 +44,11 @@ class MasterTest(tests.Test):
pass
volume = self.start_master([Document])
- conn = Connection(creds=SugarCreds(keyfile.value))
+ conn = Connection()
self.touch(('blob1', '1'))
self.touch(('blob2', '2'))
- patch = ''.join(parcel.encode([
+ patch = ''.join(packets.encode([
('push', None, [
{'resource': 'document'},
{'guid': '1', 'patch': {
@@ -63,7 +62,7 @@ class MasterTest(tests.Test):
]),
], header={'to': self.node_routes.guid, 'from': 'slave'}))
response = conn.request('POST', [], patch, params={'cmd': 'push'})
- reply = parcel.decode(response.raw)
+ reply = iter(packets.decode(response.raw))
assert volume['document']['1'].exists
blob = volume.blobs.get(hashlib.sha1('1').hexdigest())
@@ -72,7 +71,7 @@ class MasterTest(tests.Test):
self.assertEqual('2', ''.join(blob.iter_content()))
self.assertEqual({
- 'packet': 'ack',
+ 'segment': 'ack',
'from': self.node_routes.guid,
'to': 'slave',
'ack': [[1, 1]],
@@ -95,9 +94,9 @@ class MasterTest(tests.Test):
pass
volume = self.start_master([Document])
- conn = Connection(creds=SugarCreds(keyfile.value))
+ conn = Connection()
- patch = ''.join(parcel.encode([
+ patch = ''.join(packets.encode([
('push', None, [
{'resource': 'document'},
{'guid': '1', 'patch': {
@@ -110,7 +109,7 @@ class MasterTest(tests.Test):
], header={'from': 'slave'}))
self.assertRaises(http.BadRequest, conn.request, 'POST', [], patch, params={'cmd': 'push'})
- patch = ''.join(parcel.encode([
+ patch = ''.join(packets.encode([
('push', None, [
{'resource': 'document'},
{'guid': '1', 'patch': {
@@ -123,7 +122,7 @@ class MasterTest(tests.Test):
], header={'to': 'fake', 'from': 'slave'}))
self.assertRaises(http.BadRequest, conn.request, 'POST', [], patch, params={'cmd': 'push'})
- patch = ''.join(parcel.encode([
+ patch = ''.join(packets.encode([
('push', None, [
{'resource': 'document'},
{'guid': '1', 'patch': {
@@ -141,11 +140,11 @@ class MasterTest(tests.Test):
class Document(db.Resource):
pass
- volume = self.start_master([Document])
- conn = Connection(creds=SugarCreds(keyfile.value))
+ volume = self.start_master([Document, User])
+ conn = Connection()
self.touch(('blob', 'blob'))
- patch = ''.join(parcel.encode([
+ patch = ''.join(packets.encode([
('push', None, [
{'resource': 'document'},
{'guid': '1', 'patch': {
@@ -164,7 +163,7 @@ class MasterTest(tests.Test):
},
})),
})
- reply = parcel.decode(response.raw)
+ reply = iter(packets.decode(response.raw))
assert volume['document']['1'].exists
blob_digest = hashlib.sha1('blob').hexdigest()
@@ -172,7 +171,7 @@ class MasterTest(tests.Test):
self.assertEqual('blob', ''.join(blob.iter_content()))
self.assertEqual({
- 'packet': 'ack',
+ 'segment': 'ack',
'from': self.node_routes.guid,
'to': 'slave',
'ack': [[1, 1]],
@@ -194,24 +193,24 @@ class MasterTest(tests.Test):
class Document(db.Resource):
pass
- volume = self.start_master([Document])
- conn = Connection(creds=SugarCreds(keyfile.value))
+ volume = self.start_master([Document, User])
+ conn = Connection()
self.touch(('blob', 'blob'))
- patch = ''.join(parcel.encode([
+ patch = ''.join(packets.encode([
('pull', {'ranges': [[1, None]]}, []),
('request', {'for': 1}, []),
], header={'to': self.node_routes.guid, 'from': 'slave'}))
response = conn.request('POST', [], patch, params={'cmd': 'push'})
- reply = parcel.decode(response.raw)
+ reply = iter(packets.decode(response.raw))
self.assertRaises(StopIteration, next, reply)
self.assertEqual(
'sugar_network_node=%s; Max-Age=3600; HttpOnly' % b64encode(json.dumps({
- 'pull': [[1, None]],
'ack': {'slave': []},
+ 'pull': [[1, None]],
'request': [
- {'to': '127.0.0.1:7777', 'from': 'slave', 'packet': 'request', 'for': 1},
+ {'to': '127.0.0.1:7777', 'segment': 'request', 'for': 1, 'from': 'slave'},
],
})),
response.headers['set-cookie'])
@@ -222,7 +221,7 @@ class MasterTest(tests.Test):
pass
volume = self.start_master([User, Document])
- conn = Connection(creds=SugarCreds(keyfile.value))
+ conn = Connection()
volume['document'].create({'guid': 'guid', 'ctime': 1, 'mtime': 1})
self.utime('master/db/document/gu/guid', 1)
@@ -235,7 +234,7 @@ class MasterTest(tests.Test):
}))
})
self.assertEqual([
- ({'from': '127.0.0.1:7777', 'packet': 'push'}, [
+ ({'from': '127.0.0.1:7777', 'segment': 'push'}, [
{'resource': 'document'},
{'guid': 'guid', 'patch': {
'ctime': {'mtime': 1, 'value': 1},
@@ -247,10 +246,10 @@ class MasterTest(tests.Test):
{'commit': [[1, 3]]},
]),
],
- [(packet.header, [i.meta if isinstance(i, File) else i for i in packet]) for packet in parcel.decode(response.raw)])
+ [(packet.header, [i.meta if isinstance(i, File) else i for i in packet]) for packet in packets.decode(response.raw)])
self.assertEqual(
'sugar_network_node=%s; Max-Age=3600; HttpOnly' % b64encode(json.dumps({
- 'id': 1,
+ 'id': '1',
'pull': [[1, None]],
})),
response.headers['set-cookie'])
@@ -267,7 +266,7 @@ class MasterTest(tests.Test):
pass
volume = self.start_master([User, Document])
- conn = Connection(creds=SugarCreds(keyfile.value))
+ conn = Connection()
volume['document'].create({'guid': '1', 'ctime': 1, 'mtime': 1})
self.utime('master/db/document/1/1', 1)
@@ -282,12 +281,12 @@ class MasterTest(tests.Test):
}))
})
self.assertEqual([
- ({'from': '127.0.0.1:7777', 'packet': 'push'}, [{'resource': 'document'}]),
+ ({'from': '127.0.0.1:7777', 'segment': 'push'}, [{'resource': 'document'}]),
],
- [(packet.header, [record for record in packet]) for packet in parcel.decode(response.raw)])
+ [(packet.header, [record for record in packet]) for packet in packets.decode(response.raw)])
self.assertEqual(
'sugar_network_node=%s; Max-Age=3600; HttpOnly' % b64encode(json.dumps({
- 'id': 1,
+ 'id': '1',
'pull': [[1, None]],
'ack': {
'node': [[[[0, 0]], [[1, 1]]], [[[0, 0]], [[2, 2]]]],
@@ -307,7 +306,7 @@ class MasterTest(tests.Test):
pass
volume = self.start_master([User, Document])
- conn = Connection(creds=SugarCreds(keyfile.value))
+ conn = Connection()
volume['document'].create({'guid': '1', 'ctime': 1, 'mtime': 1})
self.utime('master/db/document/1/1', 1)
@@ -326,9 +325,9 @@ class MasterTest(tests.Test):
}))
})
self.assertEqual([
- ({'from': '127.0.0.1:7777', 'packet': 'push'}, [{'resource': 'document'}]),
+ ({'from': '127.0.0.1:7777', 'segment': 'push'}, [{'resource': 'document'}]),
],
- [(packet.header, [i.meta if isinstance(i, File) else i for i in packet]) for packet in parcel.decode(response.raw)])
+ [(packet.header, [i.meta if isinstance(i, File) else i for i in packet]) for packet in packets.decode(response.raw)])
response = conn.request('GET', [], params={'cmd': 'pull'}, headers={
'cookie': 'sugar_network_node=%s' % b64encode(json.dumps({
@@ -340,7 +339,7 @@ class MasterTest(tests.Test):
}))
})
self.assertEqual([
- ({'from': '127.0.0.1:7777', 'packet': 'push'}, [
+ ({'from': '127.0.0.1:7777', 'segment': 'push'}, [
{'resource': 'document'},
{'guid': '1', 'patch': {
'guid': {'mtime': 1, 'value': '1'},
@@ -350,7 +349,7 @@ class MasterTest(tests.Test):
{'commit': [[1, 1]]},
]),
],
- [(packet.header, [i.meta if isinstance(i, File) else i for i in packet]) for packet in parcel.decode(response.raw)])
+ [(packet.header, [i.meta if isinstance(i, File) else i for i in packet]) for packet in packets.decode(response.raw)])
response = conn.request('GET', [], params={'cmd': 'pull'}, headers={
'cookie': 'sugar_network_node=%s' % b64encode(json.dumps({
@@ -362,13 +361,13 @@ class MasterTest(tests.Test):
}))
})
self.assertEqual([
- ({'from': '127.0.0.1:7777', 'packet': 'push'}, [
+ ({'from': '127.0.0.1:7777', 'segment': 'push'}, [
{'resource': 'document'},
{'content-length': '2', 'content-type': 'application/octet-stream'},
{'commit': [[4, 4]]},
]),
],
- [(packet.header, [i.meta if isinstance(i, File) else i for i in packet]) for packet in parcel.decode(response.raw)])
+ [(packet.header, [i.meta if isinstance(i, File) else i for i in packet]) for packet in packets.decode(response.raw)])
def test_pull_ExcludeAckRequests(self):
@@ -376,7 +375,7 @@ class MasterTest(tests.Test):
pass
volume = self.start_master([User, Document])
- conn = Connection(creds=SugarCreds(keyfile.value))
+ conn = Connection()
volume['document'].create({'guid': '1', 'ctime': 1, 'mtime': 1})
self.utime('master/db/document/1/1', 1)
@@ -394,10 +393,10 @@ class MasterTest(tests.Test):
],
}))
})
- reply = parcel.decode(response.raw)
+ reply = packets.decode(response.raw)
self.assertEqual([
- ({'from': '127.0.0.1:7777', 'to': 'node2', 'packet': 'ack', 'ack': [[1, 2]]}, []),
- ({'from': '127.0.0.1:7777', 'packet': 'push'}, [{'resource': 'document'}]),
+ ({'from': '127.0.0.1:7777', 'to': 'node2', 'segment': 'ack', 'ack': [[1, 2]]}, []),
+ ({'from': '127.0.0.1:7777', 'segment': 'push'}, [{'resource': 'document'}]),
],
[(packet.header, [i.meta if isinstance(i, File) else i for i in packet]) for packet in reply])
@@ -411,7 +410,7 @@ class MasterTest(tests.Test):
pass
volume = self.start_master([User, Document])
- conn = Connection(creds=SugarCreds(keyfile.value))
+ conn = Connection()
volume['document'].create({'guid': '1', 'ctime': 1, 'mtime': 1, 'prop': '.' * RECORD})
self.utime('master/db/document/1/1', 1)
@@ -426,14 +425,14 @@ class MasterTest(tests.Test):
}))
})
self.assertEqual([
- ({'from': '127.0.0.1:7777', 'packet': 'push'}, [
+ ({'from': '127.0.0.1:7777', 'segment': 'push'}, [
{'resource': 'document'},
]),
],
- [(packet.header, [i.meta if isinstance(i, File) else i for i in packet]) for packet in parcel.decode(response.raw)])
+ [(packet.header, [i.meta if isinstance(i, File) else i for i in packet]) for packet in packets.decode(response.raw)])
self.assertEqual(
'sugar_network_node=%s; Max-Age=3600; HttpOnly' % b64encode(json.dumps({
- 'id': 1,
+ 'id': '1',
'pull': [[1, None]],
})),
response.headers['set-cookie'])
@@ -442,7 +441,7 @@ class MasterTest(tests.Test):
'cookie': response.headers['set-cookie'],
})
self.assertEqual([
- ({'from': '127.0.0.1:7777', 'packet': 'push'}, [
+ ({'from': '127.0.0.1:7777', 'segment': 'push'}, [
{'resource': 'document'},
{'guid': '1', 'patch': {
'ctime': {'mtime': 1, 'value': 1},
@@ -453,10 +452,10 @@ class MasterTest(tests.Test):
{'commit': [[1, 1]]},
]),
],
- [(packet.header, [i.meta if isinstance(i, File) else i for i in packet]) for packet in parcel.decode(response.raw)])
+ [(packet.header, [i.meta if isinstance(i, File) else i for i in packet]) for packet in packets.decode(response.raw)])
self.assertEqual(
'sugar_network_node=%s; Max-Age=3600; HttpOnly' % b64encode(json.dumps({
- 'id': 1,
+ 'id': '1',
'pull': [[1, None]],
})),
response.headers['set-cookie'])
@@ -465,7 +464,7 @@ class MasterTest(tests.Test):
'cookie': response.headers['set-cookie'],
})
self.assertEqual([
- ({'from': '127.0.0.1:7777', 'packet': 'push'}, [
+ ({'from': '127.0.0.1:7777', 'segment': 'push'}, [
{'resource': 'document'},
{'guid': '2', 'patch': {
'ctime': {'mtime': 2, 'value': 2},
@@ -482,10 +481,10 @@ class MasterTest(tests.Test):
{'commit': [[2, 3]]},
]),
],
- [(packet.header, [i.meta if isinstance(i, File) else i for i in packet]) for packet in parcel.decode(response.raw)])
+ [(packet.header, [i.meta if isinstance(i, File) else i for i in packet]) for packet in packets.decode(response.raw)])
self.assertEqual(
'sugar_network_node=%s; Max-Age=3600; HttpOnly' % b64encode(json.dumps({
- 'id': 1,
+ 'id': '1',
'pull': [[2, None]],
})),
response.headers['set-cookie'])
@@ -504,14 +503,14 @@ class MasterTest(tests.Test):
pass
volume = self.start_master([User, Document])
- conn = Connection(creds=SugarCreds(keyfile.value))
+ conn = Connection()
volume['document'].create({'guid': 'guid', 'ctime': 1, 'mtime': 1})
self.utime('master/db/document/gu/guid', 1)
blob1 = volume.blobs.post('1')
self.touch(('blob2', 'ccc'))
- patch = ''.join(parcel.encode([
+ patch = ''.join(packets.encode([
('push', None, [
{'resource': 'document'},
{'guid': '2', 'patch': {
@@ -528,8 +527,8 @@ class MasterTest(tests.Test):
blob2 = volume.blobs.get(hashlib.sha1('ccc').hexdigest())
self.assertEqual([
- ({'from': '127.0.0.1:7777', 'to': 'node', 'packet': 'ack', 'ack': [[3, 3]], 'ranges': [[1, 2]]}, []),
- ({'from': '127.0.0.1:7777', 'packet': 'push'}, [
+ ({'from': '127.0.0.1:7777', 'to': 'node', 'segment': 'ack', 'ack': [[3, 3]], 'ranges': [[1, 2]]}, []),
+ ({'from': '127.0.0.1:7777', 'segment': 'push'}, [
{'resource': 'document'},
{'guid': 'guid', 'patch': {
'ctime': {'mtime': 1, 'value': 1},
@@ -540,11 +539,15 @@ class MasterTest(tests.Test):
{'commit': [[1, 2]]},
]),
],
- [(packet.header, [i.meta if isinstance(i, File) else i for i in packet]) for packet in parcel.decode(response.raw)])
+ [(packet.header, [i.meta if isinstance(i, File) else i for i in packet]) for packet in packets.decode(response.raw)])
assert volume['document']['2'].exists
self.assertEqual('ccc', ''.join(blob2.iter_content()))
+def Connection():
+ return http.Connection(api.value)
+
+
if __name__ == '__main__':
tests.main()
diff --git a/tests/units/node/model.py b/tests/units/node/model.py
deleted file mode 100755
index a89a92b..0000000
--- a/tests/units/node/model.py
+++ /dev/null
@@ -1,978 +0,0 @@
-#!/usr/bin/env python
-# sugar-lint: disable
-
-import os
-import time
-from cStringIO import StringIO
-
-from __init__ import tests
-
-from sugar_network import db, toolkit
-from sugar_network.client import Connection, keyfile, api
-from sugar_network.model.post import Post
-from sugar_network.model.context import Context
-from sugar_network.node import model, obs
-from sugar_network.node.model import User, Volume
-from sugar_network.node.routes import NodeRoutes
-from sugar_network.client.auth import SugarCreds
-from sugar_network.toolkit.coroutine import this
-from sugar_network.toolkit.router import Request, Router
-from sugar_network.toolkit import spec, i18n, http, coroutine, enforce
-
-
-class ModelTest(tests.Test):
-
- def test_IncrementReleasesSeqnoOnNewReleases(self):
- events = []
- volume = self.start_master()
- this.broadcast = lambda x: events.append(x)
- conn = Connection(creds=SugarCreds(keyfile.value))
-
- context = conn.post(['context'], {
- 'type': 'activity',
- 'title': 'Activity',
- 'summary': 'summary',
- 'description': 'description',
- })
- self.assertEqual([
- ], [i for i in events if i['event'] == 'release'])
- self.assertEqual(0, volume.release_seqno.value)
-
- conn.put(['context', context], {
- 'summary': 'summary2',
- })
- self.assertEqual([
- ], [i for i in events if i['event'] == 'release'])
- self.assertEqual(0, volume.release_seqno.value)
-
- bundle = self.zips(('topdir/activity/activity.info', '\n'.join([
- '[Activity]',
- 'name = Activity',
- 'bundle_id = %s' % context,
- 'exec = true',
- 'icon = icon',
- 'activity_version = 1',
- 'license = Public Domain',
- ])))
- release = conn.upload(['context', context, 'releases'], StringIO(bundle))
- self.assertEqual([
- {'event': 'release', 'seqno': 1},
- ], [i for i in events if i['event'] == 'release'])
- self.assertEqual(1, volume.release_seqno.value)
-
- bundle = self.zips(('topdir/activity/activity.info', '\n'.join([
- '[Activity]',
- 'name = Activity',
- 'bundle_id = %s' % context,
- 'exec = true',
- 'icon = icon',
- 'activity_version = 2',
- 'license = Public Domain',
- ])))
- release = conn.upload(['context', context, 'releases'], StringIO(bundle))
- self.assertEqual([
- {'event': 'release', 'seqno': 1},
- {'event': 'release', 'seqno': 2},
- ], [i for i in events if i['event'] == 'release'])
- self.assertEqual(2, volume.release_seqno.value)
-
- bundle = self.zips(('topdir/activity/activity.info', '\n'.join([
- '[Activity]',
- 'name = Activity',
- 'bundle_id = %s' % context,
- 'exec = true',
- 'icon = icon',
- 'activity_version = 2',
- 'license = Public Domain',
- ])))
- release = conn.upload(['context', context, 'releases'], StringIO(bundle))
- self.assertEqual([
- {'event': 'release', 'seqno': 1},
- {'event': 'release', 'seqno': 2},
- {'event': 'release', 'seqno': 3},
- ], [i for i in events if i['event'] == 'release'])
- self.assertEqual(3, volume.release_seqno.value)
-
- conn.delete(['context', context, 'releases', release])
- self.assertEqual([
- {'event': 'release', 'seqno': 1},
- {'event': 'release', 'seqno': 2},
- {'event': 'release', 'seqno': 3},
- {'event': 'release', 'seqno': 4},
- ], [i for i in events if i['event'] == 'release'])
- self.assertEqual(4, volume.release_seqno.value)
-
- def test_IncrementReleasesSeqnoOnDependenciesChange(self):
- events = []
- volume = self.start_master()
- this.broadcast = lambda x: events.append(x)
- conn = Connection(creds=SugarCreds(keyfile.value))
-
- context = conn.post(['context'], {
- 'type': 'activity',
- 'title': 'Activity',
- 'summary': 'summary',
- 'description': 'description',
- })
- self.assertEqual([
- ], [i for i in events if i['event'] == 'release'])
- self.assertEqual(0, volume.release_seqno.value)
-
- bundle = self.zips(('topdir/activity/activity.info', '\n'.join([
- '[Activity]',
- 'name = Activity',
- 'bundle_id = %s' % context,
- 'exec = true',
- 'icon = icon',
- 'activity_version = 2',
- 'license = Public Domain',
- ])))
- release = conn.upload(['context', context, 'releases'], StringIO(bundle))
- self.assertEqual([
- {'seqno': 1, 'event': 'release'}
- ], [i for i in events if i['event'] == 'release'])
- self.assertEqual(1, volume.release_seqno.value)
- del events[:]
-
- conn.put(['context', context], {
- 'dependencies': 'dep',
- })
- self.assertEqual([
- {'event': 'release', 'seqno': 2},
- ], [i for i in events if i['event'] == 'release'])
- self.assertEqual(2, volume.release_seqno.value)
-
- def test_IncrementReleasesSeqnoOnDeletes(self):
- events = []
- volume = self.start_master()
- this.broadcast = lambda x: events.append(x)
- conn = Connection(creds=SugarCreds(keyfile.value))
-
- context = conn.post(['context'], {
- 'type': 'activity',
- 'title': 'Activity',
- 'summary': 'summary',
- 'description': 'description',
- })
- self.assertEqual([
- ], [i for i in events if i['event'] == 'release'])
- self.assertEqual(0, volume.release_seqno.value)
-
- bundle = self.zips(('topdir/activity/activity.info', '\n'.join([
- '[Activity]',
- 'name = Activity',
- 'bundle_id = %s' % context,
- 'exec = true',
- 'icon = icon',
- 'activity_version = 2',
- 'license = Public Domain',
- ])))
- release = conn.upload(['context', context, 'releases'], StringIO(bundle))
- self.assertEqual([
- {'seqno': 1, 'event': 'release'}
- ], [i for i in events if i['event'] == 'release'])
- self.assertEqual(1, volume.release_seqno.value)
- del events[:]
-
- conn.delete(['context', context])
- self.assertEqual([
- {'event': 'release', 'seqno': 2},
- ], [i for i in events if i['event'] == 'release'])
- self.assertEqual(2, volume.release_seqno.value)
- del events[:]
-
- def test_RestoreReleasesSeqno(self):
- events = []
- volume = self.start_master()
- this.broadcast = lambda x: events.append(x)
- conn = Connection(creds=SugarCreds(keyfile.value))
-
- context = conn.post(['context'], {
- 'type': 'activity',
- 'title': 'Activity',
- 'summary': 'summary',
- 'description': 'description',
- 'dependencies': 'dep',
- })
- bundle = self.zips(('topdir/activity/activity.info', '\n'.join([
- '[Activity]',
- 'name = Activity',
- 'bundle_id = %s' % context,
- 'exec = true',
- 'icon = icon',
- 'activity_version = 2',
- 'license = Public Domain',
- ])))
- release = conn.upload(['context', context, 'releases'], StringIO(bundle))
- self.assertEqual(1, volume.release_seqno.value)
-
- volume.close()
- volume = Volume('master', [])
- self.assertEqual(1, volume.release_seqno.value)
-
- def test_Packages(self):
- self.override(obs, 'get_repos', lambda: [
- {'lsb_id': 'Gentoo', 'lsb_release': '2.1', 'name': 'Gentoo-2.1', 'arches': ['x86', 'x86_64']},
- {'lsb_id': 'Debian', 'lsb_release': '6.0', 'name': 'Debian-6.0', 'arches': ['x86']},
- {'lsb_id': 'Debian', 'lsb_release': '7.0', 'name': 'Debian-7.0', 'arches': ['x86_64']},
- ])
- self.override(obs, 'resolve', lambda repo, arch, names: {'version': '1.0'})
-
- volume = self.start_master([User, model.Context])
- conn = http.Connection(api.value, SugarCreds(keyfile.value))
-
- guid = conn.post(['context'], {
- 'type': 'package',
- 'title': 'title',
- 'summary': 'summary',
- 'description': 'description',
- })
- conn.put(['context', guid, 'releases', '*'], {
- 'binary': ['pkg1.bin', 'pkg2.bin'],
- 'devel': 'pkg3.devel',
- })
- self.assertEqual({
- '*': {
- 'seqno': 3,
- 'author': {tests.UID: {'name': 'test', 'order': 0, 'role': 3}},
- 'value': {'binary': ['pkg1.bin', 'pkg2.bin'], 'devel': ['pkg3.devel']},
- },
- 'resolves': {
- 'Gentoo-2.1': {'status': 'success', 'packages': ['pkg1.bin', 'pkg2.bin', 'pkg3.devel'], 'version': [[1, 0], 0]},
- 'Debian-6.0': {'status': 'success', 'packages': ['pkg1.bin', 'pkg2.bin', 'pkg3.devel'], 'version': [[1, 0], 0]},
- 'Debian-7.0': {'status': 'success', 'packages': ['pkg1.bin', 'pkg2.bin', 'pkg3.devel'], 'version': [[1, 0], 0]},
- },
- },
- volume['context'][guid]['releases'])
-
- guid = conn.post(['context'], {
- 'type': 'package',
- 'title': 'title',
- 'summary': 'summary',
- 'description': 'description',
- })
- conn.put(['context', guid, 'releases', 'Gentoo'], {
- 'binary': ['pkg1.bin', 'pkg2.bin'],
- 'devel': 'pkg3.devel',
- })
- self.assertEqual({
- 'Gentoo': {
- 'seqno': 5,
- 'author': {tests.UID: {'name': 'test', 'order': 0, 'role': 3}},
- 'value': {'binary': ['pkg1.bin', 'pkg2.bin'], 'devel': ['pkg3.devel']},
- },
- 'resolves': {
- 'Gentoo-2.1': {'status': 'success', 'packages': ['pkg1.bin', 'pkg2.bin', 'pkg3.devel'], 'version': [[1, 0], 0]},
- },
- },
- volume['context'][guid]['releases'])
-
- guid = conn.post(['context'], {
- 'type': 'package',
- 'title': 'title',
- 'summary': 'summary',
- 'description': 'description',
- })
- conn.put(['context', guid, 'releases', 'Debian-6.0'], {
- 'binary': ['pkg1.bin', 'pkg2.bin'],
- 'devel': 'pkg3.devel',
- })
- self.assertEqual({
- 'Debian-6.0': {
- 'seqno': 7,
- 'author': {tests.UID: {'name': 'test', 'order': 0, 'role': 3}},
- 'value': {'binary': ['pkg1.bin', 'pkg2.bin'], 'devel': ['pkg3.devel']},
- },
- 'resolves': {
- 'Debian-6.0': {'status': 'success', 'packages': ['pkg1.bin', 'pkg2.bin', 'pkg3.devel'], 'version': [[1, 0], 0]},
- },
- },
- volume['context'][guid]['releases'])
-
- def test_UnresolvedPackages(self):
- self.override(obs, 'get_repos', lambda: [
- {'lsb_id': 'Gentoo', 'lsb_release': '2.1', 'name': 'Gentoo-2.1', 'arches': ['x86', 'x86_64']},
- ])
- self.override(obs, 'resolve', lambda repo, arch, names: enforce(False, 'resolve failed'))
-
- volume = self.start_master([User, model.Context])
- conn = http.Connection(api.value, SugarCreds(keyfile.value))
-
- guid = conn.post(['context'], {
- 'type': 'package',
- 'title': 'title',
- 'summary': 'summary',
- 'description': 'description',
- })
- conn.put(['context', guid, 'releases', '*'], {
- 'binary': ['pkg1.bin', 'pkg2.bin'],
- 'devel': 'pkg3.devel',
- })
- self.assertEqual({
- '*': {
- 'seqno': 3,
- 'author': {tests.UID: {'name': 'test', 'order': 0, 'role': 3}},
- 'value': {'binary': ['pkg1.bin', 'pkg2.bin'], 'devel': ['pkg3.devel']},
- },
- 'resolves': {
- 'Gentoo-2.1': {'status': 'resolve failed'},
- },
- },
- volume['context'][guid]['releases'])
-
- def test_PackageOverrides(self):
- self.override(obs, 'get_repos', lambda: [
- {'lsb_id': 'Gentoo', 'lsb_release': '2.1', 'name': 'Gentoo-2.1', 'arches': ['x86', 'x86_64']},
- {'lsb_id': 'Debian', 'lsb_release': '6.0', 'name': 'Debian-6.0', 'arches': ['x86']},
- {'lsb_id': 'Debian', 'lsb_release': '7.0', 'name': 'Debian-7.0', 'arches': ['x86_64']},
- ])
-
- volume = self.start_master([User, model.Context])
- conn = http.Connection(api.value, SugarCreds(keyfile.value))
- guid = conn.post(['context'], {
- 'type': 'package',
- 'title': 'title',
- 'summary': 'summary',
- 'description': 'description',
- })
-
- self.override(obs, 'resolve', lambda repo, arch, names: enforce(False, '1'))
- conn.put(['context', guid, 'releases', '*'], {'binary': '1'})
- self.assertEqual({
- '*': {
- 'seqno': 3,
- 'author': {tests.UID: {'name': 'test', 'order': 0, 'role': 3}},
- 'value': {'binary': ['1']},
- },
- 'resolves': {
- 'Gentoo-2.1': {'status': '1'},
- 'Debian-6.0': {'status': '1'},
- 'Debian-7.0': {'status': '1'},
- },
- },
- volume['context'][guid]['releases'])
-
- self.override(obs, 'resolve', lambda repo, arch, names: enforce(False, '2'))
- conn.put(['context', guid, 'releases', 'Debian'], {'binary': '2'})
- self.assertEqual({
- '*': {
- 'seqno': 3,
- 'author': {tests.UID: {'name': 'test', 'order': 0, 'role': 3}},
- 'value': {'binary': ['1']},
- },
- 'Debian': {
- 'seqno': 4,
- 'author': {tests.UID: {'name': 'test', 'order': 0, 'role': 3}},
- 'value': {'binary': ['2']},
- },
- 'resolves': {
- 'Gentoo-2.1': {'status': '1'},
- 'Debian-6.0': {'status': '2'},
- 'Debian-7.0': {'status': '2'},
- },
- },
- volume['context'][guid]['releases'])
-
- self.override(obs, 'resolve', lambda repo, arch, names: enforce(False, '3'))
- conn.put(['context', guid, 'releases', 'Debian-6.0'], {'binary': '3'})
- self.assertEqual({
- '*': {
- 'seqno': 3,
- 'author': {tests.UID: {'name': 'test', 'order': 0, 'role': 3}},
- 'value': {'binary': ['1']},
- },
- 'Debian': {
- 'seqno': 4,
- 'author': {tests.UID: {'name': 'test', 'order': 0, 'role': 3}},
- 'value': {'binary': ['2']},
- },
- 'Debian-6.0': {
- 'seqno': 5,
- 'author': {tests.UID: {'name': 'test', 'order': 0, 'role': 3}},
- 'value': {'binary': ['3']},
- },
- 'resolves': {
- 'Gentoo-2.1': {'status': '1'},
- 'Debian-6.0': {'status': '3'},
- 'Debian-7.0': {'status': '2'},
- },
- },
- volume['context'][guid]['releases'])
-
- self.override(obs, 'resolve', lambda repo, arch, names: enforce(False, '4'))
- conn.put(['context', guid, 'releases', 'Debian'], {'binary': '4'})
- self.assertEqual({
- '*': {
- 'seqno': 3,
- 'author': {tests.UID: {'name': 'test', 'order': 0, 'role': 3}},
- 'value': {'binary': ['1']},
- },
- 'Debian': {
- 'seqno': 6,
- 'author': {tests.UID: {'name': 'test', 'order': 0, 'role': 3}},
- 'value': {'binary': ['4']},
- },
- 'Debian-6.0': {
- 'seqno': 5,
- 'author': {tests.UID: {'name': 'test', 'order': 0, 'role': 3}},
- 'value': {'binary': ['3']},
- },
- 'resolves': {
- 'Gentoo-2.1': {'status': '1'},
- 'Debian-6.0': {'status': '3'},
- 'Debian-7.0': {'status': '4'},
- },
- },
- volume['context'][guid]['releases'])
-
- def test_solve_SortByVersions(self):
- volume = db.Volume('master', [Context])
- this.volume = volume
-
- context = volume['context'].create({
- 'type': ['activity'], 'title': {}, 'summary': {}, 'description': {}, 'releases': {
- '1': {'value': {'bundles': {'*-*': {}}, 'stability': 'stable', 'version': [[1], 0], 'commands': {'activity': {'exec': 1}}}},
- '2': {'value': {'bundles': {'*-*': {}}, 'stability': 'stable', 'version': [[2], 0], 'commands': {'activity': {'exec': 2}}}},
- '3': {'value': {'bundles': {'*-*': {}}, 'stability': 'stable', 'version': [[3], 0], 'commands': {'activity': {'exec': 3}}}},
- },
- })
- self.assertEqual(
- {context: {'command': ('activity', 3), 'title': '', 'blob': '3', 'version': [[3], 0]}},
- model.solve(volume, context))
-
- context = volume['context'].create({
- 'type': ['activity'], 'title': {}, 'summary': {}, 'description': {}, 'releases': {
- '3': {'value': {'bundles': {'*-*': {}}, 'stability': 'stable', 'version': [[3], 0], 'commands': {'activity': {'exec': 3}}}},
- '2': {'value': {'bundles': {'*-*': {}}, 'stability': 'stable', 'version': [[2], 0], 'commands': {'activity': {'exec': 2}}}},
- '1': {'value': {'bundles': {'*-*': {}}, 'stability': 'stable', 'version': [[1], 0], 'commands': {'activity': {'exec': 1}}}},
- },
- })
- self.assertEqual(
- {context: {'command': ('activity', 3), 'title': '', 'blob': '3', 'version': [[3], 0]}},
- model.solve(volume, context))
-
- def test_solve_SortByStability(self):
- volume = db.Volume('master', [Context])
- this.volume = volume
-
- context = volume['context'].create({
- 'type': ['activity'], 'title': {}, 'summary': {}, 'description': {}, 'releases': {
- '1': {'value': {'bundles': {'*-*': {}}, 'stability': 'developer', 'version': [[1], 0], 'commands': {'activity': {'exec': 1}}}},
- '2': {'value': {'bundles': {'*-*': {}}, 'stability': 'stable', 'version': [[2], 0], 'commands': {'activity': {'exec': 2}}}},
- '3': {'value': {'bundles': {'*-*': {}}, 'stability': 'buggy', 'version': [[3], 0], 'commands': {'activity': {'exec': 3}}}},
- },
- })
- self.assertEqual(
- {context: {'command': ('activity', 2), 'title': '', 'blob': '2', 'version': [[2], 0]}},
- model.solve(volume, context))
-
- def test_solve_CollectDeps(self):
- volume = db.Volume('master', [Context])
- this.volume = volume
-
- volume['context'].create({
- 'guid': 'context1', 'type': ['activity'], 'title': {}, 'summary': {}, 'description': {}, 'releases': {
- '1': {'value': {
- 'bundles': {'*-*': {}}, 'stability': 'stable',
- 'version': [[1], 0],
- 'requires': spec.parse_requires('context2; context4'),
- 'commands': {'activity': {'exec': 'command'}},
- }},
- },
- })
- volume['context'].create({
- 'guid': 'context2', 'type': ['activity'], 'title': {}, 'summary': {}, 'description': {}, 'releases': {
- '2': {'value': {
- 'bundles': {'*-*': {}}, 'stability': 'stable',
- 'version': [[2], 0],
- 'commands': {'activity': {'exec': 0}},
- 'requires': spec.parse_requires('context3'),
- }},
- },
- })
- volume['context'].create({
- 'guid': 'context3', 'type': ['activity'], 'title': {}, 'summary': {}, 'description': {}, 'releases': {
- '3': {'value': {'bundles': {'*-*': {}}, 'stability': 'stable', 'version': [[3], 0], 'commands': {'activity': {'exec': 0}}}},
- },
- })
- volume['context'].create({
- 'guid': 'context4', 'type': ['activity'], 'title': {}, 'summary': {}, 'description': {}, 'releases': {
- '4': {'value': {'bundles': {'*-*': {}}, 'stability': 'stable', 'version': [[4], 0], 'commands': {'activity': {'exec': 0}}}},
- },
- })
-
- self.assertEqual({
- 'context1': {'title': '', 'blob': '1', 'version': [[1], 0], 'command': ('activity', 'command')},
- 'context2': {'title': '', 'blob': '2', 'version': [[2], 0]},
- 'context3': {'title': '', 'blob': '3', 'version': [[3], 0]},
- 'context4': {'title': '', 'blob': '4', 'version': [[4], 0]},
- },
- model.solve(volume, 'context1'))
-
- def test_solve_CommandDeps(self):
- volume = db.Volume('master', [Context])
- this.volume = volume
-
- volume['context'].create({
- 'guid': 'context1', 'type': ['activity'], 'title': {}, 'summary': {}, 'description': {}, 'releases': {
- '1': {'value': {
- 'bundles': {'*-*': {}}, 'stability': 'stable',
- 'version': [[1], 0],
- 'requires': [],
- 'commands': {
- 'activity': {'exec': 1, 'requires': spec.parse_requires('context2')},
- 'application': {'exec': 2},
- },
- }},
- },
- })
- volume['context'].create({
- 'guid': 'context2', 'type': ['activity'], 'title': {}, 'summary': {}, 'description': {}, 'releases': {
- '2': {'value': {
- 'bundles': {'*-*': {}}, 'stability': 'stable',
- 'version': [[2], 0],
- 'commands': {'activity': {'exec': 0}},
- 'requires': [],
- }},
- },
- })
-
- self.assertEqual({
- 'context1': {'title': '', 'blob': '1', 'version': [[1], 0], 'command': ('activity', 1)},
- 'context2': {'title': '', 'blob': '2', 'version': [[2], 0]},
- },
- model.solve(volume, 'context1', command='activity'))
- self.assertEqual({
- 'context1': {'title': '', 'blob': '1', 'version': [[1], 0], 'command': ('application', 2)},
- },
- model.solve(volume, 'context1', command='application'))
-
- def test_solve_DepConditions(self):
- volume = db.Volume('master', [Context])
- this.volume = volume
-
- volume['context'].create({
- 'guid': 'dep', 'type': ['activity'], 'title': {}, 'summary': {}, 'description': {}, 'releases': {
- '1': {'value': {'bundles': {'*-*': {}}, 'stability': 'stable', 'version': [[1], 0], 'commands': {'activity': {'exec': 0}}}},
- '2': {'value': {'bundles': {'*-*': {}}, 'stability': 'stable', 'version': [[2], 0], 'commands': {'activity': {'exec': 0}}}},
- '3': {'value': {'bundles': {'*-*': {}}, 'stability': 'stable', 'version': [[3], 0], 'commands': {'activity': {'exec': 0}}}},
- '4': {'value': {'bundles': {'*-*': {}}, 'stability': 'stable', 'version': [[4], 0], 'commands': {'activity': {'exec': 0}}}},
- '5': {'value': {'bundles': {'*-*': {}}, 'stability': 'stable', 'version': [[5], 0], 'commands': {'activity': {'exec': 0}}}},
- },
- })
-
- volume['context'].create({
- 'guid': 'context1', 'type': ['activity'], 'title': {}, 'summary': {}, 'description': {}, 'releases': {
- '10': {'value': {'bundles': {'*-*': {}}, 'stability': 'stable', 'version': [[1], 0], 'commands': {'activity': {'exec': 'command'}},
- 'requires': spec.parse_requires('dep < 3'),
- }},
- },
- })
- self.assertEqual({
- 'context1': {'title': '', 'blob': '10', 'version': [[1], 0], 'command': ('activity', 'command')},
- 'dep': {'title': '', 'blob': '2', 'version': [[2], 0]},
- },
- model.solve(volume, 'context1'))
-
- volume['context'].create({
- 'guid': 'context1', 'type': ['activity'], 'title': {}, 'summary': {}, 'description': {}, 'releases': {
- '10': {'value': {'bundles': {'*-*': {}}, 'stability': 'stable', 'version': [[1], 0], 'commands': {'activity': {'exec': 'command'}},
- 'requires': spec.parse_requires('dep <= 3'),
- }},
- },
- })
- self.assertEqual({
- 'context1': {'title': '', 'blob': '10', 'version': [[1], 0], 'command': ('activity', 'command')},
- 'dep': {'title': '', 'blob': '3', 'version': [[3], 0]},
- },
- model.solve(volume, 'context1'))
-
- volume['context'].create({
- 'guid': 'context1', 'type': ['activity'], 'title': {}, 'summary': {}, 'description': {}, 'releases': {
- '10': {'value': {'bundles': {'*-*': {}}, 'stability': 'stable', 'version': [[1], 0], 'commands': {'activity': {'exec': 'command'}},
- 'requires': spec.parse_requires('dep > 2'),
- }},
- },
- })
- self.assertEqual({
- 'context1': {'title': '', 'blob': '10', 'version': [[1], 0], 'command': ('activity', 'command')},
- 'dep': {'title': '', 'blob': '5', 'version': [[5], 0]},
- },
- model.solve(volume, 'context1'))
-
- volume['context'].create({
- 'guid': 'context1', 'type': ['activity'], 'title': {}, 'summary': {}, 'description': {}, 'releases': {
- '10': {'value': {'bundles': {'*-*': {}}, 'stability': 'stable', 'version': [[1], 0], 'commands': {'activity': {'exec': 'command'}},
- 'requires': spec.parse_requires('dep >= 2'),
- }},
- },
- })
- self.assertEqual({
- 'context1': {'title': '', 'blob': '10', 'version': [[1], 0], 'command': ('activity', 'command')},
- 'dep': {'title': '', 'blob': '5', 'version': [[5], 0]},
- },
- model.solve(volume, 'context1'))
-
- volume['context'].create({
- 'guid': 'context1', 'type': ['activity'], 'title': {}, 'summary': {}, 'description': {}, 'releases': {
- '10': {'value': {'bundles': {'*-*': {}}, 'stability': 'stable', 'version': [[1], 0], 'commands': {'activity': {'exec': 'command'}},
- 'requires': spec.parse_requires('dep > 2; dep < 5'),
- }},
- },
- })
- self.assertEqual({
- 'context1': {'title': '', 'blob': '10', 'version': [[1], 0], 'command': ('activity', 'command')},
- 'dep': {'title': '', 'blob': '4', 'version': [[4], 0]},
- },
- model.solve(volume, 'context1'))
-
- volume['context'].create({
- 'guid': 'context1', 'type': ['activity'], 'title': {}, 'summary': {}, 'description': {}, 'releases': {
- '10': {'value': {'bundles': {'*-*': {}}, 'stability': 'stable', 'version': [[1], 0], 'commands': {'activity': {'exec': 'command'}},
- 'requires': spec.parse_requires('dep > 2; dep <= 3'),
- }},
- },
- })
- self.assertEqual({
- 'context1': {'title': '', 'blob': '10', 'version': [[1], 0], 'command': ('activity', 'command')},
- 'dep': {'title': '', 'blob': '3', 'version': [[3], 0]},
- },
- model.solve(volume, 'context1'))
-
- volume['context'].create({
- 'guid': 'context1', 'type': ['activity'], 'title': {}, 'summary': {}, 'description': {}, 'releases': {
- '10': {'value': {'bundles': {'*-*': {}}, 'stability': 'stable', 'version': [[1], 0], 'commands': {'activity': {'exec': 'command'}},
- 'requires': spec.parse_requires('dep = 1'),
- }},
- },
- })
- self.assertEqual({
- 'context1': {'title': '', 'blob': '10', 'version': [[1], 0], 'command': ('activity', 'command')},
- 'dep': {'title': '', 'blob': '1', 'version': [[1], 0]},
- },
- model.solve(volume, 'context1'))
-
- def test_solve_SwitchToAlternativeBranch(self):
- volume = db.Volume('master', [Context])
- this.volume = volume
-
- volume['context'].create({
- 'guid': 'context1', 'type': ['activity'], 'title': {}, 'summary': {}, 'description': {}, 'releases': {
- '6': {'value': {'bundles': {'*-*': {}}, 'stability': 'stable', 'version': [[1], 0], 'commands': {'activity': {'exec': 0}},
- 'requires': spec.parse_requires('context4=1'), 'commands': {'activity': {'exec': 6}}}},
- '1': {'value': {'bundles': {'*-*': {}}, 'stability': 'stable', 'version': [[2], 0], 'commands': {'activity': {'exec': 0}},
- 'requires': spec.parse_requires('context2'), 'commands': {'activity': {'exec': 1}}}},
- },
- })
- volume['context'].create({
- 'guid': 'context2', 'type': ['activity'], 'title': {}, 'summary': {}, 'description': {}, 'releases': {
- '2': {'value': {'bundles': {'*-*': {}}, 'stability': 'stable', 'version': [[1], 0], 'commands': {'activity': {'exec': 0}},
- 'requires': spec.parse_requires('context3; context4=1')}},
- },
- })
- volume['context'].create({
- 'guid': 'context3', 'type': ['activity'], 'title': {}, 'summary': {}, 'description': {}, 'releases': {
- '3': {'value': {'bundles': {'*-*': {}}, 'stability': 'stable', 'version': [[1], 0], 'commands': {'activity': {'exec': 0}},
- 'requires': spec.parse_requires('context4=2')}},
- },
- })
- volume['context'].create({
- 'guid': 'context4', 'type': ['activity'], 'title': {}, 'summary': {}, 'description': {}, 'releases': {
- '4': {'value': {'bundles': {'*-*': {}}, 'stability': 'stable', 'version': [[2], 0], 'commands': {'activity': {'exec': 0}}}},
- '5': {'value': {'bundles': {'*-*': {}}, 'stability': 'stable', 'version': [[1], 0], 'commands': {'activity': {'exec': 0}}}},
- },
- })
-
- self.assertEqual({
- 'context1': {'title': '', 'blob': '6', 'version': [[1], 0], 'command': ('activity', 6)},
- 'context4': {'title': '', 'blob': '5', 'version': [[1], 0]},
- },
- model.solve(volume, 'context1'))
-
- def test_solve_CommonDeps(self):
- volume = db.Volume('master', [Context])
- this.volume = volume
-
- volume['context'].create({
- 'guid': 'dep', 'type': ['activity'], 'title': {}, 'summary': {}, 'description': {}, 'releases': {
- '1': {'value': {'bundles': {'*-*': {}}, 'stability': 'stable', 'version': [[1], 0], 'commands': {'activity': {'exec': 0}}}},
- '2': {'value': {'bundles': {'*-*': {}}, 'stability': 'stable', 'version': [[2], 0], 'commands': {'activity': {'exec': 0}}}},
- '3': {'value': {'bundles': {'*-*': {}}, 'stability': 'stable', 'version': [[3], 0], 'commands': {'activity': {'exec': 0}}}},
- '4': {'value': {'bundles': {'*-*': {}}, 'stability': 'stable', 'version': [[4], 0], 'commands': {'activity': {'exec': 0}}}},
- '5': {'value': {'bundles': {'*-*': {}}, 'stability': 'stable', 'version': [[5], 0], 'commands': {'activity': {'exec': 0}}}},
- },
- })
-
- volume['context'].create({
- 'guid': 'context', 'type': ['activity'], 'title': {}, 'summary': {}, 'description': {},
- 'dependencies': 'dep=2',
- 'releases': {
- '10': {'value': {'bundles': {'*-*': {}}, 'stability': 'stable', 'version': [[1], 0], 'commands': {'activity': {'exec': 'command'}},
- 'requires': spec.parse_requires(''),
- }},
- },
- })
- self.assertEqual({
- 'context': {'title': '', 'blob': '10', 'version': [[1], 0], 'command': ('activity', 'command')},
- 'dep': {'title': '', 'blob': '2', 'version': [[2], 0]},
- },
- model.solve(volume, 'context'))
-
- volume['context'].create({
- 'guid': 'context', 'type': ['activity'], 'title': {}, 'summary': {}, 'description': {},
- 'dependencies': 'dep<5',
- 'releases': {
- '10': {'value': {'bundles': {'*-*': {}}, 'stability': 'stable', 'version': [[1], 0], 'commands': {'activity': {'exec': 'command'}},
- 'requires': spec.parse_requires('dep>1'),
- }},
- },
- })
- self.assertEqual({
- 'context': {'title': '', 'blob': '10', 'version': [[1], 0], 'command': ('activity', 'command')},
- 'dep': {'title': '', 'blob': '4', 'version': [[4], 0]},
- },
- model.solve(volume, 'context'))
-
- volume['context'].create({
- 'guid': 'context', 'type': ['activity'], 'title': {}, 'summary': {}, 'description': {},
- 'dependencies': 'dep<4',
- 'releases': {
- '10': {'value': {'bundles': {'*-*': {}}, 'stability': 'stable', 'version': [[1], 0], 'commands': {'activity': {'exec': 'command'}},
- 'requires': spec.parse_requires('dep<5'),
- }},
- },
- })
- self.assertEqual({
- 'context': {'title': '', 'blob': '10', 'version': [[1], 0], 'command': ('activity', 'command')},
- 'dep': {'title': '', 'blob': '3', 'version': [[3], 0]},
- },
- model.solve(volume, 'context'))
-
- def test_solve_ExtraDeps(self):
- volume = db.Volume('master', [Context])
- this.volume = volume
-
- volume['context'].create({
- 'guid': 'dep', 'type': ['activity'], 'title': {}, 'summary': {}, 'description': {}, 'releases': {
- '1': {'value': {'bundles': {'*-*': {}}, 'stability': 'stable', 'version': [[1], 0], 'commands': {'activity': {'exec': 0}}}},
- '2': {'value': {'bundles': {'*-*': {}}, 'stability': 'stable', 'version': [[2], 0], 'commands': {'activity': {'exec': 0}}}},
- '3': {'value': {'bundles': {'*-*': {}}, 'stability': 'stable', 'version': [[3], 0], 'commands': {'activity': {'exec': 0}}}},
- '4': {'value': {'bundles': {'*-*': {}}, 'stability': 'stable', 'version': [[4], 0], 'commands': {'activity': {'exec': 0}}}},
- '5': {'value': {'bundles': {'*-*': {}}, 'stability': 'stable', 'version': [[5], 0], 'commands': {'activity': {'exec': 0}}}},
- },
- })
-
- volume['context'].create({
- 'guid': 'context', 'type': ['activity'], 'title': {}, 'summary': {}, 'description': {}, 'releases': {
- '10': {'value': {'bundles': {'*-*': {}}, 'stability': 'stable', 'version': [[1], 0], 'commands': {'activity': {'exec': 'command'}},
- 'requires': spec.parse_requires(''),
- }},
- },
- })
- self.assertEqual({
- 'context': {'title': '', 'blob': '10', 'version': [[1], 0], 'command': ('activity', 'command')},
- },
- model.solve(volume, 'context'))
-
- volume['context'].create({
- 'guid': 'context', 'type': ['activity'], 'title': {}, 'summary': {}, 'description': {}, 'releases': {
- '10': {'value': {'bundles': {'*-*': {}}, 'stability': 'stable', 'version': [[1], 0], 'commands': {'activity': {'exec': 'command'}},
- 'requires': spec.parse_requires('dep>1'),
- }},
- },
- })
- self.assertEqual({
- 'context': {'title': '', 'blob': '10', 'version': [[1], 0], 'command': ('activity', 'command')},
- 'dep': {'title': '', 'blob': '5', 'version': [[5], 0]},
- },
- model.solve(volume, 'context'))
-
- volume['context'].create({
- 'guid': 'context', 'type': ['activity'], 'title': {}, 'summary': {}, 'description': {}, 'releases': {
- '10': {'value': {'bundles': {'*-*': {}}, 'stability': 'stable', 'version': [[1], 0], 'commands': {'activity': {'exec': 'command'}},
- 'requires': spec.parse_requires('dep<5'),
- }},
- },
- })
- self.assertEqual({
- 'context': {'title': '', 'blob': '10', 'version': [[1], 0], 'command': ('activity', 'command')},
- 'dep': {'title': '', 'blob': '4', 'version': [[4], 0]},
- },
- model.solve(volume, 'context'))
-
- def test_solve_Nothing(self):
- volume = db.Volume('master', [Context])
- this.volume = volume
- this.request = Request()
-
- volume['context'].create({
- 'guid': 'dep', 'type': ['activity'], 'title': {}, 'summary': {}, 'description': {}, 'releases': {
- '1': {'value': {'bundles': {'*-*': {}}, 'stability': 'stable', 'version': [[1], 0], 'commands': {'activity': {'exec': 0}}}},
- '2': {'value': {'bundles': {'*-*': {}}, 'stability': 'stable', 'version': [[2], 0], 'commands': {'activity': {'exec': 0}}}},
- '3': {'value': {'bundles': {'*-*': {}}, 'stability': 'stable', 'version': [[3], 0], 'commands': {'activity': {'exec': 0}}}},
- '4': {'value': {'bundles': {'*-*': {}}, 'stability': 'stable', 'version': [[4], 0], 'commands': {'activity': {'exec': 0}}}},
- '5': {'value': {'bundles': {'*-*': {}}, 'stability': 'stable', 'version': [[5], 0], 'commands': {'activity': {'exec': 0}}}},
- },
- })
-
- volume['context'].create({
- 'guid': 'context', 'type': ['activity'], 'title': {}, 'summary': {}, 'description': {}, 'releases': {
- },
- })
- self.assertEqual(None, model.solve(volume, 'context'))
-
- volume['context'].create({
- 'guid': 'context', 'type': ['activity'], 'title': {}, 'summary': {}, 'description': {}, 'releases': {
- '10': {'value': {'bundles': {'*-*': {}}, 'stability': 'stable', 'version': [[1], 0], 'commands': {'activity': {'exec': 'command'}},
- 'requires': spec.parse_requires('dep=0'),
- }},
- },
- })
- self.assertEqual(None, model.solve(volume, 'context'))
-
- def test_solve_Packages(self):
- volume = db.Volume('master', [Context])
- this.volume = volume
- this.request = Request()
-
- context = volume['context'].create({
- 'guid': 'context', 'type': ['activity'], 'title': {}, 'summary': {}, 'description': {}, 'releases': {
- '1': {'value': {'bundles': {'*-*': {}}, 'stability': 'stable', 'version': [[1], 0], 'commands': {'activity': {'exec': 'command'}},
- 'requires': spec.parse_requires('package'),
- }},
- },
- })
- volume['context'].create({
- 'guid': 'package', 'type': ['package'], 'title': {}, 'summary': {}, 'description': {}, 'releases': {
- 'resolves': {
- 'Ubuntu-10.04': {'version': [[1], 0], 'packages': ['pkg1', 'pkg2']},
- },
- },
- })
- self.assertEqual({
- 'context': {'title': '', 'blob': '1', 'command': ('activity', 'command'), 'version': [[1], 0]},
- 'package': {'packages': ['pkg1', 'pkg2'], 'version': [[1], 0]},
- },
- model.solve(volume, context, lsb_id='Ubuntu', lsb_release='10.04'))
-
- context = volume['context'].create({
- 'guid': 'context', 'type': ['activity'], 'title': {}, 'summary': {}, 'description': {}, 'releases': {
- '1': {'value': {'bundles': {'*-*': {}}, 'stability': 'stable', 'version': [[1], 0], 'commands': {'activity': {'exec': 'command'}},
- 'requires': spec.parse_requires('dep; package'),
- }},
- },
- })
- volume['context'].create({
- 'guid': 'dep', 'type': ['activity'], 'title': {}, 'summary': {}, 'description': {}, 'releases': {
- '2': {'value': {'bundles': {'*-*': {}}, 'stability': 'stable', 'version': [[1], 0], 'commands': {'activity': {'exec': 0}}}},
- },
- })
- self.assertEqual({
- 'context': {'title': '', 'blob': '1', 'command': ('activity', 'command'), 'version': [[1], 0]},
- 'dep': {'title': '', 'blob': '2', 'version': [[1], 0]},
- 'package': {'packages': ['pkg1', 'pkg2'], 'version': [[1], 0]},
- },
- model.solve(volume, context, lsb_id='Ubuntu', lsb_release='10.04'))
-
- def test_solve_PackagesByLsbId(self):
- volume = db.Volume('master', [Context])
- this.volume = volume
- this.request = Request()
-
- context = volume['context'].create({
- 'guid': 'context', 'type': ['activity'], 'title': {}, 'summary': {}, 'description': {}, 'releases': {
- '1': {'value': {'bundles': {'*-*': {}}, 'stability': 'stable', 'version': [[1], 0], 'commands': {'activity': {'exec': 'command'}},
- 'requires': spec.parse_requires('package1'),
- }},
- },
- })
- volume['context'].create({
- 'guid': 'package1', 'type': ['package'], 'title': {}, 'summary': {}, 'description': {}, 'releases': {
- 'Ubuntu': {'value': {'binary': ['bin1', 'bin2'], 'devel': ['devel1', 'devel2']}},
- },
- })
- self.assertEqual({
- 'context': {'title': '', 'blob': '1', 'command': ('activity', 'command'), 'version': [[1], 0]},
- 'package1': {'packages': ['bin1', 'bin2', 'devel1', 'devel2'], 'version': []},
- },
- model.solve(volume, context, lsb_id='Ubuntu'))
-
- context = volume['context'].create({
- 'guid': 'context', 'type': ['activity'], 'title': {}, 'summary': {}, 'description': {}, 'releases': {
- '1': {'value': {'bundles': {'*-*': {}}, 'stability': 'stable', 'version': [[1], 0], 'commands': {'activity': {'exec': 'command'}},
- 'requires': spec.parse_requires('package2'),
- }},
- },
- })
- volume['context'].create({
- 'guid': 'package2', 'type': ['package'], 'title': {}, 'summary': {}, 'description': {}, 'releases': {
- 'Ubuntu': {'value': {'binary': ['bin']}},
- 'resolves': {
- 'Ubuntu-10.04': {'version': [[1], 0], 'packages': ['pkg1', 'pkg2']},
- },
- },
- })
- self.assertEqual({
- 'context': {'title': '', 'blob': '1', 'command': ('activity', 'command'), 'version': [[1], 0]},
- 'package2': {'packages': ['bin'], 'version': []},
- },
- model.solve(volume, context, lsb_id='Ubuntu', lsb_release='fake'))
-
- def test_solve_PackagesByCommonAlias(self):
- volume = db.Volume('master', [Context])
- this.volume = volume
- this.request = Request()
-
- context = volume['context'].create({
- 'guid': 'context', 'type': ['activity'], 'title': {}, 'summary': {}, 'description': {}, 'releases': {
- '1': {'value': {'bundles': {'*-*': {}}, 'stability': 'stable', 'version': [[1], 0], 'commands': {'activity': {'exec': 'command'}},
- 'requires': spec.parse_requires('package1'),
- }},
- },
- })
- volume['context'].create({
- 'guid': 'package1', 'type': ['package'], 'title': {}, 'summary': {}, 'description': {}, 'releases': {
- '*': {'value': {'binary': ['pkg1']}},
- 'Ubuntu': {'value': {'binary': ['pkg2']}},
- 'resolves': {
- 'Ubuntu-10.04': {'version': [[1], 0], 'packages': ['pkg3']},
- },
- },
- })
- self.assertEqual({
- 'context': {'title': '', 'blob': '1', 'command': ('activity', 'command'), 'version': [[1], 0]},
- 'package1': {'packages': ['pkg1'], 'version': []},
- },
- model.solve(volume, context))
- self.assertEqual({
- 'context': {'title': '', 'blob': '1', 'command': ('activity', 'command'), 'version': [[1], 0]},
- 'package1': {'packages': ['pkg1'], 'version': []},
- },
- model.solve(volume, context, lsb_id='Fake'))
- self.assertEqual({
- 'context': {'title': '', 'blob': '1', 'command': ('activity', 'command'), 'version': [[1], 0]},
- 'package1': {'packages': ['pkg1'], 'version': []},
- },
- model.solve(volume, context, lsb_id='Fake', lsb_release='fake'))
-
- def test_solve_NoPackages(self):
- volume = db.Volume('master', [Context])
- this.volume = volume
- this.request = Request()
-
- context = volume['context'].create({
- 'guid': 'context', 'type': ['activity'], 'title': {}, 'summary': {}, 'description': {}, 'releases': {
- '1': {'value': {'bundles': {'*-*': {}}, 'stability': 'stable', 'version': [[1], 0], 'commands': {'activity': {'exec': 'command'}},
- 'requires': spec.parse_requires('package'),
- }},
- },
- })
- volume['context'].create({
- 'guid': 'package', 'type': ['package'], 'title': {}, 'summary': {}, 'description': {}, 'releases': {
- },
- })
- self.assertEqual(None, model.solve(volume, context))
-
-
-if __name__ == '__main__':
- tests.main()
diff --git a/tests/units/node/node_model.py b/tests/units/node/node_model.py
new file mode 100755
index 0000000..9bb758e
--- /dev/null
+++ b/tests/units/node/node_model.py
@@ -0,0 +1,2780 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# sugar-lint: disable
+
+import os
+import time
+import json
+import base64
+import hashlib
+import mimetypes
+from cStringIO import StringIO
+from os.path import exists
+
+from __init__ import tests
+
+from sugar_network import db, toolkit
+from sugar_network.client import Connection
+from sugar_network.model.post import Post
+from sugar_network.model.context import Context
+from sugar_network.node import model, obs
+from sugar_network.node.model import User, Volume
+from sugar_network.node.auth import Principal as _Principal
+from sugar_network.node.routes import NodeRoutes
+from sugar_network.toolkit.coroutine import this
+from sugar_network.toolkit.router import Request, Router, ACL, File
+from sugar_network.toolkit import spec, i18n, http, coroutine, ranges, enforce
+
+
+class NodeModelTest(tests.Test):
+
+ def test_diff_volume(self):
+
+ class Document(db.Resource):
+
+ @db.indexed_property(slot=1)
+ def prop(self, value):
+ return value
+
+ volume = Volume('.', [Document])
+ this.volume = volume
+
+ volume['document'].create({'guid': '1', 'prop': '1', 'ctime': 1, 'mtime': 1})
+ self.utime('db/document/1/1', 1)
+ volume['document'].create({'guid': '2', 'prop': '2', 'ctime': 2, 'mtime': 2})
+ self.utime('db/document/2/2', 2)
+ volume['document'].create({'guid': '3', 'prop': '3', 'ctime': 3, 'mtime': 3})
+ self.utime('db/document/3/3', 3)
+ volume.blobs.post('1')
+ self.touch(('files/foo/2', '22'))
+ self.touch(('files/bar/3', '333'))
+
+ r = [[1, None]]
+ self.assertEqual([
+ {'resource': 'document'},
+ {'guid': '1', 'patch': {
+ 'guid': {'value': '1', 'mtime': 1},
+ 'ctime': {'value': 1, 'mtime': 1},
+ 'prop': {'value': '1', 'mtime': 1},
+ 'mtime': {'value': 1, 'mtime': 1},
+ }},
+ {'guid': '2', 'patch': {
+ 'guid': {'value': '2', 'mtime': 2},
+ 'ctime': {'value': 2, 'mtime': 2},
+ 'prop': {'value': '2', 'mtime': 2},
+ 'mtime': {'value': 2, 'mtime': 2},
+ }},
+ {'guid': '3', 'patch': {
+ 'guid': {'value': '3', 'mtime': 3},
+ 'ctime': {'value': 3, 'mtime': 3},
+ 'prop': {'value': '3', 'mtime': 3},
+ 'mtime': {'value': 3, 'mtime': 3},
+ }},
+ {'content-type': 'application/octet-stream', 'content-length': '1'},
+ {'content-type': 'application/octet-stream', 'content-length': '2', 'path': 'foo/2'},
+ {'commit': [[1, 5]]},
+ ],
+ [i.meta if isinstance(i, File) else i for i in model.diff_volume(r, files=['foo'])])
+ self.assertEqual([[6, None]], r)
+
+ r = [[2, 2]]
+ self.assertEqual([
+ {'resource': 'document'},
+ {'guid': '2', 'patch': {
+ 'guid': {'value': '2', 'mtime': 2},
+ 'ctime': {'value': 2, 'mtime': 2},
+ 'prop': {'value': '2', 'mtime': 2},
+ 'mtime': {'value': 2, 'mtime': 2},
+ }},
+ {'commit': [[2, 2]]},
+ ],
+ [i for i in model.diff_volume(r)])
+ self.assertEqual([], r)
+
+ r = [[6, None]]
+ self.assertEqual([
+ {'resource': 'document'},
+ ],
+ [i for i in model.diff_volume(r)])
+ self.assertEqual([[6, None]], r)
+
+ volume['document'].update('2', {'prop': '22'})
+
+ r = [[6, None]]
+ self.assertEqual([
+ {'resource': 'document'},
+ {'guid': '2', 'patch': {
+ 'prop': {'value': '22', 'mtime': int(os.stat('db/document/2/2/prop').st_mtime)},
+ }},
+ {'commit': [[6, 6]]},
+ ],
+ [i for i in model.diff_volume(r)])
+ self.assertEqual([[7, None]], r)
+
+ volume.blobs.post('4444')
+ self.touch(('files/foo/2', '2222'))
+
+ r = [[7, None]]
+ self.assertEqual([
+ {'resource': 'document'},
+ {'content-type': 'application/octet-stream', 'content-length': '4'},
+ {'content-type': 'application/octet-stream', 'content-length': '4', 'path': 'foo/2'},
+ {'content-type': 'application/octet-stream', 'content-length': '3', 'path': 'bar/3'},
+ {'commit': [[7, 9]]},
+ ],
+ [i.meta if isinstance(i, File) else i for i in model.diff_volume(r, files=['foo', 'bar'])])
+ self.assertEqual([[10, None]], r)
+
+ def test_diff_volume_SyncUsecase(self):
+
+ class Document(db.Resource):
+
+ @db.stored_property()
+ def prop1(self, value):
+ return value
+
+ @db.stored_property()
+ def prop2(self, value):
+ return value
+
+ volume = Volume('.', [Document])
+ this.volume = volume
+
+ volume['document'].create({'guid': 'guid', 'ctime': 1, 'mtime': 1, 'prop1': 1, 'prop2': 1})
+ self.utime('db/document/gu/guid', 1)
+
+ # Fresh update to pull
+ volume['document'].update('guid', {'prop1': 2})
+ self.utime('db/document/gu/guid/prop1', 2)
+
+ # Recently pushed
+ volume['document'].update('guid', {'prop2': 2})
+ self.utime('db/document/gu/guid/prop2', 2)
+
+ # Exclude `prop2` ack from the pull reanges
+ r = [[2, None]]
+ ranges.exclude(r, 3, 3)
+ self.assertEqual([
+ {'resource': 'document'},
+ ],
+ [dict(i) for i in model.diff_volume(r)])
+ self.assertEqual([[2, 2], [4, None]], r)
+
+ # Pass `prop2` ack in `exclude`
+ r = [[2, None]]
+ self.assertEqual([
+ {'resource': 'document'},
+ {'guid': 'guid', 'patch': {
+ 'prop1': {'value': 2, 'mtime': 2},
+ }},
+ {'commit': [[2, 2]]},
+ ],
+ [dict(i) for i in model.diff_volume(r, [[3, 3]])])
+ self.assertEqual([[4, None]], r)
+
+ def test_diff_volume_Partial(self):
+ self.override(time, 'time', lambda: 0)
+
+ class Document(db.Resource):
+
+ @db.indexed_property(slot=1)
+ def prop(self, value):
+ return value
+
+ volume = Volume('.', [Document])
+ this.volume = volume
+ volume['document'].create({'guid': '1', 'prop': '1', 'ctime': 1, 'mtime': 1})
+ self.utime('db/document/1/1', 1)
+ volume['document'].create({'guid': '2', 'prop': '2', 'ctime': 2, 'mtime': 2})
+ self.utime('db/document/2/2', 2)
+
+ r = [[1, None]]
+ patch = model.diff_volume(r)
+ self.assertEqual({'resource': 'document'}, next(patch))
+ self.assertEqual('1', next(patch)['guid'])
+ self.assertRaises(StopIteration, patch.throw, StopIteration)
+ self.assertRaises(StopIteration, patch.next)
+ self.assertEqual([[1, None]], r)
+
+ r = [[1, None]]
+ patch = model.diff_volume(r)
+ self.assertEqual({'resource': 'document'}, next(patch))
+ self.assertEqual('1', next(patch)['guid'])
+ self.assertEqual('2', next(patch)['guid'])
+ self.assertEqual({'commit': [[1, 1]]}, patch.throw(StopIteration()))
+ self.assertRaises(StopIteration, patch.next)
+ self.assertEqual([[2, None]], r)
+
+ r = [[1, None]]
+ patch = model.diff_volume(r)
+ self.assertEqual({'resource': 'document'}, next(patch))
+ self.assertEqual('1', next(patch)['guid'])
+ self.assertEqual('2', next(patch)['guid'])
+ self.assertEqual({'commit': [[1, 2]]}, next(patch))
+ self.assertRaises(StopIteration, patch.next)
+ self.assertEqual([[3, None]], r)
+
+ def test_diff_volume_IgnoreOneWayResources(self):
+
+ class Document(db.Resource):
+ one_way = True
+
+ volume = Volume('.', [Document])
+ this.volume = volume
+ volume['document'].create({'guid': '1', 'ctime': 1, 'mtime': 1})
+ self.utime('db/document/1/1', 1)
+
+ r = [[1, None]]
+ self.assertEqual([
+ {'resource': 'document'},
+ {'guid': '1', 'patch': {
+ 'guid': {'value': '1', 'mtime': 1},
+ 'ctime': {'value': 1, 'mtime': 1},
+ 'mtime': {'value': 1, 'mtime': 1},
+ }},
+ {'commit': [[1, 1]]},
+ ],
+ [i for i in model.diff_volume(r)])
+ self.assertEqual([[2, None]], r)
+
+ r = [[1, None]]
+ self.assertEqual([
+ ],
+ [i for i in model.diff_volume(r, one_way=True)])
+ self.assertEqual([[1, None]], r)
+
+ def test_diff_volume_TheSameInSeqForAllDocuments(self):
+ self.override(time, 'time', lambda: 0)
+
+ class Document1(db.Resource):
+ pass
+
+ class Document2(db.Resource):
+ pass
+
+ class Document3(db.Resource):
+ pass
+
+ volume = Volume('.', [Document1, Document2, Document3])
+ this.volume = volume
+ volume['document1'].create({'guid': '3', 'ctime': 3, 'mtime': 3})
+ self.utime('db/document1/3/3', 3)
+ volume['document2'].create({'guid': '2', 'ctime': 2, 'mtime': 2})
+ self.utime('db/document2/2/2', 2)
+ volume['document3'].create({'guid': '1', 'ctime': 1, 'mtime': 1})
+ self.utime('db/document3/1/1', 1)
+
+ r = [[1, None]]
+ patch = model.diff_volume(r)
+ self.assertEqual({'resource': 'document1'}, patch.send(None))
+ self.assertEqual('3', patch.send(None)['guid'])
+ self.assertEqual({'resource': 'document2'}, patch.send(None))
+ self.assertEqual('2', patch.send(None)['guid'])
+ self.assertEqual({'resource': 'document3'}, patch.send(None))
+ self.assertEqual('1', patch.send(None)['guid'])
+ self.assertEqual({'commit': [[1, 3]]}, patch.send(None))
+ self.assertRaises(StopIteration, patch.next)
+ self.assertEqual([[4, None]], r)
+
+ def test_diff_volumeLocalProps(self):
+
+ class Document(db.Resource):
+
+ @db.stored_property()
+ def prop1(self, value):
+ return value
+
+ @db.stored_property(acl=ACL.PUBLIC | ACL.LOCAL)
+ def prop2(self, value):
+ return value
+
+ @db.stored_property()
+ def prop3(self, value):
+ return value
+
+ volume = Volume('.', [Document])
+ this.volume = volume
+
+ volume['document'].create({'guid': '1', 'prop1': '1', 'prop2': '1', 'prop3': '1', 'ctime': 1, 'mtime': 1})
+ self.utime('db/document/1/1', 0)
+
+ r = [[1, None]]
+ self.assertEqual([
+ {'resource': 'document'},
+ {'guid': '1', 'patch': {
+ 'guid': {'value': '1', 'mtime': 0},
+ 'ctime': {'value': 1, 'mtime': 0},
+ 'prop1': {'value': '1', 'mtime': 0},
+ 'prop3': {'value': '1', 'mtime': 0},
+ 'mtime': {'value': 1, 'mtime': 0},
+ }},
+ {'commit': [[1, 1]]},
+ ],
+ [dict(i) for i in model.diff_volume(r, files=['foo'])])
+ self.assertEqual([[2, None]], r)
+
+ volume['document'].update('1', {'prop1': '2'})
+ self.utime('db/document', 0)
+
+ self.assertEqual([
+ {'resource': 'document'},
+ {'guid': '1', 'patch': {
+ 'prop1': {'value': '2', 'mtime': 0},
+ }},
+ {'commit': [[2, 2]]},
+ ],
+ [dict(i) for i in model.diff_volume(r, files=['foo'])])
+ self.assertEqual([[3, None]], r)
+
+ volume['document'].update('1', {'prop2': '3'})
+ self.utime('db/document', 0)
+
+ self.assertEqual([
+ {'resource': 'document'},
+ ],
+ [dict(i) for i in model.diff_volume(r, files=['foo'])])
+ self.assertEqual([[3, None]], r)
+
+ volume['document'].update('1', {'prop1': '4', 'prop2': '4', 'prop3': '4'})
+ self.utime('db/document', 0)
+
+ self.assertEqual([
+ {'resource': 'document'},
+ {'guid': '1', 'patch': {
+ 'prop1': {'value': '4', 'mtime': 0},
+ 'prop3': {'value': '4', 'mtime': 0},
+ }},
+ {'commit': [[3, 3]]},
+ ],
+ [dict(i) for i in model.diff_volume(r, files=['foo'])])
+ self.assertEqual([[4, None]], r)
+
+ def test_patch_volume_New(self):
+
+ class Document(db.Resource):
+
+ @db.indexed_property(slot=1)
+ def prop(self, value):
+ return value
+
+ volume1 = Volume('1', [Document])
+ this.volume = volume1
+ volume1['document'].create({'guid': '1', 'prop': '1', 'ctime': 1, 'mtime': 1})
+ self.utime('1/db/document/1/1', 1)
+ volume1['document'].create({'guid': '2', 'prop': '2', 'ctime': 2, 'mtime': 2})
+ self.utime('1/db/document/2/2', 2)
+ volume1['document'].create({'guid': '3', 'prop': '3', 'ctime': 3, 'mtime': 3})
+ self.utime('1/db/document/3/3', 3)
+ volume1.blobs.post('1')
+ self.touch(('1/files/foo/2', '22'))
+ self.touch(('1/files/bar/3', '333'))
+ patch = [i for i in model.diff_volume([[1, None]], files=['foo'])]
+
+ volume2 = Volume('2', [Document])
+ this.volume = volume2
+ model.patch_volume(patch)
+
+ self.assertEqual(
+ sorted([
+ (1, '1', 1, '1'),
+ (2, '2', 2, '2'),
+ (3, '3', 3, '3'),
+ ]),
+ sorted([(i['ctime'], i['prop'], i['mtime'], i['guid']) for i in volume2['document'].find()[0]]))
+
+ doc = volume2['document'].get('1')
+ self.assertEqual(1, doc.get('seqno'))
+ self.assertEqual(1, doc.meta('guid')['mtime'])
+ self.assertEqual(1, doc.meta('ctime')['mtime'])
+ self.assertEqual(1, doc.meta('prop')['mtime'])
+ self.assertEqual(1, doc.meta('mtime')['mtime'])
+
+ doc = volume2['document'].get('2')
+ self.assertEqual(1, doc.get('seqno'))
+ self.assertEqual(2, doc.meta('guid')['mtime'])
+ self.assertEqual(2, doc.meta('ctime')['mtime'])
+ self.assertEqual(2, doc.meta('prop')['mtime'])
+ self.assertEqual(2, doc.meta('mtime')['mtime'])
+
+ doc = volume2['document'].get('3')
+ self.assertEqual(1, doc.get('seqno'))
+ self.assertEqual(3, doc.meta('guid')['mtime'])
+ self.assertEqual(3, doc.meta('ctime')['mtime'])
+ self.assertEqual(3, doc.meta('prop')['mtime'])
+ self.assertEqual(3, doc.meta('mtime')['mtime'])
+
+ blob = volume2.blobs.get(hashlib.sha1('1').hexdigest())
+ self.assertEqual({
+ 'x-seqno': '1',
+ 'content-length': '1',
+ 'content-type': 'application/octet-stream',
+ },
+ blob.meta)
+ self.assertEqual('1', file(blob.path).read())
+
+ blob = volume2.blobs.get('foo/2')
+ self.assertEqual({
+ 'x-seqno': '1',
+ 'content-length': '2',
+ 'content-type': 'application/octet-stream',
+ },
+ blob.meta)
+ self.assertEqual('22', file(blob.path).read())
+
+ assert volume2.blobs.get('bar/3') is None
+
+ def test_patch_volume_Update(self):
+
+ class Document(db.Resource):
+
+ @db.stored_property(default='')
+ def prop(self, value):
+ return value
+
+ volume1 = db.Volume('1', [Document])
+ volume1['document'].create({'guid': 'guid', 'ctime': 1, 'mtime': 1})
+ volume1['document'].update('guid', {'prop': '1'})
+ self.utime('1/db/document/gu/guid', 1)
+
+ volume2 = db.Volume('2', [Document])
+ volume2['document'].create({'guid': 'guid', 'ctime': 2, 'mtime': 2})
+ volume2['document'].update('guid', {'prop': '2'})
+ self.utime('2/db/document/gu/guid', 2)
+
+ self.assertEqual(
+ [(2, 2, 'guid')],
+ [(i['ctime'], i['mtime'], i['guid']) for i in volume2['document'].find()[0]])
+ doc = volume2['document'].get('guid')
+ self.assertEqual(2, doc.get('seqno'))
+ self.assertEqual(2, doc.meta('guid')['mtime'])
+ self.assertEqual(2, doc.meta('ctime')['mtime'])
+ self.assertEqual(2, doc.meta('mtime')['mtime'])
+ self.assertEqual(2, doc.meta('prop')['mtime'])
+ self.assertEqual('2', doc.meta('prop')['value'])
+
+ this.volume = volume1
+ patch = [i for i in model.diff_volume([[1, None]])]
+ this.volume = volume2
+ model.patch_volume(patch)
+
+ self.assertEqual(
+ [(2, 2, 'guid')],
+ [(i['ctime'], i['mtime'], i['guid']) for i in volume2['document'].find()[0]])
+ doc = volume2['document'].get('guid')
+ self.assertEqual(2, doc.get('seqno'))
+ self.assertEqual(2, doc.meta('guid')['mtime'])
+ self.assertEqual(2, doc.meta('ctime')['mtime'])
+ self.assertEqual(2, doc.meta('mtime')['mtime'])
+ self.assertEqual(2, doc.meta('prop')['mtime'])
+ self.assertEqual('2', doc.meta('prop')['value'])
+
+ os.utime('1/db/document/gu/guid/mtime', (3, 3))
+ this.volume = volume1
+ patch = [i for i in model.diff_volume([[1, None]])]
+ this.volume = volume2
+ model.patch_volume(patch)
+
+ self.assertEqual(
+ [(2, 1, 'guid')],
+ [(i['ctime'], i['mtime'], i['guid']) for i in volume2['document'].find()[0]])
+ doc = volume2['document'].get('guid')
+ self.assertEqual(3, doc.get('seqno'))
+ self.assertEqual(2, doc.meta('guid')['mtime'])
+ self.assertEqual(2, doc.meta('ctime')['mtime'])
+ self.assertEqual(3, doc.meta('mtime')['mtime'])
+ self.assertEqual(2, doc.meta('prop')['mtime'])
+ self.assertEqual('2', doc.meta('prop')['value'])
+
+ os.utime('1/db/document/gu/guid/prop', (4, 4))
+ this.volume = volume1
+ patch = [i for i in model.diff_volume([[1, None]])]
+ this.volume = volume2
+ model.patch_volume(patch)
+
+ self.assertEqual(
+ [(2, 1, 'guid')],
+ [(i['ctime'], i['mtime'], i['guid']) for i in volume2['document'].find()[0]])
+ doc = volume2['document'].get('guid')
+ self.assertEqual(4, doc.get('seqno'))
+ self.assertEqual(2, doc.meta('guid')['mtime'])
+ self.assertEqual(2, doc.meta('ctime')['mtime'])
+ self.assertEqual(3, doc.meta('mtime')['mtime'])
+ self.assertEqual(4, doc.meta('prop')['mtime'])
+ self.assertEqual('1', doc.meta('prop')['value'])
+
+ def test_patch_volume_Ranges(self):
+
+ class Document(db.Resource):
+
+ @db.stored_property(default='')
+ def prop(self, value):
+ return value
+
+ volume1 = Volume('db1', [Document])
+ volume2 = Volume('db2', [Document])
+
+ this.volume = volume1
+ patch = [i for i in model.diff_volume([[1, None]])]
+ this.volume = volume2
+ seqno, committed = model.patch_volume(patch)
+ self.assertEqual([], committed)
+ self.assertEqual(None, seqno)
+
+ volume1['document'].create({'guid': '1', 'ctime': 1, 'mtime': 1})
+ this.volume = volume1
+ patch = [i for i in model.diff_volume([[1, None]])]
+ this.volume = volume2
+ seqno, committed = model.patch_volume(patch)
+ self.assertEqual([[1, 1]], committed)
+ self.assertEqual(1, seqno)
+
+ this.volume = volume1
+ patch = [i for i in model.diff_volume([[1, None]])]
+ this.volume = volume2
+ seqno, committed = model.patch_volume(patch)
+ self.assertEqual([[1, 1]], committed)
+ self.assertEqual(None, seqno)
+
+ volume1['document'].update('1', {'prop': '1'})
+ this.volume = volume1
+ patch = [i for i in model.diff_volume([[1, None]])]
+ this.volume = volume2
+ seqno, committed = model.patch_volume(patch)
+ self.assertEqual([[1, 2]], committed)
+ self.assertEqual(2, seqno)
+
+ this.volume = volume1
+ patch = [i for i in model.diff_volume([[1, None]])]
+ this.volume = volume2
+ seqno, committed = model.patch_volume(patch)
+ self.assertEqual([[1, 2]], committed)
+ self.assertEqual(None, seqno)
+
+ volume3 = Volume('db3', [Document])
+
+ this.volume = volume1
+ patch = [i for i in model.diff_volume([[1, None]])]
+ this.volume = volume3
+ seqno, committed = model.patch_volume(patch)
+ self.assertEqual([[1, 2]], committed)
+ self.assertEqual(1, seqno)
+
+ this.volume = volume1
+ patch = [i for i in model.diff_volume([[1, None]])]
+ this.volume = volume3
+ seqno, committed = model.patch_volume(patch)
+ self.assertEqual([[1, 2]], committed)
+ self.assertEqual(None, seqno)
+
+ def test_patch_volume_MultipleCommits(self):
+
+ class Document(db.Resource):
+
+ @db.indexed_property(slot=1)
+ def prop(self, value):
+ return value
+
+ self.touch(('var/seqno', '100'))
+ volume = Volume('.', [Document])
+ this.volume = volume
+
+ def generator():
+ for i in [
+ {'resource': 'document'},
+ {'commit': [[1, 1]]},
+ {'guid': '1', 'patch': {
+ 'guid': {'value': '1', 'mtime': 1.0},
+ 'ctime': {'value': 2, 'mtime': 2.0},
+ 'mtime': {'value': 3, 'mtime': 3.0},
+ 'prop': {'value': '4', 'mtime': 4.0},
+ }},
+ {'commit': [[2, 3]]},
+ ]:
+ yield i
+
+ patch = generator()
+ self.assertEqual((101, [[1, 3]]), model.patch_volume(patch))
+ assert volume['document']['1'].exists
+
+ def test_patch_volume_SeqnoLess(self):
+
+ class Document(db.Resource):
+
+ @db.indexed_property(slot=1)
+ def prop(self, value):
+ return value
+
+ volume1 = Volume('1', [Document])
+ this.volume = volume1
+ volume1['document'].create({'guid': '1', 'prop': '1', 'ctime': 1, 'mtime': 1})
+ self.utime('1/db/document/1/1', 1)
+ volume1.blobs.post('1')
+ patch = [i for i in model.diff_volume([[1, None]])]
+
+ volume2 = Volume('2', [Document])
+ this.volume = volume2
+ model.patch_volume(patch, shift_seqno=False)
+
+ self.assertEqual(
+ [(1, '1', 1, '1')],
+ [(i['ctime'], i['prop'], i['mtime'], i['guid']) for i in volume2['document'].find()[0]])
+
+ doc = volume2['document'].get('1')
+ self.assertEqual(0, doc.get('seqno'))
+ assert 'seqno' not in doc.meta('guid')
+ assert 'seqno' not in doc.meta('ctime')
+ assert 'seqno' not in doc.meta('mtime')
+ assert 'seqno' not in doc.meta('prop')
+
+ blob = volume2.blobs.get(hashlib.sha1('1').hexdigest())
+ self.assertEqual({
+ 'x-seqno': '0',
+ 'content-length': '1',
+ 'content-type': 'application/octet-stream',
+ },
+ blob.meta)
+ self.assertEqual('1', file(blob.path).read())
+
+ def test_diff_volume_IgnoreSeqnolessUpdates(self):
+
+ class Document(db.Resource):
+
+ @db.stored_property()
+ def prop1(self, value):
+ return value
+
+ @db.stored_property(acl=ACL.PUBLIC | ACL.LOCAL)
+ def prop2(self, value):
+ return value
+
+ volume = Volume('.', [Document])
+ this.volume = volume
+
+ volume['document'].create({'guid': '1', 'prop1': '1', 'prop2': '1', 'ctime': 1, 'mtime': 1})
+ self.utime('db/document/1/1', 1)
+
+ r = [[1, None]]
+ self.assertEqual([
+ {'resource': 'document'},
+ {'guid': '1', 'patch': {
+ 'guid': {'value': '1', 'mtime': 1},
+ 'ctime': {'value': 1, 'mtime': 1},
+ 'prop1': {'value': '1', 'mtime': 1},
+ 'mtime': {'value': 1, 'mtime': 1},
+ }},
+ {'commit': [[1, 1]]},
+ ],
+ [i.meta if isinstance(i, File) else i for i in model.diff_volume(r)])
+ self.assertEqual([[2, None]], r)
+
+ volume['document'].update('1', {'prop2': '2'})
+ self.utime('db/document/1/1', 1)
+
+ r = [[1, None]]
+ self.assertEqual([
+ {'resource': 'document'},
+ {'guid': '1', 'patch': {
+ 'guid': {'value': '1', 'mtime': 1},
+ 'ctime': {'value': 1, 'mtime': 1},
+ 'prop1': {'value': '1', 'mtime': 1},
+ 'mtime': {'value': 1, 'mtime': 1},
+ }},
+ {'commit': [[1, 1]]},
+ ],
+ [i.meta if isinstance(i, File) else i for i in model.diff_volume(r)])
+ self.assertEqual([[2, None]], r)
+
+ volume['document'].update('1', {'prop1': '2'})
+ self.utime('db/document/1/1', 1)
+
+ r = [[1, None]]
+ self.assertEqual([
+ {'resource': 'document'},
+ {'guid': '1', 'patch': {
+ 'guid': {'value': '1', 'mtime': 1},
+ 'ctime': {'value': 1, 'mtime': 1},
+ 'prop1': {'value': '2', 'mtime': 1},
+ 'mtime': {'value': 1, 'mtime': 1},
+ }},
+ {'commit': [[1, 2]]},
+ ],
+ [i.meta if isinstance(i, File) else i for i in model.diff_volume(r)])
+ self.assertEqual([[3, None]], r)
+
+ self.assertEqual(False, volume['document'].patch('1', {'prop1': {'mtime': 2, 'value': '3'}}, seqno=False))
+ self.assertEqual('3', volume['document']['1']['prop1'])
+ self.utime('db/document/1/1', 1)
+
+ r = [[1, None]]
+ self.assertEqual([
+ {'resource': 'document'},
+ {'guid': '1', 'patch': {
+ 'guid': {'value': '1', 'mtime': 1},
+ 'ctime': {'value': 1, 'mtime': 1},
+ 'mtime': {'value': 1, 'mtime': 1},
+ }},
+ {'commit': [[1, 2]]},
+ ],
+ [i.meta if isinstance(i, File) else i for i in model.diff_volume(r)])
+ self.assertEqual([[3, None]], r)
+
+ def test_diff_volume_AggProps(self):
+
+ class Document(db.Resource):
+
+ @db.stored_property(db.Aggregated, db.Property())
+ def prop(self, value):
+ return value
+
+ volume = Volume('.', [Document])
+ this.volume = volume
+ volume['document'].create({'guid': '1', 'ctime': 1, 'mtime': 1, 'prop': {'1': {'prop': 1}}})
+ self.utime('db/document/1/1', 1)
+ volume['document'].create({'guid': '2', 'ctime': 2, 'mtime': 2, 'prop': {'2': {'prop': 2}}})
+ self.utime('db/document/2/2', 2)
+
+ r = [[1, None]]
+ self.assertEqual([
+ {'resource': 'document'},
+ {'guid': '1', 'patch': {
+ 'guid': {'value': '1', 'mtime': 1},
+ 'ctime': {'value': 1, 'mtime': 1},
+ 'mtime': {'value': 1, 'mtime': 1},
+ 'prop': {'value': {'1': {'prop': 1}}, 'mtime': 1},
+ }},
+ {'guid': '2', 'patch': {
+ 'guid': {'value': '2', 'mtime': 2},
+ 'ctime': {'value': 2, 'mtime': 2},
+ 'mtime': {'value': 2, 'mtime': 2},
+ 'prop': {'value': {'2': {'prop': 2}}, 'mtime': 2},
+ }},
+ {'commit': [[1, 2]]},
+ ],
+ [i for i in model.diff_volume(r)])
+ self.assertEqual([[3, None]], r)
+
+ r = [[1, 1]]
+ self.assertEqual([
+ {'resource': 'document'},
+ {'guid': '1', 'patch': {
+ 'guid': {'value': '1', 'mtime': 1},
+ 'ctime': {'value': 1, 'mtime': 1},
+ 'mtime': {'value': 1, 'mtime': 1},
+ 'prop': {'value': {'1': {'prop': 1}}, 'mtime': 1},
+ }},
+ {'commit': [[1, 1]]},
+ ],
+ [i for i in model.diff_volume(r)])
+ self.assertEqual([], r)
+
+ r = [[2, 2]]
+ self.assertEqual([
+ {'resource': 'document'},
+ {'guid': '2', 'patch': {
+ 'guid': {'value': '2', 'mtime': 2},
+ 'ctime': {'value': 2, 'mtime': 2},
+ 'mtime': {'value': 2, 'mtime': 2},
+ 'prop': {'value': {'2': {'prop': 2}}, 'mtime': 2},
+ }},
+ {'commit': [[2, 2]]},
+ ],
+ [i for i in model.diff_volume(r)])
+ self.assertEqual([], r)
+
+ r = [[3, None]]
+ self.assertEqual([
+ {'resource': 'document'},
+ ],
+ [i for i in model.diff_volume(r)])
+ self.assertEqual([[3, None]], r)
+
+ self.assertEqual({
+ '1': {'seqno': 1, 'prop': 1},
+ },
+ volume['document'].get('1')['prop'])
+ self.assertEqual({
+ '2': {'seqno': 2, 'prop': 2},
+ },
+ volume['document'].get('2')['prop'])
+
+ volume['document'].update('2', {'prop': {'2': {}, '3': {'prop': 3}}})
+ r = [[3, None]]
+ self.assertEqual([
+ {'resource': 'document'},
+ {'guid': '2', 'patch': {
+ 'prop': {'value': {'2': {}, '3': {'prop': 3}}, 'mtime': int(os.stat('db/document/2/2/prop').st_mtime)},
+ }},
+ {'commit': [[3, 3]]},
+ ],
+ [i for i in model.diff_volume(r)])
+ self.assertEqual([[4, None]], r)
+
+ self.assertEqual({
+ '2': {'seqno': 3},
+ '3': {'seqno': 3, 'prop': 3},
+ },
+ volume['document'].get('2')['prop'])
+
+ volume['document'].update('1', {'prop': {'1': {'foo': 'bar'}}})
+ r = [[4, None]]
+ self.assertEqual([
+ {'resource': 'document'},
+ {'guid': '1', 'patch': {
+ 'prop': {'value': {'1': {'foo': 'bar'}}, 'mtime': int(os.stat('db/document/1/1/prop').st_mtime)},
+ }},
+ {'commit': [[4, 4]]},
+ ],
+ [i for i in model.diff_volume(r)])
+ self.assertEqual([[5, None]], r)
+
+ self.assertEqual({
+ '1': {'seqno': 4, 'foo': 'bar'},
+ },
+ volume['document'].get('1')['prop'])
+
+ volume['document'].update('2', {'prop': {'2': {'restore': True}}})
+ r = [[5, None]]
+ self.assertEqual([
+ {'resource': 'document'},
+ {'guid': '2', 'patch': {
+ 'prop': {'value': {'2': {'restore': True}}, 'mtime': int(os.stat('db/document/2/2/prop').st_mtime)},
+ }},
+ {'commit': [[5, 5]]},
+ ],
+ [i for i in model.diff_volume(r)])
+ self.assertEqual([[6, None]], r)
+
+ self.assertEqual({
+ '2': {'seqno': 5, 'restore': True},
+ '3': {'seqno': 3, 'prop': 3},
+ },
+ volume['document'].get('2')['prop'])
+
+ volume['document'].update('2', {'ctime': 0})
+ r = [[6, None]]
+ self.assertEqual([
+ {'resource': 'document'},
+ {'guid': '2', 'patch': {
+ 'ctime': {'value': 0, 'mtime': int(os.stat('db/document/2/2/prop').st_mtime)},
+ }},
+ {'commit': [[6, 6]]},
+ ],
+ [i for i in model.diff_volume(r)])
+ self.assertEqual([[7, None]], r)
+
+ self.assertEqual({
+ '2': {'seqno': 5, 'restore': True},
+ '3': {'seqno': 3, 'prop': 3},
+ },
+ volume['document'].get('2')['prop'])
+
+ def test_patch_volume_Aggprops(self):
+
+ class Document(db.Resource):
+
+ @db.stored_property(db.Aggregated, db.Property())
+ def prop(self, value):
+ return value
+
+ volume = Volume('.', [Document])
+ this.volume = volume
+
+ model.patch_volume([
+ {'resource': 'document'},
+ {'guid': '1', 'patch': {
+ 'guid': {'mtime': 1, 'value': '1'},
+ 'ctime': {'mtime': 1, 'value': 1},
+ 'mtime': {'mtime': 1, 'value': 1},
+ 'prop': {'mtime': 1, 'value': {'1': {}}},
+ }},
+ ])
+ self.assertEqual({
+ '1': {'seqno': 1},
+ },
+ volume['document'].get('1')['prop'])
+
+ model.patch_volume([
+ {'resource': 'document'},
+ {'guid': '1', 'patch': {
+ 'prop': {'mtime': 1, 'value': {'1': {'probe': False}}},
+ }},
+ ])
+ self.assertEqual({
+ '1': {'seqno': 1},
+ },
+ volume['document'].get('1')['prop'])
+
+ model.patch_volume([
+ {'resource': 'document'},
+ {'guid': '1', 'patch': {
+ 'prop': {'mtime': 2, 'value': {'1': {'probe': True}}},
+ }},
+ ])
+ self.assertEqual({
+ '1': {'seqno': 2, 'probe': True},
+ },
+ volume['document'].get('1')['prop'])
+
+ model.patch_volume([
+ {'resource': 'document'},
+ {'guid': '1', 'patch': {
+ 'prop': {'mtime': 3, 'value': {'2': {'foo': 'bar'}}},
+ }},
+ ])
+ self.assertEqual({
+ '1': {'seqno': 2, 'probe': True},
+ '2': {'seqno': 3, 'foo': 'bar'},
+ },
+ volume['document'].get('1')['prop'])
+
+ model.patch_volume([
+ {'resource': 'document'},
+ {'guid': '1', 'patch': {
+ 'prop': {'mtime': 4, 'value': {'2': {}, '3': {'foo': 'bar'}}},
+ }},
+ ])
+ self.assertEqual({
+ '1': {'seqno': 2, 'probe': True},
+ '2': {'seqno': 4},
+ '3': {'seqno': 4, 'foo': 'bar'},
+ },
+ volume['document'].get('1')['prop'])
+
+ def test_IncrementReleasesSeqnoOnNewReleases(self):
+ events = []
+ volume = self.start_master()
+ this.broadcast = lambda x: events.append(x)
+ conn = Connection()
+
+ context = conn.post(['context'], {
+ 'type': 'activity',
+ 'title': 'Activity',
+ 'summary': 'summary',
+ 'description': 'description',
+ })
+ self.assertEqual([
+ ], [i for i in events if i['event'] == 'release'])
+ self.assertEqual(0, volume.release_seqno.value)
+
+ conn.put(['context', context], {
+ 'summary': 'summary2',
+ })
+ self.assertEqual([
+ ], [i for i in events if i['event'] == 'release'])
+ self.assertEqual(0, volume.release_seqno.value)
+
+ bundle = self.zips(('topdir/activity/activity.info', '\n'.join([
+ '[Activity]',
+ 'name = Activity',
+ 'bundle_id = %s' % context,
+ 'exec = true',
+ 'icon = icon',
+ 'activity_version = 1',
+ 'license = Public Domain',
+ ])))
+ release = conn.upload(['context', context, 'releases'], StringIO(bundle))
+ self.assertEqual([
+ {'event': 'release', 'seqno': 1},
+ ], [i for i in events if i['event'] == 'release'])
+ self.assertEqual(1, volume.release_seqno.value)
+
+ bundle = self.zips(('topdir/activity/activity.info', '\n'.join([
+ '[Activity]',
+ 'name = Activity',
+ 'bundle_id = %s' % context,
+ 'exec = true',
+ 'icon = icon',
+ 'activity_version = 2',
+ 'license = Public Domain',
+ ])))
+ release = conn.upload(['context', context, 'releases'], StringIO(bundle))
+ self.assertEqual([
+ {'event': 'release', 'seqno': 1},
+ {'event': 'release', 'seqno': 2},
+ ], [i for i in events if i['event'] == 'release'])
+ self.assertEqual(2, volume.release_seqno.value)
+
+ bundle = self.zips(('topdir/activity/activity.info', '\n'.join([
+ '[Activity]',
+ 'name = Activity',
+ 'bundle_id = %s' % context,
+ 'exec = true',
+ 'icon = icon',
+ 'activity_version = 2',
+ 'license = Public Domain',
+ ])))
+ release = conn.upload(['context', context, 'releases'], StringIO(bundle))
+ self.assertEqual([
+ {'event': 'release', 'seqno': 1},
+ {'event': 'release', 'seqno': 2},
+ {'event': 'release', 'seqno': 3},
+ ], [i for i in events if i['event'] == 'release'])
+ self.assertEqual(3, volume.release_seqno.value)
+
+ conn.delete(['context', context, 'releases', release])
+ self.assertEqual([
+ {'event': 'release', 'seqno': 1},
+ {'event': 'release', 'seqno': 2},
+ {'event': 'release', 'seqno': 3},
+ {'event': 'release', 'seqno': 4},
+ ], [i for i in events if i['event'] == 'release'])
+ self.assertEqual(4, volume.release_seqno.value)
+
+ def test_IncrementReleasesSeqnoOnDependenciesChange(self):
+ events = []
+ volume = self.start_master()
+ this.broadcast = lambda x: events.append(x)
+ conn = Connection()
+
+ context = conn.post(['context'], {
+ 'type': 'activity',
+ 'title': 'Activity',
+ 'summary': 'summary',
+ 'description': 'description',
+ })
+ self.assertEqual([
+ ], [i for i in events if i['event'] == 'release'])
+ self.assertEqual(0, volume.release_seqno.value)
+
+ bundle = self.zips(('topdir/activity/activity.info', '\n'.join([
+ '[Activity]',
+ 'name = Activity',
+ 'bundle_id = %s' % context,
+ 'exec = true',
+ 'icon = icon',
+ 'activity_version = 2',
+ 'license = Public Domain',
+ ])))
+ release = conn.upload(['context', context, 'releases'], StringIO(bundle))
+ self.assertEqual([
+ {'seqno': 1, 'event': 'release'}
+ ], [i for i in events if i['event'] == 'release'])
+ self.assertEqual(1, volume.release_seqno.value)
+ del events[:]
+
+ conn.put(['context', context], {
+ 'dependencies': 'dep',
+ })
+ self.assertEqual([
+ {'event': 'release', 'seqno': 2},
+ ], [i for i in events if i['event'] == 'release'])
+ self.assertEqual(2, volume.release_seqno.value)
+
+ def test_IncrementReleasesSeqnoOnDeletes(self):
+ events = []
+ volume = self.start_master()
+ this.broadcast = lambda x: events.append(x)
+ conn = Connection()
+
+ context = conn.post(['context'], {
+ 'type': 'activity',
+ 'title': 'Activity',
+ 'summary': 'summary',
+ 'description': 'description',
+ })
+ self.assertEqual([
+ ], [i for i in events if i['event'] == 'release'])
+ self.assertEqual(0, volume.release_seqno.value)
+
+ bundle = self.zips(('topdir/activity/activity.info', '\n'.join([
+ '[Activity]',
+ 'name = Activity',
+ 'bundle_id = %s' % context,
+ 'exec = true',
+ 'icon = icon',
+ 'activity_version = 2',
+ 'license = Public Domain',
+ ])))
+ release = conn.upload(['context', context, 'releases'], StringIO(bundle))
+ self.assertEqual([
+ {'seqno': 1, 'event': 'release'}
+ ], [i for i in events if i['event'] == 'release'])
+ self.assertEqual(1, volume.release_seqno.value)
+ del events[:]
+
+ conn.delete(['context', context])
+ self.assertEqual([
+ {'event': 'release', 'seqno': 2},
+ ], [i for i in events if i['event'] == 'release'])
+ self.assertEqual(2, volume.release_seqno.value)
+ del events[:]
+
+ def test_RestoreReleasesSeqno(self):
+ events = []
+ volume = self.start_master()
+ this.broadcast = lambda x: events.append(x)
+ conn = Connection()
+
+ context = conn.post(['context'], {
+ 'type': 'activity',
+ 'title': 'Activity',
+ 'summary': 'summary',
+ 'description': 'description',
+ 'dependencies': 'dep',
+ })
+ bundle = self.zips(('topdir/activity/activity.info', '\n'.join([
+ '[Activity]',
+ 'name = Activity',
+ 'bundle_id = %s' % context,
+ 'exec = true',
+ 'icon = icon',
+ 'activity_version = 2',
+ 'license = Public Domain',
+ ])))
+ release = conn.upload(['context', context, 'releases'], StringIO(bundle))
+ self.assertEqual(1, volume.release_seqno.value)
+
+ volume.close()
+ volume = Volume('master', [])
+ this.volume = volume
+ self.assertEqual(1, volume.release_seqno.value)
+
+ def test_Packages(self):
+ self.override(obs, 'get_repos', lambda: [
+ {'lsb_id': 'Gentoo', 'lsb_release': '2.1', 'name': 'Gentoo-2.1', 'arches': ['x86', 'x86_64']},
+ {'lsb_id': 'Debian', 'lsb_release': '6.0', 'name': 'Debian-6.0', 'arches': ['x86']},
+ {'lsb_id': 'Debian', 'lsb_release': '7.0', 'name': 'Debian-7.0', 'arches': ['x86_64']},
+ ])
+ self.override(obs, 'resolve', lambda repo, arch, names: {'version': '1.0'})
+
+ volume = self.start_master([User, model.Context])
+ conn = Connection()
+
+ guid = conn.post(['context'], {
+ 'type': 'package',
+ 'title': 'title',
+ 'summary': 'summary',
+ 'description': 'description',
+ })
+ conn.put(['context', guid, 'releases', '*'], {
+ 'binary': ['pkg1.bin', 'pkg2.bin'],
+ 'devel': 'pkg3.devel',
+ })
+ self.assertEqual({
+ '*': {
+ 'author': {tests.UID: {'name': 'test', 'order': 0, 'role': 3}},
+ 'value': {'binary': ['pkg1.bin', 'pkg2.bin'], 'devel': ['pkg3.devel']},
+ },
+ 'resolves': {
+ 'Gentoo-2.1': {'status': 'success', 'packages': ['pkg1.bin', 'pkg2.bin', 'pkg3.devel'], 'version': [[1, 0], 0]},
+ 'Debian-6.0': {'status': 'success', 'packages': ['pkg1.bin', 'pkg2.bin', 'pkg3.devel'], 'version': [[1, 0], 0]},
+ 'Debian-7.0': {'status': 'success', 'packages': ['pkg1.bin', 'pkg2.bin', 'pkg3.devel'], 'version': [[1, 0], 0]},
+ },
+ },
+ volume['context'][guid]['releases'])
+
+ guid = conn.post(['context'], {
+ 'type': 'package',
+ 'title': 'title',
+ 'summary': 'summary',
+ 'description': 'description',
+ })
+ conn.put(['context', guid, 'releases', 'Gentoo'], {
+ 'binary': ['pkg1.bin', 'pkg2.bin'],
+ 'devel': 'pkg3.devel',
+ })
+ self.assertEqual({
+ 'Gentoo': {
+ 'author': {tests.UID: {'name': 'test', 'order': 0, 'role': 3}},
+ 'value': {'binary': ['pkg1.bin', 'pkg2.bin'], 'devel': ['pkg3.devel']},
+ },
+ 'resolves': {
+ 'Gentoo-2.1': {'status': 'success', 'packages': ['pkg1.bin', 'pkg2.bin', 'pkg3.devel'], 'version': [[1, 0], 0]},
+ },
+ },
+ volume['context'][guid]['releases'])
+
+ guid = conn.post(['context'], {
+ 'type': 'package',
+ 'title': 'title',
+ 'summary': 'summary',
+ 'description': 'description',
+ })
+ conn.put(['context', guid, 'releases', 'Debian-6.0'], {
+ 'binary': ['pkg1.bin', 'pkg2.bin'],
+ 'devel': 'pkg3.devel',
+ })
+ self.assertEqual({
+ 'Debian-6.0': {
+ 'author': {tests.UID: {'name': 'test', 'order': 0, 'role': 3}},
+ 'value': {'binary': ['pkg1.bin', 'pkg2.bin'], 'devel': ['pkg3.devel']},
+ },
+ 'resolves': {
+ 'Debian-6.0': {'status': 'success', 'packages': ['pkg1.bin', 'pkg2.bin', 'pkg3.devel'], 'version': [[1, 0], 0]},
+ },
+ },
+ volume['context'][guid]['releases'])
+
+ def test_UnresolvedPackages(self):
+ self.override(obs, 'get_repos', lambda: [
+ {'lsb_id': 'Gentoo', 'lsb_release': '2.1', 'name': 'Gentoo-2.1', 'arches': ['x86', 'x86_64']},
+ ])
+ self.override(obs, 'resolve', lambda repo, arch, names: enforce(False, 'resolve failed'))
+
+ volume = self.start_master([User, model.Context])
+ conn = Connection()
+
+ guid = conn.post(['context'], {
+ 'type': 'package',
+ 'title': 'title',
+ 'summary': 'summary',
+ 'description': 'description',
+ })
+ conn.put(['context', guid, 'releases', '*'], {
+ 'binary': ['pkg1.bin', 'pkg2.bin'],
+ 'devel': 'pkg3.devel',
+ })
+ self.assertEqual({
+ '*': {
+ 'author': {tests.UID: {'name': 'test', 'order': 0, 'role': 3}},
+ 'value': {'binary': ['pkg1.bin', 'pkg2.bin'], 'devel': ['pkg3.devel']},
+ },
+ 'resolves': {
+ 'Gentoo-2.1': {'status': 'resolve failed'},
+ },
+ },
+ volume['context'][guid]['releases'])
+
+ def test_PackageOverrides(self):
+ self.override(obs, 'get_repos', lambda: [
+ {'lsb_id': 'Gentoo', 'lsb_release': '2.1', 'name': 'Gentoo-2.1', 'arches': ['x86', 'x86_64']},
+ {'lsb_id': 'Debian', 'lsb_release': '6.0', 'name': 'Debian-6.0', 'arches': ['x86']},
+ {'lsb_id': 'Debian', 'lsb_release': '7.0', 'name': 'Debian-7.0', 'arches': ['x86_64']},
+ ])
+
+ volume = self.start_master([User, model.Context])
+ conn = Connection()
+ guid = conn.post(['context'], {
+ 'type': 'package',
+ 'title': 'title',
+ 'summary': 'summary',
+ 'description': 'description',
+ })
+
+ self.override(obs, 'resolve', lambda repo, arch, names: enforce(False, '1'))
+ conn.put(['context', guid, 'releases', '*'], {'binary': '1'})
+ self.assertEqual({
+ '*': {
+ 'author': {tests.UID: {'name': 'test', 'order': 0, 'role': 3}},
+ 'value': {'binary': ['1']},
+ },
+ 'resolves': {
+ 'Gentoo-2.1': {'status': '1'},
+ 'Debian-6.0': {'status': '1'},
+ 'Debian-7.0': {'status': '1'},
+ },
+ },
+ volume['context'][guid]['releases'])
+
+ self.override(obs, 'resolve', lambda repo, arch, names: enforce(False, '2'))
+ conn.put(['context', guid, 'releases', 'Debian'], {'binary': '2'})
+ self.assertEqual({
+ '*': {
+ 'author': {tests.UID: {'name': 'test', 'order': 0, 'role': 3}},
+ 'value': {'binary': ['1']},
+ },
+ 'Debian': {
+ 'author': {tests.UID: {'name': 'test', 'order': 0, 'role': 3}},
+ 'value': {'binary': ['2']},
+ },
+ 'resolves': {
+ 'Gentoo-2.1': {'status': '1'},
+ 'Debian-6.0': {'status': '2'},
+ 'Debian-7.0': {'status': '2'},
+ },
+ },
+ volume['context'][guid]['releases'])
+
+ self.override(obs, 'resolve', lambda repo, arch, names: enforce(False, '3'))
+ conn.put(['context', guid, 'releases', 'Debian-6.0'], {'binary': '3'})
+ self.assertEqual({
+ '*': {
+ 'author': {tests.UID: {'name': 'test', 'order': 0, 'role': 3}},
+ 'value': {'binary': ['1']},
+ },
+ 'Debian': {
+ 'author': {tests.UID: {'name': 'test', 'order': 0, 'role': 3}},
+ 'value': {'binary': ['2']},
+ },
+ 'Debian-6.0': {
+ 'author': {tests.UID: {'name': 'test', 'order': 0, 'role': 3}},
+ 'value': {'binary': ['3']},
+ },
+ 'resolves': {
+ 'Gentoo-2.1': {'status': '1'},
+ 'Debian-6.0': {'status': '3'},
+ 'Debian-7.0': {'status': '2'},
+ },
+ },
+ volume['context'][guid]['releases'])
+
+ self.override(obs, 'resolve', lambda repo, arch, names: enforce(False, '4'))
+ conn.put(['context', guid, 'releases', 'Debian'], {'binary': '4'})
+ self.assertEqual({
+ '*': {
+ 'author': {tests.UID: {'name': 'test', 'order': 0, 'role': 3}},
+ 'value': {'binary': ['1']},
+ },
+ 'Debian': {
+ 'author': {tests.UID: {'name': 'test', 'order': 0, 'role': 3}},
+ 'value': {'binary': ['4']},
+ },
+ 'Debian-6.0': {
+ 'author': {tests.UID: {'name': 'test', 'order': 0, 'role': 3}},
+ 'value': {'binary': ['3']},
+ },
+ 'resolves': {
+ 'Gentoo-2.1': {'status': '1'},
+ 'Debian-6.0': {'status': '3'},
+ 'Debian-7.0': {'status': '4'},
+ },
+ },
+ volume['context'][guid]['releases'])
+
+ def test_solve_SortByVersions(self):
+ volume = Volume('master', [Context])
+ this.volume = volume
+
+ context = volume['context'].create({
+ 'type': ['activity'], 'title': {}, 'summary': {}, 'description': {}, 'releases': {
+ '1': {'value': {'bundles': {'*-*': {}}, 'stability': 'stable', 'version': [[1], 0], 'commands': {'activity': {'exec': 1}}}},
+ '2': {'value': {'bundles': {'*-*': {}}, 'stability': 'stable', 'version': [[2], 0], 'commands': {'activity': {'exec': 2}}}},
+ '3': {'value': {'bundles': {'*-*': {}}, 'stability': 'stable', 'version': [[3], 0], 'commands': {'activity': {'exec': 3}}}},
+ },
+ })
+ self.assertEqual(
+ {context: {'command': ('activity', 3), 'title': '', 'blob': '3', 'version': [[3], 0]}},
+ model.solve(volume, context))
+
+ context = volume['context'].create({
+ 'type': ['activity'], 'title': {}, 'summary': {}, 'description': {}, 'releases': {
+ '3': {'value': {'bundles': {'*-*': {}}, 'stability': 'stable', 'version': [[3], 0], 'commands': {'activity': {'exec': 3}}}},
+ '2': {'value': {'bundles': {'*-*': {}}, 'stability': 'stable', 'version': [[2], 0], 'commands': {'activity': {'exec': 2}}}},
+ '1': {'value': {'bundles': {'*-*': {}}, 'stability': 'stable', 'version': [[1], 0], 'commands': {'activity': {'exec': 1}}}},
+ },
+ })
+ self.assertEqual(
+ {context: {'command': ('activity', 3), 'title': '', 'blob': '3', 'version': [[3], 0]}},
+ model.solve(volume, context))
+
+ def test_solve_SortByStability(self):
+ volume = Volume('master', [Context])
+ this.volume = volume
+
+ context = volume['context'].create({
+ 'type': ['activity'], 'title': {}, 'summary': {}, 'description': {}, 'releases': {
+ '1': {'value': {'bundles': {'*-*': {}}, 'stability': 'developer', 'version': [[1], 0], 'commands': {'activity': {'exec': 1}}}},
+ '2': {'value': {'bundles': {'*-*': {}}, 'stability': 'stable', 'version': [[2], 0], 'commands': {'activity': {'exec': 2}}}},
+ '3': {'value': {'bundles': {'*-*': {}}, 'stability': 'buggy', 'version': [[3], 0], 'commands': {'activity': {'exec': 3}}}},
+ },
+ })
+ self.assertEqual(
+ {context: {'command': ('activity', 2), 'title': '', 'blob': '2', 'version': [[2], 0]}},
+ model.solve(volume, context))
+
+ def test_solve_CollectDeps(self):
+ volume = Volume('master', [Context])
+ this.volume = volume
+
+ volume['context'].create({
+ 'guid': 'context1', 'type': ['activity'], 'title': {}, 'summary': {}, 'description': {}, 'releases': {
+ '1': {'value': {
+ 'bundles': {'*-*': {}}, 'stability': 'stable',
+ 'version': [[1], 0],
+ 'requires': spec.parse_requires('context2; context4'),
+ 'commands': {'activity': {'exec': 'command'}},
+ }},
+ },
+ })
+ volume['context'].create({
+ 'guid': 'context2', 'type': ['activity'], 'title': {}, 'summary': {}, 'description': {}, 'releases': {
+ '2': {'value': {
+ 'bundles': {'*-*': {}}, 'stability': 'stable',
+ 'version': [[2], 0],
+ 'commands': {'activity': {'exec': 0}},
+ 'requires': spec.parse_requires('context3'),
+ }},
+ },
+ })
+ volume['context'].create({
+ 'guid': 'context3', 'type': ['activity'], 'title': {}, 'summary': {}, 'description': {}, 'releases': {
+ '3': {'value': {'bundles': {'*-*': {}}, 'stability': 'stable', 'version': [[3], 0], 'commands': {'activity': {'exec': 0}}}},
+ },
+ })
+ volume['context'].create({
+ 'guid': 'context4', 'type': ['activity'], 'title': {}, 'summary': {}, 'description': {}, 'releases': {
+ '4': {'value': {'bundles': {'*-*': {}}, 'stability': 'stable', 'version': [[4], 0], 'commands': {'activity': {'exec': 0}}}},
+ },
+ })
+
+ self.assertEqual({
+ 'context1': {'title': '', 'blob': '1', 'version': [[1], 0], 'command': ('activity', 'command')},
+ 'context2': {'title': '', 'blob': '2', 'version': [[2], 0]},
+ 'context3': {'title': '', 'blob': '3', 'version': [[3], 0]},
+ 'context4': {'title': '', 'blob': '4', 'version': [[4], 0]},
+ },
+ model.solve(volume, 'context1'))
+
+ def test_solve_CommandDeps(self):
+ volume = Volume('master', [Context])
+ this.volume = volume
+
+ volume['context'].create({
+ 'guid': 'context1', 'type': ['activity'], 'title': {}, 'summary': {}, 'description': {}, 'releases': {
+ '1': {'value': {
+ 'bundles': {'*-*': {}}, 'stability': 'stable',
+ 'version': [[1], 0],
+ 'requires': [],
+ 'commands': {
+ 'activity': {'exec': 1, 'requires': spec.parse_requires('context2')},
+ 'application': {'exec': 2},
+ },
+ }},
+ },
+ })
+ volume['context'].create({
+ 'guid': 'context2', 'type': ['activity'], 'title': {}, 'summary': {}, 'description': {}, 'releases': {
+ '2': {'value': {
+ 'bundles': {'*-*': {}}, 'stability': 'stable',
+ 'version': [[2], 0],
+ 'commands': {'activity': {'exec': 0}},
+ 'requires': [],
+ }},
+ },
+ })
+
+ self.assertEqual({
+ 'context1': {'title': '', 'blob': '1', 'version': [[1], 0], 'command': ('activity', 1)},
+ 'context2': {'title': '', 'blob': '2', 'version': [[2], 0]},
+ },
+ model.solve(volume, 'context1', command='activity'))
+ self.assertEqual({
+ 'context1': {'title': '', 'blob': '1', 'version': [[1], 0], 'command': ('application', 2)},
+ },
+ model.solve(volume, 'context1', command='application'))
+
+ def test_solve_DepConditions(self):
+ volume = Volume('master', [Context])
+ this.volume = volume
+
+ volume['context'].create({
+ 'guid': 'dep', 'type': ['activity'], 'title': {}, 'summary': {}, 'description': {}, 'releases': {
+ '1': {'value': {'bundles': {'*-*': {}}, 'stability': 'stable', 'version': [[1], 0], 'commands': {'activity': {'exec': 0}}}},
+ '2': {'value': {'bundles': {'*-*': {}}, 'stability': 'stable', 'version': [[2], 0], 'commands': {'activity': {'exec': 0}}}},
+ '3': {'value': {'bundles': {'*-*': {}}, 'stability': 'stable', 'version': [[3], 0], 'commands': {'activity': {'exec': 0}}}},
+ '4': {'value': {'bundles': {'*-*': {}}, 'stability': 'stable', 'version': [[4], 0], 'commands': {'activity': {'exec': 0}}}},
+ '5': {'value': {'bundles': {'*-*': {}}, 'stability': 'stable', 'version': [[5], 0], 'commands': {'activity': {'exec': 0}}}},
+ },
+ })
+
+ volume['context'].create({
+ 'guid': 'context1', 'type': ['activity'], 'title': {}, 'summary': {}, 'description': {}, 'releases': {
+ '10': {'value': {'bundles': {'*-*': {}}, 'stability': 'stable', 'version': [[1], 0], 'commands': {'activity': {'exec': 'command'}},
+ 'requires': spec.parse_requires('dep < 3'),
+ }},
+ },
+ })
+ self.assertEqual({
+ 'context1': {'title': '', 'blob': '10', 'version': [[1], 0], 'command': ('activity', 'command')},
+ 'dep': {'title': '', 'blob': '2', 'version': [[2], 0]},
+ },
+ model.solve(volume, 'context1'))
+
+ volume['context'].create({
+ 'guid': 'context1', 'type': ['activity'], 'title': {}, 'summary': {}, 'description': {}, 'releases': {
+ '10': {'value': {'bundles': {'*-*': {}}, 'stability': 'stable', 'version': [[1], 0], 'commands': {'activity': {'exec': 'command'}},
+ 'requires': spec.parse_requires('dep <= 3'),
+ }},
+ },
+ })
+ self.assertEqual({
+ 'context1': {'title': '', 'blob': '10', 'version': [[1], 0], 'command': ('activity', 'command')},
+ 'dep': {'title': '', 'blob': '3', 'version': [[3], 0]},
+ },
+ model.solve(volume, 'context1'))
+
+ volume['context'].create({
+ 'guid': 'context1', 'type': ['activity'], 'title': {}, 'summary': {}, 'description': {}, 'releases': {
+ '10': {'value': {'bundles': {'*-*': {}}, 'stability': 'stable', 'version': [[1], 0], 'commands': {'activity': {'exec': 'command'}},
+ 'requires': spec.parse_requires('dep > 2'),
+ }},
+ },
+ })
+ self.assertEqual({
+ 'context1': {'title': '', 'blob': '10', 'version': [[1], 0], 'command': ('activity', 'command')},
+ 'dep': {'title': '', 'blob': '5', 'version': [[5], 0]},
+ },
+ model.solve(volume, 'context1'))
+
+ volume['context'].create({
+ 'guid': 'context1', 'type': ['activity'], 'title': {}, 'summary': {}, 'description': {}, 'releases': {
+ '10': {'value': {'bundles': {'*-*': {}}, 'stability': 'stable', 'version': [[1], 0], 'commands': {'activity': {'exec': 'command'}},
+ 'requires': spec.parse_requires('dep >= 2'),
+ }},
+ },
+ })
+ self.assertEqual({
+ 'context1': {'title': '', 'blob': '10', 'version': [[1], 0], 'command': ('activity', 'command')},
+ 'dep': {'title': '', 'blob': '5', 'version': [[5], 0]},
+ },
+ model.solve(volume, 'context1'))
+
+ volume['context'].create({
+ 'guid': 'context1', 'type': ['activity'], 'title': {}, 'summary': {}, 'description': {}, 'releases': {
+ '10': {'value': {'bundles': {'*-*': {}}, 'stability': 'stable', 'version': [[1], 0], 'commands': {'activity': {'exec': 'command'}},
+ 'requires': spec.parse_requires('dep > 2; dep < 5'),
+ }},
+ },
+ })
+ self.assertEqual({
+ 'context1': {'title': '', 'blob': '10', 'version': [[1], 0], 'command': ('activity', 'command')},
+ 'dep': {'title': '', 'blob': '4', 'version': [[4], 0]},
+ },
+ model.solve(volume, 'context1'))
+
+ volume['context'].create({
+ 'guid': 'context1', 'type': ['activity'], 'title': {}, 'summary': {}, 'description': {}, 'releases': {
+ '10': {'value': {'bundles': {'*-*': {}}, 'stability': 'stable', 'version': [[1], 0], 'commands': {'activity': {'exec': 'command'}},
+ 'requires': spec.parse_requires('dep > 2; dep <= 3'),
+ }},
+ },
+ })
+ self.assertEqual({
+ 'context1': {'title': '', 'blob': '10', 'version': [[1], 0], 'command': ('activity', 'command')},
+ 'dep': {'title': '', 'blob': '3', 'version': [[3], 0]},
+ },
+ model.solve(volume, 'context1'))
+
+ volume['context'].create({
+ 'guid': 'context1', 'type': ['activity'], 'title': {}, 'summary': {}, 'description': {}, 'releases': {
+ '10': {'value': {'bundles': {'*-*': {}}, 'stability': 'stable', 'version': [[1], 0], 'commands': {'activity': {'exec': 'command'}},
+ 'requires': spec.parse_requires('dep = 1'),
+ }},
+ },
+ })
+ self.assertEqual({
+ 'context1': {'title': '', 'blob': '10', 'version': [[1], 0], 'command': ('activity', 'command')},
+ 'dep': {'title': '', 'blob': '1', 'version': [[1], 0]},
+ },
+ model.solve(volume, 'context1'))
+
+ def test_solve_SwitchToAlternativeBranch(self):
+ volume = Volume('master', [Context])
+ this.volume = volume
+
+ volume['context'].create({
+ 'guid': 'context1', 'type': ['activity'], 'title': {}, 'summary': {}, 'description': {}, 'releases': {
+ '6': {'value': {'bundles': {'*-*': {}}, 'stability': 'stable', 'version': [[1], 0], 'commands': {'activity': {'exec': 0}},
+ 'requires': spec.parse_requires('context4=1'), 'commands': {'activity': {'exec': 6}}}},
+ '1': {'value': {'bundles': {'*-*': {}}, 'stability': 'stable', 'version': [[2], 0], 'commands': {'activity': {'exec': 0}},
+ 'requires': spec.parse_requires('context2'), 'commands': {'activity': {'exec': 1}}}},
+ },
+ })
+ volume['context'].create({
+ 'guid': 'context2', 'type': ['activity'], 'title': {}, 'summary': {}, 'description': {}, 'releases': {
+ '2': {'value': {'bundles': {'*-*': {}}, 'stability': 'stable', 'version': [[1], 0], 'commands': {'activity': {'exec': 0}},
+ 'requires': spec.parse_requires('context3; context4=1')}},
+ },
+ })
+ volume['context'].create({
+ 'guid': 'context3', 'type': ['activity'], 'title': {}, 'summary': {}, 'description': {}, 'releases': {
+ '3': {'value': {'bundles': {'*-*': {}}, 'stability': 'stable', 'version': [[1], 0], 'commands': {'activity': {'exec': 0}},
+ 'requires': spec.parse_requires('context4=2')}},
+ },
+ })
+ volume['context'].create({
+ 'guid': 'context4', 'type': ['activity'], 'title': {}, 'summary': {}, 'description': {}, 'releases': {
+ '4': {'value': {'bundles': {'*-*': {}}, 'stability': 'stable', 'version': [[2], 0], 'commands': {'activity': {'exec': 0}}}},
+ '5': {'value': {'bundles': {'*-*': {}}, 'stability': 'stable', 'version': [[1], 0], 'commands': {'activity': {'exec': 0}}}},
+ },
+ })
+
+ self.assertEqual({
+ 'context1': {'title': '', 'blob': '6', 'version': [[1], 0], 'command': ('activity', 6)},
+ 'context4': {'title': '', 'blob': '5', 'version': [[1], 0]},
+ },
+ model.solve(volume, 'context1'))
+
+ def test_solve_CommonDeps(self):
+ volume = Volume('master', [Context])
+ this.volume = volume
+
+ volume['context'].create({
+ 'guid': 'dep', 'type': ['activity'], 'title': {}, 'summary': {}, 'description': {}, 'releases': {
+ '1': {'value': {'bundles': {'*-*': {}}, 'stability': 'stable', 'version': [[1], 0], 'commands': {'activity': {'exec': 0}}}},
+ '2': {'value': {'bundles': {'*-*': {}}, 'stability': 'stable', 'version': [[2], 0], 'commands': {'activity': {'exec': 0}}}},
+ '3': {'value': {'bundles': {'*-*': {}}, 'stability': 'stable', 'version': [[3], 0], 'commands': {'activity': {'exec': 0}}}},
+ '4': {'value': {'bundles': {'*-*': {}}, 'stability': 'stable', 'version': [[4], 0], 'commands': {'activity': {'exec': 0}}}},
+ '5': {'value': {'bundles': {'*-*': {}}, 'stability': 'stable', 'version': [[5], 0], 'commands': {'activity': {'exec': 0}}}},
+ },
+ })
+
+ volume['context'].create({
+ 'guid': 'context', 'type': ['activity'], 'title': {}, 'summary': {}, 'description': {},
+ 'dependencies': 'dep=2',
+ 'releases': {
+ '10': {'value': {'bundles': {'*-*': {}}, 'stability': 'stable', 'version': [[1], 0], 'commands': {'activity': {'exec': 'command'}},
+ 'requires': spec.parse_requires(''),
+ }},
+ },
+ })
+ self.assertEqual({
+ 'context': {'title': '', 'blob': '10', 'version': [[1], 0], 'command': ('activity', 'command')},
+ 'dep': {'title': '', 'blob': '2', 'version': [[2], 0]},
+ },
+ model.solve(volume, 'context'))
+
+ volume['context'].create({
+ 'guid': 'context', 'type': ['activity'], 'title': {}, 'summary': {}, 'description': {},
+ 'dependencies': 'dep<5',
+ 'releases': {
+ '10': {'value': {'bundles': {'*-*': {}}, 'stability': 'stable', 'version': [[1], 0], 'commands': {'activity': {'exec': 'command'}},
+ 'requires': spec.parse_requires('dep>1'),
+ }},
+ },
+ })
+ self.assertEqual({
+ 'context': {'title': '', 'blob': '10', 'version': [[1], 0], 'command': ('activity', 'command')},
+ 'dep': {'title': '', 'blob': '4', 'version': [[4], 0]},
+ },
+ model.solve(volume, 'context'))
+
+ volume['context'].create({
+ 'guid': 'context', 'type': ['activity'], 'title': {}, 'summary': {}, 'description': {},
+ 'dependencies': 'dep<4',
+ 'releases': {
+ '10': {'value': {'bundles': {'*-*': {}}, 'stability': 'stable', 'version': [[1], 0], 'commands': {'activity': {'exec': 'command'}},
+ 'requires': spec.parse_requires('dep<5'),
+ }},
+ },
+ })
+ self.assertEqual({
+ 'context': {'title': '', 'blob': '10', 'version': [[1], 0], 'command': ('activity', 'command')},
+ 'dep': {'title': '', 'blob': '3', 'version': [[3], 0]},
+ },
+ model.solve(volume, 'context'))
+
+ def test_solve_ExtraDeps(self):
+ volume = Volume('master', [Context])
+ this.volume = volume
+
+ volume['context'].create({
+ 'guid': 'dep', 'type': ['activity'], 'title': {}, 'summary': {}, 'description': {}, 'releases': {
+ '1': {'value': {'bundles': {'*-*': {}}, 'stability': 'stable', 'version': [[1], 0], 'commands': {'activity': {'exec': 0}}}},
+ '2': {'value': {'bundles': {'*-*': {}}, 'stability': 'stable', 'version': [[2], 0], 'commands': {'activity': {'exec': 0}}}},
+ '3': {'value': {'bundles': {'*-*': {}}, 'stability': 'stable', 'version': [[3], 0], 'commands': {'activity': {'exec': 0}}}},
+ '4': {'value': {'bundles': {'*-*': {}}, 'stability': 'stable', 'version': [[4], 0], 'commands': {'activity': {'exec': 0}}}},
+ '5': {'value': {'bundles': {'*-*': {}}, 'stability': 'stable', 'version': [[5], 0], 'commands': {'activity': {'exec': 0}}}},
+ },
+ })
+
+ volume['context'].create({
+ 'guid': 'context', 'type': ['activity'], 'title': {}, 'summary': {}, 'description': {}, 'releases': {
+ '10': {'value': {'bundles': {'*-*': {}}, 'stability': 'stable', 'version': [[1], 0], 'commands': {'activity': {'exec': 'command'}},
+ 'requires': spec.parse_requires(''),
+ }},
+ },
+ })
+ self.assertEqual({
+ 'context': {'title': '', 'blob': '10', 'version': [[1], 0], 'command': ('activity', 'command')},
+ },
+ model.solve(volume, 'context'))
+
+ volume['context'].create({
+ 'guid': 'context', 'type': ['activity'], 'title': {}, 'summary': {}, 'description': {}, 'releases': {
+ '10': {'value': {'bundles': {'*-*': {}}, 'stability': 'stable', 'version': [[1], 0], 'commands': {'activity': {'exec': 'command'}},
+ 'requires': spec.parse_requires('dep>1'),
+ }},
+ },
+ })
+ self.assertEqual({
+ 'context': {'title': '', 'blob': '10', 'version': [[1], 0], 'command': ('activity', 'command')},
+ 'dep': {'title': '', 'blob': '5', 'version': [[5], 0]},
+ },
+ model.solve(volume, 'context'))
+
+ volume['context'].create({
+ 'guid': 'context', 'type': ['activity'], 'title': {}, 'summary': {}, 'description': {}, 'releases': {
+ '10': {'value': {'bundles': {'*-*': {}}, 'stability': 'stable', 'version': [[1], 0], 'commands': {'activity': {'exec': 'command'}},
+ 'requires': spec.parse_requires('dep<5'),
+ }},
+ },
+ })
+ self.assertEqual({
+ 'context': {'title': '', 'blob': '10', 'version': [[1], 0], 'command': ('activity', 'command')},
+ 'dep': {'title': '', 'blob': '4', 'version': [[4], 0]},
+ },
+ model.solve(volume, 'context'))
+
+ def test_solve_Nothing(self):
+ volume = Volume('master', [Context])
+ this.volume = volume
+ this.request = Request()
+
+ volume['context'].create({
+ 'guid': 'dep', 'type': ['activity'], 'title': {}, 'summary': {}, 'description': {}, 'releases': {
+ '1': {'value': {'bundles': {'*-*': {}}, 'stability': 'stable', 'version': [[1], 0], 'commands': {'activity': {'exec': 0}}}},
+ '2': {'value': {'bundles': {'*-*': {}}, 'stability': 'stable', 'version': [[2], 0], 'commands': {'activity': {'exec': 0}}}},
+ '3': {'value': {'bundles': {'*-*': {}}, 'stability': 'stable', 'version': [[3], 0], 'commands': {'activity': {'exec': 0}}}},
+ '4': {'value': {'bundles': {'*-*': {}}, 'stability': 'stable', 'version': [[4], 0], 'commands': {'activity': {'exec': 0}}}},
+ '5': {'value': {'bundles': {'*-*': {}}, 'stability': 'stable', 'version': [[5], 0], 'commands': {'activity': {'exec': 0}}}},
+ },
+ })
+
+ volume['context'].create({
+ 'guid': 'context', 'type': ['activity'], 'title': {}, 'summary': {}, 'description': {}, 'releases': {
+ },
+ })
+ self.assertEqual(None, model.solve(volume, 'context'))
+
+ volume['context'].create({
+ 'guid': 'context', 'type': ['activity'], 'title': {}, 'summary': {}, 'description': {}, 'releases': {
+ '10': {'value': {'bundles': {'*-*': {}}, 'stability': 'stable', 'version': [[1], 0], 'commands': {'activity': {'exec': 'command'}},
+ 'requires': spec.parse_requires('dep=0'),
+ }},
+ },
+ })
+ self.assertEqual(None, model.solve(volume, 'context'))
+
+ def test_solve_Packages(self):
+ volume = Volume('master', [Context])
+ this.volume = volume
+ this.request = Request()
+
+ context = volume['context'].create({
+ 'guid': 'context', 'type': ['activity'], 'title': {}, 'summary': {}, 'description': {}, 'releases': {
+ '1': {'value': {'bundles': {'*-*': {}}, 'stability': 'stable', 'version': [[1], 0], 'commands': {'activity': {'exec': 'command'}},
+ 'requires': spec.parse_requires('package'),
+ }},
+ },
+ })
+ volume['context'].create({
+ 'guid': 'package', 'type': ['package'], 'title': {}, 'summary': {}, 'description': {}, 'releases': {
+ 'resolves': {
+ 'Ubuntu-10.04': {'version': [[1], 0], 'packages': ['pkg1', 'pkg2']},
+ },
+ },
+ })
+ self.assertEqual({
+ 'context': {'title': '', 'blob': '1', 'command': ('activity', 'command'), 'version': [[1], 0]},
+ 'package': {'packages': ['pkg1', 'pkg2'], 'version': [[1], 0]},
+ },
+ model.solve(volume, context, lsb_id='Ubuntu', lsb_release='10.04'))
+
+ context = volume['context'].create({
+ 'guid': 'context', 'type': ['activity'], 'title': {}, 'summary': {}, 'description': {}, 'releases': {
+ '1': {'value': {'bundles': {'*-*': {}}, 'stability': 'stable', 'version': [[1], 0], 'commands': {'activity': {'exec': 'command'}},
+ 'requires': spec.parse_requires('dep; package'),
+ }},
+ },
+ })
+ volume['context'].create({
+ 'guid': 'dep', 'type': ['activity'], 'title': {}, 'summary': {}, 'description': {}, 'releases': {
+ '2': {'value': {'bundles': {'*-*': {}}, 'stability': 'stable', 'version': [[1], 0], 'commands': {'activity': {'exec': 0}}}},
+ },
+ })
+ self.assertEqual({
+ 'context': {'title': '', 'blob': '1', 'command': ('activity', 'command'), 'version': [[1], 0]},
+ 'dep': {'title': '', 'blob': '2', 'version': [[1], 0]},
+ 'package': {'packages': ['pkg1', 'pkg2'], 'version': [[1], 0]},
+ },
+ model.solve(volume, context, lsb_id='Ubuntu', lsb_release='10.04'))
+
+ def test_solve_PackagesByLsbId(self):
+ volume = Volume('master', [Context])
+ this.volume = volume
+ this.request = Request()
+
+ context = volume['context'].create({
+ 'guid': 'context', 'type': ['activity'], 'title': {}, 'summary': {}, 'description': {}, 'releases': {
+ '1': {'value': {'bundles': {'*-*': {}}, 'stability': 'stable', 'version': [[1], 0], 'commands': {'activity': {'exec': 'command'}},
+ 'requires': spec.parse_requires('package1'),
+ }},
+ },
+ })
+ volume['context'].create({
+ 'guid': 'package1', 'type': ['package'], 'title': {}, 'summary': {}, 'description': {}, 'releases': {
+ 'Ubuntu': {'value': {'binary': ['bin1', 'bin2'], 'devel': ['devel1', 'devel2']}},
+ },
+ })
+ self.assertEqual({
+ 'context': {'title': '', 'blob': '1', 'command': ('activity', 'command'), 'version': [[1], 0]},
+ 'package1': {'packages': ['bin1', 'bin2', 'devel1', 'devel2'], 'version': []},
+ },
+ model.solve(volume, context, lsb_id='Ubuntu'))
+
+ context = volume['context'].create({
+ 'guid': 'context', 'type': ['activity'], 'title': {}, 'summary': {}, 'description': {}, 'releases': {
+ '1': {'value': {'bundles': {'*-*': {}}, 'stability': 'stable', 'version': [[1], 0], 'commands': {'activity': {'exec': 'command'}},
+ 'requires': spec.parse_requires('package2'),
+ }},
+ },
+ })
+ volume['context'].create({
+ 'guid': 'package2', 'type': ['package'], 'title': {}, 'summary': {}, 'description': {}, 'releases': {
+ 'Ubuntu': {'value': {'binary': ['bin']}},
+ 'resolves': {
+ 'Ubuntu-10.04': {'version': [[1], 0], 'packages': ['pkg1', 'pkg2']},
+ },
+ },
+ })
+ self.assertEqual({
+ 'context': {'title': '', 'blob': '1', 'command': ('activity', 'command'), 'version': [[1], 0]},
+ 'package2': {'packages': ['bin'], 'version': []},
+ },
+ model.solve(volume, context, lsb_id='Ubuntu', lsb_release='fake'))
+
+ def test_solve_PackagesByCommonAlias(self):
+ volume = Volume('master', [Context])
+ this.volume = volume
+ this.request = Request()
+
+ context = volume['context'].create({
+ 'guid': 'context', 'type': ['activity'], 'title': {}, 'summary': {}, 'description': {}, 'releases': {
+ '1': {'value': {'bundles': {'*-*': {}}, 'stability': 'stable', 'version': [[1], 0], 'commands': {'activity': {'exec': 'command'}},
+ 'requires': spec.parse_requires('package1'),
+ }},
+ },
+ })
+ volume['context'].create({
+ 'guid': 'package1', 'type': ['package'], 'title': {}, 'summary': {}, 'description': {}, 'releases': {
+ '*': {'value': {'binary': ['pkg1']}},
+ 'Ubuntu': {'value': {'binary': ['pkg2']}},
+ 'resolves': {
+ 'Ubuntu-10.04': {'version': [[1], 0], 'packages': ['pkg3']},
+ },
+ },
+ })
+ self.assertEqual({
+ 'context': {'title': '', 'blob': '1', 'command': ('activity', 'command'), 'version': [[1], 0]},
+ 'package1': {'packages': ['pkg1'], 'version': []},
+ },
+ model.solve(volume, context))
+ self.assertEqual({
+ 'context': {'title': '', 'blob': '1', 'command': ('activity', 'command'), 'version': [[1], 0]},
+ 'package1': {'packages': ['pkg1'], 'version': []},
+ },
+ model.solve(volume, context, lsb_id='Fake'))
+ self.assertEqual({
+ 'context': {'title': '', 'blob': '1', 'command': ('activity', 'command'), 'version': [[1], 0]},
+ 'package1': {'packages': ['pkg1'], 'version': []},
+ },
+ model.solve(volume, context, lsb_id='Fake', lsb_release='fake'))
+
+ def test_solve_NoPackages(self):
+ volume = Volume('master', [Context])
+ this.volume = volume
+ this.request = Request()
+
+ context = volume['context'].create({
+ 'guid': 'context', 'type': ['activity'], 'title': {}, 'summary': {}, 'description': {}, 'releases': {
+ '1': {'value': {'bundles': {'*-*': {}}, 'stability': 'stable', 'version': [[1], 0], 'commands': {'activity': {'exec': 'command'}},
+ 'requires': spec.parse_requires('package'),
+ }},
+ },
+ })
+ volume['context'].create({
+ 'guid': 'package', 'type': ['package'], 'title': {}, 'summary': {}, 'description': {}, 'releases': {
+ },
+ })
+ self.assertEqual(None, model.solve(volume, context))
+
+ def test_load_bundle_Activity(self):
+ volume = self.start_master()
+ blobs = volume.blobs
+ conn = Connection()
+
+ bundle_id = conn.post(['context'], {
+ 'type': 'activity',
+ 'title': 'Activity',
+ 'summary': 'summary',
+ 'description': 'description',
+ })
+ activity_info = '\n'.join([
+ '[Activity]',
+ 'name = Activity',
+ 'bundle_id = %s' % bundle_id,
+ 'exec = true',
+ 'icon = icon',
+ 'activity_version = 1',
+ 'license = Public Domain',
+ 'stability = developer',
+ 'requires = sugar>=0.88; dep'
+ ])
+ changelog = "LOG"
+ bundle = self.zips(
+ ('topdir/activity/activity.info', activity_info),
+ ('topdir/CHANGELOG', changelog),
+ )
+ blob = blobs.post(bundle)
+
+ this.principal = Principal(tests.UID)
+ this.request = Request(method='POST', path=['context', bundle_id])
+ context, release = model.load_bundle(blob, bundle_id)
+
+ self.assertEqual({
+ 'content-type': 'application/vnd.olpc-sugar',
+ 'content-disposition': 'attachment; filename="Activity-1%s"' % (mimetypes.guess_extension('application/vnd.olpc-sugar') or ''),
+ 'content-length': str(len(bundle)),
+ 'x-seqno': '6',
+ }, blobs.get(blob.digest).meta)
+ self.assertEqual(bundle_id, context)
+ self.assertEqual([[1], 0], release['version'])
+ self.assertEqual('developer', release['stability'])
+ self.assertEqual(['Public Domain'], release['license'])
+ self.assertEqual('developer', release['stability'])
+ self.assertEqual({
+ 'dep': [],
+ 'sugar': [([1, 0], [[0, 88], 0])],
+ },
+ release['requires'])
+ self.assertEqual({
+ '*-*': {
+ 'blob': blob.digest,
+ 'unpack_size': len(activity_info) + len(changelog),
+ },
+ },
+ release['bundles'])
+
+ post = volume['post'][release['announce']]
+ assert tests.UID in post['author']
+ self.assertEqual('notification', post['type'])
+ self.assertEqual({
+ 'en': 'Activity 1 release',
+ 'es': 'Activity 1 release',
+ 'fr': 'Activity 1 release',
+ }, post['title'])
+ self.assertEqual({
+ 'en-us': 'LOG',
+ }, post['message'])
+
+ def test_load_bundle_NonActivity(self):
+ volume = self.start_master()
+ blobs = volume.blobs
+ conn = Connection()
+
+ bundle_id = conn.post(['context'], {
+ 'type': 'book',
+ 'title': 'NonActivity',
+ 'summary': 'summary',
+ 'description': 'description',
+ })
+ bundle = 'non-activity'
+ blob = blobs.post(bundle)
+ blob.meta['content-type'] = 'application/pdf'
+
+ this.principal = Principal(tests.UID)
+ this.request = Request(method='POST', path=['context', bundle_id], version='2', license='GPL')
+ context, release = model.load_bundle(blob, bundle_id)
+
+ self.assertEqual({
+ 'content-type': 'application/pdf',
+ 'content-disposition': 'attachment; filename="NonActivity-2.pdf"',
+ 'content-length': str(len(bundle)),
+ 'x-seqno': '6',
+ }, blobs.get(blob.digest).meta)
+ self.assertEqual(bundle_id, context)
+ self.assertEqual([[2], 0], release['version'])
+ self.assertEqual(['GPL'], release['license'])
+
+ post = volume['post'][release['announce']]
+ assert tests.UID in post['author']
+ self.assertEqual('notification', post['type'])
+ self.assertEqual({
+ 'en': 'NonActivity 2 release',
+ 'es': 'NonActivity 2 release',
+ 'fr': 'NonActivity 2 release',
+ }, post['title'])
+ self.assertEqual({
+ 'en-us': '',
+ }, post['message'])
+
+ def test_load_bundle_ReuseActivityLicense(self):
+ volume = self.start_master()
+ blobs = volume.blobs
+ conn = Connection()
+
+ bundle_id = conn.post(['context'], {
+ 'type': 'activity',
+ 'title': 'Activity',
+ 'summary': 'summary',
+ 'description': 'description',
+ })
+
+ activity_info_wo_license = '\n'.join([
+ '[Activity]',
+ 'name = Activity',
+ 'bundle_id = %s' % bundle_id,
+ 'exec = true',
+ 'icon = icon',
+ 'activity_version = 1',
+ ])
+ bundle = self.zips(('topdir/activity/activity.info', activity_info_wo_license))
+ blob_wo_license = blobs.post(bundle)
+ self.assertRaises(http.BadRequest, model.load_bundle, blob_wo_license, bundle_id)
+
+ volume['context'].update(bundle_id, {'releases': {
+ 'new': {'value': {'release': 2, 'license': ['New']}},
+ }})
+ this.principal = Principal(tests.UID)
+ this.request = Request(method='POST', path=['context', bundle_id])
+ context, release = model.load_bundle(blob_wo_license, bundle_id)
+ self.assertEqual(['New'], release['license'])
+
+ volume['context'].update(bundle_id, {'releases': {
+ 'new': {'value': {'release': 2, 'license': ['New']}},
+ 'old': {'value': {'release': 1, 'license': ['Old']}},
+ }})
+ this.principal = Principal(tests.UID)
+ this.request = Request(method='POST', path=['context', bundle_id])
+ context, release = model.load_bundle(blob_wo_license, bundle_id)
+ self.assertEqual(['New'], release['license'])
+
+ volume['context'].update(bundle_id, {'releases': {
+ 'new': {'value': {'release': 2, 'license': ['New']}},
+ 'old': {'value': {'release': 1, 'license': ['Old']}},
+ 'newest': {'value': {'release': 3, 'license': ['Newest']}},
+ }})
+ this.principal = Principal(tests.UID)
+ this.request = Request(method='POST', path=['context', bundle_id])
+ context, release = model.load_bundle(blob_wo_license, bundle_id)
+ self.assertEqual(['Newest'], release['license'])
+
+ def test_load_bundle_ReuseNonActivityLicense(self):
+ volume = self.start_master()
+ blobs = volume.blobs
+ conn = Connection()
+
+ bundle_id = conn.post(['context'], {
+ 'type': 'book',
+ 'title': 'Activity',
+ 'summary': 'summary',
+ 'description': 'description',
+ })
+
+ blob = blobs.post('non-activity')
+ this.principal = Principal(tests.UID)
+ this.request = Request(method='POST', path=['context', bundle_id], version='1')
+ self.assertRaises(http.BadRequest, model.load_bundle, blob, bundle_id)
+
+ volume['context'].update(bundle_id, {'releases': {
+ 'new': {'value': {'release': 2, 'license': ['New']}},
+ }})
+ this.principal = Principal(tests.UID)
+ this.request = Request(method='POST', path=['context', bundle_id], version='1')
+ context, release = model.load_bundle(blob, bundle_id)
+ self.assertEqual(['New'], release['license'])
+
+ volume['context'].update(bundle_id, {'releases': {
+ 'new': {'value': {'release': 2, 'license': ['New']}},
+ 'old': {'value': {'release': 1, 'license': ['Old']}},
+ }})
+ this.principal = Principal(tests.UID)
+ this.request = Request(method='POST', path=['context', bundle_id], version='1')
+ context, release = model.load_bundle(blob, bundle_id)
+ self.assertEqual(['New'], release['license'])
+
+ volume['context'].update(bundle_id, {'releases': {
+ 'new': {'value': {'release': 2, 'license': ['New']}},
+ 'old': {'value': {'release': 1, 'license': ['Old']}},
+ 'newest': {'value': {'release': 3, 'license': ['Newest']}},
+ }})
+ this.principal = Principal(tests.UID)
+ this.request = Request(method='POST', path=['context', bundle_id], version='1')
+ context, release = model.load_bundle(blob, bundle_id)
+ self.assertEqual(['Newest'], release['license'])
+
+ def test_load_bundle_WrontContextType(self):
+ volume = self.start_master()
+ blobs = volume.blobs
+ conn = Connection()
+
+ bundle_id = conn.post(['context'], {
+ 'type': 'group',
+ 'title': 'NonActivity',
+ 'summary': 'summary',
+ 'description': 'description',
+ })
+
+ blob = blobs.post('non-activity')
+ this.principal = Principal(tests.UID)
+ this.request = Request(method='POST', path=['context', bundle_id], version='2', license='GPL')
+ self.assertRaises(http.BadRequest, model.load_bundle, blob, bundle_id)
+
+ activity_info = '\n'.join([
+ '[Activity]',
+ 'name = Activity',
+ 'bundle_id = %s' % bundle_id,
+ 'exec = true',
+ 'icon = icon',
+ 'activity_version = 1',
+ 'license = Public Domain',
+ 'stability = developer',
+ 'requires = sugar>=0.88; dep'
+ ])
+ changelog = "LOG"
+ bundle = self.zips(
+ ('topdir/activity/activity.info', activity_info),
+ ('topdir/CHANGELOG', changelog),
+ )
+ blob = blobs.post(bundle)
+ self.assertRaises(http.BadRequest, model.load_bundle, blob, bundle_id)
+
+ def test_load_bundle_MissedContext(self):
+ volume = self.start_master()
+ blobs = volume.blobs
+ volume['user'].create({'guid': tests.UID, 'name': 'user', 'pubkey': tests.PUBKEY})
+ conn = Connection()
+
+ bundle = self.zips(('topdir/activity/activity.info', '\n'.join([
+ '[Activity]',
+ 'name = Activity',
+ 'bundle_id = bundle_id',
+ 'exec = true',
+ 'icon = icon',
+ 'activity_version = 1',
+ 'license = Public Domain',
+ 'stability = developer',
+ 'requires = sugar>=0.88; dep'
+ ])))
+ blob = blobs.post(bundle)
+
+ this.principal = Principal(tests.UID)
+ this.request = Request()
+ self.assertRaises(http.NotFound, model.load_bundle, blob, initial=False)
+
+ def test_load_bundle_CreateContext(self):
+ volume = self.start_master()
+ blobs = volume.blobs
+ volume['user'].create({'guid': tests.UID, 'name': 'user', 'pubkey': tests.PUBKEY})
+ conn = Connection()
+
+ bundle = self.zips(
+ ('ImageViewer.activity/activity/activity.info', '\n'.join([
+ '[Activity]',
+ 'bundle_id = org.laptop.ImageViewerActivity',
+ 'name = Image Viewer',
+ 'summary = The Image Viewer activity is a simple and fast image viewer tool',
+ 'description = It has features one would expect of a standard image viewer, like zoom, rotate, etc.',
+ 'homepage = http://wiki.sugarlabs.org/go/Activities/Image_Viewer',
+ 'activity_version = 1',
+ 'license = GPLv2+',
+ 'icon = activity-imageviewer',
+ 'exec = true',
+ 'mime_types = image/bmp;image/gif',
+ ])),
+ ('ImageViewer.activity/activity/activity-imageviewer.svg', ''),
+ )
+ blob = blobs.post(bundle)
+
+ this.principal = Principal(tests.UID)
+ this.request = Request()
+ context, release = model.load_bundle(blob, initial=True)
+ self.assertEqual('org.laptop.ImageViewerActivity', context)
+
+ context = volume['context'].get('org.laptop.ImageViewerActivity')
+ self.assertEqual({'en': 'Image Viewer'}, context['title'])
+ self.assertEqual({'en': 'The Image Viewer activity is a simple and fast image viewer tool'}, context['summary'])
+ self.assertEqual({'en': 'It has features one would expect of a standard image viewer, like zoom, rotate, etc.'}, context['description'])
+ self.assertEqual('http://wiki.sugarlabs.org/go/Activities/Image_Viewer', context['homepage'])
+ self.assertEqual(['image/bmp', 'image/gif'], context['mime_types'])
+ assert context['ctime'] > 0
+ assert context['mtime'] > 0
+ self.assertEqual({tests.UID: {'role': 3, 'name': 'user', 'order': 0}}, context['author'])
+
+ post = volume['post'][release['announce']]
+ assert tests.UID in post['author']
+ self.assertEqual('notification', post['type'])
+ self.assertEqual({
+ 'en': 'Image Viewer 1 release',
+ 'es': 'Image Viewer 1 release',
+ 'fr': 'Image Viewer 1 release',
+ }, post['title'])
+
+ def test_load_bundle_UpdateContext(self):
+ volume = self.start_master()
+ blobs = volume.blobs
+ conn = Connection()
+ self.touch(('master/etc/authorization.conf', [
+ '[permissions]',
+ '%s = admin' % tests.UID,
+ ]))
+
+ conn.post(['context'], {
+ 'guid': 'org.laptop.ImageViewerActivity',
+ 'type': 'activity',
+ 'title': {'en': ''},
+ 'summary': {'en': ''},
+ 'description': {'en': ''},
+ })
+ svg = '\n'.join([
+ '<?xml version="1.0" encoding="UTF-8"?>',
+ '<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd" [',
+ ' <!ENTITY fill_color "#123456">',
+ ' <!ENTITY stroke_color "#123456">',
+ ']>',
+ '<svg xmlns="http://www.w3.org/2000/svg" width="50" height="50">',
+ ' <rect x="3" y="7" width="44" height="36" style="fill:&fill_color;;stroke:&stroke_color;;stroke-width:3"/>',
+ ' <polyline points="15,7 25,1 35,7" style="fill:none;;stroke:&stroke_color;;stroke-width:1.25"/>',
+ ' <circle cx="14" cy="19" r="4.5" style="fill:&stroke_color;;stroke:&stroke_color;;stroke-width:1.5"/>',
+ ' <polyline points="3,36 16,32 26,35" style="fill:none;;stroke:&stroke_color;;stroke-width:2.5"/>',
+ ' <polyline points="15,43 37,28 47,34 47,43" style="fill:&stroke_color;;stroke:&stroke_color;;stroke-width:3"/>',
+ ' <polyline points="22,41.5 35,30 27,41.5" style="fill:&fill_color;;stroke:none;;stroke-width:0"/>',
+ ' <polyline points="26,23 28,25 30,23" style="fill:none;;stroke:&stroke_color;;stroke-width:.9"/>',
+ ' <polyline points="31.2,20 33.5,17.7 35.8,20" style="fill:none;;stroke:&stroke_color;;stroke-width:1"/>',
+ ' <polyline points="36,13 38.5,15.5 41,13" style="fill:none;;stroke:&stroke_color;;stroke-width:1"/>',
+ '</svg>',
+ ])
+ bundle = self.zips(
+ ('ImageViewer.activity/activity/activity.info', '\n'.join([
+ '[Activity]',
+ 'bundle_id = org.laptop.ImageViewerActivity',
+ 'name = Image Viewer',
+ 'summary = The Image Viewer activity is a simple and fast image viewer tool',
+ 'description = It has features one would expect of a standard image viewer, like zoom, rotate, etc.',
+ 'homepage = http://wiki.sugarlabs.org/go/Activities/Image_Viewer',
+ 'activity_version = 22',
+ 'license = GPLv2+',
+ 'icon = activity-imageviewer',
+ 'exec = true',
+ 'mime_types = image/bmp;image/gif',
+ ])),
+ ('ImageViewer.activity/locale/ru/LC_MESSAGES/org.laptop.ImageViewerActivity.mo',
+ base64.b64decode('3hIElQAAAAAMAAAAHAAAAHwAAAARAAAA3AAAAAAAAAAgAQAADwAAACEBAAAOAAAAMQEAAA0AAABAAQAACgAAAE4BAAAMAAAAWQEAAA0AAABmAQAAJwAAAHQBAAAUAAAAnAEAABAAAACxAQAABwAAAMIBAAAIAAAAygEAANEBAADTAQAAIQAAAKUDAAATAAAAxwMAABwAAADbAwAAFwAAAPgDAAAhAAAAEAQAAB0AAAAyBAAAQAAAAFAEAAA9AAAAkQQAADUAAADPBAAAFAAAAAUFAAAQAAAAGgUAAAEAAAACAAAABwAAAAAAAAADAAAAAAAAAAwAAAAJAAAAAAAAAAoAAAAEAAAAAAAAAAAAAAALAAAABgAAAAgAAAAFAAAAAENob29zZSBkb2N1bWVudABEb3dubG9hZGluZy4uLgBGaXQgdG8gd2luZG93AEZ1bGxzY3JlZW4ASW1hZ2UgVmlld2VyAE9yaWdpbmFsIHNpemUAUmV0cmlldmluZyBzaGFyZWQgaW1hZ2UsIHBsZWFzZSB3YWl0Li4uAFJvdGF0ZSBhbnRpY2xvY2t3aXNlAFJvdGF0ZSBjbG9ja3dpc2UAWm9vbSBpbgBab29tIG91dABQcm9qZWN0LUlkLVZlcnNpb246IFBBQ0tBR0UgVkVSU0lPTgpSZXBvcnQtTXNnaWQtQnVncy1UbzogClBPVC1DcmVhdGlvbi1EYXRlOiAyMDEyLTA5LTI3IDE0OjU3LTA0MDAKUE8tUmV2aXNpb24tRGF0ZTogMjAxMC0wOS0yMiAxMzo1MCswMjAwCkxhc3QtVHJhbnNsYXRvcjoga3JvbTlyYSA8a3JvbTlyYUBnbWFpbC5jb20+Ckxhbmd1YWdlLVRlYW06IExBTkdVQUdFIDxMTEBsaS5vcmc+Ckxhbmd1YWdlOiAKTUlNRS1WZXJzaW9uOiAxLjAKQ29udGVudC1UeXBlOiB0ZXh0L3BsYWluOyBjaGFyc2V0PVVURi04CkNvbnRlbnQtVHJhbnNmZXItRW5jb2Rpbmc6IDhiaXQKUGx1cmFsLUZvcm1zOiBucGx1cmFscz0zOyBwbHVyYWw9KG4lMTA9PTEgJiYgbiUxMDAhPTExID8gMCA6IG4lMTA+PTIgJiYgbiUxMDw9NCAmJiAobiUxMDA8MTAgfHwgbiUxMDA+PTIwKSA/IDEgOiAyKTsKWC1HZW5lcmF0b3I6IFBvb3RsZSAyLjAuMwoA0JLRi9Cx0LXRgNC40YLQtSDQtNC+0LrRg9C80LXQvdGCANCX0LDQs9GA0YPQt9C60LAuLi4A0KPQvNC10YHRgtC40YLRjCDQsiDQvtC60L3QtQDQn9C+0LvQvdGL0Lkg0Y3QutGA0LDQvQDQn9GA0L7RgdC80L7RgtGAINC60LDRgNGC0LjQvdC+0LoA0JjRgdGC0LjQvdC90YvQuSDRgNCw0LfQvNC10YAA0J/QvtC70YPRh9C10L3QuNC1INC40LfQvtCx0YDQsNC20LXQvdC40LksINC/0L7QtNC+0LbQtNC40YLQtS4uLgDQn9C+0LLQtdGA0L3Rg9GC0Ywg0L/RgNC+0YLQuNCyINGH0LDRgdC+0LLQvtC5INGB0YLRgNC10LvQutC4ANCf0L7QstC10YDQvdGD0YLRjCDQv9C+INGH0LDRgdC+0LLQvtC5INGB0YLRgNC10LvQutC1ANCf0YDQuNCx0LvQuNC30LjRgtGMANCe0YLQtNCw0LvQuNGC0YwA')),
+ ('ImageViewer.activity/activity/activity-imageviewer.svg', svg),
+ )
+
+ blob = blobs.post(bundle)
+ this.principal = Principal(tests.UID)
+ this.request = Request(method='POST', path=['context', 'org.laptop.ImageViewerActivity'])
+ context, release = model.load_bundle(blob, initial=True)
+
+ context = volume['context'].get('org.laptop.ImageViewerActivity')
+ self.assertEqual({
+ 'en': 'Image Viewer',
+ 'ru': u'Просмотр картинок',
+ },
+ context['title'])
+ self.assertEqual({
+ 'en': 'The Image Viewer activity is a simple and fast image viewer tool',
+ },
+ context['summary'])
+ self.assertEqual({
+ 'en': 'It has features one would expect of a standard image viewer, like zoom, rotate, etc.',
+ },
+ context['description'])
+ self.assertEqual(svg, file(blobs.get(context['artefact_icon']).path).read())
+ assert context['icon'] != 'missing.png'
+ assert context['logo'] != 'missing-logo.png'
+ self.assertEqual('http://wiki.sugarlabs.org/go/Activities/Image_Viewer', context['homepage'])
+ self.assertEqual(['image/bmp', 'image/gif'], context['mime_types'])
+
+ def test_load_bundle_3rdPartyRelease(self):
+ i18n._default_langs = ['en']
+ volume = self.start_master()
+ blobs = volume.blobs
+ volume['user'].create({'guid': tests.UID2, 'name': 'user2', 'pubkey': tests.PUBKEY2})
+ conn = Connection()
+
+ bundle_id = conn.post(['context'], {
+ 'type': 'activity',
+ 'title': 'Activity',
+ 'summary': 'summary',
+ 'description': 'description',
+ })
+
+ bundle = self.zips(('topdir/activity/activity.info', '\n'.join([
+ '[Activity]',
+ 'name = Activity2',
+ 'bundle_id = %s' % bundle_id,
+ 'exec = true',
+ 'icon = icon',
+ 'activity_version = 1',
+ 'license = Public Domain',
+ 'stability = developer',
+ ])))
+ blob = blobs.post(bundle)
+ this.principal = Principal(tests.UID2)
+ this.request = Request(method='POST', path=['context', bundle_id])
+ context, release = model.load_bundle(blob, bundle_id)
+
+ assert tests.UID in volume['context'][bundle_id]['author']
+ assert tests.UID2 not in volume['context'][bundle_id]['author']
+ self.assertEqual({'en': 'Activity'}, volume['context'][bundle_id]['title'])
+
+ post = volume['post'][release['announce']]
+ assert tests.UID not in post['author']
+ assert tests.UID2 in post['author']
+ self.assertEqual('notification', post['type'])
+ self.assertEqual({
+ 'en': 'Activity 1 third-party release',
+ 'es': 'Activity 1 third-party release',
+ 'fr': 'Activity 1 third-party release',
+ }, post['title'])
+
+ blobs.delete(blob.digest)
+ blob = blobs.post(bundle)
+ this.principal = Principal(tests.UID)
+ this.request = Request(method='POST', path=['context', bundle_id])
+ context, release = model.load_bundle(blob, bundle_id)
+
+ assert tests.UID in volume['context'][bundle_id]['author']
+ assert tests.UID2 not in volume['context'][bundle_id]['author']
+ self.assertEqual({'en': 'Activity2'}, volume['context'][bundle_id]['title'])
+
+ post = volume['post'][release['announce']]
+ assert tests.UID in post['author']
+ assert tests.UID2 not in post['author']
+ self.assertEqual('notification', post['type'])
+ self.assertEqual({
+ 'en': 'Activity2 1 release',
+ 'es': 'Activity2 1 release',
+ 'fr': 'Activity2 1 release',
+ }, post['title'])
+
+ def test_load_bundle_PopulateRequires(self):
+ volume = self.start_master()
+ blobs = volume.blobs
+ conn = Connection()
+
+ bundle_id = conn.post(['context'], {
+ 'type': 'activity',
+ 'title': 'Activity',
+ 'summary': 'summary',
+ 'description': 'description',
+ })
+ bundle = self.zips(
+ ('ImageViewer.activity/activity/activity.info', '\n'.join([
+ '[Activity]',
+ 'bundle_id = %s' % bundle_id,
+ 'name = Image Viewer',
+ 'activity_version = 22',
+ 'license = GPLv2+',
+ 'icon = activity-imageviewer',
+ 'exec = true',
+ 'requires = dep1, dep2<10, dep3<=20, dep4>30, dep5>=40, dep6>5<7, dep7>=1<=3',
+ ])),
+ ('ImageViewer.activity/activity/activity-imageviewer.svg', ''),
+ )
+ blob = blobs.post(bundle)
+ this.principal = Principal(tests.UID)
+ this.request = Request(method='POST', path=['context', bundle_id])
+ context, release = model.load_bundle(blob, bundle_id)
+
+ self.assertEqual({
+ 'dep5': [([1, 0], [[40], 0])],
+ 'dep4': [([1], [[30], 0])],
+ 'dep7': [([1, 0], [[1], 0]), ([-1, 0], [[3], 0])],
+ 'dep6': [([1], [[5], 0]), ([-1], [[7], 0])],
+ 'dep1': [],
+ 'dep3': [([-1, 0], [[20], 0])],
+ 'dep2': [([-1], [[10], 0])],
+ },
+ release['requires'])
+
+ def test_apply_batch(self):
+
+ class Document(db.Resource):
+
+ @db.stored_property()
+ def prop(self, value):
+ return value
+
+ volume = self.start_master([User, Document])
+
+ self.touch(('batch', [
+ json.dumps({
+ }),
+ json.dumps({
+ 'op': {'method': 'POST', 'path': ['document']},
+ 'content': {'prop': '1'},
+ }),
+ json.dumps({
+ 'op': {'method': 'POST', 'path': ['document']},
+ 'content': {'prop': '2'},
+ }),
+ ]))
+ self.touch(('batch.meta', [
+ json.dumps({
+ 'principal': ['test', 0xF],
+ }),
+ ]))
+ model.apply_batch('./batch')
+
+ self.assertEqual(sorted([
+ {'prop': '1', 'author': [{'name': 'test', 'role': ACL.ORIGINAL}]},
+ {'prop': '2', 'author': [{'name': 'test', 'role': ACL.ORIGINAL}]},
+ ]),
+ sorted(this.call(method='GET', path=['document'], reply=['prop', 'author'])['result']))
+ assert not exists('batch')
+ assert not exists('batch.meta')
+
+ def test_apply_batch_MapPK(self):
+
+ class Document(db.Resource):
+ pass
+
+ volume = self.start_master([User, Document])
+ self.override(toolkit, 'uuid', lambda: 'local')
+
+ self.touch(('batch', [
+ json.dumps({
+ }),
+ json.dumps({
+ 'op': {'method': 'POST', 'path': ['document']},
+ 'content': {},
+ 'keys': ['guid'],
+ }),
+ ]))
+ self.touch(('batch.meta', [
+ json.dumps({
+ 'principal': ['test', 0xF],
+ }),
+ ]))
+ self.assertRaises(http.BadRequest, model.apply_batch, './batch')
+
+ self.touch(('batch', [
+ json.dumps({
+ }),
+ json.dumps({
+ 'op': {'method': 'POST', 'path': ['document']},
+ 'content': {'guid': 'remote'},
+ 'keys': ['guid'],
+ }),
+ ]))
+ self.touch(('batch.meta', [
+ json.dumps({
+ 'principal': ['test', 0xF],
+ }),
+ ]))
+ model.apply_batch('./batch')
+
+ self.assertEqual(sorted([
+ {'guid': 'local'},
+ ]),
+ sorted(this.call(method='GET', path=['document'])['result']))
+
+ def test_apply_batch_MapFKAfterCreatingPK(self):
+
+ class Document(db.Resource):
+
+ @db.stored_property()
+ def prop(self, value):
+ return value
+
+ volume = self.start_master([User, Document])
+ self.uuid = 0
+
+ def uuid():
+ self.uuid += 1
+ return 'local%s' % self.uuid
+ self.override(toolkit, 'uuid', uuid)
+
+ self.touch(('batch', [
+ json.dumps({
+ }),
+ json.dumps({
+ 'op': {'method': 'POST', 'path': ['document']},
+ 'content': {'guid': 'remote', 'prop': ''},
+ 'keys': ['guid'],
+ }),
+ json.dumps({
+ 'op': {'method': 'POST', 'path': ['document']},
+ 'content': {'prop': 'remote'},
+ 'keys': ['prop'],
+ }),
+ ]))
+ self.touch(('batch.meta', [
+ json.dumps({
+ 'principal': ['test', 0xF],
+ }),
+ ]))
+ model.apply_batch('./batch')
+
+ self.assertEqual(sorted([
+ {'prop': '', 'guid': 'local1'},
+ {'prop': 'local1', 'guid': 'local2'},
+ ]),
+ sorted(this.call(method='GET', path=['document'], reply=['guid', 'prop'])['result']))
+
+ def test_apply_batch_MapFKBeforeCreatingPK(self):
+
+ class Document(db.Resource):
+
+ @db.stored_property()
+ def prop(self, value):
+ return value
+
+ volume = self.start_master([User, Document])
+ self.uuid = 0
+
+ def uuid():
+ self.uuid += 1
+ return 'local%s' % self.uuid
+ self.override(toolkit, 'uuid', uuid)
+
+ self.touch(('batch', [
+ json.dumps({
+ }),
+ json.dumps({
+ 'op': {'method': 'POST', 'path': ['document']},
+ 'content': {'prop': 'remote'},
+ 'keys': ['prop'],
+ }),
+ json.dumps({
+ 'op': {'method': 'POST', 'path': ['document']},
+ 'content': {'guid': 'remote', 'prop': ''},
+ 'keys': ['guid'],
+ }),
+ ]))
+ self.touch(('batch.meta', [
+ json.dumps({
+ 'principal': ['test', 0xF],
+ }),
+ ]))
+ model.apply_batch('./batch')
+
+ self.assertEqual(sorted([
+ {'prop': '', 'guid': 'local1'},
+ {'prop': 'local1', 'guid': 'local2'},
+ ]),
+ sorted(this.call(method='GET', path=['document'], reply=['guid', 'prop'])['result']))
+
+ def test_apply_batch_NoPKInPropsWithoutMap(self):
+
+ class Document(db.Resource):
+ pass
+
+ volume = self.start_master([User, Document])
+ self.override(toolkit, 'uuid', lambda: 'local')
+
+ self.touch(('batch', [
+ json.dumps({
+ }),
+ json.dumps({
+ 'op': {'method': 'POST', 'path': ['document']},
+ 'content': {'guid': 'remote'},
+ }),
+ ]))
+ self.touch(('batch.meta', [
+ json.dumps({
+ 'principal': ['test', 0xF],
+ }),
+ ]))
+ self.assertRaises(http.BadRequest, model.apply_batch, './batch')
+
+ def test_apply_batch_MapPathAfterCreatingPK(self):
+
+ class Document(db.Resource):
+
+ @db.stored_property()
+ def prop(self, value):
+ return value
+
+ volume = self.start_master([User, Document])
+ self.override(toolkit, 'uuid', lambda: 'local')
+
+ self.touch(('batch', [
+ json.dumps({
+ }),
+ json.dumps({
+ 'op': {'method': 'POST', 'path': ['document']},
+ 'content': {'guid': 'remote', 'prop': '1'},
+ 'keys': ['guid'],
+ }),
+ json.dumps({
+ 'op': {'method': 'PUT', 'path': ['document','remote']},
+ 'content': {'prop': '2'},
+ }),
+ ]))
+ self.touch(('batch.meta', [
+ json.dumps({
+ 'principal': ['test', 0xF],
+ }),
+ ]))
+ model.apply_batch('./batch')
+
+ self.assertEqual(sorted([
+ {'prop': '2', 'guid': 'local'},
+ ]),
+ sorted(this.call(method='GET', path=['document'], reply=['guid', 'prop'])['result']))
+
+ def test_apply_batch_MapPathBeforeCreatingPK(self):
+
+ class Document(db.Resource):
+
+ @db.stored_property()
+ def prop(self, value):
+ return value
+
+ volume = self.start_master([User, Document])
+ self.override(toolkit, 'uuid', lambda: 'local')
+
+ self.touch(('batch', [
+ json.dumps({
+ }),
+ json.dumps({
+ 'op': {'method': 'PUT', 'path': ['document','remote']},
+ 'content': {'prop': '2'},
+ }),
+ json.dumps({
+ 'op': {'method': 'POST', 'path': ['document']},
+ 'content': {'guid': 'remote', 'prop': '1'},
+ 'keys': ['guid'],
+ }),
+ ]))
+ self.touch(('batch.meta', [
+ json.dumps({
+ 'principal': ['test', 0xF],
+ }),
+ ]))
+ model.apply_batch('./batch')
+
+ self.assertEqual(sorted([
+ {'prop': '1', 'guid': 'local'},
+ ]),
+ sorted(this.call(method='GET', path=['document'], reply=['guid', 'prop'])['result']))
+ self.assertEqual({
+ 'guid_map': {'remote': 'local'},
+ 'failed': [[1, 1]],
+ 'principal': ['test', 0xF],
+ },
+ json.load(file('batch.meta')))
+
+ def test_apply_batch_Blobs(self):
+
+ class Document(db.Resource):
+
+ @db.stored_property(db.Blob)
+ def blob(self, value):
+ return value
+
+ volume = self.start_master([User, Document])
+ self.override(toolkit, 'uuid', lambda: 'local')
+
+ self.touch(('batch', [
+ json.dumps({
+ }),
+ json.dumps({
+ 'op': {'method': 'POST', 'path': ['document']},
+ 'content': {'guid': 'remote'},
+ 'keys': ['guid'],
+ }),
+ json.dumps({
+ 'op': {'method': 'PUT', 'path': ['document','remote', 'blob']},
+ 'content': 'file',
+ }),
+ ]))
+ self.touch(('batch.meta', [
+ json.dumps({
+ 'principal': ['test', 0xF],
+ }),
+ ]))
+ model.apply_batch('./batch')
+
+ digest = hashlib.sha1('file').hexdigest()
+ self.assertEqual(digest, volume['document']['local']['blob'])
+ self.assertEqual('file', file(volume.blobs.get(digest).path).read())
+
+ def test_apply_batch_Fails(self):
+
+ class Document(db.Resource):
+
+ @db.stored_property()
+ def prop(self_, value):
+ return value
+
+ @prop.setter
+ def prop(self_, value):
+ if value >= self.prop_to_fail:
+ raise RuntimeError()
+ return value
+
+ volume = self.start_master([User, Document])
+
+ self.touch(('batch', [
+ json.dumps({
+ }),
+ json.dumps({
+ 'op': {'method': 'POST', 'path': ['document']},
+ 'content': {'guid': 'remote1', 'prop': 1},
+ 'keys': ['guid'],
+ }),
+ json.dumps({
+ 'op': {'method': 'POST', 'path': ['document']},
+ 'content': {'guid': 'remote2', 'prop': 2},
+ 'keys': ['guid'],
+ }),
+ json.dumps({
+ 'op': {'method': 'POST', 'path': ['document']},
+ 'content': {'guid': 'remote3', 'prop': 3},
+ 'keys': ['guid'],
+ }),
+ ]))
+ self.touch(('batch.meta', [
+ json.dumps({
+ 'principal': ['test', 0xF],
+ }),
+ ]))
+
+ def uuid():
+ self.uuid += 1
+ return 'local%s' % self.uuid
+ self.uuid = 0
+ self.override(toolkit, 'uuid', uuid)
+
+ self.prop_to_fail = 1
+ model.apply_batch('./batch')
+ self.assertEqual(
+ sorted([]),
+ sorted(this.call(method='GET', path=['document'], reply=['guid', 'prop'])['result']))
+ self.assertEqual({
+ 'guid_map': {'remote1': 'local1', 'remote2': 'local2', 'remote3': 'local3'},
+ 'failed': [[1, 3]],
+ 'principal': ['test', 0xF],
+ },
+ json.load(file('batch.meta')))
+ assert exists('batch')
+
+ self.prop_to_fail = 2
+ model.apply_batch('./batch')
+ self.assertEqual(
+ sorted([{'guid': 'local1', 'prop': 1}]),
+ sorted(this.call(method='GET', path=['document'], reply=['guid', 'prop'])['result']))
+ self.assertEqual({
+ 'guid_map': {'remote1': 'local1', 'remote2': 'local2', 'remote3': 'local3'},
+ 'failed': [[2, 3]],
+ 'principal': ['test', 0xF],
+ },
+ json.load(file('batch.meta')))
+ assert exists('batch')
+
+ self.prop_to_fail = 3
+ model.apply_batch('./batch')
+ self.assertEqual(
+ sorted([{'guid': 'local1', 'prop': 1}, {'guid': 'local2', 'prop': 2}]),
+ sorted(this.call(method='GET', path=['document'], reply=['guid', 'prop'])['result']))
+ self.assertEqual({
+ 'guid_map': {'remote1': 'local1', 'remote2': 'local2', 'remote3': 'local3'},
+ 'failed': [[3, 3]],
+ 'principal': ['test', 0xF],
+ },
+ json.load(file('batch.meta')))
+ assert exists('batch')
+
+ self.prop_to_fail = 4
+ model.apply_batch('./batch')
+ self.assertEqual(
+ sorted([{'guid': 'local1', 'prop': 1}, {'guid': 'local2', 'prop': 2}, {'guid': 'local3', 'prop': 3}]),
+ sorted(this.call(method='GET', path=['document'], reply=['guid', 'prop'])['result']))
+ assert not exists('batch.meta')
+ assert not exists('batch')
+
+
+class Principal(_Principal):
+
+ admin = True
+
+
+if __name__ == '__main__':
+ tests.main()
diff --git a/tests/units/node/node.py b/tests/units/node/node_routes.py
index 9e5206f..5981c2f 100755
--- a/tests/units/node/node.py
+++ b/tests/units/node/node_routes.py
@@ -16,24 +16,23 @@ from os.path import exists, join
from __init__ import tests
from sugar_network import db, node, model, client
-from sugar_network.client import Connection, keyfile, api
+from sugar_network.client import Connection
from sugar_network.toolkit import http, coroutine
-from sugar_network.client.auth import SugarCreds
+from sugar_network.node import routes as node_routes
from sugar_network.node.routes import NodeRoutes
from sugar_network.node.master import MasterRoutes
from sugar_network.model.context import Context
from sugar_network.node.model import User
from sugar_network.node.auth import Principal
-from sugar_network.toolkit.router import Router, Request, Response, fallbackroute, ACL, route
+from sugar_network.toolkit.router import Router, Request, Response, fallbackroute, ACL, route, File
from sugar_network.toolkit.coroutine import this
-from sugar_network.toolkit import http
+from sugar_network.toolkit import http, packets
-class NodeTest(tests.Test):
+class NodeRoutesTest(tests.Test):
def test_RegisterUser(self):
volume = self.start_master()
- conn = Connection(creds=SugarCreds(keyfile.value))
guid = this.call(method='POST', path=['user'], environ=auth_env(tests.UID2), content={
'name': 'user',
@@ -61,7 +60,6 @@ class NodeTest(tests.Test):
pass
volume = self.start_master([Document, User], Routes)
- conn = Connection(creds=SugarCreds(keyfile.value))
volume['user'].create({'guid': tests.UID, 'name': 'user', 'pubkey': tests.PUBKEY})
guid = this.call(method='POST', path=['document'], environ=auth_env(tests.UID), content={})
@@ -79,7 +77,7 @@ class NodeTest(tests.Test):
def __init__(self, **kwargs):
NodeRoutes.__init__(self, 'node', **kwargs)
- @route('GET', [None, None], cmd='probe1', acl=ACL.AUTHOR)
+ @route('GET', [None, None], cmd='probe1', acl=ACL.AUTH | ACL.AUTHOR)
def probe1(self):
pass
@@ -91,8 +89,6 @@ class NodeTest(tests.Test):
pass
volume = self.start_master([Document, User], Routes)
- conn = Connection(creds=SugarCreds(keyfile.value))
-
volume['user'].create({'guid': tests.UID, 'name': 'user', 'pubkey': tests.PUBKEY})
volume['user'].create({'guid': tests.UID2, 'name': 'user2', 'pubkey': tests.PUBKEY2})
@@ -106,7 +102,6 @@ class NodeTest(tests.Test):
def test_ForbiddenCommandsForUserResource(self):
volume = self.start_master()
- conn = Connection(creds=SugarCreds(keyfile.value))
this.call(method='POST', path=['user'], environ=auth_env(tests.UID2), content={
'name': 'user1',
@@ -134,12 +129,13 @@ class NodeTest(tests.Test):
def __init__(self, **kwargs):
NodeRoutes.__init__(self, 'node', **kwargs)
- @route('PROBE', acl=ACL.SUPERUSER)
+ @route('PROBE', acl=ACL.AUTH)
def probe(self):
+ if not this.principal.cap_create_with_guid:
+ raise http.Forbidden()
return 'ok'
volume = self.start_master([User], Routes)
- conn = Connection(creds=SugarCreds(keyfile.value))
volume['user'].create({'guid': tests.UID, 'name': 'user', 'pubkey': tests.PUBKEY})
volume['user'].create({'guid': tests.UID2, 'name': 'test', 'pubkey': tests.PUBKEY2})
@@ -156,7 +152,6 @@ class NodeTest(tests.Test):
return value
volume = self.start_master([User, Document])
- conn = Connection(creds=SugarCreds(keyfile.value))
volume['user'].create({'guid': tests.UID, 'name': 'user', 'pubkey': tests.PUBKEY})
volume['user'].create({'guid': tests.UID2, 'name': 'user', 'pubkey': tests.PUBKEY2})
@@ -172,12 +167,11 @@ class NodeTest(tests.Test):
class Document(db.Resource):
- @db.indexed_property(slot=1, acl=ACL.PUBLIC | ACL.AUTHOR)
+ @db.indexed_property(slot=1, acl=ACL.PUBLIC | ACL.AUTH | ACL.AUTHOR)
def prop(self, value):
return value
volume = self.start_master([User, Document])
- conn = Connection(creds=SugarCreds(keyfile.value))
volume['user'].create({'guid': tests.UID, 'name': 'user', 'pubkey': tests.PUBKEY})
volume['user'].create({'guid': tests.UID2, 'name': 'user', 'pubkey': tests.PUBKEY2})
@@ -196,12 +190,12 @@ class NodeTest(tests.Test):
def __init__(self, **kwargs):
NodeRoutes.__init__(self, 'node', **kwargs)
- @route('PROBE', acl=ACL.SUPERUSER)
+ @route('PROBE', acl=ACL.AUTH)
def probe(self):
- pass
+ if not this.principal.cap_create_with_guid:
+ raise http.Forbidden()
volume = self.start_master([User], Routes)
- conn = Connection(creds=SugarCreds(keyfile.value))
volume['user'].create({'guid': tests.UID, 'name': 'user', 'pubkey': tests.PUBKEY})
self.assertRaises(http.Forbidden, this.call, method='PROBE', environ=auth_env(tests.UID))
@@ -219,19 +213,13 @@ class NodeTest(tests.Test):
def __init__(self, **kwargs):
NodeRoutes.__init__(self, 'node', **kwargs)
- @route('PROBE1', acl=ACL.AUTH)
- def probe1(self, request):
- pass
-
- @route('PROBE2', acl=ACL.SUPERUSER)
- def probe2(self, request):
+ @route('PROBE', acl=ACL.AUTH)
+ def probe(self, request):
pass
volume = self.start_master([User], Routes)
- conn = Connection(creds=SugarCreds(keyfile.value))
- self.assertRaises(http.Unauthorized, this.call, method='PROBE1')
- self.assertRaises(http.Unauthorized, this.call, method='PROBE2')
+ self.assertRaises(http.Unauthorized, this.call, method='PROBE')
def test_authorize_DefaultPermissions(self):
@@ -240,12 +228,12 @@ class NodeTest(tests.Test):
def __init__(self, **kwargs):
NodeRoutes.__init__(self, 'node', **kwargs)
- @route('PROBE', acl=ACL.SUPERUSER)
+ @route('PROBE', acl=ACL.AUTH)
def probe(self, request):
- pass
+ if not this.principal.cap_create_with_guid:
+ raise http.Forbidden()
volume = self.start_master([User], Routes)
- conn = Connection(creds=SugarCreds(keyfile.value))
volume['user'].create({'guid': tests.UID, 'name': 'user', 'pubkey': tests.PUBKEY})
self.assertRaises(http.Forbidden, this.call, method='PROBE', environ=auth_env(tests.UID))
@@ -259,7 +247,6 @@ class NodeTest(tests.Test):
def test_SetUser(self):
volume = self.start_master()
- conn = Connection(creds=SugarCreds(keyfile.value))
volume['user'].create({'guid': tests.UID, 'name': 'user', 'pubkey': tests.PUBKEY})
guid = this.call(method='POST', path=['context'], environ=auth_env(tests.UID), content={
@@ -274,7 +261,6 @@ class NodeTest(tests.Test):
def test_find_MaxLimit(self):
volume = self.start_master()
- conn = Connection(creds=SugarCreds(keyfile.value))
volume['user'].create({'guid': tests.UID, 'name': 'user', 'pubkey': tests.PUBKEY})
this.call(method='POST', path=['context'], environ=auth_env(tests.UID), content={
@@ -305,7 +291,6 @@ class NodeTest(tests.Test):
def test_DeletedDocuments(self):
volume = self.start_master()
- conn = Connection(creds=SugarCreds(keyfile.value))
volume['user'].create({'guid': tests.UID, 'name': 'user', 'pubkey': tests.PUBKEY})
guid = this.call(method='POST', path=['context'], environ=auth_env(tests.UID), content={
@@ -327,24 +312,22 @@ class NodeTest(tests.Test):
self.assertEqual([], this.call(method='GET', path=['context'])['result'])
def test_CreateGUID(self):
- # TODO Temporal security hole, see TODO
volume = self.start_master()
- conn = Connection(creds=SugarCreds(keyfile.value))
volume['user'].create({'guid': tests.UID, 'name': 'user', 'pubkey': tests.PUBKEY})
+
this.call(method='POST', path=['context'], environ=auth_env(tests.UID), content={
'guid': 'foo',
'type': 'activity',
'title': 'title',
'summary': 'summary',
'description': 'description',
- }, principal=Admin('admin'))
+ }, principal=Principal('admin', 0xF))
self.assertEqual(
{'guid': 'foo', 'title': 'title'},
this.call(method='GET', path=['context', 'foo'], reply=['guid', 'title']))
def test_CreateMalformedGUID(self):
volume = self.start_master()
- conn = Connection(creds=SugarCreds(keyfile.value))
volume['user'].create({'guid': tests.UID, 'name': 'user', 'pubkey': tests.PUBKEY})
self.assertRaises(http.BadRequest, this.call, method='POST', path=['context'], environ=auth_env(tests.UID), content={
@@ -353,11 +336,10 @@ class NodeTest(tests.Test):
'title': 'title',
'summary': 'summary',
'description': 'description',
- }, principal=Admin('admin'))
+ }, principal=Principal('admin', 0xF))
def test_FailOnExistedGUID(self):
volume = self.start_master()
- conn = Connection(creds=SugarCreds(keyfile.value))
volume['user'].create({'guid': tests.UID, 'name': 'user', 'pubkey': tests.PUBKEY})
guid = this.call(method='POST', path=['context'], environ=auth_env(tests.UID), content={
@@ -373,11 +355,11 @@ class NodeTest(tests.Test):
'title': 'title',
'summary': 'summary',
'description': 'description',
- }, principal=Admin('admin'))
+ }, principal=Principal('admin', 0xF))
def test_PackagesRoute(self):
volume = self.start_master()
- client = Connection(creds=SugarCreds(keyfile.value))
+ client = Connection()
self.touch(('master/files/packages/repo/arch/package', 'file'))
volume.blobs.populate()
@@ -389,7 +371,7 @@ class NodeTest(tests.Test):
def test_PackageUpdatesRoute(self):
volume = self.start_master()
- ipc = Connection(creds=SugarCreds(keyfile.value))
+ ipc = Connection()
self.touch('master/files/packages/repo/1', 'master/files/packages/repo/1.1')
volume.blobs.populate()
@@ -424,9 +406,122 @@ class NodeTest(tests.Test):
sorted(json.loads(response.content)))
assert 'last-modified' not in response.headers
- def test_release(self):
+ def test_SubmitReleasesViaAggpropsIface(self):
volume = self.start_master()
- conn = Connection(creds=SugarCreds(keyfile.value))
+ conn = Connection()
+
+ context = conn.post(['context'], {
+ 'type': 'activity',
+ 'title': 'Activity',
+ 'summary': 'summary',
+ 'description': 'description',
+ })
+
+ activity_info1 = '\n'.join([
+ '[Activity]',
+ 'name = Activity',
+ 'bundle_id = %s' % context,
+ 'exec = true',
+ 'icon = icon',
+ 'activity_version = 1',
+ 'license = Public Domain',
+ ])
+ bundle1 = self.zips(('topdir/activity/activity.info', activity_info1))
+ release1 = conn.upload(['context', context, 'releases'], StringIO(bundle1))
+ assert release1 == str(hashlib.sha1(bundle1).hexdigest())
+ self.assertEqual({
+ release1: {
+ 'author': {tests.UID: {'name': 'test', 'order': 0, 'role': 3}},
+ 'value': {
+ 'license': ['Public Domain'],
+ 'announce': next(volume['post'].find(query='title:1')[0]).guid,
+ 'version': [[1], 0],
+ 'requires': {},
+ 'commands': {'activity': {'exec': 'true'}},
+ 'bundles': {'*-*': {'blob': str(hashlib.sha1(bundle1).hexdigest()), 'unpack_size': len(activity_info1)}},
+ 'stability': 'stable',
+ },
+ },
+ }, volume['context'][context]['releases'])
+ assert volume.blobs.get(str(hashlib.sha1(bundle1).hexdigest())).exists
+
+ activity_info2 = '\n'.join([
+ '[Activity]',
+ 'name = Activity',
+ 'bundle_id = %s' % context,
+ 'exec = true',
+ 'icon = icon',
+ 'activity_version = 2',
+ 'license = Public Domain',
+ ])
+ bundle2 = self.zips(('topdir/activity/activity.info', activity_info2))
+ release2 = conn.upload(['context', context, 'releases'], StringIO(bundle2))
+ assert release2 == str(hashlib.sha1(bundle2).hexdigest())
+ self.assertEqual({
+ release1: {
+ 'author': {tests.UID: {'name': 'test', 'order': 0, 'role': 3}},
+ 'value': {
+ 'license': ['Public Domain'],
+ 'announce': next(volume['post'].find(query='title:1')[0]).guid,
+ 'version': [[1], 0],
+ 'requires': {},
+ 'commands': {'activity': {'exec': 'true'}},
+ 'bundles': {'*-*': {'blob': str(hashlib.sha1(bundle1).hexdigest()), 'unpack_size': len(activity_info1)}},
+ 'stability': 'stable',
+ },
+ },
+ release2: {
+ 'author': {tests.UID: {'name': 'test', 'order': 0, 'role': 3}},
+ 'value': {
+ 'license': ['Public Domain'],
+ 'announce': next(volume['post'].find(query='title:2')[0]).guid,
+ 'version': [[2], 0],
+ 'requires': {},
+ 'commands': {'activity': {'exec': 'true'}},
+ 'bundles': {'*-*': {'blob': str(hashlib.sha1(bundle2).hexdigest()), 'unpack_size': len(activity_info2)}},
+ 'stability': 'stable',
+ },
+ },
+ }, volume['context'][context]['releases'])
+ assert volume.blobs.get(str(hashlib.sha1(bundle1).hexdigest())).exists
+ assert volume.blobs.get(str(hashlib.sha1(bundle2).hexdigest())).exists
+
+ conn.delete(['context', context, 'releases', release1])
+ self.assertEqual({
+ release1: {
+ 'author': {tests.UID: {'name': 'test', 'order': 0, 'role': 3}},
+ },
+ release2: {
+ 'author': {tests.UID: {'name': 'test', 'order': 0, 'role': 3}},
+ 'value': {
+ 'license': ['Public Domain'],
+ 'announce': next(volume['post'].find(query='title:2')[0]).guid,
+ 'version': [[2], 0],
+ 'requires': {},
+ 'commands': {'activity': {'exec': 'true'}},
+ 'bundles': {'*-*': {'blob': str(hashlib.sha1(bundle2).hexdigest()), 'unpack_size': len(activity_info2)}},
+ 'stability': 'stable',
+ },
+ },
+ }, volume['context'][context]['releases'])
+ assert not volume.blobs.get(str(hashlib.sha1(bundle1).hexdigest())).exists
+ assert volume.blobs.get(str(hashlib.sha1(bundle2).hexdigest())).exists
+
+ conn.delete(['context', context, 'releases', release2])
+ self.assertEqual({
+ release1: {
+ 'author': {tests.UID: {'name': 'test', 'order': 0, 'role': 3}},
+ },
+ release2: {
+ 'author': {tests.UID: {'name': 'test', 'order': 0, 'role': 3}},
+ },
+ }, volume['context'][context]['releases'])
+ assert not volume.blobs.get(str(hashlib.sha1(bundle1).hexdigest())).exists
+ assert not volume.blobs.get(str(hashlib.sha1(bundle2).hexdigest())).exists
+
+ def test_submit(self):
+ volume = self.start_master()
+ conn = Connection()
activity_info = '\n'.join([
'[Activity]',
@@ -448,7 +543,6 @@ class NodeTest(tests.Test):
self.assertEqual({
release: {
- 'seqno': 8,
'author': {tests.UID: {'name': 'test', 'order': 0, 'role': 3}},
'value': {
'license': ['Public Domain'],
@@ -476,7 +570,7 @@ class NodeTest(tests.Test):
def test_Solve(self):
volume = self.start_master()
- conn = http.Connection(api.value, SugarCreds(keyfile.value))
+ conn = Connection()
activity_unpack = '\n'.join([
'[Activity]',
@@ -509,7 +603,7 @@ class NodeTest(tests.Test):
'title': 'title',
'summary': 'summary',
'description': 'description',
- }, principal=Admin('admin'))
+ }, principal=Principal('admin', 0xF))
conn.put(['context', 'package', 'releases', '*'], {'binary': ['package.bin']})
self.assertEqual({
@@ -539,7 +633,7 @@ class NodeTest(tests.Test):
def test_SolveWithArguments(self):
volume = self.start_master()
- conn = http.Connection(api.value, SugarCreds(keyfile.value))
+ conn = Connection()
activity_unpack = '\n'.join([
'[Activity]',
@@ -585,7 +679,7 @@ class NodeTest(tests.Test):
'title': 'title',
'summary': 'summary',
'description': 'description',
- }, principal=Admin('admin'))
+ }, principal=Principal('admin', 0xF))
volume['context'].update('package', {'releases': {
'resolves': {
'Ubuntu-10.04': {'version': [[1], 0], 'packages': ['package.bin']},
@@ -620,7 +714,7 @@ class NodeTest(tests.Test):
def test_Resolve(self):
volume = self.start_master()
- conn = http.Connection(api.value, SugarCreds(keyfile.value))
+ conn = Connection()
activity_info = '\n'.join([
'[Activity]',
@@ -651,7 +745,7 @@ class NodeTest(tests.Test):
'title': 'title',
'summary': 'summary',
'description': 'description',
- }, principal=Admin('admin'))
+ }, principal=Principal('admin', 0xF))
conn.put(['context', 'package', 'releases', '*'], {'binary': ['package.bin']})
response = Response()
@@ -671,7 +765,6 @@ class NodeTest(tests.Test):
return value
volume = self.start_master([Document, User])
- conn = Connection(creds=SugarCreds(keyfile.value))
volume['user'].create({'guid': tests.UID, 'name': 'user1', 'pubkey': tests.PUBKEY})
volume['user'].create({'guid': tests.UID2, 'name': 'user2', 'pubkey': tests.PUBKEY2})
@@ -693,6 +786,42 @@ class NodeTest(tests.Test):
},
volume['document'][guid]['prop2'])
+ def test_AggpropReplaceAccess(self):
+
+ class Document(db.Resource):
+
+ @db.stored_property(db.Aggregated, acl=ACL.READ | ACL.INSERT | ACL.REPLACE)
+ def prop1(self, value):
+ return value
+
+ @db.stored_property(db.Aggregated, acl=ACL.READ | ACL.INSERT | ACL.REPLACE | ACL.AUTHOR)
+ def prop2(self, value):
+ return value
+
+ volume = self.start_master([Document, User])
+ volume['user'].create({'guid': tests.UID, 'name': 'user1', 'pubkey': tests.PUBKEY})
+ volume['user'].create({'guid': tests.UID2, 'name': 'user2', 'pubkey': tests.PUBKEY2})
+
+ guid = this.call(method='POST', path=['document'], environ=auth_env(tests.UID), content={})
+ self.override(time, 'time', lambda: 0)
+
+ agg1 = this.call(method='POST', path=['document', guid, 'prop1'], environ=auth_env(tests.UID))
+ agg2 = this.call(method='POST', path=['document', guid, 'prop1'], environ=auth_env(tests.UID2))
+ self.assertEqual({
+ agg1: {'seqno': 4, 'author': {tests.UID: {'name': 'user1', 'order': 0, 'role': 3}}, 'value': None},
+ agg2: {'seqno': 5, 'author': {tests.UID2: {'name': 'user2', 'order': 0, 'role': 1}}, 'value': None},
+ },
+ volume['document'][guid]['prop1'])
+ self.assertRaises(http. Forbidden, this.call, method='PUT', path=['document', guid, 'prop1', agg1], environ=auth_env(tests.UID2))
+ this.call(method='PUT', path=['document', guid, 'prop1', agg2], environ=auth_env(tests.UID2))
+
+ agg3 = this.call(method='POST', path=['document', guid, 'prop2'], environ=auth_env(tests.UID))
+ self.assertRaises(http. Forbidden, this.call, method='POST', path=['document', guid, 'prop2'], environ=auth_env(tests.UID2))
+ self.assertEqual({
+ agg3: {'seqno': 7, 'author': {tests.UID: {'name': 'user1', 'order': 0, 'role': 3}}, 'value': None},
+ },
+ volume['document'][guid]['prop2'])
+
def test_AggpropRemoveAccess(self):
class Document(db.Resource):
@@ -706,7 +835,6 @@ class NodeTest(tests.Test):
return value
volume = self.start_master([Document, User])
- conn = Connection(creds=SugarCreds(keyfile.value))
volume['user'].create({'guid': tests.UID, 'name': 'user1', 'pubkey': tests.PUBKEY})
volume['user'].create({'guid': tests.UID2, 'name': 'user2', 'pubkey': tests.PUBKEY2})
@@ -758,6 +886,252 @@ class NodeTest(tests.Test):
},
volume['document'][guid]['prop2'])
+ def test_diff_resource(self):
+
+ class Document(db.Resource):
+
+ @db.stored_property()
+ def prop1(self, value):
+ return value
+
+ @db.stored_property()
+ def prop2(self, value):
+ return value
+
+ @db.stored_property(db.Blob)
+ def prop3(self, value):
+ return value
+
+ @db.stored_property(db.Blob)
+ def prop4(self, value):
+ return value
+
+ volume = db.Volume('.', [Document])
+ router = Router(NodeRoutes('node', volume=volume))
+
+ volume['document'].create({
+ 'guid': 'guid',
+ 'prop1': '1',
+ 'prop2': 2,
+ 'prop3': volume.blobs.post('333', '3/3').digest,
+ })
+ volume['document'].update('guid', {'prop4': volume.blobs.post('4444', '4/4').digest})
+ self.utime('db/document/gu/guid', 1)
+
+ packet = packets.decode(StringIO(
+ ''.join([i for i in this.call(method='GET', path=['document', 'guid'], cmd='diff')]),
+ ))
+ self.assertEqual({
+ 'ranges': [[1, 4]],
+ 'patch': {
+ 'guid': {'value': 'guid', 'mtime': 1},
+ 'prop1': {'value': '1', 'mtime': 1},
+ 'prop2': {'value': 2, 'mtime': 1},
+ 'prop3': {'value': hashlib.sha1('333').hexdigest(), 'mtime': 1},
+ 'prop4': {'value': hashlib.sha1('4444').hexdigest(), 'mtime': 1},
+ },
+ },
+ packet.header)
+ self.assertEqual(sorted([
+ {'content-type': '4/4', 'content-length': '4', 'x-seqno': '3'},
+ {'content-type': '3/3', 'content-length': '3', 'x-seqno': '1'},
+ ]),
+ sorted([i.meta for i in packet]))
+
+ packet = packets.decode(StringIO(
+ ''.join([i for i in this.call(method='GET', path=['document', 'guid'], cmd='diff', environ={
+ 'HTTP_X_RANGES': json.dumps([[1, 1]]),
+ })])))
+ self.assertEqual({
+ },
+ packet.header)
+ self.assertEqual(sorted([
+ ]),
+ sorted([i.meta for i in packet]))
+
+ packet = packets.decode(StringIO(
+ ''.join([i for i in this.call(method='GET', path=['document', 'guid'], cmd='diff', environ={
+ 'HTTP_X_RANGES': json.dumps([[2, 2]]),
+ })])))
+ self.assertEqual({
+ 'ranges': [[1, 2]],
+ 'patch': {
+ 'guid': {'value': 'guid', 'mtime': 1},
+ 'prop1': {'value': '1', 'mtime': 1},
+ 'prop2': {'value': 2, 'mtime': 1},
+ 'prop3': {'value': hashlib.sha1('333').hexdigest(), 'mtime': 1},
+ },
+ },
+ packet.header)
+ self.assertEqual(sorted([
+ {'content-type': '3/3', 'content-length': '3', 'x-seqno': '1'},
+ ]),
+ sorted([i.meta for i in packet]))
+
+ packet = packets.decode(StringIO(
+ ''.join([i for i in this.call(method='GET', path=['document', 'guid'], cmd='diff', environ={
+ 'HTTP_X_RANGES': json.dumps([[3, 3]]),
+ })])))
+ self.assertEqual({
+ },
+ packet.header)
+ self.assertEqual(sorted([
+ ]),
+ sorted([i.meta for i in packet]))
+
+ packet = packets.decode(StringIO(
+ ''.join([i for i in this.call(method='GET', path=['document', 'guid'], cmd='diff', environ={
+ 'HTTP_X_RANGES': json.dumps([[4, 4]]),
+ })])))
+ self.assertEqual({
+ 'ranges': [[3, 4]],
+ 'patch': {
+ 'prop4': {'value': hashlib.sha1('4444').hexdigest(), 'mtime': 1},
+ },
+ },
+ packet.header)
+ self.assertEqual(sorted([
+ {'content-type': '4/4', 'content-length': '4', 'x-seqno': '3'},
+ ]),
+ sorted([i.meta for i in packet]))
+
+ def test_diff_resource_NotForUsers(self):
+
+ class User(db.Resource):
+ pass
+
+ volume = db.Volume('.', [User])
+ router = Router(NodeRoutes('node', volume=volume))
+ volume['user'].create({'guid': 'guid'})
+
+ self.assertRaises(http.BadRequest, this.call, method='GET', path=['user', 'guid'], cmd='diff')
+
+ def test_grouped_diff(self):
+
+ class Document(db.Resource):
+
+ @db.stored_property()
+ def prop(self, value):
+ return value
+
+ volume = db.Volume('.', [Document])
+ router = Router(NodeRoutes('node', volume=volume))
+
+ volume['document'].create({'guid': '1', 'prop': 'q'})
+ volume['document'].create({'guid': '2', 'prop': 'w'})
+ volume['document'].create({'guid': '3', 'prop': 'w'})
+ volume['document'].create({'guid': '4', 'prop': 'e'})
+ volume['document'].create({'guid': '5', 'prop': 'e'})
+ volume['document'].create({'guid': '6', 'prop': 'e'})
+ self.utime('db/document', 0)
+
+ self.assertEqual({
+ '1': [[1, 1]],
+ '2': [[2, 2]],
+ '3': [[3, 3]],
+ '4': [[4, 4]],
+ '5': [[5, 5]],
+ '6': [[6, 6]],
+ },
+ this.call(method='GET', path=['document'], cmd='diff'))
+
+ self.assertEqual({
+ 'q': [[1, 1]],
+ 'w': [[2, 3]],
+ 'e': [[4, 6]],
+ },
+ this.call(method='GET', path=['document'], cmd='diff', key='prop'))
+
+ def test_grouped_diff_Limits(self):
+ node_routes._GROUPED_DIFF_LIMIT = 2
+
+ class Document(db.Resource):
+ pass
+
+ volume = db.Volume('.', [Document])
+ router = Router(NodeRoutes('node', volume=volume))
+
+ volume['document'].create({'guid': '1'})
+ volume['document'].create({'guid': '2'})
+ volume['document'].create({'guid': '3'})
+ volume['document'].create({'guid': '4'})
+ volume['document'].create({'guid': '5'})
+ self.utime('db/document', 0)
+
+ self.assertEqual({
+ '1': [[1, 1]],
+ '2': [[2, 2]],
+ },
+ this.call(method='GET', path=['document'], cmd='diff'))
+
+ self.assertEqual({
+ '3': [[3, 3]],
+ '4': [[4, 4]],
+ },
+ this.call(method='GET', path=['document'], cmd='diff', environ={'HTTP_X_RANGES': json.dumps([[3, None]])}))
+
+ self.assertEqual({
+ '5': [[5, 5]],
+ },
+ this.call(method='GET', path=['document'], cmd='diff', environ={'HTTP_X_RANGES': json.dumps([[5, None]])}))
+
+ self.assertEqual({
+ },
+ this.call(method='GET', path=['document'], cmd='diff', environ={'HTTP_X_RANGES': json.dumps([[6, None]])}))
+
+ def test_grouped_diff_NotForUsers(self):
+
+ class User(db.Resource):
+ pass
+
+ volume = db.Volume('.', [User])
+ router = Router(NodeRoutes('node', volume=volume))
+ volume['user'].create({'guid': '1'})
+
+ self.assertRaises(http.BadRequest, this.call, method='GET', path=['user'], cmd='diff')
+
+ def test_apply(self):
+
+ class Document(db.Resource):
+
+ @db.stored_property()
+ def prop(self, value):
+ return value
+
+ volume = self.start_master([Document, User])
+ conn = Connection()
+
+ conn.upload(cmd='apply', data=
+ json.dumps({
+ }) + '\n' +
+ json.dumps({
+ 'op': {'method': 'POST', 'path': ['document']},
+ 'content': {'prop': '1'},
+ }) + '\n' +
+ json.dumps({
+ 'op': {'method': 'POST', 'path': ['document']},
+ 'content': {'prop': '2'},
+ }) + '\n'
+ )
+
+ self.assertEqual(sorted([
+ {'prop': '1', 'author': [{'guid': tests.UID, 'name': 'test', 'role': ACL.ORIGINAL | ACL.INSYSTEM}]},
+ {'prop': '2', 'author': [{'guid': tests.UID, 'name': 'test', 'role': ACL.ORIGINAL | ACL.INSYSTEM}]},
+ ]),
+ sorted(this.call(method='GET', path=['document'], reply=['prop', 'author'])['result']))
+
+ def test_DoNotPassGuidsForCreate(self):
+
+ class TestDocument(db.Resource):
+ pass
+
+ volume = self.start_master([TestDocument, User])
+ volume['user'].create({'guid': tests.UID, 'name': 'user', 'pubkey': tests.PUBKEY})
+
+ self.assertRaises(http.BadRequest, this.call, method='POST', path=['testdocument'], content={'guid': 'foo'}, environ=auth_env(tests.UID))
+ guid = this.call(method='POST', path=['testdocument'], content={}, environ=auth_env(tests.UID))
+ assert guid
+
def auth_env(uid):
key = RSA.load_key(join(tests.root, 'data', uid))
@@ -769,10 +1143,5 @@ def auth_env(uid):
return {'HTTP_AUTHORIZATION': authorization}
-class Admin(Principal):
-
- admin = True
-
-
if __name__ == '__main__':
tests.main()
diff --git a/tests/units/node/slave.py b/tests/units/node/slave.py
index 10e5742..9265a7b 100755
--- a/tests/units/node/slave.py
+++ b/tests/units/node/slave.py
@@ -10,8 +10,7 @@ from os.path import exists
from __init__ import tests
from sugar_network import db, toolkit
-from sugar_network.client import Connection, keyfile
-from sugar_network.client.auth import SugarCreds
+from sugar_network.client import Connection
from sugar_network.node import master_api
from sugar_network.node.master import MasterRoutes
from sugar_network.node.slave import SlaveRoutes
@@ -19,7 +18,7 @@ from sugar_network.node.auth import SugarAuth
from sugar_network.node.model import User
from sugar_network.db.volume import Volume
from sugar_network.toolkit.router import Router, File
-from sugar_network.toolkit import coroutine, http, parcel
+from sugar_network.toolkit import coroutine, http, packets
class SlaveTest(tests.Test):
@@ -58,8 +57,8 @@ class SlaveTest(tests.Test):
def test_online_sync_Push(self):
self.fork_master([User, self.Document])
- master = Connection('http://127.0.0.1:7777', creds=SugarCreds(keyfile.value))
- slave = Connection('http://127.0.0.1:8888', creds=SugarCreds(keyfile.value))
+ master = Connection('http://127.0.0.1:7777')
+ slave = Connection('http://127.0.0.1:8888')
slave.post(cmd='online_sync')
self.assertEqual([[1, None]], json.load(file('slave/var/pull.ranges')))
@@ -122,9 +121,10 @@ class SlaveTest(tests.Test):
def test_online_sync_Pull(self):
self.fork_master([User, self.Document])
- master = Connection('http://127.0.0.1:7777', creds=SugarCreds(keyfile.value))
- slave = Connection('http://127.0.0.1:8888', creds=SugarCreds(keyfile.value))
+ master = Connection('http://127.0.0.1:7777')
+ slave = Connection('http://127.0.0.1:8888')
+ coroutine.sleep(1)
slave.post(cmd='online_sync')
self.assertEqual([[1, None]], json.load(file('slave/var/pull.ranges')))
self.assertEqual([[1, None]], json.load(file('slave/var/push.ranges')))
@@ -186,9 +186,10 @@ class SlaveTest(tests.Test):
def test_online_sync_PullBlobs(self):
self.fork_master([User, self.Document])
- master = Connection('http://127.0.0.1:7777', creds=SugarCreds(keyfile.value))
- slave = Connection('http://127.0.0.1:8888', creds=SugarCreds(keyfile.value))
+ master = Connection('http://127.0.0.1:7777')
+ slave = Connection('http://127.0.0.1:8888')
+ coroutine.sleep(1)
slave.post(cmd='online_sync')
self.assertEqual([[1, None]], json.load(file('slave/var/pull.ranges')))
self.assertEqual([[1, None]], json.load(file('slave/var/push.ranges')))
@@ -203,8 +204,8 @@ class SlaveTest(tests.Test):
def test_online_sync_PullFromPreviouslyMergedRecord(self):
self.fork_master([User, self.Document])
- master = Connection('http://127.0.0.1:7777', creds=SugarCreds(keyfile.value))
- slave = Connection('http://127.0.0.1:8888', creds=SugarCreds(keyfile.value))
+ master = Connection('http://127.0.0.1:7777')
+ slave = Connection('http://127.0.0.1:8888')
slave.post(cmd='online_sync')
self.assertEqual([[1, None]], json.load(file('slave/var/pull.ranges')))
@@ -224,11 +225,11 @@ class SlaveTest(tests.Test):
self.assertEqual('1_', slave.get(['document', guid, 'title']))
def test_offline_sync_Import(self):
- slave = Connection('http://127.0.0.1:8888', creds=SugarCreds(keyfile.value))
+ slave = Connection('http://127.0.0.1:8888')
self.touch(('blob1', 'a'))
self.touch(('blob2', 'bb'))
- parcel.encode_dir([
+ packets.encode_dir([
('push', {'from': '127.0.0.1:7777'}, [
{'resource': 'document'},
{'guid': '1', 'patch': {
@@ -255,7 +256,7 @@ class SlaveTest(tests.Test):
self.assertEqual(
sorted([
- ({'from': '127.0.0.1:7777', 'packet': u'push'}, [
+ ({'from': '127.0.0.1:7777', 'segment': 'push'}, [
{'resource': 'document'},
{'guid': '1', 'patch': {
'guid': {'value': '1', 'mtime': 0},
@@ -268,22 +269,22 @@ class SlaveTest(tests.Test):
{'content-length': '2', 'path': 'foo/bar'},
{'commit': [[1, 2]]},
]),
- ({'ack': [[101, 103]], 'from': '127.0.0.1:7777', 'packet': 'ack', 'ranges': [[1, 3]], 'to': self.slave_routes.guid}, [
+ ({'ack': [[101, 103]], 'from': '127.0.0.1:7777', 'segment': 'ack', 'ranges': [[1, 3]], 'to': self.slave_routes.guid}, [
]),
- ({'from': self.slave_routes.guid, 'packet': 'push', 'to': '127.0.0.1:7777'}, [
+ ({'from': self.slave_routes.guid, 'segment': 'push', 'to': '127.0.0.1:7777'}, [
{'resource': 'document'},
]),
- ({'from': self.slave_routes.guid, 'packet': 'pull', 'ranges': [[3, 100], [104, None]], 'to': '127.0.0.1:7777'}, [
+ ({'from': self.slave_routes.guid, 'segment': 'pull', 'ranges': [[3, 100], [104, None]], 'to': '127.0.0.1:7777'}, [
]),
]),
- sorted([(packet.header, [i.meta if isinstance(i, File) else i for i in packet]) for packet in parcel.decode_dir('sync')]))
+ sorted([(packet.header, [i.meta if isinstance(i, File) else i for i in packet]) for packet in packets.decode_dir('sync')]))
def test_offline_sync_ImportPush(self):
- slave = Connection('http://127.0.0.1:8888', creds=SugarCreds(keyfile.value))
+ slave = Connection('http://127.0.0.1:8888')
self.touch(('blob1', 'a'))
self.touch(('blob2', 'bb'))
- parcel.encode_dir([
+ packets.encode_dir([
('push', {'from': '127.0.0.1:7777'}, [
{'resource': 'document'},
{'guid': '1', 'patch': {
@@ -309,7 +310,7 @@ class SlaveTest(tests.Test):
self.assertEqual(
sorted([
- ({'from': '127.0.0.1:7777', 'packet': u'push'}, [
+ ({'from': '127.0.0.1:7777', 'segment': 'push'}, [
{'resource': 'document'},
{'guid': '1', 'patch': {
'guid': {'value': '1', 'mtime': 0},
@@ -322,18 +323,18 @@ class SlaveTest(tests.Test):
{'content-length': '2', 'path': 'foo/bar'},
{'commit': [[1, 2]]},
]),
- ({'from': self.slave_routes.guid, 'packet': 'push', 'to': '127.0.0.1:7777'}, [
+ ({'from': self.slave_routes.guid, 'segment': 'push', 'to': '127.0.0.1:7777'}, [
{'resource': 'document'},
]),
- ({'from': self.slave_routes.guid, 'packet': 'pull', 'ranges': [[3, None]], 'to': '127.0.0.1:7777'}, [
+ ({'from': self.slave_routes.guid, 'segment': 'pull', 'ranges': [[3, None]], 'to': '127.0.0.1:7777'}, [
]),
]),
- sorted([(packet.header, [i.meta if isinstance(i, File) else i for i in packet]) for packet in parcel.decode_dir('sync')]))
+ sorted([(packet.header, [i.meta if isinstance(i, File) else i for i in packet]) for packet in packets.decode_dir('sync')]))
def test_offline_sync_ImportAck(self):
- slave = Connection('http://127.0.0.1:8888', creds=SugarCreds(keyfile.value))
+ slave = Connection('http://127.0.0.1:8888')
- parcel.encode_dir([
+ packets.encode_dir([
('ack', {'ack': [[101, 103]], 'ranges': [[1, 3]], 'from': '127.0.0.1:7777', 'to': self.slave_routes.guid}, []),
],
root='sync', limit=99999999)
@@ -344,19 +345,19 @@ class SlaveTest(tests.Test):
self.assertEqual(
sorted([
- ({'ack': [[101, 103]], 'from': '127.0.0.1:7777', 'packet': 'ack', 'ranges': [[1, 3]], 'to': self.slave_routes.guid}, [
+ ({'ack': [[101, 103]], 'from': '127.0.0.1:7777', 'segment': 'ack', 'ranges': [[1, 3]], 'to': self.slave_routes.guid}, [
]),
- ({'from': self.slave_routes.guid, 'packet': 'push', 'to': '127.0.0.1:7777'}, [
+ ({'from': self.slave_routes.guid, 'segment': 'push', 'to': '127.0.0.1:7777'}, [
]),
- ({'from': self.slave_routes.guid, 'packet': 'pull', 'ranges': [[1, 100], [104, None]], 'to': '127.0.0.1:7777'}, [
+ ({'from': self.slave_routes.guid, 'segment': 'pull', 'ranges': [[1, 100], [104, None]], 'to': '127.0.0.1:7777'}, [
]),
]),
- sorted([(packet.header, [i.meta if isinstance(i, File) else i for i in packet]) for packet in parcel.decode_dir('sync')]))
+ sorted([(packet.header, [i.meta if isinstance(i, File) else i for i in packet]) for packet in packets.decode_dir('sync')]))
def test_offline_sync_GenerateRequestAfterImport(self):
- slave = Connection('http://127.0.0.1:8888', creds=SugarCreds(keyfile.value))
+ slave = Connection('http://127.0.0.1:8888')
- parcel.encode_dir([
+ packets.encode_dir([
('push', {'from': 'another-slave'}, [
{'resource': 'document'},
{'guid': '1', 'patch': {
@@ -378,7 +379,7 @@ class SlaveTest(tests.Test):
self.assertEqual(
sorted([
- ({'from': 'another-slave', 'packet': u'push'}, [
+ ({'from': 'another-slave', 'segment': 'push'}, [
{'resource': 'document'},
{'guid': '1', 'patch': {
'guid': {'value': '1', 'mtime': 0},
@@ -389,18 +390,18 @@ class SlaveTest(tests.Test):
}},
{'commit': [[1, 1]]},
]),
- ({'from': self.slave_routes.guid, 'packet': 'request', 'to': '127.0.0.1:7777', 'origin': 'another-slave', 'ranges': [[1, 1]]}, [
+ ({'from': self.slave_routes.guid, 'segment': 'request', 'to': '127.0.0.1:7777', 'origin': 'another-slave', 'ranges': [[1, 1]]}, [
]),
- ({'from': self.slave_routes.guid, 'packet': 'push', 'to': '127.0.0.1:7777'}, [
+ ({'from': self.slave_routes.guid, 'segment': 'push', 'to': '127.0.0.1:7777'}, [
{'resource': 'document'},
]),
- ({'from': self.slave_routes.guid, 'packet': 'pull', 'ranges': [[1, None]], 'to': '127.0.0.1:7777'}, [
+ ({'from': self.slave_routes.guid, 'segment': 'pull', 'ranges': [[1, None]], 'to': '127.0.0.1:7777'}, [
]),
]),
- sorted([(packet.header, [i.meta if isinstance(i, File) else i for i in packet]) for packet in parcel.decode_dir('sync')]))
+ sorted([(packet.header, [i.meta if isinstance(i, File) else i for i in packet]) for packet in packets.decode_dir('sync')]))
def test_offline_sync_Export(self):
- slave = Connection('http://127.0.0.1:8888', creds=SugarCreds(keyfile.value))
+ slave = Connection('http://127.0.0.1:8888')
class statvfs(object):
@@ -423,7 +424,7 @@ class SlaveTest(tests.Test):
self.assertEqual(
sorted([
- ({'from': self.slave_routes.guid, 'to': '127.0.0.1:7777', 'packet': u'push'}, [
+ ({'from': self.slave_routes.guid, 'to': '127.0.0.1:7777', 'segment': 'push'}, [
{'resource': 'document'},
{'guid': guid, 'patch': {
'mtime': {'value': 0, 'mtime': self.slave_volume['document'].get(guid).meta('mtime')['mtime']},
@@ -433,13 +434,13 @@ class SlaveTest(tests.Test):
{'content-length': '1', 'content-type': 'application/octet-stream'},
{'commit': [[push_seqno, push_seqno + 1]]},
]),
- ({'from': self.slave_routes.guid, 'packet': 'pull', 'ranges': [[1, None]], 'to': '127.0.0.1:7777'}, [
+ ({'from': self.slave_routes.guid, 'segment': 'pull', 'ranges': [[1, None]], 'to': '127.0.0.1:7777'}, [
]),
]),
- sorted([(packet.header, [i.meta if isinstance(i, File) else i for i in packet]) for packet in parcel.decode_dir('sync')]))
+ sorted([(packet.header, [i.meta if isinstance(i, File) else i for i in packet]) for packet in packets.decode_dir('sync')]))
def test_offline_sync_ContinuousExport(self):
- slave = Connection('http://127.0.0.1:8888', creds=SugarCreds(keyfile.value))
+ slave = Connection('http://127.0.0.1:8888')
class statvfs(object):
@@ -457,12 +458,12 @@ class SlaveTest(tests.Test):
RECORD = 1024 * 1024
slave.put(['document', guid1, 'title'], '.' * RECORD)
slave.put(['document', guid2, 'title'], '.' * RECORD)
- statvfs.f_bfree = parcel._RESERVED_DISK_SPACE + RECORD * 1.5
+ statvfs.f_bfree = packets._RESERVED_DISK_SPACE + RECORD * 1.5
slave.post(cmd='offline_sync', path=tests.tmpdir + '/sync')
self.assertEqual(
sorted([
- ({'from': self.slave_routes.guid, 'to': '127.0.0.1:7777', 'packet': u'push'}, [
+ ({'from': self.slave_routes.guid, 'to': '127.0.0.1:7777', 'segment': 'push'}, [
{'resource': 'document'},
{'guid': guid1, 'patch': {
'mtime': {'value': 0, 'mtime': self.slave_volume['document'].get(guid1).meta('mtime')['mtime']},
@@ -470,15 +471,15 @@ class SlaveTest(tests.Test):
}},
{'commit': [[push_seqno, push_seqno]]},
]),
- ({'from': self.slave_routes.guid, 'packet': 'pull', 'ranges': [[1, None]], 'to': '127.0.0.1:7777'}, [
+ ({'from': self.slave_routes.guid, 'segment': 'pull', 'ranges': [[1, None]], 'to': '127.0.0.1:7777'}, [
]),
]),
- sorted([(packet.header, [i.meta if isinstance(i, File) else i for i in packet]) for packet in parcel.decode_dir('sync')]))
+ sorted([(packet.header, [i.meta if isinstance(i, File) else i for i in packet]) for packet in packets.decode_dir('sync')]))
slave.post(cmd='offline_sync', path=tests.tmpdir + '/sync')
self.assertEqual(
sorted([
- ({'from': self.slave_routes.guid, 'to': '127.0.0.1:7777', 'packet': u'push'}, [
+ ({'from': self.slave_routes.guid, 'to': '127.0.0.1:7777', 'segment': 'push'}, [
{'resource': 'document'},
{'guid': guid1, 'patch': {
'mtime': {'value': 0, 'mtime': self.slave_volume['document'].get(guid1).meta('mtime')['mtime']},
@@ -486,9 +487,9 @@ class SlaveTest(tests.Test):
}},
{'commit': [[push_seqno, push_seqno]]},
]),
- ({'from': self.slave_routes.guid, 'packet': 'pull', 'ranges': [[1, None]], 'to': '127.0.0.1:7777'}, [
+ ({'from': self.slave_routes.guid, 'segment': 'pull', 'ranges': [[1, None]], 'to': '127.0.0.1:7777'}, [
]),
- ({'from': self.slave_routes.guid, 'to': '127.0.0.1:7777', 'packet': u'push'}, [
+ ({'from': self.slave_routes.guid, 'to': '127.0.0.1:7777', 'segment': 'push'}, [
{'resource': 'document'},
{'guid': guid2, 'patch': {
'mtime': {'value': 0, 'mtime': self.slave_volume['document'].get(guid2).meta('mtime')['mtime']},
@@ -497,15 +498,15 @@ class SlaveTest(tests.Test):
{'resource': 'user'},
{'commit': [[push_seqno + 1, push_seqno + 1]]},
]),
- ({'from': self.slave_routes.guid, 'packet': 'pull', 'ranges': [[1, None]], 'to': '127.0.0.1:7777'}, [
+ ({'from': self.slave_routes.guid, 'segment': 'pull', 'ranges': [[1, None]], 'to': '127.0.0.1:7777'}, [
]),
]),
- sorted([(packet.header, [i.meta if isinstance(i, File) else i for i in packet]) for packet in parcel.decode_dir('sync')]))
+ sorted([(packet.header, [i.meta if isinstance(i, File) else i for i in packet]) for packet in packets.decode_dir('sync')]))
slave.post(cmd='offline_sync', path=tests.tmpdir + '/sync')
self.assertEqual(
sorted([
- ({'from': self.slave_routes.guid, 'to': '127.0.0.1:7777', 'packet': u'push'}, [
+ ({'from': self.slave_routes.guid, 'to': '127.0.0.1:7777', 'segment': 'push'}, [
{'resource': 'document'},
{'guid': guid1, 'patch': {
'mtime': {'value': 0, 'mtime': self.slave_volume['document'].get(guid1).meta('mtime')['mtime']},
@@ -513,9 +514,9 @@ class SlaveTest(tests.Test):
}},
{'commit': [[push_seqno, push_seqno]]},
]),
- ({'from': self.slave_routes.guid, 'packet': 'pull', 'ranges': [[1, None]], 'to': '127.0.0.1:7777'}, [
+ ({'from': self.slave_routes.guid, 'segment': 'pull', 'ranges': [[1, None]], 'to': '127.0.0.1:7777'}, [
]),
- ({'from': self.slave_routes.guid, 'to': '127.0.0.1:7777', 'packet': u'push'}, [
+ ({'from': self.slave_routes.guid, 'to': '127.0.0.1:7777', 'segment': 'push'}, [
{'resource': 'document'},
{'guid': guid2, 'patch': {
'mtime': {'value': 0, 'mtime': self.slave_volume['document'].get(guid2).meta('mtime')['mtime']},
@@ -524,16 +525,16 @@ class SlaveTest(tests.Test):
{'resource': 'user'},
{'commit': [[push_seqno + 1, push_seqno + 1]]},
]),
- ({'from': self.slave_routes.guid, 'packet': 'pull', 'ranges': [[1, None]], 'to': '127.0.0.1:7777'}, [
+ ({'from': self.slave_routes.guid, 'segment': 'pull', 'ranges': [[1, None]], 'to': '127.0.0.1:7777'}, [
]),
- ({'from': self.slave_routes.guid, 'to': '127.0.0.1:7777', 'packet': u'push'}, [
+ ({'from': self.slave_routes.guid, 'to': '127.0.0.1:7777', 'segment': 'push'}, [
{'resource': 'document'},
{'resource': 'user'},
]),
- ({'from': self.slave_routes.guid, 'packet': 'pull', 'ranges': [[1, None]], 'to': '127.0.0.1:7777'}, [
+ ({'from': self.slave_routes.guid, 'segment': 'pull', 'ranges': [[1, None]], 'to': '127.0.0.1:7777'}, [
]),
]),
- sorted([(packet.header, [i.meta if isinstance(i, File) else i for i in packet]) for packet in parcel.decode_dir('sync')]))
+ sorted([(packet.header, [i.meta if isinstance(i, File) else i for i in packet]) for packet in packets.decode_dir('sync')]))
if __name__ == '__main__':
diff --git a/tests/units/toolkit/__main__.py b/tests/units/toolkit/__main__.py
index ee726bd..2d8530f 100644
--- a/tests/units/toolkit/__main__.py
+++ b/tests/units/toolkit/__main__.py
@@ -13,7 +13,7 @@ from router import *
from gbus import *
from i18n import *
from sat import *
-from parcel import *
+from packets import *
from ranges import *
if __name__ == '__main__':
diff --git a/tests/units/toolkit/http.py b/tests/units/toolkit/http.py
index cdf9198..f05da0d 100755
--- a/tests/units/toolkit/http.py
+++ b/tests/units/toolkit/http.py
@@ -108,6 +108,155 @@ class HTTPTest(tests.Test):
})
self.assertEqual('result', json.load(client.call(request)))
+ def test_call_SendGeneratorTypeData(self):
+
+ class Routes(object):
+
+ @route('POST', mime_type='application/json')
+ def probe(self):
+ return this.request.content.read()
+
+ self.server = coroutine.WSGIServer(('127.0.0.1', local.ipc_port.value), Router(Routes()))
+ coroutine.spawn(self.server.serve_forever)
+ coroutine.dispatch()
+ conn = http.Connection('http://127.0.0.1:%s' % local.ipc_port.value)
+
+ def data():
+ yield '1'
+ yield '2'
+ yield '3'
+
+ request = Request({
+ 'REQUEST_METHOD': 'POST',
+ 'PATH_INFO': '/',
+ }, content=data())
+ self.assertEqual('123', conn.call(request))
+
+ def test_DoNotRepostOn401(self):
+ requests = []
+
+ class Creds(object):
+
+ def logon(self, challenge):
+ return {'login': 'ok'}
+
+ class Routes(object):
+
+ @route('GET', mime_type='application/json')
+ def get(self):
+ requests.append(repr(this.request))
+ if this.request.environ.get('HTTP_LOGIN') != 'ok':
+ raise http.Unauthorized()
+ return this.request.content.read()
+
+ @route('POST', mime_type='application/json')
+ def post(self):
+ requests.append(repr(this.request))
+ if this.request.environ.get('HTTP_LOGIN') != 'ok':
+ raise http.Unauthorized()
+ return this.request.content.read()
+
+ self.server = coroutine.WSGIServer(('127.0.0.1', local.ipc_port.value), Router(Routes()))
+ coroutine.spawn(self.server.serve_forever)
+ coroutine.dispatch()
+
+ conn = http.Connection('http://127.0.0.1:%s' % local.ipc_port.value, creds=Creds())
+
+ request = Request({
+ 'REQUEST_METHOD': 'GET',
+ 'PATH_INFO': '/',
+ })
+ self.assertEqual('', conn.call(request))
+ self.assertEqual([
+ '<Request method=GET path=[] cmd=None query={}>',
+ '<Request method=GET path=[] cmd=None query={}>',
+ ], requests)
+ del requests[:]
+
+ request = Request({
+ 'REQUEST_METHOD': 'POST',
+ 'PATH_INFO': '/',
+ }, content='probe')
+ self.assertEqual('probe', conn.call(request))
+ self.assertEqual([
+ '<Request method=POST path=[] cmd=None query={}>',
+ ], requests)
+ del requests[:]
+
+ conn = http.Connection('http://127.0.0.1:%s' % local.ipc_port.value, creds=Creds())
+
+ request = Request({
+ 'REQUEST_METHOD': 'POST',
+ 'PATH_INFO': '/',
+ }, content='probe')
+ self.assertRaises(RuntimeError, conn.call, request)
+ self.assertEqual([
+ '<Request method=POST path=[] cmd=None query={}>',
+ ], requests)
+ del requests[:]
+
+ def test_AuthBeforePosting(self):
+ challenges = []
+ requests = []
+
+ class Creds(object):
+
+ def logon(self, challenge):
+ challenges.append(challenge)
+ return {'login': 'ok'}
+
+ class Routes(object):
+
+ @route('LOGIN')
+ def get(self):
+ requests.append(repr(this.request))
+ if this.request.environ.get('HTTP_LOGIN') != 'ok':
+ this.response['www-authenticate'] = 'login'
+ raise http.Unauthorized()
+
+ @route('POST', mime_type='application/json')
+ def post(self):
+ requests.append(repr(this.request))
+ if this.request.environ.get('HTTP_LOGIN') != 'ok':
+ this.response['www-authenticate'] = 'fail'
+ raise http.Unauthorized()
+ return this.request.content.read()
+
+ self.server = coroutine.WSGIServer(('127.0.0.1', local.ipc_port.value), Router(Routes()))
+ coroutine.spawn(self.server.serve_forever)
+ coroutine.dispatch()
+
+ conn = http.Connection('http://127.0.0.1:%s' % local.ipc_port.value, creds=Creds(), auth_request={'method': 'LOGIN'})
+
+ request = Request({
+ 'REQUEST_METHOD': 'POST',
+ 'PATH_INFO': '/',
+ }, content='probe')
+ self.assertEqual('probe', conn.call(request))
+ self.assertEqual([
+ '<Request method=LOGIN path=[] cmd=None query={}>',
+ '<Request method=LOGIN path=[] cmd=None query={}>',
+ '<Request method=POST path=[] cmd=None query={}>',
+ ], requests)
+ del requests[:]
+ self.assertEqual([
+ 'login',
+ ], challenges)
+ del challenges[:]
+
+ request = Request({
+ 'REQUEST_METHOD': 'POST',
+ 'PATH_INFO': '/',
+ }, content='probe')
+ self.assertEqual('probe', conn.call(request))
+ self.assertEqual([
+ '<Request method=POST path=[] cmd=None query={}>',
+ ], requests)
+ del requests[:]
+ self.assertEqual([
+ ], challenges)
+ del challenges[:]
+
if __name__ == '__main__':
tests.main()
diff --git a/tests/units/toolkit/parcel.py b/tests/units/toolkit/packets.py
index 17fa146..6a181b6 100755
--- a/tests/units/toolkit/parcel.py
+++ b/tests/units/toolkit/packets.py
@@ -13,54 +13,54 @@ from __init__ import tests
from sugar_network import db, toolkit, client
from sugar_network.toolkit.router import File, route, Router
-from sugar_network.toolkit import parcel, http, coroutine
+from sugar_network.toolkit import packets, http, coroutine
-class ParcelTest(tests.Test):
+class PacketsTest(tests.Test):
def test_decode_Zipped(self):
stream = zips(
json.dumps({'foo': 'bar'}) + '\n'
)
- packets_iter = parcel.decode(stream)
- self.assertRaises(EOFError, packets_iter.next)
+ packets_iter = iter(packets.decode(stream))
+ self.assertRaises(StopIteration, packets_iter.next)
self.assertEqual(len(stream.getvalue()), stream.tell())
stream = zips(
json.dumps({'foo': 'bar'}) + '\n' +
- json.dumps({'packet': 1, 'bar': 'foo'}) + '\n'
+ json.dumps({'segment': 1, 'bar': 'foo'}) + '\n'
)
- packets_iter = parcel.decode(stream)
+ packets_iter = iter(packets.decode(stream))
with next(packets_iter) as packet:
self.assertEqual(1, packet.name)
self.assertEqual('foo', packet['bar'])
packet_iter = iter(packet)
- self.assertRaises(EOFError, packet_iter.next)
- self.assertRaises(EOFError, packets_iter.next)
+ self.assertRaises(StopIteration, packet_iter.next)
+ self.assertRaises(StopIteration, packets_iter.next)
self.assertEqual(len(stream.getvalue()), stream.tell())
stream = zips(
json.dumps({'foo': 'bar'}) + '\n' +
- json.dumps({'packet': 1, 'bar': 'foo'}) + '\n' +
+ json.dumps({'segment': 1, 'bar': 'foo'}) + '\n' +
json.dumps({'payload': 1}) + '\n'
)
- packets_iter = parcel.decode(stream)
+ packets_iter = iter(packets.decode(stream))
with next(packets_iter) as packet:
self.assertEqual(1, packet.name)
packet_iter = iter(packet)
self.assertEqual({'payload': 1}, next(packet_iter))
- self.assertRaises(EOFError, packet_iter.next)
- self.assertRaises(EOFError, packets_iter.next)
+ self.assertRaises(StopIteration, packet_iter.next)
+ self.assertRaises(StopIteration, packets_iter.next)
self.assertEqual(len(stream.getvalue()), stream.tell())
stream = zips(
json.dumps({'foo': 'bar'}) + '\n' +
- json.dumps({'packet': 1, 'bar': 'foo'}) + '\n' +
+ json.dumps({'segment': 1, 'bar': 'foo'}) + '\n' +
json.dumps({'payload': 1}) + '\n' +
- json.dumps({'packet': 2}) + '\n' +
+ json.dumps({'segment': 2}) + '\n' +
json.dumps({'payload': 2}) + '\n'
)
- packets_iter = parcel.decode(stream)
+ packets_iter = iter(packets.decode(stream))
with next(packets_iter) as packet:
self.assertEqual(1, packet.name)
packet_iter = iter(packet)
@@ -70,19 +70,18 @@ class ParcelTest(tests.Test):
self.assertEqual(2, packet.name)
packet_iter = iter(packet)
self.assertEqual({'payload': 2}, next(packet_iter))
- self.assertRaises(EOFError, packet_iter.next)
- self.assertRaises(EOFError, packets_iter.next)
+ self.assertRaises(StopIteration, packet_iter.next)
+ self.assertRaises(StopIteration, packets_iter.next)
self.assertEqual(len(stream.getvalue()), stream.tell())
stream = zips(
json.dumps({'foo': 'bar'}) + '\n' +
- json.dumps({'packet': 1, 'bar': 'foo'}) + '\n' +
+ json.dumps({'segment': 1, 'bar': 'foo'}) + '\n' +
json.dumps({'payload': 1}) + '\n' +
- json.dumps({'packet': 2}) + '\n' +
- json.dumps({'payload': 2}) + '\n' +
- json.dumps({'packet': 'last'}) + '\n'
+ json.dumps({'segment': 2}) + '\n' +
+ json.dumps({'payload': 2}) + '\n'
)
- packets_iter = parcel.decode(stream)
+ packets_iter = iter(packets.decode(stream))
with next(packets_iter) as packet:
self.assertEqual(1, packet.name)
packet_iter = iter(packet)
@@ -100,45 +99,45 @@ class ParcelTest(tests.Test):
stream = StringIO(
json.dumps({'foo': 'bar'}) + '\n'
)
- packets_iter = parcel.decode(stream)
- self.assertRaises(EOFError, packets_iter.next)
+ packets_iter = iter(packets.decode(stream))
+ self.assertRaises(StopIteration, packets_iter.next)
self.assertEqual(len(stream.getvalue()), stream.tell())
stream = StringIO(
json.dumps({'foo': 'bar'}) + '\n' +
- json.dumps({'packet': 1, 'bar': 'foo'}) + '\n'
+ json.dumps({'segment': 1, 'bar': 'foo'}) + '\n'
)
- packets_iter = parcel.decode(stream)
+ packets_iter = iter(packets.decode(stream))
with next(packets_iter) as packet:
self.assertEqual(1, packet.name)
self.assertEqual('foo', packet['bar'])
packet_iter = iter(packet)
- self.assertRaises(EOFError, packet_iter.next)
- self.assertRaises(EOFError, packets_iter.next)
+ self.assertRaises(StopIteration, packet_iter.next)
+ self.assertRaises(StopIteration, packets_iter.next)
self.assertEqual(len(stream.getvalue()), stream.tell())
stream = StringIO(
json.dumps({'foo': 'bar'}) + '\n' +
- json.dumps({'packet': 1, 'bar': 'foo'}) + '\n' +
+ json.dumps({'segment': 1, 'bar': 'foo'}) + '\n' +
json.dumps({'payload': 1}) + '\n'
)
- packets_iter = parcel.decode(stream)
+ packets_iter = iter(packets.decode(stream))
with next(packets_iter) as packet:
self.assertEqual(1, packet.name)
packet_iter = iter(packet)
self.assertEqual({'payload': 1}, next(packet_iter))
- self.assertRaises(EOFError, packet_iter.next)
- self.assertRaises(EOFError, packets_iter.next)
+ self.assertRaises(StopIteration, packet_iter.next)
+ self.assertRaises(StopIteration, packets_iter.next)
self.assertEqual(len(stream.getvalue()), stream.tell())
stream = StringIO(
json.dumps({'foo': 'bar'}) + '\n' +
- json.dumps({'packet': 1, 'bar': 'foo'}) + '\n' +
+ json.dumps({'segment': 1, 'bar': 'foo'}) + '\n' +
json.dumps({'payload': 1}) + '\n' +
- json.dumps({'packet': 2}) + '\n' +
+ json.dumps({'segment': 2}) + '\n' +
json.dumps({'payload': 2}) + '\n'
)
- packets_iter = parcel.decode(stream)
+ packets_iter = iter(packets.decode(stream))
with next(packets_iter) as packet:
self.assertEqual(1, packet.name)
packet_iter = iter(packet)
@@ -148,19 +147,18 @@ class ParcelTest(tests.Test):
self.assertEqual(2, packet.name)
packet_iter = iter(packet)
self.assertEqual({'payload': 2}, next(packet_iter))
- self.assertRaises(EOFError, packet_iter.next)
- self.assertRaises(EOFError, packets_iter.next)
+ self.assertRaises(StopIteration, packet_iter.next)
+ self.assertRaises(StopIteration, packets_iter.next)
self.assertEqual(len(stream.getvalue()), stream.tell())
stream = StringIO(
json.dumps({'foo': 'bar'}) + '\n' +
- json.dumps({'packet': 1, 'bar': 'foo'}) + '\n' +
+ json.dumps({'segment': 1, 'bar': 'foo'}) + '\n' +
json.dumps({'payload': 1}) + '\n' +
- json.dumps({'packet': 2}) + '\n' +
- json.dumps({'payload': 2}) + '\n' +
- json.dumps({'packet': 'last'}) + '\n'
+ json.dumps({'segment': 2}) + '\n' +
+ json.dumps({'payload': 2}) + '\n'
)
- packets_iter = parcel.decode(stream)
+ packets_iter = iter(packets.decode(stream))
with next(packets_iter) as packet:
self.assertEqual(1, packet.name)
packet_iter = iter(packet)
@@ -177,73 +175,68 @@ class ParcelTest(tests.Test):
def test_decode_ZippedWithLimit(self):
payload = zips(
json.dumps({}) + '\n' +
- json.dumps({'packet': 'first'}) + '\n' +
- json.dumps({'packet': 'last'}) + '\n'
+ json.dumps({'segment': 'first'}) + '\n'
).getvalue()
tail = '.' * 100
stream = StringIO(payload + tail)
- for i in parcel.decode(stream):
+ for i in packets.decode(stream):
pass
self.assertEqual(len(payload + tail), stream.tell())
stream = StringIO(payload + tail)
- for i in parcel.decode(stream, limit=len(payload)):
+ for i in packets.decode(stream, limit=len(payload)):
pass
self.assertEqual(len(payload), stream.tell())
def test_decode_NotZippedWithLimit(self):
payload = StringIO(
json.dumps({}) + '\n' +
- json.dumps({'packet': 'first'}) + '\n' +
- json.dumps({'packet': 'last'}) + '\n'
+ json.dumps({'segment': 'first'}) + '\n'
).getvalue()
tail = '.' * 100
stream = StringIO(payload + tail)
- for i in parcel.decode(stream):
- pass
+ self.assertRaises(ValueError, lambda: [i for i in packets.decode(stream)])
self.assertEqual(len(payload + tail), stream.tell())
stream = StringIO(payload + tail)
- for i in parcel.decode(stream, limit=len(payload)):
+ for i in packets.decode(stream, limit=len(payload)):
pass
self.assertEqual(len(payload), stream.tell())
def test_decode_Empty(self):
- self.assertRaises(http.BadRequest, parcel.decode(StringIO()).next)
+ self.assertRaises(http.BadRequest, packets.decode, StringIO())
stream = zips(
''
)
- self.assertRaises(EOFError, parcel.decode(stream).next)
+ self.assertRaises(StopIteration, iter(packets.decode(stream)).next)
self.assertEqual(len(stream.getvalue()), stream.tell())
stream = zips(
json.dumps({'foo': 'bar'}) + '\n'
)
- self.assertRaises(EOFError, parcel.decode(stream).next)
+ self.assertRaises(StopIteration, iter(packets.decode(stream)).next)
self.assertEqual(len(stream.getvalue()), stream.tell())
stream = zips(
- json.dumps({'foo': 'bar'}) + '\n' +
- json.dumps({'packet': 'last'}) + '\n'
+ json.dumps({'foo': 'bar'}) + '\n'
)
- self.assertRaises(StopIteration, parcel.decode(stream).next)
+ self.assertRaises(StopIteration, iter(packets.decode(stream)).next)
self.assertEqual(len(stream.getvalue()), stream.tell())
def test_decode_SkipPackets(self):
stream = zips(
json.dumps({}) + '\n' +
- json.dumps({'packet': 1}) + '\n' +
+ json.dumps({'segment': 1}) + '\n' +
json.dumps({'payload': 1}) + '\n' +
json.dumps({'payload': 11}) + '\n' +
json.dumps({'payload': 111}) + '\n' +
- json.dumps({'packet': 2}) + '\n' +
- json.dumps({'payload': 2}) + '\n' +
- json.dumps({'packet': 'last'}) + '\n'
+ json.dumps({'segment': 2}) + '\n' +
+ json.dumps({'payload': 2}) + '\n'
)
- packets_iter = parcel.decode(stream)
+ packets_iter = iter(packets.decode(stream))
next(packets_iter)
with next(packets_iter) as packet:
self.assertEqual(2, packet.name)
@@ -254,7 +247,7 @@ class ParcelTest(tests.Test):
self.assertEqual(len(stream.getvalue()), stream.tell())
stream.seek(0)
- packets_iter = parcel.decode(stream)
+ packets_iter = iter(packets.decode(stream))
with next(packets_iter) as packet:
self.assertEqual(1, packet.name)
packet_iter = iter(packet)
@@ -270,17 +263,16 @@ class ParcelTest(tests.Test):
def test_decode_Blobs(self):
stream = zips(
json.dumps({}) + '\n' +
- json.dumps({'packet': 1}) + '\n' +
+ json.dumps({'segment': 1}) + '\n' +
json.dumps({'num': 1, 'content-length': 1}) + '\n' +
'a' +
json.dumps({'num': 2, 'content-length': 2}) + '\n' +
'bb' +
- json.dumps({'packet': 2}) + '\n' +
+ json.dumps({'segment': 2}) + '\n' +
json.dumps({'num': 3, 'content-length': 3}) + '\n' +
- 'ccc' +
- json.dumps({'packet': 'last'}) + '\n'
+ 'ccc'
)
- packets_iter = parcel.decode(stream)
+ packets_iter = iter(packets.decode(stream))
with next(packets_iter) as packet:
self.assertEqual(1, packet.name)
self.assertEqual([
@@ -300,17 +292,16 @@ class ParcelTest(tests.Test):
def test_decode_EmptyBlobs(self):
stream = zips(
json.dumps({}) + '\n' +
- json.dumps({'packet': 1}) + '\n' +
+ json.dumps({'segment': 1}) + '\n' +
json.dumps({'num': 1, 'content-length': 1}) + '\n' +
'a' +
json.dumps({'num': 2, 'content-length': 0}) + '\n' +
'' +
- json.dumps({'packet': 2}) + '\n' +
+ json.dumps({'segment': 2}) + '\n' +
json.dumps({'num': 3, 'content-length': 3}) + '\n' +
- 'ccc' +
- json.dumps({'packet': 'last'}) + '\n'
+ 'ccc'
)
- packets_iter = parcel.decode(stream)
+ packets_iter = iter(packets.decode(stream))
with next(packets_iter) as packet:
self.assertEqual(1, packet.name)
self.assertEqual([
@@ -330,116 +321,107 @@ class ParcelTest(tests.Test):
def test_decode_SkipNotReadBlobs(self):
stream = zips(
json.dumps({}) + '\n' +
- json.dumps({'packet': 1}) + '\n' +
+ json.dumps({'segment': 1}) + '\n' +
json.dumps({'num': 1, 'content-length': 1}) + '\n' +
'a' +
json.dumps({'num': 2, 'content-length': 2}) + '\n' +
'bb' +
- json.dumps({'packet': 2}) + '\n' +
+ json.dumps({'segment': 2}) + '\n' +
json.dumps({'num': 3, 'content-length': 3}) + '\n' +
- 'ccc' +
- json.dumps({'packet': 'last'}) + '\n'
+ 'ccc'
)
- packets_iter = parcel.decode(stream)
+ packets_iter = iter(packets.decode(stream))
with next(packets_iter) as packet:
self.assertEqual(1, packet.name)
self.assertEqual([1, 2], [i.meta['num'] for i in packet])
with next(packets_iter) as packet:
self.assertEqual(2, packet.name)
self.assertEqual([3], [i.meta['num'] for i in packet])
- self.assertRaises(StopIteration, packets_iter.next)
+ self.assertRaises(StopIteration, next, packets_iter)
self.assertEqual(len(stream.getvalue()), stream.tell())
def test_encode_Zipped(self):
- stream = ''.join([i for i in parcel.encode([])])
+ stream = ''.join([i for i in packets.encode([])])
self.assertEqual(
- json.dumps({}) + '\n' +
- json.dumps({'packet': 'last'}) + '\n',
+ json.dumps({}) + '\n',
unzips(stream))
- stream = ''.join([i for i in parcel.encode([(None, None, None)], header={'foo': 'bar'})])
+ stream = ''.join([i for i in packets.encode([(None, None, None)], header={'foo': 'bar'})])
self.assertEqual(
json.dumps({'foo': 'bar'}) + '\n' +
- json.dumps({'packet': None}) + '\n' +
- json.dumps({'packet': 'last'}) + '\n',
+ json.dumps({'segment': None}) + '\n',
unzips(stream))
- stream = ''.join([i for i in parcel.encode([
+ stream = ''.join([i for i in packets.encode([
(1, {}, None),
('2', {'n': 2}, []),
('3', {'n': 3}, iter([])),
])])
self.assertEqual(
json.dumps({}) + '\n' +
- json.dumps({'packet': 1}) + '\n' +
- json.dumps({'packet': '2', 'n': 2}) + '\n' +
- json.dumps({'packet': '3', 'n': 3}) + '\n' +
- json.dumps({'packet': 'last'}) + '\n',
+ json.dumps({'segment': 1}) + '\n' +
+ json.dumps({'segment': '2', 'n': 2}) + '\n' +
+ json.dumps({'segment': '3', 'n': 3}) + '\n',
unzips(stream))
- stream = ''.join([i for i in parcel.encode([
+ stream = ''.join([i for i in packets.encode([
(1, None, [{1: 1}]),
(2, None, [{2: 2}, {2: 2}]),
(3, None, [{3: 3}, {3: 3}, {3: 3}]),
])])
self.assertEqual(
json.dumps({}) + '\n' +
- json.dumps({'packet': 1}) + '\n' +
+ json.dumps({'segment': 1}) + '\n' +
json.dumps({1: 1}) + '\n' +
- json.dumps({'packet': 2}) + '\n' +
+ json.dumps({'segment': 2}) + '\n' +
json.dumps({2: 2}) + '\n' +
json.dumps({2: 2}) + '\n' +
- json.dumps({'packet': 3}) + '\n' +
+ json.dumps({'segment': 3}) + '\n' +
json.dumps({3: 3}) + '\n' +
json.dumps({3: 3}) + '\n' +
- json.dumps({3: 3}) + '\n' +
- json.dumps({'packet': 'last'}) + '\n',
+ json.dumps({3: 3}) + '\n',
unzips(stream))
def test_encode_NotZipped(self):
- stream = ''.join([i for i in parcel.encode([], compresslevel=0)])
+ stream = ''.join([i for i in packets.encode([], compresslevel=0)])
self.assertEqual(
- json.dumps({}) + '\n' +
- json.dumps({'packet': 'last'}) + '\n',
+ json.dumps({}) + '\n',
stream)
- stream = ''.join([i for i in parcel.encode([(None, None, None)], header={'foo': 'bar'}, compresslevel=0)])
+ stream = ''.join([i for i in packets.encode([(None, None, None)], header={'foo': 'bar'}, compresslevel=0)])
self.assertEqual(
json.dumps({'foo': 'bar'}) + '\n' +
- json.dumps({'packet': None}) + '\n' +
- json.dumps({'packet': 'last'}) + '\n',
+ json.dumps({'segment': None}) + '\n',
stream)
- stream = ''.join([i for i in parcel.encode([
+ stream = ''.join([i for i in packets.encode([
(1, {}, None),
('2', {'n': 2}, []),
('3', {'n': 3}, iter([])),
], compresslevel=0)])
self.assertEqual(
json.dumps({}) + '\n' +
- json.dumps({'packet': 1}) + '\n' +
- json.dumps({'packet': '2', 'n': 2}) + '\n' +
- json.dumps({'packet': '3', 'n': 3}) + '\n' +
- json.dumps({'packet': 'last'}) + '\n',
+ json.dumps({'segment': 1}) + '\n' +
+ json.dumps({'segment': '2', 'n': 2}) + '\n' +
+ json.dumps({'segment': '3', 'n': 3}) + '\n',
stream)
- stream = ''.join([i for i in parcel.encode([
+ stream = ''.join([i for i in packets.encode([
(1, None, [{1: 1}]),
(2, None, [{2: 2}, {2: 2}]),
(3, None, [{3: 3}, {3: 3}, {3: 3}]),
], compresslevel=0)])
self.assertEqual(
json.dumps({}) + '\n' +
- json.dumps({'packet': 1}) + '\n' +
+ json.dumps({'segment': 1}) + '\n' +
json.dumps({1: 1}) + '\n' +
- json.dumps({'packet': 2}) + '\n' +
+ json.dumps({'segment': 2}) + '\n' +
json.dumps({2: 2}) + '\n' +
json.dumps({2: 2}) + '\n' +
- json.dumps({'packet': 3}) + '\n' +
+ json.dumps({'segment': 3}) + '\n' +
json.dumps({3: 3}) + '\n' +
json.dumps({3: 3}) + '\n' +
- json.dumps({3: 3}) + '\n' +
- json.dumps({'packet': 'last'}) + '\n',
+ json.dumps({3: 3}) + '\n',
stream)
def test_limited_encode(self):
@@ -449,50 +431,50 @@ class ParcelTest(tests.Test):
yield {'record': '.' * RECORD}
yield {'record': '.' * RECORD}
- stream = unzips(''.join([i for i in parcel.encode([
+ stream = unzips(''.join([i for i in packets.encode([
('first', None, content()),
('second', None, content()),
],
limit=RECORD / 2)]))
assert len(stream) < RECORD
- self.assertEqual(4, len(stream.strip().split('\n')))
+ self.assertEqual(3, len(stream.strip().split('\n')))
- stream = unzips(''.join([i for i in parcel.encode([
+ stream = unzips(''.join([i for i in packets.encode([
('first', None, content()),
('second', None, content()),
],
limit=RECORD * 1.5)]))
assert len(stream) > RECORD
assert len(stream) < RECORD * 2
- self.assertEqual(5, len(stream.strip().split('\n')))
+ self.assertEqual(4, len(stream.strip().split('\n')))
- stream = unzips(''.join([i for i in parcel.encode([
+ stream = unzips(''.join([i for i in packets.encode([
('first', None, content()),
('second', None, content()),
],
limit=RECORD * 2.5)]))
assert len(stream) > RECORD * 2
assert len(stream) < RECORD * 3
- self.assertEqual(6, len(stream.strip().split('\n')))
+ self.assertEqual(5, len(stream.strip().split('\n')))
- stream = unzips(''.join([i for i in parcel.encode([
+ stream = unzips(''.join([i for i in packets.encode([
('first', None, content()),
('second', None, content()),
],
limit=RECORD * 3.5)]))
assert len(stream) > RECORD * 3
assert len(stream) < RECORD * 4
- self.assertEqual(7, len(stream.strip().split('\n')))
+ self.assertEqual(6, len(stream.strip().split('\n')))
- stream = unzips(''.join([i for i in parcel.encode([
+ stream = unzips(''.join([i for i in packets.encode([
('first', None, content()),
('second', None, content()),
],
limit=RECORD * 4.5)]))
assert len(stream) > RECORD * 4
- self.assertEqual(8, len(stream.strip().split('\n')))
+ self.assertEqual(7, len(stream.strip().split('\n')))
- stream = unzips(''.join([i for i in parcel.encode([
+ stream = unzips(''.join([i for i in packets.encode([
('first', None, content()),
('second', None, content()),
],
@@ -512,84 +494,84 @@ class ParcelTest(tests.Test):
yield {'record': '.' * RECORD}
yield {'record': '.' * RECORD}
- stream = unzips(''.join([i for i in parcel.encode([
+ stream = unzips(''.join([i for i in packets.encode([
('first', None, content()),
('second', None, content()),
],
limit=RECORD / 2)]))
assert len(stream) > RECORD * 4
assert len(stream) < RECORD * 5
- self.assertEqual(8, len(stream.strip().split('\n')))
+ self.assertEqual(7, len(stream.strip().split('\n')))
- stream = unzips(''.join([i for i in parcel.encode([
+ stream = unzips(''.join([i for i in packets.encode([
('first', None, content()),
('second', None, content()),
],
limit=RECORD * 1.5)]))
assert len(stream) > RECORD * 5
assert len(stream) < RECORD * 6
- self.assertEqual(9, len(stream.strip().split('\n')))
+ self.assertEqual(8, len(stream.strip().split('\n')))
- stream = unzips(''.join([i for i in parcel.encode([
+ stream = unzips(''.join([i for i in packets.encode([
('first', None, content()),
('second', None, content()),
],
limit=RECORD * 2.5)]))
assert len(stream) > RECORD * 6
assert len(stream) < RECORD * 7
- self.assertEqual(10, len(stream.strip().split('\n')))
+ self.assertEqual(9, len(stream.strip().split('\n')))
- stream = unzips(''.join([i for i in parcel.encode([
+ stream = unzips(''.join([i for i in packets.encode([
('first', None, content()),
('second', None, content()),
],
limit=RECORD * 3.5)]))
assert len(stream) > RECORD * 6
assert len(stream) < RECORD * 7
- self.assertEqual(10, len(stream.strip().split('\n')))
+ self.assertEqual(9, len(stream.strip().split('\n')))
- stream = unzips(''.join([i for i in parcel.encode([
+ stream = unzips(''.join([i for i in packets.encode([
('first', None, content()),
('second', None, content()),
],
limit=RECORD * 4.5)]))
assert len(stream) > RECORD * 6
assert len(stream) < RECORD * 7
- self.assertEqual(10, len(stream.strip().split('\n')))
+ self.assertEqual(9, len(stream.strip().split('\n')))
- stream = unzips(''.join([i for i in parcel.encode([
+ stream = unzips(''.join([i for i in packets.encode([
('first', None, content()),
('second', None, content()),
],
limit=RECORD * 5.5)]))
assert len(stream) > RECORD * 7
assert len(stream) < RECORD * 8
- self.assertEqual(11, len(stream.strip().split('\n')))
+ self.assertEqual(10, len(stream.strip().split('\n')))
- stream = unzips(''.join([i for i in parcel.encode([
+ stream = unzips(''.join([i for i in packets.encode([
('first', None, content()),
('second', None, content()),
],
limit=RECORD * 6.5)]))
assert len(stream) > RECORD * 8
assert len(stream) < RECORD * 9
- self.assertEqual(12, len(stream.strip().split('\n')))
+ self.assertEqual(11, len(stream.strip().split('\n')))
- stream = unzips(''.join([i for i in parcel.encode([
+ stream = unzips(''.join([i for i in packets.encode([
('first', None, content()),
('second', None, content()),
],
)]))
assert len(stream) > RECORD * 8
assert len(stream) < RECORD * 9
- self.assertEqual(12, len(stream.strip().split('\n')))
+ self.assertEqual(11, len(stream.strip().split('\n')))
def test_encode_Blobs(self):
self.touch(('a', 'a'))
self.touch(('b', 'bb'))
self.touch(('c', 'ccc'))
- stream = ''.join([i for i in parcel.encode([
+ stream = ''.join([i for i in packets.encode([
(1, None, [
File('a', 'digest', [('num', 1)]),
File('b', 'digest', [('num', 2)]),
@@ -601,15 +583,14 @@ class ParcelTest(tests.Test):
self.assertEqual(
json.dumps({}) + '\n' +
- json.dumps({'packet': 1}) + '\n' +
+ json.dumps({'segment': 1}) + '\n' +
json.dumps({'num': 1}) + '\n' +
'a' + '\n' +
json.dumps({'num': 2}) + '\n' +
'bb' + '\n' +
- json.dumps({'packet': 2}) + '\n' +
+ json.dumps({'segment': 2}) + '\n' +
json.dumps({'num': 3}) + '\n' +
- 'ccc' + '\n' +
- json.dumps({'packet': 'last'}) + '\n',
+ 'ccc' + '\n',
unzips(stream))
def test_encode_BlobWithUrls(self):
@@ -625,39 +606,36 @@ class ParcelTest(tests.Test):
coroutine.dispatch()
url = 'http://127.0.0.1:%s' % client.ipc_port.value
- stream = ''.join([i for i in parcel.encode([
+ stream = ''.join([i for i in packets.encode([
(1, None, [File(None, meta={'location': 'fake'})]),
])])
self.assertEqual(
json.dumps({}) + '\n' +
- json.dumps({'packet': 1}) + '\n' +
- json.dumps({'location': 'fake'}) + '\n' +
- json.dumps({'packet': 'last'}) + '\n',
+ json.dumps({'segment': 1}) + '\n' +
+ json.dumps({'location': 'fake'}) + '\n',
unzips(stream))
- stream = ''.join([i for i in parcel.encode([
+ stream = ''.join([i for i in packets.encode([
(1, None, [File(None, meta={'location': 'fake', 'content-length': '0'})]),
])])
self.assertEqual(
json.dumps({}) + '\n' +
- json.dumps({'packet': 1}) + '\n' +
- json.dumps({'location': 'fake', 'content-length': '0'}) + '\n' +
- json.dumps({'packet': 'last'}) + '\n',
+ json.dumps({'segment': 1}) + '\n' +
+ json.dumps({'location': 'fake', 'content-length': '0'}) + '\n',
unzips(stream))
- stream = ''.join([i for i in parcel.encode([
+ stream = ''.join([i for i in packets.encode([
(1, None, [File(None, meta={'location': url, 'content-length': str(len('probe'))})]),
])])
self.assertEqual(
json.dumps({}) + '\n' +
- json.dumps({'packet': 1}) + '\n' +
+ json.dumps({'segment': 1}) + '\n' +
json.dumps({'location': url, 'content-length': str(len('probe'))}) + '\n' +
- 'probe' + '\n' +
- json.dumps({'packet': 'last'}) + '\n',
+ 'probe' + '\n',
unzips(stream))
def encode():
- stream = ''.join([i for i in parcel.encode([
+ stream = ''.join([i for i in packets.encode([
(1, None, [File(None, meta={'location': 'http://127.0.0.1:108', 'content-length': str(len('probe'))})]),
])])
self.assertRaises(http.ConnectionError, encode)
@@ -670,56 +648,56 @@ class ParcelTest(tests.Test):
yield File('blob', 'digest')
yield File('blob', 'digest')
- stream = unzips(''.join([i for i in parcel.encode([
+ stream = unzips(''.join([i for i in packets.encode([
('first', None, content()),
('second', None, content()),
],
limit=RECORD / 2)]))
assert len(stream) < RECORD
- self.assertEqual(4, len(stream.strip().split('\n')))
+ self.assertEqual(3, len(stream.strip().split('\n')))
- stream = unzips(''.join([i for i in parcel.encode([
+ stream = unzips(''.join([i for i in packets.encode([
('first', None, content()),
('second', None, content()),
],
limit=RECORD * 1.5)]))
assert len(stream) > RECORD
assert len(stream) < RECORD * 2
- self.assertEqual(6, len(stream.strip().split('\n')))
+ self.assertEqual(5, len(stream.strip().split('\n')))
- stream = unzips(''.join([i for i in parcel.encode([
+ stream = unzips(''.join([i for i in packets.encode([
('first', None, content()),
('second', None, content()),
],
limit=RECORD * 2.5)]))
assert len(stream) > RECORD * 2
assert len(stream) < RECORD * 3
- self.assertEqual(8, len(stream.strip().split('\n')))
+ self.assertEqual(7, len(stream.strip().split('\n')))
- stream = unzips(''.join([i for i in parcel.encode([
+ stream = unzips(''.join([i for i in packets.encode([
('first', None, content()),
('second', None, content()),
],
limit=RECORD * 3.5)]))
assert len(stream) > RECORD * 3
assert len(stream) < RECORD * 4
- self.assertEqual(10, len(stream.strip().split('\n')))
+ self.assertEqual(9, len(stream.strip().split('\n')))
- stream = unzips(''.join([i for i in parcel.encode([
+ stream = unzips(''.join([i for i in packets.encode([
('first', None, content()),
('second', None, content()),
],
limit=RECORD * 4.5)]))
assert len(stream) > RECORD * 4
- self.assertEqual(12, len(stream.strip().split('\n')))
+ self.assertEqual(11, len(stream.strip().split('\n')))
- stream = unzips(''.join([i for i in parcel.encode([
+ stream = unzips(''.join([i for i in packets.encode([
('first', None, content()),
('second', None, content()),
],
)]))
assert len(stream) > RECORD * 4
- self.assertEqual(12, len(stream.strip().split('\n')))
+ self.assertEqual(11, len(stream.strip().split('\n')))
def test_limited_encode_FinalBlobs(self):
RECORD = 1024 * 1024
@@ -735,104 +713,102 @@ class ParcelTest(tests.Test):
yield File('blob', 'digest')
yield File('blob', 'digest')
- stream = unzips(''.join([i for i in parcel.encode([
+ stream = unzips(''.join([i for i in packets.encode([
('first', None, content()),
('second', None, content()),
],
limit=RECORD / 2)]))
assert len(stream) > RECORD * 4
assert len(stream) < RECORD * 5
- self.assertEqual(12, len(stream.strip().split('\n')))
+ self.assertEqual(11, len(stream.strip().split('\n')))
- stream = unzips(''.join([i for i in parcel.encode([
+ stream = unzips(''.join([i for i in packets.encode([
('first', None, content()),
('second', None, content()),
],
limit=RECORD * 1.5)]))
assert len(stream) > RECORD * 5
assert len(stream) < RECORD * 6
- self.assertEqual(14, len(stream.strip().split('\n')))
+ self.assertEqual(13, len(stream.strip().split('\n')))
- stream = unzips(''.join([i for i in parcel.encode([
+ stream = unzips(''.join([i for i in packets.encode([
('first', None, content()),
('second', None, content()),
],
limit=RECORD * 2.5)]))
assert len(stream) > RECORD * 6
assert len(stream) < RECORD * 7
- self.assertEqual(16, len(stream.strip().split('\n')))
+ self.assertEqual(15, len(stream.strip().split('\n')))
- stream = unzips(''.join([i for i in parcel.encode([
+ stream = unzips(''.join([i for i in packets.encode([
('first', None, content()),
('second', None, content()),
],
limit=RECORD * 3.5)]))
assert len(stream) > RECORD * 6
assert len(stream) < RECORD * 7
- self.assertEqual(16, len(stream.strip().split('\n')))
+ self.assertEqual(15, len(stream.strip().split('\n')))
- stream = unzips(''.join([i for i in parcel.encode([
+ stream = unzips(''.join([i for i in packets.encode([
('first', None, content()),
('second', None, content()),
],
limit=RECORD * 4.5)]))
assert len(stream) > RECORD * 6
assert len(stream) < RECORD * 7
- self.assertEqual(16, len(stream.strip().split('\n')))
+ self.assertEqual(15, len(stream.strip().split('\n')))
- stream = unzips(''.join([i for i in parcel.encode([
+ stream = unzips(''.join([i for i in packets.encode([
('first', None, content()),
('second', None, content()),
],
limit=RECORD * 5.5)]))
assert len(stream) > RECORD * 7
assert len(stream) < RECORD * 8
- self.assertEqual(18, len(stream.strip().split('\n')))
+ self.assertEqual(17, len(stream.strip().split('\n')))
- stream = unzips(''.join([i for i in parcel.encode([
+ stream = unzips(''.join([i for i in packets.encode([
('first', None, content()),
('second', None, content()),
],
limit=RECORD * 6.5)]))
assert len(stream) > RECORD * 8
assert len(stream) < RECORD * 9
- self.assertEqual(20, len(stream.strip().split('\n')))
+ self.assertEqual(19, len(stream.strip().split('\n')))
- stream = unzips(''.join([i for i in parcel.encode([
+ stream = unzips(''.join([i for i in packets.encode([
('first', None, content()),
('second', None, content()),
],
)]))
assert len(stream) > RECORD * 8
assert len(stream) < RECORD * 9
- self.assertEqual(20, len(stream.strip().split('\n')))
+ self.assertEqual(19, len(stream.strip().split('\n')))
def test_decode_dir(self):
stream = zips(
json.dumps({'foo': 'bar'}) + '\n' +
- json.dumps({'packet': 1}) + '\n' +
+ json.dumps({'segment': 1}) + '\n' +
json.dumps({'payload': 1}) + '\n' +
json.dumps({'num': 1, 'content-length': '8'}) + '\n' +
'content1' + '\n' +
- json.dumps({'payload': 2}) + '\n' +
- json.dumps({'packet': 'last'}) + '\n'
+ json.dumps({'payload': 2}) + '\n'
)
- self.touch(('parcels/1.parcel', stream.getvalue()))
+ self.touch(('packets/1.packet', stream.getvalue()))
stream = zips(
json.dumps({}) + '\n' +
- json.dumps({'packet': 2}) + '\n' +
+ json.dumps({'segment': 2}) + '\n' +
json.dumps({'num': 2, 'content-length': '8'}) + '\n' +
'content2' + '\n' +
- json.dumps({'payload': 3}) + '\n' +
- json.dumps({'packet': 'last'}) + '\n'
+ json.dumps({'payload': 3}) + '\n'
)
- self.touch(('parcels/2.parcel', stream.getvalue()))
+ self.touch(('packets/2.packet', stream.getvalue()))
- packets_iter = parcel.decode_dir('parcels')
+ packets_iter = iter(packets.decode_dir('packets'))
with next(packets_iter) as packet:
self.assertEqual(2, packet.name)
- self.assertEqual({'packet': 2}, packet.header)
+ self.assertEqual({'segment': 2}, packet.header)
items = iter(packet)
blob = next(items)
self.assertEqual({'num': 2, 'content-length': '8'}, blob.meta)
@@ -841,7 +817,7 @@ class ParcelTest(tests.Test):
self.assertRaises(StopIteration, items.next)
with next(packets_iter) as packet:
self.assertEqual(1, packet.name)
- self.assertEqual({'foo': 'bar', 'packet': 1}, packet.header)
+ self.assertEqual({'foo': 'bar', 'segment': 1}, packet.header)
items = iter(packet)
self.assertEqual({'payload': 1}, next(items))
blob = next(items)
@@ -851,73 +827,68 @@ class ParcelTest(tests.Test):
self.assertRaises(StopIteration, items.next)
self.assertRaises(StopIteration, packets_iter.next)
- def test_decode_dir_RemoveOutdatedParcels(self):
+ def test_decode_dir_RemoveOutdatedPackets(self):
stream = zips(
json.dumps({'from': 'principal'}) + '\n' +
- json.dumps({'packet': 1}) + '\n' +
- json.dumps({'payload': 1}) + '\n' +
- json.dumps({'packet': 'last'}) + '\n'
+ json.dumps({'segment': 1}) + '\n' +
+ json.dumps({'payload': 1}) + '\n'
)
- self.touch(('parcels/1.parcel', stream.getvalue()))
+ self.touch(('packets/1.packet', stream.getvalue()))
stream = zips(
json.dumps({}) + '\n' +
- json.dumps({'packet': 2}) + '\n' +
- json.dumps({'payload': 2}) + '\n' +
- json.dumps({'packet': 'last'}) + '\n'
+ json.dumps({'segment': 2}) + '\n' +
+ json.dumps({'payload': 2}) + '\n'
)
- self.touch(('parcels/2.parcel', stream.getvalue()))
+ self.touch(('packets/2.packet', stream.getvalue()))
- packets_iter = parcel.decode_dir('parcels', recipient='principal')
+ packets_iter = iter(packets.decode_dir('packets', recipient='principal'))
with next(packets_iter) as packet:
self.assertEqual(2, packet.name)
self.assertRaises(StopIteration, packets_iter.next)
- assert not exists('parcels/1.parcel')
- assert exists('parcels/2.parcel')
+ assert not exists('packets/1.packet')
+ assert exists('packets/2.packet')
stream = zips(
json.dumps({'from': 'principal', 'session': 'old'}) + '\n' +
- json.dumps({'packet': 1}) + '\n' +
- json.dumps({'payload': 1}) + '\n' +
- json.dumps({'packet': 'last'}) + '\n'
+ json.dumps({'segment': 1}) + '\n' +
+ json.dumps({'payload': 1}) + '\n'
)
- self.touch(('parcels/3.parcel', stream.getvalue()))
+ self.touch(('packets/3.packet', stream.getvalue()))
- packets_iter = parcel.decode_dir('parcels', recipient='principal', session='new')
+ packets_iter = iter(packets.decode_dir('packets', recipient='principal', session='new'))
with next(packets_iter) as packet:
self.assertEqual(2, packet.name)
self.assertRaises(StopIteration, packets_iter.next)
- assert not exists('parcels/1.parcel')
- assert exists('parcels/2.parcel')
- assert not exists('parcels/3.parcel')
+ assert not exists('packets/1.packet')
+ assert exists('packets/2.packet')
+ assert not exists('packets/3.packet')
- def test_decode_dir_SkipTheSameSessionParcels(self):
+ def test_decode_dir_SkipTheSameSessionPackets(self):
stream = zips(
json.dumps({'from': 'principal', 'session': 'new'}) + '\n' +
- json.dumps({'packet': 1}) + '\n' +
- json.dumps({'payload': 1}) + '\n' +
- json.dumps({'packet': 'last'}) + '\n'
+ json.dumps({'segment': 1}) + '\n' +
+ json.dumps({'payload': 1}) + '\n'
)
- self.touch(('parcels/1.parcel', stream.getvalue()))
+ self.touch(('packets/1.packet', stream.getvalue()))
stream = zips(
json.dumps({}) + '\n' +
- json.dumps({'packet': 2}) + '\n' +
- json.dumps({'payload': 2}) + '\n' +
- json.dumps({'packet': 'last'}) + '\n'
+ json.dumps({'segment': 2}) + '\n' +
+ json.dumps({'payload': 2}) + '\n'
)
- self.touch(('parcels/2.parcel', stream.getvalue()))
+ self.touch(('packets/2.packet', stream.getvalue()))
- packets_iter = parcel.decode_dir('parcels', recipient='principal', session='new')
+ packets_iter = iter(packets.decode_dir('packets', recipient='principal', session='new'))
with next(packets_iter) as packet:
self.assertEqual(2, packet.name)
self.assertRaises(StopIteration, packets_iter.next)
- assert exists('parcels/1.parcel')
- assert exists('parcels/2.parcel')
+ assert exists('packets/1.packet')
+ assert exists('packets/2.packet')
def test_encode_dir(self):
self.touch(('blob', 'content'))
- parcel.encode_dir([
+ packets.encode_dir([
(1, None, [
{'payload': 1},
File('blob', 'digest', [('num', 1)]),
@@ -927,23 +898,42 @@ class ParcelTest(tests.Test):
File('blob', 'digest', [('num', 2)]),
{'payload': 3},
]),
- ], path='./parcel', limit=99999999)
+ ], path='./packets', limit=99999999)
- assert exists('parcel')
+ assert exists('packets')
self.assertEqual(
json.dumps({}) + '\n' +
- json.dumps({'packet': 1}) + '\n' +
+ json.dumps({'segment': 1}) + '\n' +
json.dumps({'payload': 1}) + '\n' +
json.dumps({'num': 1}) + '\n' +
'content' + '\n' +
json.dumps({'payload': 2}) + '\n' +
- json.dumps({'packet': 2}) + '\n' +
+ json.dumps({'segment': 2}) + '\n' +
json.dumps({'num': 2}) + '\n' +
'content' + '\n' +
- json.dumps({'payload': 3}) + '\n' +
- json.dumps({'packet': 'last'}) + '\n',
- unzips(file('parcel').read()))
+ json.dumps({'payload': 3}) + '\n',
+ unzips(file('packets').read()))
+
+ def test_decode_WithoutSegments(self):
+ stream = zips(
+ json.dumps({'foo': 'bar'}) + '\n' +
+ json.dumps({'n': 1}) + '\n' +
+ json.dumps({'n': 2}) + '\n' +
+ json.dumps({'n': 3}) + '\n'
+ )
+ packet = packets.decode(stream)
+ self.assertEqual({'foo': 'bar'}, packet.header)
+ self.assertEqual([{'n': 1}, {'n': 2}, {'n': 3}], [i for i in packet])
+
+ def test_encode_WithoutSegments(self):
+ stream = ''.join([i for i in packets.encode([{'n': 1}, {'n': 2}, {'n': 3}], header={'foo': 'bar'})])
+ self.assertEqual(
+ json.dumps({'foo': 'bar'}) + '\n' +
+ json.dumps({'n': 1}) + '\n' +
+ json.dumps({'n': 2}) + '\n' +
+ json.dumps({'n': 3}) + '\n',
+ unzips(stream))
def zips(data):
diff --git a/tests/units/toolkit/router.py b/tests/units/toolkit/router.py
index 63d4646..7222da1 100755
--- a/tests/units/toolkit/router.py
+++ b/tests/units/toolkit/router.py
@@ -680,9 +680,9 @@ class RouterTest(tests.Test):
'CONTENT_LENGTH': '3',
'wsgi.input': Stream('123'),
})
- self.assertEqual('123', request.content_stream.read())
- self.assertEqual('', request.content_stream.read())
- self.assertEqual('', request.content_stream.read(10))
+ self.assertEqual('123', request.content.read())
+ self.assertEqual('', request.content.read())
+ self.assertEqual('', request.content.read(10))
request = Request({
'PATH_INFO': '/',
@@ -690,7 +690,7 @@ class RouterTest(tests.Test):
'CONTENT_LENGTH': '3',
'wsgi.input': Stream('123'),
})
- self.assertEqual('123', request.content_stream.read(10))
+ self.assertEqual('123', request.content.read(10))
request = Request({
'PATH_INFO': '/',
@@ -698,10 +698,10 @@ class RouterTest(tests.Test):
'CONTENT_LENGTH': '3',
'wsgi.input': Stream('123'),
})
- self.assertEqual('1', request.content_stream.read(1))
- self.assertEqual('2', request.content_stream.read(1))
- self.assertEqual('3', request.content_stream.read())
- self.assertEqual('', request.content_stream.read())
+ self.assertEqual('1', request.content.read(1))
+ self.assertEqual('2', request.content.read(1))
+ self.assertEqual('3', request.content.read())
+ self.assertEqual('', request.content.read())
def test_IntArguments(self):
@@ -1415,7 +1415,7 @@ class RouterTest(tests.Test):
@route('GET', mime_type='text/event-stream')
def get(self):
yield {'event': 'probe'}
- yield {'event': 'probe', 'request': this.request.content}
+ yield {'event': 'probe', 'request': this.request.content.read()}
events = []
def localcast(event):
diff --git a/tests/units/toolkit/spec.py b/tests/units/toolkit/spec.py
index 66cb2b3..a7c2af1 100755
--- a/tests/units/toolkit/spec.py
+++ b/tests/units/toolkit/spec.py
@@ -139,40 +139,40 @@ class SpecTest(tests.Test):
self.assertEqual([[1, 2], 0], spec.parse_version('1.2foo', ignore_errors=True))
def test_ensure(self):
- assert spec.ensure(spec.parse_version('1'), spec.parse_requires('dep')['dep'])
+ assert spec.ensure_version(spec.parse_version('1'), spec.parse_requires('dep')['dep'])
- assert spec.ensure(spec.parse_version('1'), spec.parse_requires('dep=1')['dep'])
- assert not spec.ensure(spec.parse_version('2'), spec.parse_requires('dep=1')['dep'])
+ assert spec.ensure_version(spec.parse_version('1'), spec.parse_requires('dep=1')['dep'])
+ assert not spec.ensure_version(spec.parse_version('2'), spec.parse_requires('dep=1')['dep'])
- assert spec.ensure(spec.parse_version('1'), spec.parse_requires('dep<2')['dep'])
- assert not spec.ensure(spec.parse_version('2'), spec.parse_requires('dep<2')['dep'])
- assert not spec.ensure(spec.parse_version('3'), spec.parse_requires('dep<2')['dep'])
+ assert spec.ensure_version(spec.parse_version('1'), spec.parse_requires('dep<2')['dep'])
+ assert not spec.ensure_version(spec.parse_version('2'), spec.parse_requires('dep<2')['dep'])
+ assert not spec.ensure_version(spec.parse_version('3'), spec.parse_requires('dep<2')['dep'])
- assert spec.ensure(spec.parse_version('3'), spec.parse_requires('dep>2')['dep'])
- assert not spec.ensure(spec.parse_version('2'), spec.parse_requires('dep>2')['dep'])
- assert not spec.ensure(spec.parse_version('1'), spec.parse_requires('dep>2')['dep'])
+ assert spec.ensure_version(spec.parse_version('3'), spec.parse_requires('dep>2')['dep'])
+ assert not spec.ensure_version(spec.parse_version('2'), spec.parse_requires('dep>2')['dep'])
+ assert not spec.ensure_version(spec.parse_version('1'), spec.parse_requires('dep>2')['dep'])
- assert spec.ensure(spec.parse_version('1'), spec.parse_requires('dep<=2')['dep'])
- assert spec.ensure(spec.parse_version('2'), spec.parse_requires('dep<=2')['dep'])
- assert not spec.ensure(spec.parse_version('3'), spec.parse_requires('dep<=2')['dep'])
+ assert spec.ensure_version(spec.parse_version('1'), spec.parse_requires('dep<=2')['dep'])
+ assert spec.ensure_version(spec.parse_version('2'), spec.parse_requires('dep<=2')['dep'])
+ assert not spec.ensure_version(spec.parse_version('3'), spec.parse_requires('dep<=2')['dep'])
- assert spec.ensure(spec.parse_version('3'), spec.parse_requires('dep>=2')['dep'])
- assert spec.ensure(spec.parse_version('2'), spec.parse_requires('dep>=2')['dep'])
- assert not spec.ensure(spec.parse_version('1'), spec.parse_requires('dep>=2')['dep'])
+ assert spec.ensure_version(spec.parse_version('3'), spec.parse_requires('dep>=2')['dep'])
+ assert spec.ensure_version(spec.parse_version('2'), spec.parse_requires('dep>=2')['dep'])
+ assert not spec.ensure_version(spec.parse_version('1'), spec.parse_requires('dep>=2')['dep'])
def test_ensure_StripVersionsForEQ(self):
- assert spec.ensure(spec.parse_version('1.2'), spec.parse_requires('dep=1')['dep'])
- assert not spec.ensure(spec.parse_version('1'), spec.parse_requires('dep=1.2')['dep'])
- assert spec.ensure(spec.parse_version('1.2.3'), spec.parse_requires('dep=1.2')['dep'])
+ assert spec.ensure_version(spec.parse_version('1.2'), spec.parse_requires('dep=1')['dep'])
+ assert not spec.ensure_version(spec.parse_version('1'), spec.parse_requires('dep=1.2')['dep'])
+ assert spec.ensure_version(spec.parse_version('1.2.3'), spec.parse_requires('dep=1.2')['dep'])
- assert spec.ensure(spec.parse_version('1-pre2'), spec.parse_requires('dep=1')['dep'])
- assert spec.ensure(spec.parse_version('1-post2'), spec.parse_requires('dep=1')['dep'])
+ assert spec.ensure_version(spec.parse_version('1-pre2'), spec.parse_requires('dep=1')['dep'])
+ assert spec.ensure_version(spec.parse_version('1-post2'), spec.parse_requires('dep=1')['dep'])
- assert spec.ensure(spec.parse_version('1.2-pre3'), spec.parse_requires('dep=1')['dep'])
- assert spec.ensure(spec.parse_version('1.2-post3'), spec.parse_requires('dep=1')['dep'])
+ assert spec.ensure_version(spec.parse_version('1.2-pre3'), spec.parse_requires('dep=1')['dep'])
+ assert spec.ensure_version(spec.parse_version('1.2-post3'), spec.parse_requires('dep=1')['dep'])
- assert not spec.ensure(spec.parse_version('1-pre3'), spec.parse_requires('dep=1.2')['dep'])
- assert not spec.ensure(spec.parse_version('1-post3'), spec.parse_requires('dep=1.2')['dep'])
+ assert not spec.ensure_version(spec.parse_version('1-pre3'), spec.parse_requires('dep=1.2')['dep'])
+ assert not spec.ensure_version(spec.parse_version('1-post3'), spec.parse_requires('dep=1.2')['dep'])
if __name__ == '__main__':