Web   ·   Wiki   ·   Activities   ·   Blog   ·   Lists   ·   Chat   ·   Meeting   ·   Bugs   ·   Git   ·   Translate   ·   Archive   ·   People   ·   Donate
summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorAleksey Lim <alsroot@sugarlabs.org>2014-03-06 15:33:04 (GMT)
committer Aleksey Lim <alsroot@sugarlabs.org>2014-03-07 04:58:00 (GMT)
commit90f74541ec4925bad47466e39517c22ff7eadfe4 (patch)
treef8fca9c302904981a46e275fcaa5a2305ea99f8d
parent1028755053ef3d8c538138b37e61ece13b9c1a23 (diff)
Keep data synchronization in db module; use blobs storage to keep standalone files
-rw-r--r--TODO20
-rw-r--r--sugar_network/db/blobs.py314
-rw-r--r--sugar_network/db/directory.py96
-rw-r--r--sugar_network/db/index.py2
-rw-r--r--sugar_network/db/metadata.py43
-rw-r--r--sugar_network/db/resource.py18
-rw-r--r--sugar_network/db/routes.py23
-rw-r--r--sugar_network/db/storage.py10
-rw-r--r--sugar_network/db/volume.py91
-rw-r--r--sugar_network/model/__init__.py16
-rw-r--r--sugar_network/model/report.py2
-rw-r--r--sugar_network/model/routes.py5
-rw-r--r--sugar_network/toolkit/__init__.py235
-rw-r--r--sugar_network/toolkit/http.py6
-rw-r--r--sugar_network/toolkit/parcel.py52
-rw-r--r--sugar_network/toolkit/ranges.py198
-rw-r--r--sugar_network/toolkit/router.py93
-rw-r--r--tests/__init__.py15
-rw-r--r--tests/units/db/__main__.py1
-rwxr-xr-xtests/units/db/blobs.py625
-rwxr-xr-xtests/units/db/resource.py693
-rwxr-xr-xtests/units/db/routes.py298
-rwxr-xr-xtests/units/db/storage.py7
-rwxr-xr-xtests/units/db/volume.py750
-rw-r--r--tests/units/toolkit/__main__.py1
-rwxr-xr-xtests/units/toolkit/parcel.py129
-rwxr-xr-xtests/units/toolkit/ranges.py442
-rwxr-xr-xtests/units/toolkit/router.py102
-rwxr-xr-xtests/units/toolkit/toolkit.py319
29 files changed, 2502 insertions, 2104 deletions
diff --git a/TODO b/TODO
index 904aacf..dc40f32 100644
--- a/TODO
+++ b/TODO
@@ -1,29 +1,19 @@
+- proxying as a tool to sort out downstream content
- push local offline changes to the node on getting online
- diff/merge while checking in node context
- deliver spawn events only to local subscribers
- test/run presolve
- if node relocates api calls, do it only once in toolkit.http
-
-
-0.10
-====
-- Context prop for Update Hub sort
-
- Remove temporal security hole with speciying guid in POST,
it was added as a fast hack to support offline creation (with later pushing to a node)
- changed pulls should take into account accept_length
-- increase granularity for sync.chunked_encode()
-- slave._Pooler might leak events if pullers are not in time to call wait()
-- sync node->local db sync
-- parse command while uploading impls; while parsing, take into accoun quotes
-- secure node-to-node offline sync
+- secure node-to-node sync
+- cache init sync pull
v2.0
====
-- Pools topics
-- proxying as a tool to sort out downstream content
- * client db is a special case of proxying
- * easy proxying from any client, e.g., from a classroom teacher
+- Pool topics
+- (?) easy proxying from any client, e.g., from a classroom teacher
- sort out contexts by targeting auditory
* students of ages from..to
* educators
diff --git a/sugar_network/db/blobs.py b/sugar_network/db/blobs.py
index a9d66e0..cd795c6 100644
--- a/sugar_network/db/blobs.py
+++ b/sugar_network/db/blobs.py
@@ -16,137 +16,215 @@
import os
import logging
import hashlib
+import mimetypes
from contextlib import contextmanager
-from os.path import exists, abspath, join, isdir, isfile
+from os.path import exists, abspath, join, dirname
from sugar_network import toolkit
from sugar_network.toolkit.router import File
-from sugar_network.toolkit import http, enforce
+from sugar_network.toolkit import http, ranges, enforce
_META_SUFFIX = '.meta'
_logger = logging.getLogger('db.blobs')
-_root = None
-
-
-def init(path):
- global _root
- _root = abspath(path)
- if not exists(_root):
- os.makedirs(_root)
-
-
-def post(content, mime_type=None, digest_to_assert=None, meta=None):
- if meta is None:
- meta = []
- meta.append(('content-type', mime_type or 'application/octet-stream'))
- else:
- meta = meta.items()
- if mime_type:
- meta.append(('content-type', mime_type))
-
- @contextmanager
- def write_blob():
- tmp_path = join(_path(), 'post')
- if hasattr(content, 'read'):
- with toolkit.new_file(tmp_path) as blob:
- digest = hashlib.sha1()
- while True:
- chunk = content.read(toolkit.BUFFER_SIZE)
- if not chunk:
- break
- blob.write(chunk)
- digest.update(chunk)
- yield blob, digest.hexdigest()
- elif isinstance(content, dict):
- enforce('location' in content, http.BadRequest, 'No location')
- enforce('digest' in content, http.BadRequest, 'No digest')
- meta.append(('status', '301 Moved Permanently'))
- meta.append(('location', content['location']))
- with toolkit.new_file(tmp_path) as blob:
- yield blob, content['digest']
- else:
- with toolkit.new_file(tmp_path) as blob:
- blob.write(content)
- yield blob, hashlib.sha1(content).hexdigest()
-
- with write_blob() as (blob, digest):
- if digest_to_assert and digest != digest_to_assert:
- blob.unlink()
- raise http.BadRequest('Digest mismatch')
- path = _path(digest)
- meta.append(('content-length', str(blob.tell())))
- with toolkit.new_file(path + _META_SUFFIX) as f:
- for key, value in meta:
- f.write('%s: %s\n' % (key, value))
- blob.name = path
-
- return File(path, digest, meta)
-
-
-def update(digest, meta):
- path = _path(digest) + _META_SUFFIX
- enforce(exists(path), http.NotFound, 'No such blob')
- meta_content = ''
- for key, value in meta.items():
- meta_content += '%s: %s\n' % (key, value)
- with toolkit.new_file(path) as f:
- f.write(meta_content)
-
-def get(digest):
- path = _path(digest)
- if not exists(path) or not exists(path + _META_SUFFIX):
- return None
- meta = []
- with file(path + _META_SUFFIX) as f:
- for line in f:
- key, value = line.split(':', 1)
- meta.append((key, value.strip()))
- return File(path, digest, meta)
+class Blobs(object):
-def delete(digest):
- path = _path(digest)
- if exists(path + _META_SUFFIX):
- os.unlink(path + _META_SUFFIX)
- if exists(path):
- os.unlink(path)
+ def __init__(self, root, seqno):
+ self._root = abspath(root)
+ self._seqno = seqno
+ def path(self, *args):
+ if len(args) == 1 and len(args[0]) == 40 and '.' not in args[0]:
+ return self._blob_path(args[0])
+ else:
+ return join(self._root, 'files', *args)
-def diff(in_seq, out_seq=None):
- if out_seq is None:
- out_seq = toolkit.Sequence([])
- is_the_only_seq = not out_seq
-
- try:
- root = _path()
- for name in os.listdir(root):
- dirpath = join(root, name)
- if not isdir(dirpath) or os.stat(dirpath).st_ctime not in in_seq:
+ def post(self, content, mime_type=None, digest_to_assert=None, meta=None):
+ if meta is None:
+ meta = []
+ meta.append(('content-type',
+ mime_type or 'application/octet-stream'))
+ else:
+ meta = meta.items()
+ if mime_type:
+ meta.append(('content-type', mime_type))
+
+ @contextmanager
+ def write_blob():
+ tmp_path = join(self._blob_path(), 'post')
+ if hasattr(content, 'read'):
+ with toolkit.new_file(tmp_path) as blob:
+ digest = hashlib.sha1()
+ while True:
+ chunk = content.read(toolkit.BUFFER_SIZE)
+ if not chunk:
+ break
+ blob.write(chunk)
+ digest.update(chunk)
+ yield blob, digest.hexdigest()
+ elif isinstance(content, dict):
+ enforce('location' in content, http.BadRequest, 'No location')
+ enforce('digest' in content, http.BadRequest, 'No digest')
+ meta.append(('status', '301 Moved Permanently'))
+ meta.append(('location', content['location']))
+ with toolkit.new_file(tmp_path) as blob:
+ yield blob, content['digest']
+ else:
+ with toolkit.new_file(tmp_path) as blob:
+ blob.write(content)
+ yield blob, hashlib.sha1(content).hexdigest()
+
+ with write_blob() as (blob, digest):
+ if digest_to_assert and digest != digest_to_assert:
+ blob.unlink()
+ raise http.BadRequest('Digest mismatch')
+ path = self._blob_path(digest)
+ seqno = self._seqno.next()
+ meta.append(('content-length', str(blob.tell())))
+ meta.append(('x-seqno', str(seqno)))
+ _write_meta(path, meta, seqno)
+ blob.name = path
+ os.utime(path, (seqno, seqno))
+
+ _logger.debug('Post %r file', path)
+
+ return File(path, digest, meta)
+
+ def update(self, path, meta):
+ path = self.path(path)
+ enforce(exists(path), http.NotFound, 'No such blob')
+ orig_meta = _read_meta(path)
+ orig_meta.update(meta)
+ _write_meta(path, orig_meta, None)
+
+ def get(self, digest):
+ path = self.path(digest)
+ if exists(path + _META_SUFFIX):
+ return File(path, digest, _read_meta(path))
+
+ def delete(self, path):
+ self._delete(path, None)
+
+ def diff(self, r, path=None, recursive=True):
+ if path is None:
+ is_files = False
+ root = self._blob_path()
+ else:
+ path = path.strip('/').split('/')
+ enforce(not [i for i in path if i == '..'],
+ http.BadRequest, 'Relative paths are not allowed')
+ is_files = True
+ root = self.path(*path)
+ checkin_seqno = None
+
+ for root, __, files in os.walk(root):
+ if not ranges.contains(r, int(os.stat(root).st_mtime)):
continue
- for digest in os.listdir(dirpath):
- if len(digest) != 40:
- continue
- path = join(dirpath, digest)
- if not isfile(path):
+ rel_root = root[len(self._root) + 7:] if is_files else None
+ for filename in files:
+ path = join(root, filename)
+ if filename.endswith(_META_SUFFIX):
+ seqno = int(os.stat(path).st_mtime)
+ path = path[:-len(_META_SUFFIX)]
+ meta = None
+ if exists(path):
+ stat = os.stat(path)
+ if seqno != int(stat.st_mtime):
+ _logger.debug('Found updated %r file', path)
+ seqno = self._seqno.next()
+ meta = _read_meta(path)
+ meta['x-seqno'] = str(seqno)
+ meta['content-length'] = str(stat.st_size)
+ _write_meta(path, meta, seqno)
+ os.utime(path, (seqno, seqno))
+ if not ranges.contains(r, seqno):
+ continue
+ if meta is None:
+ meta = _read_meta(path)
+ if is_files:
+ digest = join(rel_root, filename[:-len(_META_SUFFIX)])
+ meta['path'] = digest
+ else:
+ digest = filename[:-len(_META_SUFFIX)]
+ elif not is_files or exists(path + _META_SUFFIX):
continue
- ctime = int(os.stat(path).st_ctime)
- if ctime not in in_seq:
- continue
- blob = get(digest)
- if blob is None:
- continue
- yield blob
- out_seq.include(ctime, ctime)
- if is_the_only_seq:
- # There is only one diff, so, we can stretch it to remove all holes
- out_seq.stretch()
- except StopIteration:
- pass
-
-
-def _path(digest=None):
- enforce(_root is not None, 'Blobs storage is not initialized')
- return join(_root, digest[:3], digest) if digest else _root
+ else:
+ _logger.debug('Found new %r file', path)
+ mime_type = mimetypes.guess_type(filename)[0] or \
+ 'application/octet-stream'
+ if checkin_seqno is None:
+ checkin_seqno = self._seqno.next()
+ seqno = checkin_seqno
+ meta = [('content-type', mime_type),
+ ('content-length', str(os.stat(path).st_size)),
+ ('x-seqno', str(seqno)),
+ ]
+ _write_meta(path, meta, seqno)
+ os.utime(path, (seqno, seqno))
+ if not ranges.contains(r, seqno):
+ continue
+ digest = join(rel_root, filename)
+ meta.append(('path', digest))
+ yield File(path, digest, meta)
+ if not recursive:
+ break
+
+ def patch(self, patch, seqno):
+ if 'path' in patch:
+ path = self.path(patch.pop('path'))
+ else:
+ path = self._blob_path(patch.digest)
+ if not patch.size:
+ self._delete(path, seqno)
+ return
+ if not exists(dirname(path)):
+ os.makedirs(dirname(path))
+ os.rename(patch.path, path)
+ if exists(path + _META_SUFFIX):
+ meta = _read_meta(path)
+ meta.update(patch)
+ else:
+ meta = patch
+ meta['x-seqno'] = str(seqno)
+ _write_meta(path, meta, seqno)
+ os.utime(path, (seqno, seqno))
+
+ def _delete(self, path, seqno):
+ path = self.path(path)
+ if exists(path + _META_SUFFIX):
+ if seqno is None:
+ seqno = self._seqno.next()
+ meta = _read_meta(path)
+ meta['status'] = '410 Gone'
+ meta['x-seqno'] = str(seqno)
+ _write_meta(path, meta, seqno)
+ if exists(path):
+ _logger.debug('Delete %r file', path)
+ os.unlink(path)
+
+ def _blob_path(self, digest=None):
+ if not digest:
+ return join(self._root, 'blobs')
+ return join(self._root, 'blobs', digest[:3], digest)
+
+
+def _write_meta(path, meta, seqno):
+ path += _META_SUFFIX
+ with toolkit.new_file(path) as f:
+ for key, value in meta.items() if isinstance(meta, dict) else meta:
+ if seqno is None and key == 'x-seqno':
+ seqno = int(value)
+ f.write('%s: %s\n' % (key, value))
+ os.utime(path, (seqno, seqno))
+
+
+def _read_meta(path):
+ meta = {}
+ with file(path + _META_SUFFIX) as f:
+ for line in f:
+ key, value = line.split(':', 1)
+ meta[key] = value.strip()
+ return meta
diff --git a/sugar_network/db/directory.py b/sugar_network/db/directory.py
index c6957d7..3ef4b91 100644
--- a/sugar_network/db/directory.py
+++ b/sugar_network/db/directory.py
@@ -13,7 +13,6 @@
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
-import os
import shutil
import logging
from os.path import exists, join
@@ -33,7 +32,7 @@ _logger = logging.getLogger('db.directory')
class Directory(object):
- def __init__(self, root, resource_class, index_class, seqno=None):
+ def __init__(self, root, resource, index_class, seqno):
"""
:param index_class:
what class to use to access to indexes, for regular casses
@@ -41,19 +40,16 @@ class Directory(object):
keep writer in separate process).
"""
- if not exists(root):
- os.makedirs(root)
-
- if resource_class.metadata is None:
+ if resource.metadata is None:
# Metadata cannot be recreated
- resource_class.metadata = Metadata(resource_class)
- resource_class.metadata['guid'] = Guid()
- self.metadata = resource_class.metadata
+ resource.metadata = Metadata(resource)
+ resource.metadata['guid'] = Guid()
+ self.metadata = resource.metadata
- self.resource_class = resource_class
+ self.resource = resource
self._index_class = index_class
self._root = root
- self._seqno = _SessionSeqno() if seqno is None else seqno
+ self._seqno = seqno
self._storage = None
self._index = None
@@ -62,7 +58,10 @@ class Directory(object):
def wipe(self):
self.close()
_logger.debug('Wipe %r directory', self.metadata.name)
- shutil.rmtree(self._root, ignore_errors=True)
+ shutil.rmtree(join(self._root, 'index', self.metadata.name),
+ ignore_errors=True)
+ shutil.rmtree(join(self._root, 'db', self.metadata.name),
+ ignore_errors=True)
self._open()
def close(self):
@@ -129,7 +128,7 @@ class Directory(object):
enforce(cached_props or record.exists, http.NotFound,
'Resource %r does not exist in %r',
guid, self.metadata.name)
- return self.resource_class(guid, record, cached_props)
+ return self.resource(guid, record, cached_props)
def __getitem__(self, guid):
return self.get(guid)
@@ -141,7 +140,7 @@ class Directory(object):
for hit in mset:
guid = hit.document.get_value(0)
record = self._storage.get(guid)
- yield self.resource_class(guid, record)
+ yield self.resource(guid, record)
return iterate(), mset.get_matches_estimated()
@@ -186,74 +185,52 @@ class Directory(object):
self._save_layout()
self.commit()
- def diff(self, seq, exclude_seq=None, **params):
- if exclude_seq is not None:
- for start, end in exclude_seq:
- seq.exclude(start, end)
- if 'group_by' in params:
- # Pickup only most recent change
- params['order_by'] = '-seqno'
- else:
- params['order_by'] = 'seqno'
- params['no_cache'] = True
-
- for start, end in seq:
- query = 'seqno:%s..' % start
- if end:
- query += str(end)
- documents, __ = self.find(query=query, **params)
- for doc in documents:
- yield doc.guid, doc.diff(seq)
-
- def merge(self, guid, diff):
+ def patch(self, guid, patch, seqno=None):
"""Apply changes for documents."""
- doc = self.resource_class(guid, self._storage.get(guid))
+ doc = self.resource(guid, self._storage.get(guid))
- for prop, meta in diff.items():
+ for prop, meta in patch.items():
orig_meta = doc.meta(prop)
if orig_meta and orig_meta['mtime'] >= meta['mtime']:
continue
if doc.post_seqno is None:
- doc.post_seqno = self._seqno.next()
+ if seqno is None:
+ seqno = self._seqno.next()
+ doc.post_seqno = seqno
doc.post(prop, **meta)
- if doc.post_seqno is None:
- return None, False
-
- if doc.exists:
+ if doc.post_seqno is not None and doc.exists:
# No need in after-merge event, further commit event
# is enough to avoid increasing events flow
self._index.store(guid, doc.props, self._preindex)
- return doc.post_seqno, True
+ return seqno
def _open(self):
- if not exists(self._root):
- os.makedirs(self._root)
- index_path = join(self._root, 'index')
+ index_path = join(self._root, 'index', self.metadata.name)
if self._is_layout_stale():
if exists(index_path):
_logger.warning('%r layout is stale, remove index',
self.metadata.name)
shutil.rmtree(index_path, ignore_errors=True)
self._save_layout()
- self._storage = Storage(self._root, self.metadata)
self._index = self._index_class(index_path, self.metadata,
self._postcommit)
- _logger.debug('Open %r resource', self.resource_class)
+ self._storage = Storage(join(self._root, 'db', self.metadata.name))
+ _logger.debug('Open %r resource', self.resource)
def _broadcast(self, event):
event['resource'] = self.metadata.name
this.broadcast(event)
def _preindex(self, guid, changes):
- doc = self.resource_class(guid, self._storage.get(guid), changes)
+ doc = self.resource(guid, self._storage.get(guid), changes)
for prop in self.metadata:
enforce(doc[prop] is not None, 'Empty %r property', prop)
return doc.props
def _prestore(self, guid, changes, event):
- doc = self.resource_class(guid, self._storage.get(guid))
+ doc = self.resource(guid, self._storage.get(guid))
doc.post_seqno = self._seqno.next()
for prop in self.metadata.keys():
value = changes.get(prop)
@@ -272,31 +249,14 @@ class Directory(object):
self._broadcast({'event': 'commit', 'mtime': self._index.mtime})
def _save_layout(self):
- path = join(self._root, 'layout')
+ path = join(self._root, 'index', self.metadata.name, 'layout')
with toolkit.new_file(path) as f:
f.write(str(_LAYOUT_VERSION))
def _is_layout_stale(self):
- path = join(self._root, 'layout')
+ path = join(self._root, 'index', self.metadata.name, 'layout')
if not exists(path):
return True
with file(path) as f:
version = f.read()
return not version.isdigit() or int(version) != _LAYOUT_VERSION
-
-
-class _SessionSeqno(object):
-
- def __init__(self):
- self._value = 0
-
- @property
- def value(self):
- return self._value
-
- def next(self):
- self._value += 1
- return self._value
-
- def commit(self):
- pass
diff --git a/sugar_network/db/index.py b/sugar_network/db/index.py
index b44bdfb..eb8f0cb 100644
--- a/sugar_network/db/index.py
+++ b/sugar_network/db/index.py
@@ -123,7 +123,7 @@ class IndexReader(object):
raise NotImplementedError()
def find(self, offset=0, limit=None, query='', reply=('guid',),
- order_by=None, no_cache=False, group_by=None, **request):
+ order_by=None, group_by=None, **request):
"""Search resources within the index.
The result will be an array of dictionaries with found documents'
diff --git a/sugar_network/db/metadata.py b/sugar_network/db/metadata.py
index 9ba5998..ecefdab 100644
--- a/sugar_network/db/metadata.py
+++ b/sugar_network/db/metadata.py
@@ -16,7 +16,6 @@
import xapian
from sugar_network import toolkit
-from sugar_network.db import blobs
from sugar_network.toolkit.router import ACL, File
from sugar_network.toolkit.coroutine import this
from sugar_network.toolkit import i18n, http, enforce
@@ -304,49 +303,27 @@ class Blob(Property):
return value
enforce(value is None or isinstance(value, basestring) or
- isinstance(value, dict) and value or hasattr(value, 'read'),
- 'Inappropriate blob value')
+ hasattr(value, 'read'),
+ http.BadRequest, 'Inappropriate blob value')
if not value:
return ''
- if not isinstance(value, dict):
- mime_type = None
- if this.request.prop == self.name:
- mime_type = this.request.content_type
- if not mime_type:
- mime_type = self.mime_type
- return blobs.post(value, mime_type).digest
-
- digest = this.resource[self.name] if self.name else None
- if digest:
- orig = blobs.get(digest)
- enforce('digest' not in value or value.pop('digest') == digest,
- "Inappropriate 'digest' value")
- enforce(orig.path or 'location' in orig or 'location' in value,
- 'Blob points to nothing')
- if 'location' in value and orig.path:
- blobs.delete(digest)
- orig.update(value)
- value = orig
- else:
- enforce('location' in value, 'Blob points to nothing')
- enforce('digest' in value, "Missed 'digest' value")
- if 'content-type' not in value:
- value['content-type'] = self.mime_type
- digest = value.pop('digest')
-
- blobs.update(digest, value)
- return digest
+ mime_type = None
+ if this.request.prop == self.name:
+ mime_type = this.request.content_type
+ if not mime_type:
+ mime_type = self.mime_type
+ return this.volume.blobs.post(value, mime_type).digest
def reprcast(self, value):
if not value:
return File.AWAY
- return blobs.get(value)
+ return this.volume.blobs.get(value)
def teardown(self, value):
if value:
- blobs.delete(value)
+ this.volume.blobs.delete(value)
def assert_access(self, mode, value=None):
if mode == ACL.WRITE and not value:
diff --git a/sugar_network/db/resource.py b/sugar_network/db/resource.py
index 2636dca..71a3efd 100644
--- a/sugar_network/db/resource.py
+++ b/sugar_network/db/resource.py
@@ -18,6 +18,7 @@ from sugar_network.db.metadata import Numeric, List, Authors
from sugar_network.db.metadata import Composite, Aggregated
from sugar_network.toolkit.coroutine import this
from sugar_network.toolkit.router import ACL
+from sugar_network.toolkit import ranges
class Resource(object):
@@ -25,6 +26,8 @@ class Resource(object):
#: `Metadata` object that describes the document
metadata = None
+ #: Whether these resources should be migrated from slave-to-master only
+ one_way = False
def __init__(self, guid, record, cached_props=None):
self.props = cached_props or {}
@@ -118,7 +121,9 @@ class Resource(object):
if self.record is not None:
return self.record.get(prop)
- def diff(self, seq):
+ def diff(self, r):
+ patch = {}
+ last_seqno = None
for name, prop in self.metadata.items():
if name == 'seqno' or prop.acl & ACL.CALC:
continue
@@ -126,19 +131,20 @@ class Resource(object):
if meta is None:
continue
seqno = meta.get('seqno')
- if seqno not in seq:
+ if not ranges.contains(r, seqno):
continue
+ last_seqno = max(seqno, last_seqno)
value = meta.get('value')
if isinstance(prop, Aggregated):
value_ = {}
for key, agg in value.items():
- if agg.pop('seqno') in seq:
+ if ranges.contains(r, agg.pop('seqno')):
value_[key] = agg
value = value_
- meta = {'mtime': meta['mtime'], 'value': value}
- yield name, meta, seqno
+ patch[name] = {'mtime': meta['mtime'], 'value': value}
+ return last_seqno, patch
- def patch(self, props):
+ def format_patch(self, props):
if not props:
return {}
patch = {}
diff --git a/sugar_network/db/routes.py b/sugar_network/db/routes.py
index d8d2fb4..153e0a7 100644
--- a/sugar_network/db/routes.py
+++ b/sugar_network/db/routes.py
@@ -19,10 +19,8 @@ import logging
from contextlib import contextmanager
from sugar_network import toolkit
-from sugar_network.db import blobs
from sugar_network.db.metadata import Aggregated
-from sugar_network.toolkit.router import ACL, File
-from sugar_network.toolkit.router import route, preroute, fallbackroute
+from sugar_network.toolkit.router import ACL, File, route, fallbackroute
from sugar_network.toolkit.coroutine import this
from sugar_network.toolkit import http, enforce
@@ -39,11 +37,6 @@ class Routes(object):
self._find_limit = find_limit
this.volume = self.volume
- @preroute
- def __preroute__(self, op, request, response):
- this.request = request
- this.response = response
-
@route('POST', [None], acl=ACL.AUTH, mime_type='application/json')
def create(self, request):
with self._post(request, ACL.CREATE) as doc:
@@ -91,14 +84,6 @@ class Routes(object):
self.volume[request.resource].update(doc.guid, doc.props)
self.after_post(doc)
- @route('GET', [None, None], cmd='diff', mime_type='application/json')
- def diff(self, request):
- result = {}
- res = self.volume[request.resource][request.guid]
- for prop, meta, __ in res.diff(toolkit.Sequence([[0, None]])):
- result[prop] = meta
- return result
-
@route('PUT', [None, None, None], acl=ACL.AUTH | ACL.AUTHOR)
def update_prop(self, request):
if request.content is None:
@@ -187,8 +172,8 @@ class Routes(object):
directory.update(request.guid, {'author': authors})
@fallbackroute('GET', ['blobs'])
- def blobs(self, request):
- return blobs.get(request.guid)
+ def blobs(self):
+ return this.volume.blobs.get(this.request.guid)
def on_create(self, request, props):
ts = int(time.time())
@@ -215,7 +200,7 @@ class Routes(object):
directory = self.volume[request.resource]
if access == ACL.CREATE:
- doc = directory.resource_class(None, None)
+ doc = directory.resource(None, None)
if 'guid' in content:
# TODO Temporal security hole, see TODO
guid = content['guid']
diff --git a/sugar_network/db/storage.py b/sugar_network/db/storage.py
index 72cbcf7..bbb50db 100644
--- a/sugar_network/db/storage.py
+++ b/sugar_network/db/storage.py
@@ -25,9 +25,8 @@ from sugar_network import toolkit
class Storage(object):
"""Get access to documents' data storage."""
- def __init__(self, root, metadata):
+ def __init__(self, root):
self._root = root
- self.metadata = metadata
def get(self, guid):
"""Get access to particular document's properties.
@@ -50,12 +49,7 @@ class Storage(object):
path = self._path(guid)
if not exists(path):
return
- try:
- shutil.rmtree(path)
- except Exception, error:
- toolkit.exception()
- raise RuntimeError('Cannot delete %r document from %r: %s' %
- (guid, self.metadata.name, error))
+ shutil.rmtree(path)
def walk(self, mtime):
"""Generator function to enumerate all existing documents.
diff --git a/sugar_network/db/volume.py b/sugar_network/db/volume.py
index 6457b93..5ec5683 100644
--- a/sugar_network/db/volume.py
+++ b/sugar_network/db/volume.py
@@ -15,12 +15,14 @@
import os
import logging
+from copy import deepcopy
from os.path import exists, join, abspath
from sugar_network import toolkit
from sugar_network.db.directory import Directory
from sugar_network.db.index import IndexWriter
-from sugar_network.toolkit import http, coroutine, enforce
+from sugar_network.db.blobs import Blobs
+from sugar_network.toolkit import http, coroutine, ranges, enforce
_logger = logging.getLogger('db.volume')
@@ -44,8 +46,10 @@ class Volume(dict):
if not exists(root):
os.makedirs(root)
self._index_class = index_class
- self.seqno = toolkit.Seqno(join(self._root, 'db.seqno'))
- self.releases_seqno = toolkit.Seqno(join(self._root, 'releases.seqno'))
+ self.seqno = toolkit.Seqno(join(self._root, 'var', 'db.seqno'))
+ self.releases_seqno = toolkit.Seqno(
+ join(self._root, 'var', 'releases.seqno'))
+ self.blobs = Blobs(root, self.seqno)
for document in documents:
if isinstance(document, basestring):
@@ -72,6 +76,74 @@ class Volume(dict):
for __ in cls.populate():
coroutine.dispatch()
+ def diff(self, r, files=None, one_way=False):
+ last_seqno = None
+ try:
+ for resource, directory in self.items():
+ if one_way and directory.resource.one_way:
+ continue
+ directory.commit()
+ yield {'resource': resource}
+ for start, end in r:
+ query = 'seqno:%s..' % start
+ if end:
+ query += str(end)
+ docs, __ = directory.find(query=query, order_by='seqno')
+ for doc in docs:
+ seqno, patch = doc.diff(r)
+ if not patch:
+ continue
+ yield {'guid': doc.guid, 'patch': patch}
+ last_seqno = max(last_seqno, seqno)
+ for blob in self.blobs.diff(r):
+ seqno = int(blob.pop('x-seqno'))
+ yield blob
+ last_seqno = max(last_seqno, seqno)
+ for dirpath in files or []:
+ for blob in self.blobs.diff(r, dirpath):
+ seqno = int(blob.pop('x-seqno'))
+ yield blob
+ last_seqno = max(last_seqno, seqno)
+ except StopIteration:
+ pass
+
+ if last_seqno:
+ commit_r = deepcopy(r)
+ ranges.exclude(commit_r, last_seqno + 1, None)
+ ranges.exclude(r, None, last_seqno)
+ yield {'commit': commit_r}
+
+ def patch(self, records):
+ directory = None
+ commit_r = []
+ merged_r = []
+ seqno = None
+
+ for record in records:
+ resource_ = record.get('resource')
+ if resource_:
+ directory = self[resource_]
+ continue
+
+ if 'guid' in record:
+ seqno = directory.patch(record['guid'], record['patch'], seqno)
+ continue
+
+ if 'content-length' in record:
+ if seqno is None:
+ seqno = self.seqno.next()
+ self.blobs.patch(record, seqno)
+ continue
+
+ commit = record.get('commit')
+ if commit is not None:
+ ranges.include(commit_r, commit)
+ continue
+
+ if seqno is not None:
+ ranges.include(merged_r, seqno, seqno)
+ return commit_r, merged_r
+
def __enter__(self):
return self
@@ -79,8 +151,8 @@ class Volume(dict):
self.close()
def __getitem__(self, name):
- directory = self.get(name)
- if directory is None:
+ dir_ = self.get(name)
+ if dir_ is None:
enforce(name in self.resources, http.BadRequest,
'Unknown %r resource', name)
resource = self.resources[name]
@@ -89,11 +161,10 @@ class Volume(dict):
cls = getattr(mod, name.capitalize())
else:
cls = resource
- directory = Directory(join(self._root, name), cls,
- self._index_class, self.seqno)
- self._populators.spawn(self._populate, directory)
- self[name] = directory
- return directory
+ dir_ = Directory(self._root, cls, self._index_class, self.seqno)
+ self._populators.spawn(self._populate, dir_)
+ self[name] = dir_
+ return dir_
def _populate(self, directory):
for __ in directory.populate():
diff --git a/sugar_network/model/__init__.py b/sugar_network/model/__init__.py
index 5b7a245..bd7405d 100644
--- a/sugar_network/model/__init__.py
+++ b/sugar_network/model/__init__.py
@@ -22,7 +22,6 @@ from os.path import join
import xapian
from sugar_network import toolkit, db
-from sugar_network.db import blobs
from sugar_network.model.routes import FrontRoutes
from sugar_network.toolkit.spec import parse_version, parse_requires
from sugar_network.toolkit.spec import EMPTY_LICENSE
@@ -81,7 +80,7 @@ class Release(object):
return release
if not isinstance(release, dict):
__, release = load_bundle(
- blobs.post(release, this.request.content_type),
+ this.volume.blobs.post(release, this.request.content_type),
context=this.request.guid)
return release['bundles']['*-*']['blob'], release
@@ -91,7 +90,7 @@ class Release(object):
'book' not in this.resource['type']:
return
for bundle in release['bundles'].values():
- blobs.delete(bundle['blob'])
+ this.volume.blobs.delete(bundle['blob'])
def encode(self, value):
return []
@@ -123,6 +122,7 @@ def populate_context_images(props, svg):
if 'guid' in props:
from sugar_network.toolkit.sugar import color_svg
svg = color_svg(svg, props['guid'])
+ blobs = this.volume.blobs
props['artifact_icon'] = blobs.post(svg, 'image/svg+xml').digest
props['icon'] = blobs.post(svg_to_png(svg, 55, 55), 'image/png').digest
props['logo'] = blobs.post(svg_to_png(svg, 140, 140), 'image/png').digest
@@ -212,10 +212,10 @@ def load_bundle(blob, context=None, initial=False, extra_deps=None):
_logger.debug('Load %r release: %r', context, release)
if this.request.principal in context_doc['author']:
- diff = context_doc.patch(context_meta)
- if diff:
- this.call(method='PUT', path=['context', context], content=diff)
- context_doc.props.update(diff)
+ patch = context_doc.format_patch(context_meta)
+ if patch:
+ this.call(method='PUT', path=['context', context], content=patch)
+ context_doc.props.update(patch)
# TRANS: Release notes title
title = i18n._('%(name)s %(version)s release')
else:
@@ -237,7 +237,7 @@ def load_bundle(blob, context=None, initial=False, extra_deps=None):
''.join(i18n.decode(context_doc['title']).split()),
version, mimetypes.guess_extension(blob.get('content-type')) or '',
)
- blobs.update(blob.digest, blob)
+ this.volume.blobs.update(blob.digest, blob)
return context, release
diff --git a/sugar_network/model/report.py b/sugar_network/model/report.py
index 980c3ff..be9fd9f 100644
--- a/sugar_network/model/report.py
+++ b/sugar_network/model/report.py
@@ -32,6 +32,8 @@ class _Solution(db.Property):
class Report(db.Resource):
+ one_way = True
+
@db.indexed_property(prefix='C', acl=ACL.CREATE | ACL.READ)
def context(self, value):
return value
diff --git a/sugar_network/model/routes.py b/sugar_network/model/routes.py
index 35c56a9..af19023 100644
--- a/sugar_network/model/routes.py
+++ b/sugar_network/model/routes.py
@@ -15,7 +15,6 @@
import logging
-from sugar_network.db import blobs
from sugar_network.toolkit.router import route
from sugar_network.toolkit.coroutine import this
from sugar_network.toolkit import coroutine
@@ -60,8 +59,8 @@ class FrontRoutes(object):
return 'User-agent: *\nDisallow: /\n'
@route('GET', ['favicon.ico'])
- def favicon(self, request, response):
- return blobs.get('favicon.ico')
+ def favicon(self):
+ return this.volume.blobs.get('favicon.ico')
def _broadcast(self, event):
_logger.debug('Broadcast event: %r', event)
diff --git a/sugar_network/toolkit/__init__.py b/sugar_network/toolkit/__init__.py
index 792267a..d3d9b88 100644
--- a/sugar_network/toolkit/__init__.py
+++ b/sugar_network/toolkit/__init__.py
@@ -21,6 +21,7 @@ import shutil
import logging
import tempfile
import collections
+from copy import deepcopy
from cStringIO import StringIO
from os.path import exists, join, islink, isdir, dirname, basename, abspath
from os.path import lexists, isfile
@@ -487,231 +488,71 @@ class NamedTemporaryFile(object):
return getattr(self._file, name)
-class Seqno(object):
- """Sequence number counter with persistent storing in a file."""
+class Bin(object):
+ """Store variable in a file."""
- def __init__(self, path):
- """
- :param path:
- path to file to [re]store seqno value
+ def __init__(self, path, default_value=None):
+ self._path = abspath(path)
+ self.value = default_value
+ self._orig_value = None
- """
- self._path = path
- self._value = 0
- if exists(path):
- with file(path) as f:
- self._value = int(f.read().strip())
- self._orig_value = self._value
+ if exists(self._path):
+ with file(self._path) as f:
+ self.value = json.load(f)
+ else:
+ self.commit()
+ self._orig_value = deepcopy(self.value)
@property
- def value(self):
- """Current seqno value."""
- return self._value
-
- def next(self):
- """Incerement seqno.
-
- :returns:
- new seqno value
-
- """
- self._value += 1
- return self._value
+ def mtime(self):
+ if exists(self._path):
+ return os.stat(self._path).st_mtime
+ else:
+ return 0
def commit(self):
- """Store current seqno value in a file.
+ """Store current value in a file.
:returns:
`True` if commit was happened
"""
- if self._value == self._orig_value:
+ if self.value == self._orig_value:
return False
with new_file(self._path) as f:
- f.write(str(self._value))
+ json.dump(self.value, f)
f.flush()
os.fsync(f.fileno())
- self._orig_value = self._value
+ self._orig_value = self.value
return True
+ def __enter__(self):
+ return self
-class Sequence(list):
- """List of sorted and non-overlapping ranges.
-
- List items are ranges, [`start`, `stop']. If `start` or `stop`
- is `None`, it means the beginning or ending of the entire sequence.
+ def __exit__(self, exc_type, exc_value, traceback):
+ self.commit()
- """
- def __init__(self, value=None, empty_value=None):
- """
- :param value:
- default value to initialize range
- :param empty_value:
- if not `None`, the initial value for empty range
+class Seqno(Bin):
+ """Sequence number counter with persistent storing in a file."""
+ def __init__(self, path):
"""
- if empty_value is None:
- self._empty_value = []
- else:
- self._empty_value = [empty_value]
-
- if value:
- self.extend(value)
- else:
- self.clear()
-
- def __contains__(self, value):
- for start, end in self:
- if value >= start and (end is None or value <= end):
- return True
- else:
- return False
-
- @property
- def empty(self):
- """Is timeline in the initial state."""
- return self == self._empty_value
-
- def clear(self):
- """Reset range to the initial value."""
- self[:] = self._empty_value
-
- def stretch(self):
- """Remove all holes between the first and the last items."""
- if self:
- self[:] = [[self[0][0], self[-1][-1]]]
-
- def include(self, start, end=None):
- """Include specified range.
-
- :param start:
- either including range start or a list of
- (`start`, `end`) pairs
- :param end:
- including range end
+ :param path:
+ path to file to [re]store seqno value
"""
- if issubclass(type(start), collections.Iterable):
- for range_start, range_end in start:
- self._include(range_start, range_end)
- elif start is not None:
- self._include(start, end)
+ Bin.__init__(self, path, 0)
- def exclude(self, start, end=None):
- """Exclude specified range.
+ def next(self):
+ """Incerement seqno.
- :param start:
- either excluding range start or a list of
- (`start`, `end`) pairs
- :param end:
- excluding range end
+ :returns:
+ new seqno value
"""
- if issubclass(type(start), collections.Iterable):
- for range_start, range_end in start:
- self._exclude(range_start, range_end)
- else:
- enforce(end is not None)
- self._exclude(start, end)
-
- def _include(self, range_start, range_end):
- if range_start is None:
- range_start = 1
-
- range_start_new = None
- range_start_i = 0
-
- for range_start_i, (start, end) in enumerate(self):
- if range_end is not None and start - 1 > range_end:
- break
- if (range_end is None or start - 1 <= range_end) and \
- (end is None or end + 1 >= range_start):
- range_start_new = min(start, range_start)
- break
- else:
- range_start_i += 1
-
- if range_start_new is None:
- self.insert(range_start_i, [range_start, range_end])
- return
-
- range_end_new = range_end
- range_end_i = range_start_i
- for i, (start, end) in enumerate(self[range_start_i:]):
- if range_end is not None and start - 1 > range_end:
- break
- if range_end is None or end is None:
- range_end_new = None
- else:
- range_end_new = max(end, range_end)
- range_end_i = range_start_i + i
-
- del self[range_start_i:range_end_i]
- self[range_start_i] = [range_start_new, range_end_new]
-
- def _exclude(self, range_start, range_end):
- if range_start is None:
- range_start = 1
- enforce(range_end is not None)
- enforce(range_start <= range_end and range_start > 0,
- 'Start value %r is less than 0 or not less than %r',
- range_start, range_end)
-
- for i, interval in enumerate(self):
- start, end = interval
-
- if end is not None and end < range_start:
- # Current `interval` is below new one
- continue
-
- if range_end is not None and range_end < start:
- # Current `interval` is above new one
- continue
-
- if end is None or end > range_end:
- # Current `interval` will exist after changing
- self[i] = [range_end + 1, end]
- if start < range_start:
- self.insert(i, [start, range_start - 1])
- else:
- if start < range_start:
- self[i] = [start, range_start - 1]
- else:
- del self[i]
-
- if end is not None:
- range_start = end + 1
- if range_start < range_end:
- self.exclude(range_start, range_end)
- break
-
-
-class PersistentSequence(Sequence):
-
- def __init__(self, path, empty_value=None):
- Sequence.__init__(self, empty_value=empty_value)
- self._path = path
-
- if exists(self._path):
- with file(self._path) as f:
- self[:] = json.load(f)
-
- @property
- def mtime(self):
- if exists(self._path):
- return os.stat(self._path).st_mtime
- else:
- return 0
-
- def commit(self):
- dir_path = dirname(self._path)
- if dir_path and not exists(dir_path):
- os.makedirs(dir_path)
- with new_file(self._path) as f:
- json.dump(self, f)
- f.flush()
- os.fsync(f.fileno())
+ self.value += 1
+ return self.value
class Pool(object):
diff --git a/sugar_network/toolkit/http.py b/sugar_network/toolkit/http.py
index 47f13bc..d280035 100644
--- a/sugar_network/toolkit/http.py
+++ b/sugar_network/toolkit/http.py
@@ -378,14 +378,16 @@ class SugarAuth(object):
from M2Crypto import RSA
from base64 import b64encode
+ key_dir = dirname(self._key_path)
if exists(self._key_path):
+ if os.stat(key_dir) & 077:
+ os.chmod(key_dir, 0700)
self._key = RSA.load_key(self._key_path)
return
- key_dir = dirname(self._key_path)
if not exists(key_dir):
os.makedirs(key_dir)
- os.chmod(key_dir, 0700)
+ os.chmod(key_dir, 0700)
_logger.info('Generate RSA private key at %r', self._key_path)
self._key = RSA.gen_key(1024, 65537, lambda *args: None)
diff --git a/sugar_network/toolkit/parcel.py b/sugar_network/toolkit/parcel.py
index 457ea07..43e6960 100644
--- a/sugar_network/toolkit/parcel.py
+++ b/sugar_network/toolkit/parcel.py
@@ -19,15 +19,19 @@ import zlib
import time
import json
import struct
+import hashlib
import logging
from types import GeneratorType
from os.path import dirname, exists, join
from sugar_network import toolkit
from sugar_network.toolkit.router import File
+from sugar_network.toolkit.coroutine import this
from sugar_network.toolkit import http, coroutine, BUFFER_SIZE, enforce
+DEFAULT_COMPRESSLEVEL = 6
+
_FILENAME_SUFFIX = '.parcel'
_RESERVED_DISK_SPACE = 1024 * 1024
@@ -48,15 +52,19 @@ def decode(stream, limit=None):
packet.next()
if packet.name == 'last':
break
- packet.props.update(header)
+ packet.header.update(header)
yield packet
-def encode(packets, limit=None, header=None, compresslevel=6):
+def encode(packets, limit=None, header=None, compresslevel=None,
+ on_complete=None):
_logger.debug('Encode %r packets limit=%r header=%r',
packets, limit, header)
ostream = _ZipStream(compresslevel)
+ # In case of downloading blobs
+ # (?) reuse current `this.http`
+ this.http = http.Connection()
if limit is None:
limit = sys.maxint
@@ -87,12 +95,13 @@ def encode(packets, limit=None, header=None, compresslevel=6):
record = next(content)
continue
blob_len = 0
- if isinstance(record, File) and record.path:
+ if isinstance(record, File):
blob_len = record.size
chunk = ostream.write_record(record,
None if finalizing else limit - blob_len)
if chunk is None:
_logger.debug('Reach the encoding limit')
+ on_complete = None
if not isinstance(content, GeneratorType):
raise StopIteration()
finalizing = True
@@ -101,22 +110,21 @@ def encode(packets, limit=None, header=None, compresslevel=6):
if chunk:
yield chunk
if blob_len:
- with file(record.path, 'rb') as blob:
- while True:
- chunk = blob.read(BUFFER_SIZE)
- if not chunk:
- break
- blob_len -= len(chunk)
- if not blob_len:
- chunk += '\n'
- chunk = ostream.write(chunk)
- if chunk:
- yield chunk
+ for chunk in record.iter_content():
+ blob_len -= len(chunk)
+ if not blob_len:
+ chunk += '\n'
+ chunk = ostream.write(chunk)
+ if chunk:
+ yield chunk
enforce(blob_len == 0, EOFError, 'Blob size mismatch')
record = next(content)
except StopIteration:
pass
+ if on_complete is not None:
+ on_complete()
+
chunk = ostream.write_record({'packet': 'last'})
if chunk:
yield chunk
@@ -173,7 +181,7 @@ class _DecodeIterator(object):
def __init__(self, stream):
self._stream = stream
- self.props = {}
+ self.header = {}
self._name = None
self._shift = True
@@ -190,10 +198,10 @@ class _DecodeIterator(object):
self._shift = True
def __repr__(self):
- return '<Packet %r>' % self.props
+ return '<Packet %r>' % self.header
def __getitem__(self, key):
- return self.props.get(key)
+ return self.header.get(key)
def __iter__(self):
while True:
@@ -203,7 +211,7 @@ class _DecodeIterator(object):
raise EOFError()
if 'packet' in record:
self._name = record['packet'] or ''
- self.props = record
+ self.header = record
self._shift = False
break
blob_len = record.get('content-length')
@@ -212,13 +220,15 @@ class _DecodeIterator(object):
continue
blob_len = int(blob_len)
with toolkit.NamedTemporaryFile() as blob:
+ digest = hashlib.sha1()
while blob_len:
chunk = self._stream.read(min(blob_len, BUFFER_SIZE))
enforce(chunk, 'Blob size mismatch')
blob.write(chunk)
blob_len -= len(chunk)
+ digest.update(chunk)
blob.flush()
- yield File(blob.name, meta=record)
+ yield File(blob.name, digest=digest.hexdigest(), meta=record)
def __enter__(self):
return self
@@ -229,7 +239,9 @@ class _DecodeIterator(object):
class _ZipStream(object):
- def __init__(self, compresslevel=6):
+ def __init__(self, compresslevel=None):
+ if compresslevel is None:
+ compresslevel = DEFAULT_COMPRESSLEVEL
self._zipper = zlib.compressobj(compresslevel,
zlib.DEFLATED, -_ZLIB_WBITS, zlib.DEF_MEM_LEVEL, 0)
self._offset = 0
diff --git a/sugar_network/toolkit/ranges.py b/sugar_network/toolkit/ranges.py
new file mode 100644
index 0000000..247944e
--- /dev/null
+++ b/sugar_network/toolkit/ranges.py
@@ -0,0 +1,198 @@
+# Copyright (C) 2011-2014 Aleksey Lim
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <http://www.gnu.org/licenses/>.
+
+"""Routines to treat lists of sorted and non-overlapping ranges.
+
+List items are [`start`, `stop'] ranges. If `start` or `stop` is `None`,
+it means the beginning or ending of the entire list.
+
+"""
+import sys
+import collections
+
+from sugar_network.toolkit import enforce
+
+
+def contains(r, value):
+ """Whether specified value included to one of ranges."""
+ for start, end in r:
+ if value >= start and (end is None or value <= end):
+ return True
+ else:
+ return False
+
+
+def stretch(r):
+ """Remove all holes between the first and the last ranges."""
+ if r:
+ r[:] = [[r[0][0], r[-1][-1]]]
+
+
+def include(r, start, end=None):
+ """Insert specified range.
+
+ :param start:
+ either including range start or a list of
+ (`start`, `end`) pairs
+ :param end:
+ including range end
+
+ """
+ if issubclass(type(start), collections.Iterable):
+ for range_start, range_end in start:
+ _include(r, range_start, range_end)
+ elif start is not None:
+ _include(r, start, end)
+
+
+def exclude(r, start, end=None):
+ """Remove specified range.
+
+ :param start:
+ either excluding range start or a list of
+ (`start`, `end`) pairs
+ :param end:
+ excluding range end
+
+ """
+ if issubclass(type(start), collections.Iterable):
+ for range_start, range_end in start:
+ _exclude(r, range_start, range_end)
+ else:
+ _exclude(r, start, end)
+
+
+def intersect(r1, r2):
+ """Return an intersection between two range sets."""
+ result = []
+ for start1, end1 in r1:
+ if end1 is None:
+ end1 = sys.maxint
+ for start2, end2 in r2:
+ if end2 is None:
+ end2 = sys.maxint
+ start = max(start1, start2)
+ end = min(end1, end2)
+ if start > end:
+ continue
+ if end == sys.maxint:
+ result.append([start, None])
+ break
+ result.append([start, end])
+ return result
+
+
+def _include(r, range_start, range_end):
+ if range_start is None:
+ range_start = 1
+
+ range_start_new = None
+ range_start_i = 0
+
+ for range_start_i, (start, end) in enumerate(r):
+ if range_end is not None and start - 1 > range_end:
+ break
+ if (range_end is None or start - 1 <= range_end) and \
+ (end is None or end + 1 >= range_start):
+ range_start_new = min(start, range_start)
+ break
+ else:
+ range_start_i += 1
+
+ if range_start_new is None:
+ r.insert(range_start_i, [range_start, range_end])
+ return
+
+ range_end_new = range_end
+ range_end_i = range_start_i
+ for i, (start, end) in enumerate(r[range_start_i:]):
+ if range_end is not None and start - 1 > range_end:
+ break
+ if range_end is None or end is None:
+ range_end_new = None
+ else:
+ range_end_new = max(end, range_end)
+ range_end_i = range_start_i + i
+
+ del r[range_start_i:range_end_i]
+ r[range_start_i] = [range_start_new, range_end_new]
+
+
+def _exclude(r, range_start, range_end):
+ enforce(range_start is not None or range_end is not None)
+
+ if range_start is None:
+ for i, interval in enumerate(r):
+ start, end = interval
+ if range_end < start:
+ del r[:i]
+ return
+ if end is not None:
+ if range_end == end:
+ del r[:i + 1]
+ return
+ if range_end < end:
+ interval[0] = min(range_end + 1, end)
+ del r[:i]
+ return
+ if r and r[-1][1] is None:
+ r[:] = [[range_end + 1, None]]
+ else:
+ del r[:]
+ return
+
+ if range_end is None:
+ for i, interval in enumerate(r):
+ start, end = interval
+ if end is None or range_start <= end:
+ if range_start <= start:
+ del r[i:]
+ else:
+ interval[1] = range_start - 1
+ del r[i + 1:]
+ return
+ return
+
+ enforce(range_start <= range_end and range_start > 0,
+ 'Start value %r is less than 0 or not less than %r',
+ range_start, range_end)
+
+ for i, interval in enumerate(r):
+ start, end = interval
+
+ if end is not None and end < range_start:
+ # Current `interval` is below new one
+ continue
+
+ if range_end is not None and range_end < start:
+ # Current `interval` is above new one
+ continue
+
+ if end is None or end > range_end:
+ # Current `interval` will exist after changing
+ r[i] = [range_end + 1, end]
+ if start < range_start:
+ r.insert(i, [start, range_start - 1])
+ else:
+ if start < range_start:
+ r[i] = [start, range_start - 1]
+ else:
+ del r[i]
+
+ if end is not None:
+ range_start = end + 1
+ if range_start < range_end:
+ exclude(r, range_start, range_end)
+ break
diff --git a/sugar_network/toolkit/router.py b/sugar_network/toolkit/router.py
index 4206121..48a04fe 100644
--- a/sugar_network/toolkit/router.py
+++ b/sugar_network/toolkit/router.py
@@ -20,12 +20,13 @@ import time
import types
import logging
import calendar
-from base64 import b64decode
+from base64 import b64decode, b64encode
from bisect import bisect_left
from urllib import urlencode
+from Cookie import SimpleCookie
from urlparse import parse_qsl, urlsplit
from email.utils import parsedate, formatdate
-from os.path import isfile
+from os.path import isfile, basename, exists
from sugar_network import toolkit
from sugar_network.toolkit.coroutine import this
@@ -357,11 +358,11 @@ class CaseInsensitiveDict(dict):
def __setitem__(self, key, value):
return self.set(key.lower(), value)
- def __delitem__(self, key, value):
+ def __delitem__(self, key):
self.remove(key.lower())
- def get(self, key):
- return dict.get(self, key)
+ def get(self, key, default=None):
+ return dict.get(self, key, default)
def set(self, key, value):
dict.__setitem__(self, key, value)
@@ -426,17 +427,21 @@ class File(CaseInsensitiveDict):
pass
def __init__(self, path, digest=None, meta=None):
- CaseInsensitiveDict.__init__(self)
+ CaseInsensitiveDict.__init__(self, meta or [])
self.path = path
self.digest = File.Digest(digest) if digest else None
- if meta is not None:
- for key, value in meta.items() if isinstance(meta, dict) else meta:
- self[key] = value
self._stat = None
@property
+ def exists(self):
+ return self.path and exists(self.path)
+
+ @property
def size(self):
if self._stat is None:
+ if not self.exists:
+ size = self.get('content-length', 0)
+ return int(size) if size else 0
self._stat = os.stat(self.path)
return self._stat.st_size
@@ -453,9 +458,37 @@ class File(CaseInsensitiveDict):
return self.get('location') or \
'%s/blobs/%s' % (this.request.static_prefix, self.digest)
+ @property
+ def name(self):
+ if self.path:
+ return basename(self.path)
+
def __repr__(self):
return '<File %r>' % self.url
+ def iter_content(self):
+ if self.path:
+ return self._iter_content()
+ url = self.get('location')
+ enforce(url, http.NotFound, 'No location')
+ blob = this.http.request('GET', url, allow_redirects=True,
+ # Request for uncompressed data
+ headers={'accept-encoding': ''})
+ self.clear()
+ for tag in ('content-length', 'content-type', 'content-disposition'):
+ value = blob.headers.get(tag)
+ if value:
+ self[tag] = value
+ return blob.iter_content(toolkit.BUFFER_SIZE)
+
+ def _iter_content(self):
+ with file(self.path, 'rb') as f:
+ while True:
+ chunk = f.read(toolkit.BUFFER_SIZE)
+ if not chunk:
+ break
+ yield chunk
+
class Router(object):
@@ -532,6 +565,8 @@ class Router(object):
if response is None:
response = Response()
+ this.request = request
+ this.response = response
route_ = self._resolve_route(request)
for arg, cast in route_.arguments.items():
@@ -592,6 +627,8 @@ class Router(object):
content = None
try:
+ this.cookie = _load_cookie(request, 'sugar_network_node')
+
if 'HTTP_ORIGIN' in request.environ:
enforce(self._assert_origin(request.environ), http.Forbidden,
'Cross-site is not allowed for %r origin',
@@ -655,10 +692,10 @@ class Router(object):
content = json.dumps(content)
if 'content-length' not in response:
response.content_length = len(content) if content else 0
-
if request.method == 'HEAD' and content is not None:
_logger.warning('Content from HEAD response is ignored')
content = None
+ _save_cookie(response, 'sugar_network_node', this.cookie)
_logger.trace('%s call: request=%s response=%r content=%r',
self, request.environ, response, repr(content)[:256])
@@ -845,6 +882,42 @@ def _parse_accept_language(value):
return langs
+def _load_cookie(request, name):
+ cookie_str = request.environ.get('HTTP_COOKIE')
+ if not cookie_str:
+ return _Cookie()
+ cookie = SimpleCookie()
+ cookie.load(cookie_str)
+ if name not in cookie:
+ return _Cookie()
+ raw_value = cookie.get(name).value
+ if raw_value == 'unset_%s' % name:
+ _logger.debug('Found unset %r cookie', name)
+ return _Cookie()
+ value = _Cookie(json.loads(b64decode(raw_value)))
+ value.loaded = True
+ _logger.debug('Found %r cookie value=%r', name, value)
+ return value
+
+
+def _save_cookie(response, name, value, age=3600):
+ if value:
+ _logger.debug('Set %r cookie value=%r age=%s', name, value, age)
+ raw_value = b64encode(json.dumps(value))
+ else:
+ if not value.loaded:
+ return
+ _logger.debug('Unset %r cookie')
+ raw_value = 'unset_%s' % name
+ cookie = '%s=%s; Max-Age=%s; HttpOnly' % (name, raw_value, age)
+ response.setdefault('set-cookie', []).append(cookie)
+
+
+class _Cookie(dict):
+
+ loaded = False
+
+
class _Routes(dict):
def __init__(self, parent=None):
diff --git a/tests/__init__.py b/tests/__init__.py
index 96fb8db..cc9ec01 100644
--- a/tests/__init__.py
+++ b/tests/__init__.py
@@ -17,10 +17,9 @@ from os.path import dirname, join, exists, abspath, isfile
from M2Crypto import DSA
from gevent import monkey
-from sugar_network.toolkit import coroutine, http, mountpoints, Option, gbus, i18n, languages
+from sugar_network.toolkit import coroutine, http, mountpoints, Option, gbus, i18n, languages, parcel
from sugar_network.toolkit.router import Router, Request
from sugar_network.toolkit.coroutine import this
-from sugar_network.db import blobs
from sugar_network.client import IPCConnection, journal, routes as client_routes
from sugar_network.client.routes import ClientRoutes, _Auth
from sugar_network import db, client, node, toolkit, model
@@ -28,7 +27,7 @@ from sugar_network.model.user import User
from sugar_network.model.context import Context
from sugar_network.model.post import Post
from sugar_network.node.master import MasterRoutes
-from sugar_network.node import stats_user, obs, slave, downloads
+from sugar_network.node import stats_user, obs, slave
from requests import adapters
@@ -76,9 +75,8 @@ class Test(unittest.TestCase):
os.environ['SUGAR_LOGGER_LEVEL'] = 'all'
os.environ['HOME'] = tmpdir
os.environ['LC_ALL'] = 'en_US.UTF-8'
- profile_dir = join(tmpdir, '.sugar', 'default')
- os.makedirs(profile_dir)
+ parcel.DEFAULT_COMPRESSLEVEL = 0
adapters.DEFAULT_RETRIES = 5
Option.items = {}
Option.config_files = []
@@ -89,8 +87,6 @@ class Test(unittest.TestCase):
db.index_flush_threshold.value = 1
node.find_limit.value = 1024
node.data_root.value = tmpdir
- node.files_root.value = None
- node.sync_layers.value = None
node.stats_root.value = tmpdir + '/stats'
node.port.value = 8888
db.index_write_queue.value = 10
@@ -115,7 +111,6 @@ class Test(unittest.TestCase):
http._RECONNECTION_NUMBER = 0
toolkit.cachedir.value = tmpdir + '/tmp'
journal._ds_root = tmpdir + '/datastore'
- downloads._POOL_SIZE = 256
gbus.join()
db.Volume.model = [
@@ -143,8 +138,6 @@ class Test(unittest.TestCase):
this.call = None
this.broadcast = lambda x: x
- blobs.init('blobs')
-
def tearDown(self):
self.stop_nodes()
while db.Volume._flush_pool:
@@ -273,7 +266,7 @@ class Test(unittest.TestCase):
if classes is None:
classes = [User, Context, Post]
self.node_volume = db.Volume('master', classes)
- self.node_routes = routes('guid', volume=self.node_volume)
+ self.node_routes = routes('master', volume=self.node_volume)
self.node_router = Router(self.node_routes)
self.node = coroutine.WSGIServer(('127.0.0.1', 8888), self.node_router)
coroutine.spawn(self.node.serve_forever)
diff --git a/tests/units/db/__main__.py b/tests/units/db/__main__.py
index cff8f52..b03dde4 100644
--- a/tests/units/db/__main__.py
+++ b/tests/units/db/__main__.py
@@ -8,6 +8,7 @@ from index import *
from resource import *
from routes import *
from blobs import *
+from volume import *
#from migrate import *
if __name__ == '__main__':
diff --git a/tests/units/db/blobs.py b/tests/units/db/blobs.py
index 9a68402..cee8667 100755
--- a/tests/units/db/blobs.py
+++ b/tests/units/db/blobs.py
@@ -10,19 +10,17 @@ from os.path import exists, abspath
from __init__ import tests
from sugar_network import toolkit
-from sugar_network.db import blobs
-from sugar_network.toolkit.router import Request
+from sugar_network.db.blobs import Blobs
+from sugar_network.toolkit.router import Request, File
from sugar_network.toolkit.coroutine import this
from sugar_network.toolkit import http
class BlobsTest(tests.Test):
- def setUp(self):
- tests.Test.setUp(self)
- blobs.init('.')
-
def test_post(self):
+ blobs = Blobs('.', Seqno())
+
content = 'probe'
blob = blobs.post(content)
@@ -30,11 +28,12 @@ class BlobsTest(tests.Test):
hashlib.sha1(content).hexdigest(),
blob.digest)
self.assertEqual(
- abspath('%s/%s' % (blob.digest[:3], blob.digest)),
+ abspath('blobs/%s/%s' % (blob.digest[:3], blob.digest)),
blob.path)
self.assertEqual({
'content-type': 'application/octet-stream',
'content-length': str(len(content)),
+ 'x-seqno': '1',
},
blob)
@@ -44,6 +43,7 @@ class BlobsTest(tests.Test):
self.assertEqual([
'content-type: application/octet-stream',
'content-length: %s' % len(content),
+ 'x-seqno: 1',
],
file(blob.path + '.meta').read().strip().split('\n'))
@@ -54,6 +54,8 @@ class BlobsTest(tests.Test):
assert the_same_blob.path == blob.path
def test_post_Stream(self):
+ blobs = Blobs('.', Seqno())
+
content = 'probe'
blob = blobs.post(StringIO(content))
@@ -61,11 +63,12 @@ class BlobsTest(tests.Test):
hashlib.sha1(content).hexdigest(),
blob.digest)
self.assertEqual(
- abspath('%s/%s' % (blob.digest[:3], blob.digest)),
+ abspath('blobs/%s/%s' % (blob.digest[:3], blob.digest)),
blob.path)
self.assertEqual({
'content-type': 'application/octet-stream',
'content-length': str(len(content)),
+ 'x-seqno': '1',
},
blob)
@@ -75,6 +78,7 @@ class BlobsTest(tests.Test):
self.assertEqual([
'content-type: application/octet-stream',
'content-length: %s' % len(content),
+ 'x-seqno: 1',
],
file(blob.path + '.meta').read().strip().split('\n'))
@@ -85,34 +89,39 @@ class BlobsTest(tests.Test):
assert the_same_blob.path == blob.path
def test_post_Url(self):
+ blobs = Blobs('.', Seqno())
+
self.assertRaises(http.BadRequest, blobs.post, {})
self.assertRaises(http.BadRequest, blobs.post, {'digest': 'digest'})
- blob = blobs.post({'location': 'location', 'digest': 'digest', 'foo': 'bar'})
+ blob = blobs.post({'location': 'location', 'digest': '0000000000000000000000000000000000000000', 'foo': 'bar'})
self.assertEqual(
- 'digest',
+ '0000000000000000000000000000000000000000',
blob.digest)
self.assertEqual(
- abspath('%s/%s' % (blob.digest[:3], blob.digest)),
+ abspath('blobs/%s/%s' % (blob.digest[:3], blob.digest)),
blob.path)
self.assertEqual({
'status': '301 Moved Permanently',
'location': 'location',
'content-type': 'application/octet-stream',
'content-length': '0',
+ 'x-seqno': '1',
},
blob)
self.assertEqual(
'',
file(blob.path).read())
- self.assertEqual([
- 'status: 301 Moved Permanently',
- 'location: location',
- 'content-type: application/octet-stream',
- 'content-length: 0',
- ],
- file(blob.path + '.meta').read().strip().split('\n'))
+ self.assertEqual(
+ sorted([
+ 'status: 301 Moved Permanently',
+ 'location: location',
+ 'content-type: application/octet-stream',
+ 'content-length: 0',
+ 'x-seqno: 1',
+ ]),
+ sorted(file(blob.path + '.meta').read().strip().split('\n')))
the_same_blob = blobs.get(blob.digest)
assert the_same_blob is not blob
@@ -121,419 +130,281 @@ class BlobsTest(tests.Test):
assert the_same_blob.path == blob.path
def test_update(self):
+ blobs = Blobs('.', Seqno())
+
blob = blobs.post('probe')
self.assertEqual({
'content-type': 'application/octet-stream',
'content-length': str(len('probe')),
+ 'x-seqno': '1',
},
blob)
blobs.update(blob.digest, {'foo': 'bar'})
self.assertEqual({
+ 'content-type': 'application/octet-stream',
+ 'content-length': str(len('probe')),
+ 'x-seqno': '1',
'foo': 'bar',
},
blobs.get(blob.digest))
def test_delete(self):
+ blobs = Blobs('.', Seqno())
+
blob = blobs.post('probe')
assert exists(blob.path)
assert exists(blob.path + '.meta')
+ self.assertEqual({
+ 'content-length': '5',
+ 'content-type': 'application/octet-stream',
+ 'x-seqno': '1',
+ },
+ dict(blobs.get(blob.digest)))
blobs.delete(blob.digest)
assert not exists(blob.path)
- assert not exists(blob.path + '.meta')
- assert blobs.get(blob.digest) is None
+ assert exists(blob.path + '.meta')
+ self.assertEqual({
+ 'content-length': '5',
+ 'content-type': 'application/octet-stream',
+ 'status': '410 Gone',
+ 'x-seqno': '2',
+ },
+ dict(blobs.get(blob.digest)))
- def test_diff(self):
- blobs.init('blobs')
+ def test_diff_Blobs(self):
+ blobs = Blobs('.', Seqno())
this.request = Request()
- self.touch(
- 'blobs/100/1000000000000000000000000000000000000001', ('blobs/100/1000000000000000000000000000000000000001.meta', ''),
- 'blobs/100/1000000000000000000000000000000000000002', ('blobs/100/1000000000000000000000000000000000000002.meta', ''),
- 'blobs/200/2000000000000000000000000000000000000003', ('blobs/200/2000000000000000000000000000000000000003.meta', ''),
- )
- in_seq1 = toolkit.Sequence([[0, None]])
- out_seq1 = toolkit.Sequence([])
+ self.touch('blobs/100/1000000000000000000000000000000000000001',
+ ('blobs/100/1000000000000000000000000000000000000001.meta', 'n: 1\nx-seqno: 1'))
+ self.utime('blobs/100/1000000000000000000000000000000000000001', 1)
+ self.utime('blobs/100/1000000000000000000000000000000000000001.meta', 1)
+ self.touch('blobs/100/1000000000000000000000000000000000000002',
+ ('blobs/100/1000000000000000000000000000000000000002.meta', 'n: 2\nx-seqno: 2'))
+ self.utime('blobs/100/1000000000000000000000000000000000000002', 2)
+ self.utime('blobs/100/1000000000000000000000000000000000000002.meta', 2)
+ self.touch('blobs/200/2000000000000000000000000000000000000003',
+ ('blobs/200/2000000000000000000000000000000000000003.meta', 'n: 3\nx-seqno: 3'))
+ self.utime('blobs/200/2000000000000000000000000000000000000003', 3)
+ self.utime('blobs/200/2000000000000000000000000000000000000003.meta', 3)
+
self.assertEqual([
- '2000000000000000000000000000000000000003',
- '1000000000000000000000000000000000000002',
- '1000000000000000000000000000000000000001',
+ ('2000000000000000000000000000000000000003', {'n': '3', 'x-seqno': '3'}),
+ ('1000000000000000000000000000000000000002', {'n': '2', 'x-seqno': '2'}),
+ ('1000000000000000000000000000000000000001', {'n': '1', 'x-seqno': '1'}),
],
- [i.digest for i in blobs.diff(in_seq1, out_seq1)])
- ctimes1 = [
- int(os.stat('blobs/100/1000000000000000000000000000000000000001').st_ctime),
- int(os.stat('blobs/200/2000000000000000000000000000000000000003').st_ctime),
- ]
- self.assertEqual(
- [[min(ctimes1), max(ctimes1)]],
- out_seq1)
-
- in_seq2 = toolkit.Sequence([[0, None]])
- in_seq2.exclude(out_seq1)
- out_seq2 = toolkit.Sequence([])
+ [(i.digest, dict(i)) for i in blobs.diff([[1, None]])])
self.assertEqual([
],
- [i.digest for i in blobs.diff(in_seq2, out_seq2)])
- self.assertEqual(
- [],
- out_seq2)
+ [(i.digest, dict(i)) for i in blobs.diff([[4, None]])])
- time.sleep(1.1)
- self.touch(
- 'blobs/200/2000000000000000000000000000000000000004', ('blobs/200/2000000000000000000000000000000000000004.meta', ''),
- 'blobs/300/3000000000000000000000000000000000000005', ('blobs/300/3000000000000000000000000000000000000005.meta', ''),
- )
+ self.touch('blobs/200/2000000000000000000000000000000000000004',
+ ('blobs/200/2000000000000000000000000000000000000004.meta', 'n: 4\nx-seqno: 4'))
+ self.utime('blobs/200/2000000000000000000000000000000000000004', 4)
+ self.utime('blobs/200/2000000000000000000000000000000000000004.meta', 4)
+ self.touch('blobs/300/3000000000000000000000000000000000000005',
+ ('blobs/300/3000000000000000000000000000000000000005.meta', 'n: 5\nx-seqno: 5'))
+ self.utime('blobs/300/3000000000000000000000000000000000000005', 5)
+ self.utime('blobs/300/3000000000000000000000000000000000000005.meta', 5)
self.assertEqual([
- '3000000000000000000000000000000000000005',
- '2000000000000000000000000000000000000004',
+ ('3000000000000000000000000000000000000005', {'n': '5', 'x-seqno': '5'}),
+ ('2000000000000000000000000000000000000004', {'n': '4', 'x-seqno': '4'}),
],
- [i.digest for i in blobs.diff(in_seq2, out_seq2)])
- ctimes2 = [
- int(os.stat('blobs/200/2000000000000000000000000000000000000004').st_ctime),
- int(os.stat('blobs/300/3000000000000000000000000000000000000005').st_ctime),
- ]
- self.assertEqual(
- [[min(ctimes2), max(ctimes2)]],
- out_seq2)
-
- in_seq3 = toolkit.Sequence([[0, None]])
- out_seq3 = toolkit.Sequence([])
+ [(i.digest, dict(i)) for i in blobs.diff([[4, None]])])
self.assertEqual([
- '3000000000000000000000000000000000000005',
- '2000000000000000000000000000000000000004',
- '2000000000000000000000000000000000000003',
- '1000000000000000000000000000000000000002',
- '1000000000000000000000000000000000000001',
-
],
- [i.digest for i in blobs.diff(in_seq3, out_seq3)])
- self.assertEqual(
- [[min(ctimes1 + ctimes2), max(ctimes1 + ctimes2)]],
- out_seq3)
-
-"""
- def test_diff_WithBlobsSetByUrl(self):
- URL = 'http://src.sugarlabs.org/robots.txt'
- URL_content = urllib2.urlopen(URL).read()
-
- class Document(db.Resource):
+ [i for i in blobs.diff([[6, None]])])
- @db.blob_property()
- def blob(self, value):
- return value
-
- directory = Directory(tests.tmpdir, Document, IndexWriter)
-
- directory.create({'guid': '1', 'ctime': 1, 'mtime': 1})
- directory.update('1', {'blob': {'url': URL}})
- self.utime('1/1', 1)
-
- out_seq = Sequence()
self.assertEqual([
- {'guid': '1', 'diff': {
- 'guid': {'value': '1', 'mtime': 1},
- 'ctime': {'value': 1, 'mtime': 1},
- 'mtime': {'value': 1, 'mtime': 1},
- 'blob': {
- 'url': URL,
- 'mtime': 1,
- },
- }},
+ ('3000000000000000000000000000000000000005', {'n': '5', 'x-seqno': '5'}),
+ ('2000000000000000000000000000000000000004', {'n': '4', 'x-seqno': '4'}),
+ ('2000000000000000000000000000000000000003', {'n': '3', 'x-seqno': '3'}),
+ ('1000000000000000000000000000000000000002', {'n': '2', 'x-seqno': '2'}),
+ ('1000000000000000000000000000000000000001', {'n': '1', 'x-seqno': '1'}),
],
- [i for i in diff(directory, [[0, None]], out_seq)])
- self.assertEqual([[1, 2]], out_seq)
-
- def test_merge_AvoidCalculatedBlobs(self):
-
- class Document(db.Resource):
-
- @db.blob_property()
- def blob(self, value):
- return {'url': 'http://foo/bar', 'mime_type': 'image/png'}
-
- directory1 = Directory('document1', Document, IndexWriter)
- directory1.create({'guid': 'guid', 'ctime': 1, 'mtime': 1})
- for i in os.listdir('document1/gu/guid'):
- os.utime('document1/gu/guid/%s' % i, (1, 1))
-
- directory2 = Directory('document2', Document, IndexWriter)
- for patch in diff(directory1, [[0, None]], Sequence()):
- directory2.merge(**patch)
-
- doc = directory2.get('guid')
- self.assertEqual(1, doc.get('seqno'))
- self.assertEqual(1, doc.meta('guid')['mtime'])
- assert not exists('document2/gu/guid/blob')
-
- def test_merge_Blobs(self):
+ [(i.digest, dict(i)) for i in blobs.diff([[1, None]])])
- class Document(db.Resource):
-
- @db.blob_property()
- def blob(self, value):
- return value
-
- directory = Directory('document', Document, IndexWriter)
- self.touch(('blob', 'blob-1'))
- directory.merge('1', {
- 'guid': {'mtime': 1, 'value': '1'},
- 'ctime': {'mtime': 2, 'value': 2},
- 'mtime': {'mtime': 3, 'value': 3},
- 'blob': {'mtime': 4, 'blob': 'blob'},
- })
-
- self.assertEqual(
- [(2, 3, '1')],
- [(i['ctime'], i['mtime'], i['guid']) for i in directory.find()[0]])
-
- doc = directory.get('1')
- self.assertEqual(1, doc.get('seqno'))
- self.assertEqual(1, doc.meta('guid')['mtime'])
- self.assertEqual(2, doc.meta('ctime')['mtime'])
- self.assertEqual(3, doc.meta('mtime')['mtime'])
- self.assertEqual(4, doc.meta('blob')['mtime'])
- self.assertEqual('blob-1', file('document/1/1/blob.blob').read())
-
- self.touch(('blob', 'blob-2'))
- directory.merge('1', {
- 'blob': {'mtime': 5, 'blob': 'blob'},
- })
-
- self.assertEqual(5, doc.meta('blob')['mtime'])
- self.assertEqual('blob-2', file('document/1/1/blob.blob').read())
-
-
- def test_DeleteOldBlobOnUpdate(self):
-
- class Document(db.Resource):
-
- @db.blob_property()
- def blob(self, value):
- return value
-
- directory = Directory(tests.tmpdir, Document, IndexWriter)
-
- directory.create({'guid': 'guid', 'blob': 'foo'})
- assert exists('gu/guid/blob.blob')
- directory.update('guid', {'blob': {'url': 'foo'}})
- assert not exists('gu/guid/blob.blob')
-
- directory.update('guid', {'blob': 'foo'})
- assert exists('gu/guid/blob.blob')
- directory.update('guid', {'blob': {}})
- assert not exists('gu/guid/blob.blob')
-
- def test_diff_Blobs(self):
-
- class Document(db.Resource):
-
- @db.blob_property()
- def prop(self, value):
- return value
-
- volume = db.Volume('db', [Document])
- cp = NodeRoutes('guid', volume)
+ def test_diff_Files(self):
+ blobs = Blobs('.', Seqno())
+ this.request = Request()
- guid = call(cp, method='POST', document='document', content={})
- call(cp, method='PUT', document='document', guid=guid, content={'prop': 'payload'})
- self.utime('db', 0)
+ self.touch('foo/bar', ('foo/bar.meta', 'n: -1\nx-seqno: -1'))
+ self.utime('foo/bar', 1)
+ self.utime('foo/bar.meta', 1)
- patch = diff(volume, toolkit.Sequence([[1, None]]))
- self.assertEqual(
- {'resource': 'document'},
- next(patch))
- record = next(patch)
- self.assertEqual('payload', ''.join([i for i in record.pop('blob')]))
self.assertEqual(
- {'guid': guid, 'blob_size': len('payload'), 'diff': {
- 'prop': {
- 'digest': hashlib.sha1('payload').hexdigest(),
- 'blob_size': len('payload'),
- 'mime_type': 'application/octet-stream',
- 'mtime': 0,
- },
- }},
- record)
- self.assertEqual(
- {'guid': guid, 'diff': {
- 'guid': {'value': guid, 'mtime': 0},
- 'author': {'mtime': 0, 'value': {}},
- 'layer': {'mtime': 0, 'value': []},
- 'tags': {'mtime': 0, 'value': []},
- 'mtime': {'value': 0, 'mtime': 0},
- 'ctime': {'value': 0, 'mtime': 0},
- }},
- next(patch))
- self.assertEqual(
- {'commit': [[1, 2]]},
- next(patch))
- self.assertRaises(StopIteration, next, patch)
-
- def test_diff_BlobUrls(self):
- url = 'http://src.sugarlabs.org/robots.txt'
- blob = urllib2.urlopen(url).read()
-
- class Document(db.Resource):
+ sorted([]),
+ sorted([i.digest for i in blobs.diff([[1, None]])]))
+
+ self.touch('files/1', ('files/1.meta', 'n: 1\nx-seqno: 1'))
+ self.utime('files/1', 1)
+ self.utime('files/1.meta', 1)
+ self.touch('files/2/3', ('files/2/3.meta', 'n: 2\nx-seqno: 2'))
+ self.utime('files/2/3', 2)
+ self.utime('files/2/3.meta', 2)
+ self.touch('files/2/4/5', ('files/2/4/5.meta', 'n: 3\nx-seqno: 3'))
+ self.utime('files/2/4/5', 3)
+ self.utime('files/2/4/5.meta', 3)
+ self.touch('files/6', ('files/6.meta', 'n: 4\nx-seqno: 4'))
+ self.utime('files/6', 4)
+ self.utime('files/6.meta', 4)
+
+ self.assertEqual(sorted([
+ ]),
+ sorted([(i.digest, dict(i)) for i in blobs.diff([[1, None]])]))
+ self.assertEqual(sorted([
+ ('1', {'n': '1', 'path': '1', 'x-seqno': '1'}),
+ ('2/3', {'n': '2', 'path': '2/3', 'x-seqno': '2'}),
+ ('2/4/5', {'n': '3', 'path': '2/4/5', 'x-seqno': '3'}),
+ ('6', {'n': '4', 'path': '6', 'x-seqno': '4'}),
+ ]),
+ sorted([(i.digest, dict(i)) for i in blobs.diff([[1, None]], '')]))
+ self.assertEqual(sorted([
+ ('2/3', {'n': '2', 'path': '2/3', 'x-seqno': '2'}),
+ ('2/4/5', {'n': '3', 'path': '2/4/5', 'x-seqno': '3'}),
+ ]),
+ sorted([(i.digest, dict(i)) for i in blobs.diff([[1, None]], '2')]))
+ self.assertEqual(sorted([
+ ('2/4/5', {'n': '3', 'path': '2/4/5', 'x-seqno': '3'}),
+ ]),
+ sorted([(i.digest, dict(i)) for i in blobs.diff([[1, None]], '2/4')]))
+ self.assertEqual(sorted([
+ ]),
+ sorted([(i.digest, dict(i)) for i in blobs.diff([[1, None]], 'foo')]))
+ self.assertEqual(sorted([
+ ('1', {'n': '1', 'path': '1', 'x-seqno': '1'}),
+ ('6', {'n': '4', 'path': '6', 'x-seqno': '4'}),
+ ]),
+ sorted([(i.digest, dict(i)) for i in blobs.diff([[1, None]], '', False)]))
+
+ def test_diff_FailOnRelativePaths(self):
+ blobs = Blobs('.', Seqno())
+
+ self.assertRaises(http.BadRequest, lambda: [i for i in blobs.diff([[1, None]], '..')])
+ self.assertRaises(http.BadRequest, lambda: [i for i in blobs.diff([[1, None]], '/..')])
+ self.assertRaises(http.BadRequest, lambda: [i for i in blobs.diff([[1, None]], '/../foo')])
+ self.assertRaises(http.BadRequest, lambda: [i for i in blobs.diff([[1, None]], 'foo/..')])
+
+ def test_diff_CheckinFiles(self):
+ blobs = Blobs('.', Seqno())
+ this.request = Request()
- @db.blob_property()
- def prop(self, value):
- return value
+ self.touch(
+ ('files/1.pdf', '1'),
+ ('files/2/3.txt', '22'),
+ ('files/2/4/5.svg', '333'),
+ ('files/6.png', '4444'),
+ )
+
+ self.assertEqual(0, blobs._seqno.value)
+ self.assertEqual(sorted([
+ ('1.pdf', {'content-type': 'application/pdf', 'content-length': '1', 'x-seqno': '1', 'path': '1.pdf'}),
+ ('2/3.txt', {'content-type': 'text/plain', 'content-length': '2', 'x-seqno': '1', 'path': '2/3.txt'}),
+ ('2/4/5.svg', {'content-type': 'image/svg+xml', 'content-length': '3', 'x-seqno': '1', 'path': '2/4/5.svg'}),
+ ('6.png', {'content-type': 'image/png', 'content-length': '4', 'x-seqno': '1', 'path': '6.png'}),
+ ]),
+ sorted([(i.digest, dict(i)) for i in blobs.diff([[1, None]], '')]))
+ self.assertEqual(1, blobs._seqno.value)
+
+ self.assertEqual(sorted([
+ ('1.pdf', {'content-type': 'application/pdf', 'content-length': '1', 'x-seqno': '1', 'path': '1.pdf'}),
+ ('2/3.txt', {'content-type': 'text/plain', 'content-length': '2', 'x-seqno': '1', 'path': '2/3.txt'}),
+ ('2/4/5.svg', {'content-type': 'image/svg+xml', 'content-length': '3', 'x-seqno': '1', 'path': '2/4/5.svg'}),
+ ('6.png', {'content-type': 'image/png', 'content-length': '4', 'x-seqno': '1', 'path': '6.png'}),
+ ]),
+ sorted([(i.digest, dict(i)) for i in blobs.diff([[1, None]], '')]))
+ self.assertEqual(1, blobs._seqno.value)
+
+ def test_diff_HandleUpdates(self):
+ blobs = Blobs('.', Seqno())
+ this.request = Request()
- volume = db.Volume('db', [Document])
- cp = NodeRoutes('guid', volume)
+ self.touch('blobs/000/0000000000000000000000000000000000000001',
+ ('blobs/000/0000000000000000000000000000000000000001.meta', 'n: 1\nx-seqno: 1'))
+ self.utime('blobs/000/0000000000000000000000000000000000000001', 100)
+ self.utime('blobs/000/0000000000000000000000000000000000000001.meta', 1)
- guid = call(cp, method='POST', document='document', content={})
- call(cp, method='PUT', document='document', guid=guid, content={'prop': {'url': url}})
- self.utime('db', 1)
+ self.touch('files/2', ('files/2.meta', 'n: 2\nx-seqno: 2'))
+ self.utime('files/2', 200)
+ self.utime('files/2.meta', 2)
+ blobs._seqno.value = 10
+ self.assertEqual([
+ ('0000000000000000000000000000000000000001', {'n': '1', 'content-length': '50', 'x-seqno': '11'}),
+ ],
+ [(i.digest, dict(i)) for i in blobs.diff([[1, None]])])
+ self.assertEqual(11, blobs._seqno.value)
self.assertEqual([
- {'resource': 'document'},
- {'guid': guid,
- 'diff': {
- 'guid': {'value': guid, 'mtime': 1},
- 'author': {'mtime': 1, 'value': {}},
- 'layer': {'mtime': 1, 'value': []},
- 'tags': {'mtime': 1, 'value': []},
- 'mtime': {'value': 0, 'mtime': 1},
- 'ctime': {'value': 0, 'mtime': 1},
- 'prop': {'url': url, 'mtime': 1},
- },
- },
- {'commit': [[1, 2]]},
+ ('0000000000000000000000000000000000000001', {'n': '1', 'content-length': '50', 'x-seqno': '11'}),
],
- [i for i in diff(volume, toolkit.Sequence([[1, None]]))])
+ [(i.digest, dict(i)) for i in blobs.diff([[1, None]])])
+ self.assertEqual(11, blobs._seqno.value)
+
+ self.assertEqual(sorted([
+ ('2', {'n': '2', 'path': '2', 'content-length': '7', 'x-seqno': '12'}),
+ ]),
+ sorted([(i.digest, dict(i)) for i in blobs.diff([[1, None]], '')]))
+ self.assertEqual(12, blobs._seqno.value)
+ self.assertEqual(sorted([
+ ('2', {'n': '2', 'path': '2', 'content-length': '7', 'x-seqno': '12'}),
+ ]),
+ sorted([(i.digest, dict(i)) for i in blobs.diff([[1, None]], '')]))
+ self.assertEqual(12, blobs._seqno.value)
+
+ def test_patch_Blob(self):
+ blobs = Blobs('.', Seqno())
+
+ self.touch(('blob', '1'))
+ blobs.patch(File('./blob', '0000000000000000000000000000000000000001', {'n': 1}), -1)
+ blob = blobs.get('0000000000000000000000000000000000000001')
+ self.assertEqual(tests.tmpdir + '/blobs/000/0000000000000000000000000000000000000001', blob.path)
+ self.assertEqual('0000000000000000000000000000000000000001', blob.digest)
+ self.assertEqual('1', file(blob.path).read())
+ self.assertEqual({'x-seqno': '-1', 'n': '1'}, blob)
+ assert not exists('blob')
+
+ blobs.patch(File('./fake', '0000000000000000000000000000000000000002', {'n': 2, 'content-length': '0'}), -2)
+ assert blobs.get('0000000000000000000000000000000000000002') is None
+
+ blobs.patch(File('./fake', '0000000000000000000000000000000000000001', {'n': 3, 'content-length': '0'}), -3)
+ blob = blobs.get('0000000000000000000000000000000000000001')
+ assert not exists(blob.path)
+ self.assertEqual({'x-seqno': '-3', 'n': '1', 'status': '410 Gone'}, dict(blob))
- patch = diff(volume, toolkit.Sequence([[1, None]]), fetch_blobs=True)
- self.assertEqual(
- {'resource': 'document'},
- next(patch))
- record = next(patch)
- self.assertEqual(blob, ''.join([i for i in record.pop('blob')]))
- self.assertEqual(
- {'guid': guid, 'blob_size': len(blob), 'diff': {'prop': {'mtime': 1}}},
- record)
- self.assertEqual(
- {'guid': guid, 'diff': {
- 'guid': {'value': guid, 'mtime': 1},
- 'author': {'mtime': 1, 'value': {}},
- 'layer': {'mtime': 1, 'value': []},
- 'tags': {'mtime': 1, 'value': []},
- 'mtime': {'value': 0, 'mtime': 1},
- 'ctime': {'value': 0, 'mtime': 1},
- }},
- next(patch))
- self.assertEqual(
- {'commit': [[1, 2]]},
- next(patch))
- self.assertRaises(StopIteration, next, patch)
+ def test_patch_File(self):
+ blobs = Blobs('.', Seqno())
- def test_diff_SkipBrokenBlobUrls(self):
+ self.touch(('file', '1'))
+ blobs.patch(File('./file', '1', {'n': 1, 'path': 'foo/bar'}), -1)
+ blob = blobs.get('foo/bar')
+ self.assertEqual('1', file(blob.path).read())
+ self.assertEqual({'x-seqno': '-1', 'n': '1'}, blob)
+ assert not exists('file')
- class Document(db.Resource):
+ blobs.patch(File('./fake', 'bar/foo', {'n': 2, 'content-length': '0'}), -2)
+ assert blobs.get('bar/foo') is None
- @db.blob_property()
- def prop(self, value):
- return value
+ blobs.patch(File('./fake', 'foo/bar', {'n': 3, 'content-length': '0', 'path': 'foo/bar'}), -3)
+ blob = blobs.get('foo/bar')
+ assert not exists(blob.path)
+ self.assertEqual({'x-seqno': '-3', 'n': '1', 'status': '410 Gone'}, dict(blob))
- volume = db.Volume('db', [Document])
- cp = NodeRoutes('guid', volume)
- guid1 = call(cp, method='POST', document='document', content={})
- call(cp, method='PUT', document='document', guid=guid1, content={'prop': {'url': 'http://foo/bar'}})
- guid2 = call(cp, method='POST', document='document', content={})
- self.utime('db', 1)
+class Seqno(object):
- self.assertEqual([
- {'resource': 'document'},
- {'guid': guid1,
- 'diff': {
- 'guid': {'value': guid1, 'mtime': 1},
- 'author': {'mtime': 1, 'value': {}},
- 'layer': {'mtime': 1, 'value': []},
- 'tags': {'mtime': 1, 'value': []},
- 'mtime': {'value': 0, 'mtime': 1},
- 'ctime': {'value': 0, 'mtime': 1},
- 'prop': {'url': 'http://foo/bar', 'mtime': 1},
- },
- },
- {'guid': guid2,
- 'diff': {
- 'guid': {'value': guid2, 'mtime': 1},
- 'author': {'mtime': 1, 'value': {}},
- 'layer': {'mtime': 1, 'value': []},
- 'tags': {'mtime': 1, 'value': []},
- 'mtime': {'value': 0, 'mtime': 1},
- 'ctime': {'value': 0, 'mtime': 1},
- },
- },
- {'commit': [[1, 3]]},
- ],
- [i for i in diff(volume, toolkit.Sequence([[1, None]]), fetch_blobs=False)])
+ def __init__(self):
+ self.value = 0
+
+ def next(self):
+ self.value += 1
+ return self.value
+
+ def commit(self):
+ pass
- self.assertEqual([
- {'resource': 'document'},
- {'guid': guid1,
- 'diff': {
- 'guid': {'value': guid1, 'mtime': 1},
- 'author': {'mtime': 1, 'value': {}},
- 'layer': {'mtime': 1, 'value': []},
- 'tags': {'mtime': 1, 'value': []},
- 'mtime': {'value': 0, 'mtime': 1},
- 'ctime': {'value': 0, 'mtime': 1},
- },
- },
- {'guid': guid2,
- 'diff': {
- 'guid': {'value': guid2, 'mtime': 1},
- 'author': {'mtime': 1, 'value': {}},
- 'layer': {'mtime': 1, 'value': []},
- 'tags': {'mtime': 1, 'value': []},
- 'mtime': {'value': 0, 'mtime': 1},
- 'ctime': {'value': 0, 'mtime': 1},
- },
- },
- {'commit': [[1, 3]]},
- ],
- [i for i in diff(volume, toolkit.Sequence([[1, None]]), fetch_blobs=True)])
-
- def test_merge_Blobs(self):
-
- class Document(db.Resource):
-
- @db.blob_property()
- def prop(self, value):
- return value
-
- volume = db.Volume('db', [Document])
-
- merge(volume, [
- {'resource': 'document'},
- {'guid': '1', 'diff': {
- 'guid': {'value': '1', 'mtime': 1.0},
- 'ctime': {'value': 2, 'mtime': 2.0},
- 'mtime': {'value': 3, 'mtime': 3.0},
- 'prop': {
- 'blob': StringIO('payload'),
- 'blob_size': len('payload'),
- 'digest': hashlib.sha1('payload').hexdigest(),
- 'mime_type': 'foo/bar',
- 'mtime': 1,
- },
- }},
- {'commit': [[1, 1]]},
- ])
-
- assert volume['document'].exists('1')
- blob = volume['document'].get('1')['prop']
- self.assertEqual(1, blob['mtime'])
- self.assertEqual('foo/bar', blob['mime_type'])
- self.assertEqual(hashlib.sha1('payload').hexdigest(), blob['digest'])
- self.assertEqual(tests.tmpdir + '/db/document/1/1/prop.blob', blob['blob'])
- self.assertEqual('payload', file(blob['blob']).read())
-
-"""
if __name__ == '__main__':
tests.main()
diff --git a/tests/units/db/resource.py b/tests/units/db/resource.py
index ef305ec..4870418 100755
--- a/tests/units/db/resource.py
+++ b/tests/units/db/resource.py
@@ -24,7 +24,7 @@ from sugar_network.db.directory import Directory
from sugar_network.db.index import IndexWriter
from sugar_network.toolkit.router import ACL
from sugar_network.toolkit.coroutine import this
-from sugar_network.toolkit import http, Sequence
+from sugar_network.toolkit import http
class ResourceTest(tests.Test):
@@ -45,7 +45,7 @@ class ResourceTest(tests.Test):
def not_slotted(self, value):
return value
- directory = Directory(tests.tmpdir, Document, IndexWriter)
+ directory = Directory(tests.tmpdir, Document, IndexWriter, _SessionSeqno())
self.assertEqual(1, directory.metadata['slotted'].slot)
directory.create({'slotted': 'slotted', 'not_slotted': 'not_slotted'})
@@ -70,7 +70,7 @@ class ResourceTest(tests.Test):
def prop_2(self, value):
return value
- self.assertRaises(RuntimeError, Directory, tests.tmpdir, Document, IndexWriter)
+ self.assertRaises(RuntimeError, Directory, tests.tmpdir, Document, IndexWriter, _SessionSeqno())
def test_ActiveProperty_Terms(self):
@@ -84,7 +84,7 @@ class ResourceTest(tests.Test):
def not_term(self, value):
return value
- directory = Directory(tests.tmpdir, Document, IndexWriter)
+ directory = Directory(tests.tmpdir, Document, IndexWriter, _SessionSeqno())
self.assertEqual('T', directory.metadata['term'].prefix)
guid = directory.create({'term': 'term', 'not_term': 'not_term'})
@@ -110,7 +110,7 @@ class ResourceTest(tests.Test):
def prop_2(self, value):
return value
- self.assertRaises(RuntimeError, Directory, tests.tmpdir, Document, IndexWriter)
+ self.assertRaises(RuntimeError, Directory, tests.tmpdir, Document, IndexWriter, _SessionSeqno())
def test_ActiveProperty_FullTextSearch(self):
@@ -124,7 +124,7 @@ class ResourceTest(tests.Test):
def yes(self, value):
return value
- directory = Directory(tests.tmpdir, Document, IndexWriter)
+ directory = Directory(tests.tmpdir, Document, IndexWriter, _SessionSeqno())
self.assertEqual(False, directory.metadata['no'].full_text)
self.assertEqual(True, directory.metadata['yes'].full_text)
@@ -145,7 +145,7 @@ class ResourceTest(tests.Test):
def prop_2(self, value):
return value
- directory = Directory(tests.tmpdir, Document, IndexWriter)
+ directory = Directory(tests.tmpdir, Document, IndexWriter, _SessionSeqno())
guid = directory.create({'prop_1': '1', 'prop_2': '2'})
self.assertEqual(
@@ -165,7 +165,7 @@ class ResourceTest(tests.Test):
def prop(self, value):
return value
- directory = Directory(tests.tmpdir, Document, IndexWriter)
+ directory = Directory(tests.tmpdir, Document, IndexWriter, _SessionSeqno())
guid_1 = directory.create({'prop': '1'})
guid_2 = directory.create({'prop': '2'})
@@ -199,20 +199,20 @@ class ResourceTest(tests.Test):
return value
self.touch(
- ('1/1/guid', '{"value": "1"}'),
- ('1/1/ctime', '{"value": 1}'),
- ('1/1/mtime', '{"value": 1}'),
- ('1/1/prop', '{"value": "prop-1"}'),
- ('1/1/seqno', '{"value": 0}'),
-
- ('2/2/guid', '{"value": "2"}'),
- ('2/2/ctime', '{"value": 2}'),
- ('2/2/mtime', '{"value": 2}'),
- ('2/2/prop', '{"value": "prop-2"}'),
- ('2/2/seqno', '{"value": 0}'),
+ ('db/document/1/1/guid', '{"value": "1"}'),
+ ('db/document/1/1/ctime', '{"value": 1}'),
+ ('db/document/1/1/mtime', '{"value": 1}'),
+ ('db/document/1/1/prop', '{"value": "prop-1"}'),
+ ('db/document/1/1/seqno', '{"value": 0}'),
+
+ ('db/document/2/2/guid', '{"value": "2"}'),
+ ('db/document/2/2/ctime', '{"value": 2}'),
+ ('db/document/2/2/mtime', '{"value": 2}'),
+ ('db/document/2/2/prop', '{"value": "prop-2"}'),
+ ('db/document/2/2/seqno', '{"value": 0}'),
)
- directory = Directory(tests.tmpdir, Document, IndexWriter)
+ directory = Directory(tests.tmpdir, Document, IndexWriter, _SessionSeqno())
self.assertEqual(0, directory._index.mtime)
for i in directory.populate():
@@ -245,26 +245,26 @@ class ResourceTest(tests.Test):
return value
self.touch(
- ('1/1/guid', '{"value": "1"}'),
- ('1/1/ctime', '{"value": 1}'),
- ('1/1/mtime', '{"value": 1}'),
- ('1/1/prop', '{"value": "prop-1"}'),
- ('1/1/seqno', '{"value": 0}'),
-
- ('2/2/guid', '{"value": "2"}'),
- ('2/2/ctime', ''),
- ('2/2/mtime', '{"value": 2}'),
- ('2/2/prop', '{"value": "prop-2"}'),
- ('2/2/seqno', '{"value": 0}'),
-
- ('3/3/guid', ''),
- ('3/3/ctime', ''),
- ('3/3/mtime', ''),
- ('3/3/prop', ''),
- ('3/3/seqno', ''),
+ ('db/document/1/1/guid', '{"value": "1"}'),
+ ('db/document/1/1/ctime', '{"value": 1}'),
+ ('db/document/1/1/mtime', '{"value": 1}'),
+ ('db/document/1/1/prop', '{"value": "prop-1"}'),
+ ('db/document/1/1/seqno', '{"value": 0}'),
+
+ ('db/document/2/2/guid', '{"value": "2"}'),
+ ('db/document/2/2/ctime', ''),
+ ('db/document/2/2/mtime', '{"value": 2}'),
+ ('db/document/2/2/prop', '{"value": "prop-2"}'),
+ ('db/document/2/2/seqno', '{"value": 0}'),
+
+ ('db/document/3/3/guid', ''),
+ ('db/document/3/3/ctime', ''),
+ ('db/document/3/3/mtime', ''),
+ ('db/document/3/3/prop', ''),
+ ('db/document/3/3/seqno', ''),
)
- directory = Directory(tests.tmpdir, Document, IndexWriter)
+ directory = Directory(tests.tmpdir, Document, IndexWriter, _SessionSeqno())
populated = 0
for i in directory.populate():
@@ -273,9 +273,9 @@ class ResourceTest(tests.Test):
self.assertEqual(
sorted(['1']),
sorted([i.guid for i in directory.find()[0]]))
- assert exists('1/1/guid')
- assert not exists('2/2/guid')
- assert not exists('3/3/guid')
+ assert exists('db/document/1/1/guid')
+ assert not exists('db/document/2/2/guid')
+ assert not exists('db/document/3/3/guid')
def test_create_with_guid(self):
@@ -285,7 +285,7 @@ class ResourceTest(tests.Test):
def prop(self, value):
return value
- directory = Directory(tests.tmpdir, Document, IndexWriter)
+ directory = Directory(tests.tmpdir, Document, IndexWriter, _SessionSeqno())
guid = directory.create({'guid': 'guid', 'prop': 'foo'})
self.assertEqual(
@@ -305,39 +305,39 @@ class ResourceTest(tests.Test):
def prop(self, value):
return value
- directory = Directory(tests.tmpdir, Document, IndexWriter)
+ directory = Directory(tests.tmpdir, Document, IndexWriter, _SessionSeqno())
guid_1 = directory.create({'prop': 'value'})
seqno = directory.get(guid_1).get('seqno')
self.assertEqual(1, seqno)
self.assertEqual(
- json.load(file('%s/%s/guid' % (guid_1[:2], guid_1)))['seqno'],
+ json.load(file('db/document/%s/%s/guid' % (guid_1[:2], guid_1)))['seqno'],
seqno)
self.assertEqual(
- json.load(file('%s/%s/prop' % (guid_1[:2], guid_1)))['seqno'],
+ json.load(file('db/document/%s/%s/prop' % (guid_1[:2], guid_1)))['seqno'],
seqno)
guid_2 = directory.create({'prop': 'value'})
seqno = directory.get(guid_2).get('seqno')
self.assertEqual(2, seqno)
self.assertEqual(
- json.load(file('%s/%s/guid' % (guid_2[:2], guid_2)))['seqno'],
+ json.load(file('db/document/%s/%s/guid' % (guid_2[:2], guid_2)))['seqno'],
seqno)
self.assertEqual(
- json.load(file('%s/%s/prop' % (guid_2[:2], guid_2)))['seqno'],
+ json.load(file('db/document/%s/%s/prop' % (guid_2[:2], guid_2)))['seqno'],
seqno)
directory.update(guid_1, {'prop': 'new'})
seqno = directory.get(guid_1).get('seqno')
self.assertEqual(3, seqno)
self.assertEqual(
- json.load(file('%s/%s/guid' % (guid_1[:2], guid_1)))['seqno'],
+ json.load(file('db/document/%s/%s/guid' % (guid_1[:2], guid_1)))['seqno'],
1)
self.assertEqual(
- json.load(file('%s/%s/prop' % (guid_1[:2], guid_1)))['seqno'],
+ json.load(file('db/document/%s/%s/prop' % (guid_1[:2], guid_1)))['seqno'],
seqno)
- def test_patch(self):
+ def test_format_patch(self):
class Document(db.Resource):
@@ -349,16 +349,16 @@ class ResourceTest(tests.Test):
def prop2(self, value):
return value
- directory = Directory(tests.tmpdir, Document, IndexWriter)
+ directory = Directory(tests.tmpdir, Document, IndexWriter, _SessionSeqno())
guid = directory.create({'guid': '1', 'prop1': '1', 'prop2': '2'})
doc = directory.get(guid)
- self.assertEqual({}, doc.patch({}))
- self.assertEqual({}, doc.patch({'prop1': '1', 'prop2': '2'}))
- self.assertEqual({'prop1': '1_'}, doc.patch({'prop1': '1_', 'prop2': '2'}))
- self.assertEqual({'prop1': '1_', 'prop2': '2_'}, doc.patch({'prop1': '1_', 'prop2': '2_'}))
+ self.assertEqual({}, doc.format_patch({}))
+ self.assertEqual({}, doc.format_patch({'prop1': '1', 'prop2': '2'}))
+ self.assertEqual({'prop1': '1_'}, doc.format_patch({'prop1': '1_', 'prop2': '2'}))
+ self.assertEqual({'prop1': '1_', 'prop2': '2_'}, doc.format_patch({'prop1': '1_', 'prop2': '2_'}))
- def test_patch_LocalizedProps(self):
+ def test_format_patch_LocalizedProps(self):
class Document(db.Resource):
@@ -366,579 +366,48 @@ class ResourceTest(tests.Test):
def prop(self, value):
return value
- directory = Directory(tests.tmpdir, Document, IndexWriter)
+ directory = Directory(tests.tmpdir, Document, IndexWriter, _SessionSeqno())
guid = directory.create({'guid': '1', 'prop': {'ru': 'ru'}})
doc = directory.get(guid)
- self.assertEqual({}, doc.patch({'prop': {'ru': 'ru'}}))
- self.assertEqual({'prop': {'ru': 'ru_'}}, doc.patch({'prop': {'ru': 'ru_'}}))
- self.assertEqual({'prop': {'en': 'en'}}, doc.patch({'prop': {'en': 'en'}}))
- self.assertEqual({'prop': {'ru': 'ru', 'en': 'en'}}, doc.patch({'prop': {'ru': 'ru', 'en': 'en'}}))
- self.assertEqual({'prop': {'ru': 'ru_', 'en': 'en'}}, doc.patch({'prop': {'ru': 'ru_', 'en': 'en'}}))
-
- def test_diff(self):
-
- class Document(db.Resource):
-
- @db.indexed_property(slot=1)
- def prop(self, value):
- return value
-
- directory = Directory(tests.tmpdir, Document, IndexWriter)
-
- directory.create({'guid': '1', 'prop': '1', 'ctime': 1, 'mtime': 1})
- for i in os.listdir('1/1'):
- os.utime('1/1/%s' % i, (1, 1))
-
- directory.create({'guid': '2', 'prop': '2', 'ctime': 2, 'mtime': 2})
- for i in os.listdir('2/2'):
- os.utime('2/2/%s' % i, (2, 2))
-
- directory.create({'guid': '3', 'prop': '3', 'ctime': 3, 'mtime': 3})
- for i in os.listdir('3/3'):
- os.utime('3/3/%s' % i, (3, 3))
-
- out_seq = Sequence()
- self.assertEqual([
- {'guid': '1', 'diff': {
- 'guid': {'value': '1', 'mtime': 1},
- 'ctime': {'value': 1, 'mtime': 1},
- 'prop': {'value': '1', 'mtime': 1},
- 'mtime': {'value': 1, 'mtime': 1},
- }},
- {'guid': '2', 'diff': {
- 'guid': {'value': '2', 'mtime': 2},
- 'ctime': {'value': 2, 'mtime': 2},
- 'prop': {'value': '2', 'mtime': 2},
- 'mtime': {'value': 2, 'mtime': 2},
- }},
- {'guid': '3', 'diff': {
- 'guid': {'value': '3', 'mtime': 3},
- 'ctime': {'value': 3, 'mtime': 3},
- 'prop': {'value': '3', 'mtime': 3},
- 'mtime': {'value': 3, 'mtime': 3},
- }},
- ],
- [i for i in diff(directory, [[0, None]], out_seq)])
- self.assertEqual([[1, 3]], out_seq)
-
- out_seq = Sequence()
- self.assertEqual([
- {'guid': '2', 'diff': {
- 'guid': {'value': '2', 'mtime': 2},
- 'ctime': {'value': 2, 'mtime': 2},
- 'prop': {'value': '2', 'mtime': 2},
- 'mtime': {'value': 2, 'mtime': 2},
- }},
- ],
- [i for i in diff(directory, [[2, 2]], out_seq)])
- self.assertEqual([[2, 2]], out_seq)
-
- out_seq = Sequence()
- self.assertEqual([
- ],
- [i for i in diff(directory, [[4, 100]], out_seq)])
- self.assertEqual([], out_seq)
- directory.update('2', {'prop': '22'})
- self.assertEqual([
- {'guid': '2', 'diff': {
- 'prop': {'value': '22', 'mtime': int(os.stat('2/2/prop').st_mtime)},
- }},
- ],
- [i for i in diff(directory, [[4, 100]], out_seq)])
- self.assertEqual([[4, 4]], out_seq)
-
- def test_diff_IgnoreCalcProps(self):
-
- class Document(db.Resource):
-
- @db.indexed_property(slot=1, acl=ACL.PUBLIC | ACL.CALC)
- def prop(self, value):
- return value
-
- directory = Directory('.', Document, IndexWriter)
-
- directory.create({'guid': 'guid', 'prop': '1', 'ctime': 1, 'mtime': 1})
- self.utime('.', 1)
-
- out_seq = Sequence()
- self.assertEqual([
- {'guid': 'guid', 'diff': {
- 'guid': {'value': 'guid', 'mtime': 1},
- 'ctime': {'value': 1, 'mtime': 1},
- 'mtime': {'value': 1, 'mtime': 1},
- }},
- ],
- [i for i in diff(directory, [[0, None]], out_seq)])
- self.assertEqual([[1, 1]], out_seq)
-
- directory.update('guid', {'prop': '2'})
- out_seq = Sequence()
- self.assertEqual([
- ],
- [i for i in diff(directory, [[6, 100]], out_seq)])
- self.assertEqual([], out_seq)
-
- def test_diff_Exclude(self):
-
- class Document(db.Resource):
-
- @db.indexed_property(slot=1)
- def prop(self, value):
- return value
-
- directory = Directory(tests.tmpdir, Document, IndexWriter)
-
- directory.create({'guid': '1', 'prop': '1', 'ctime': 1, 'mtime': 1})
- directory.create({'guid': '2', 'prop': '2', 'ctime': 2, 'mtime': 2})
- directory.create({'guid': '3', 'prop': '3', 'ctime': 3, 'mtime': 3})
- directory.update('2', {'prop': '2_'})
- self.utime('.', 0)
-
- out_seq = Sequence()
- self.assertEqual([
- {'guid': '1', 'diff': {
- 'guid': {'value': '1', 'mtime': 0},
- 'ctime': {'value': 1, 'mtime': 0},
- 'prop': {'value': '1', 'mtime': 0},
- 'mtime': {'value': 1, 'mtime': 0},
- }},
- {'guid': '2', 'diff': {
- 'prop': {'value': '2_', 'mtime': 0},
- }},
- ],
- [i for i in diff(directory, [[0, None]], out_seq, [[2, 3]])])
-
- self.assertEqual([[1, 1], [4, 4]], out_seq)
-
- def test_diff_Filter(self):
-
- class Document(db.Resource):
-
- @db.indexed_property(prefix='P')
- def prop(self, value):
- return value
-
- directory = Directory(tests.tmpdir, Document, IndexWriter)
-
- directory.create({'guid': '1', 'ctime': 1, 'mtime': 1, 'prop': '1'})
- directory.create({'guid': '2', 'ctime': 2, 'mtime': 2, 'prop': '2'})
- for i in os.listdir('2/2'):
- os.utime('2/2/%s' % i, (2, 2))
-
- out_seq = Sequence()
- self.assertEqual([
- {'guid': '2', 'diff': {
- 'guid': {'value': '2', 'mtime': 2},
- 'ctime': {'value': 2, 'mtime': 2},
- 'mtime': {'value': 2, 'mtime': 2},
- 'prop': {'value': '2', 'mtime': 2},
- }},
- ],
- [i for i in diff(directory, [[0, None]], out_seq, prop='2')])
- self.assertEqual([[2, 2]], out_seq)
-
- def test_diff_GroupBy(self):
-
- class Document(db.Resource):
-
- @db.indexed_property(slot=1, prefix='P')
- def prop(self, value):
- return value
-
- directory = Directory(tests.tmpdir, Document, IndexWriter)
-
- directory.create({'guid': '1', 'ctime': 1, 'mtime': 1, 'prop': '0'})
- for i in os.listdir('1/1'):
- os.utime('1/1/%s' % i, (1, 1))
- directory.create({'guid': '2', 'ctime': 2, 'mtime': 2, 'prop': '0'})
- for i in os.listdir('2/2'):
- os.utime('2/2/%s' % i, (2, 2))
-
- out_seq = Sequence()
- self.assertEqual([
- {'guid': '2', 'diff': {
- 'guid': {'value': '2', 'mtime': 2},
- 'ctime': {'value': 2, 'mtime': 2},
- 'mtime': {'value': 2, 'mtime': 2},
- 'prop': {'value': '0', 'mtime': 2},
- }},
- ],
- [i for i in diff(directory, [[0, None]], out_seq, group_by='prop')])
- self.assertEqual([[2, 2]], out_seq)
-
- def test_diff_Aggprops(self):
-
- class Document(db.Resource):
-
- @db.stored_property(db.Aggregated)
- def prop(self, value):
- return value
-
- directory = Directory(tests.tmpdir, Document, IndexWriter)
-
- directory.create({'guid': '1', 'prop': {'1': {'prop': 1}}, 'ctime': 1, 'mtime': 1})
- for i in os.listdir('1/1'):
- os.utime('1/1/%s' % i, (1, 1))
-
- directory.create({'guid': '2', 'prop': {'2': {'prop': 2}}, 'ctime': 2, 'mtime': 2})
- for i in os.listdir('2/2'):
- os.utime('2/2/%s' % i, (2, 2))
-
- out_seq = Sequence()
- self.assertEqual([
- {'guid': '1', 'diff': {
- 'guid': {'value': '1', 'mtime': 1},
- 'ctime': {'value': 1, 'mtime': 1},
- 'mtime': {'value': 1, 'mtime': 1},
- 'prop': {'value': {'1': {'prop': 1}}, 'mtime': 1},
- }},
- {'guid': '2', 'diff': {
- 'guid': {'value': '2', 'mtime': 2},
- 'ctime': {'value': 2, 'mtime': 2},
- 'mtime': {'value': 2, 'mtime': 2},
- 'prop': {'value': {'2': {'prop': 2}}, 'mtime': 2},
- }},
- ],
- [i for i in diff(directory, [[0, None]], out_seq)])
- self.assertEqual([[1, 2]], out_seq)
-
- out_seq = Sequence()
- self.assertEqual([
- {'guid': '1', 'diff': {
- 'guid': {'value': '1', 'mtime': 1},
- 'ctime': {'value': 1, 'mtime': 1},
- 'mtime': {'value': 1, 'mtime': 1},
- 'prop': {'value': {'1': {'prop': 1}}, 'mtime': 1},
- }},
- ],
- [i for i in diff(directory, [[1, 1]], out_seq)])
- self.assertEqual([[1, 1]], out_seq)
-
- out_seq = Sequence()
- self.assertEqual([
- {'guid': '2', 'diff': {
- 'guid': {'value': '2', 'mtime': 2},
- 'ctime': {'value': 2, 'mtime': 2},
- 'mtime': {'value': 2, 'mtime': 2},
- 'prop': {'value': {'2': {'prop': 2}}, 'mtime': 2},
- }},
- ],
- [i for i in diff(directory, [[2, 2]], out_seq)])
- self.assertEqual([[2, 2]], out_seq)
-
- out_seq = Sequence()
- self.assertEqual([
- ],
- [i for i in diff(directory, [[3, None]], out_seq)])
- self.assertEqual([], out_seq)
-
- self.assertEqual({
- '1': {'seqno': 1, 'prop': 1},
- },
- directory.get('1')['prop'])
- self.assertEqual({
- '2': {'seqno': 2, 'prop': 2},
- },
- directory.get('2')['prop'])
-
- out_seq = Sequence()
- directory.update('2', {'prop': {'2': {}, '3': {'prop': 3}}})
- self.assertEqual([
- {'guid': '2', 'diff': {
- 'prop': {'value': {'2': {}, '3': {'prop': 3}}, 'mtime': int(os.stat('2/2/prop').st_mtime)},
- }},
- ],
- [i for i in diff(directory, [[3, None]], out_seq)])
- self.assertEqual([[3, 3]], out_seq)
-
- self.assertEqual({
- '2': {'seqno': 3},
- '3': {'seqno': 3, 'prop': 3},
- },
- directory.get('2')['prop'])
-
- out_seq = Sequence()
- directory.update('1', {'prop': {'1': {'foo': 'bar'}}})
- self.assertEqual([
- {'guid': '1', 'diff': {
- 'prop': {'value': {'1': {'foo': 'bar'}}, 'mtime': int(os.stat('1/1/prop').st_mtime)},
- }},
- ],
- [i for i in diff(directory, [[4, None]], out_seq)])
- self.assertEqual([[4, 4]], out_seq)
-
- self.assertEqual({
- '1': {'seqno': 4, 'foo': 'bar'},
- },
- directory.get('1')['prop'])
-
- out_seq = Sequence()
- directory.update('2', {'prop': {'2': {'restore': True}}})
- self.assertEqual([
- {'guid': '2', 'diff': {
- 'prop': {'value': {'2': {'restore': True}}, 'mtime': int(os.stat('2/2/prop').st_mtime)},
- }},
- ],
- [i for i in diff(directory, [[5, None]], out_seq)])
- self.assertEqual([[5, 5]], out_seq)
-
- self.assertEqual({
- '2': {'seqno': 5, 'restore': True},
- '3': {'seqno': 3, 'prop': 3},
- },
- directory.get('2')['prop'])
-
- out_seq = Sequence()
- directory.update('2', {'ctime': 0})
- self.assertEqual([
- {'guid': '2', 'diff': {
- 'ctime': {'value': 0, 'mtime': int(os.stat('2/2/prop').st_mtime)},
- }},
- ],
- [i for i in diff(directory, [[6, None]], out_seq)])
- self.assertEqual([[6, 6]], out_seq)
-
- self.assertEqual({
- '2': {'seqno': 5, 'restore': True},
- '3': {'seqno': 3, 'prop': 3},
- },
- directory.get('2')['prop'])
-
- def test_merge_New(self):
-
- class Document(db.Resource):
-
- @db.indexed_property(slot=1)
- def prop(self, value):
- return value
-
- directory1 = Directory('document1', Document, IndexWriter)
-
- directory1.create({'guid': '1', 'prop': '1', 'ctime': 1, 'mtime': 1})
- for i in os.listdir('document1/1/1'):
- os.utime('document1/1/1/%s' % i, (1, 1))
-
- directory1.create({'guid': '2', 'prop': '2', 'ctime': 2, 'mtime': 2})
- for i in os.listdir('document1/2/2'):
- os.utime('document1/2/2/%s' % i, (2, 2))
-
- directory1.create({'guid': '3', 'prop': '3', 'ctime': 3, 'mtime': 3})
- for i in os.listdir('document1/3/3'):
- os.utime('document1/3/3/%s' % i, (3, 3))
-
- directory2 = Directory('document2', Document, IndexWriter)
- for patch in diff(directory1, [[0, None]], Sequence()):
- directory2.merge(**patch)
-
- self.assertEqual(
- sorted([
- (1, '1', 1, '1'),
- (2, '2', 2, '2'),
- (3, '3', 3, '3'),
- ]),
- sorted([(i['ctime'], i['prop'], i['mtime'], i['guid']) for i in directory2.find()[0]]))
-
- doc = directory2.get('1')
- self.assertEqual(1, doc.get('seqno'))
- self.assertEqual(1, doc.meta('guid')['mtime'])
- self.assertEqual(1, doc.meta('ctime')['mtime'])
- self.assertEqual(1, doc.meta('prop')['mtime'])
- self.assertEqual(1, doc.meta('mtime')['mtime'])
-
- doc = directory2.get('2')
- self.assertEqual(2, doc.get('seqno'))
- self.assertEqual(2, doc.meta('guid')['mtime'])
- self.assertEqual(2, doc.meta('ctime')['mtime'])
- self.assertEqual(2, doc.meta('prop')['mtime'])
- self.assertEqual(2, doc.meta('mtime')['mtime'])
-
- doc = directory2.get('3')
- self.assertEqual(3, doc.get('seqno'))
- self.assertEqual(3, doc.meta('guid')['mtime'])
- self.assertEqual(3, doc.meta('ctime')['mtime'])
- self.assertEqual(3, doc.meta('prop')['mtime'])
- self.assertEqual(3, doc.meta('mtime')['mtime'])
-
- def test_merge_Update(self):
-
- class Document(db.Resource):
-
- @db.stored_property(default='')
- def prop(self, value):
- return value
-
- directory1 = Directory('document1', Document, IndexWriter)
- directory2 = Directory('document2', Document, IndexWriter)
-
- directory1.create({'guid': 'guid', 'ctime': 1, 'mtime': 1})
- directory1.update('guid', {'prop': '1'})
- for i in os.listdir('document1/gu/guid'):
- os.utime('document1/gu/guid/%s' % i, (1, 1))
-
- directory2.create({'guid': 'guid', 'ctime': 2, 'mtime': 2})
- directory2.update('guid', {'prop': '2'})
-
- for i in os.listdir('document2/gu/guid'):
- os.utime('document2/gu/guid/%s' % i, (2, 2))
-
- self.assertEqual(
- [(2, 2, 'guid')],
- [(i['ctime'], i['mtime'], i['guid']) for i in directory2.find()[0]])
- doc = directory2.get('guid')
- self.assertEqual(2, doc.get('seqno'))
- self.assertEqual(2, doc.meta('guid')['mtime'])
- self.assertEqual(2, doc.meta('ctime')['mtime'])
- self.assertEqual(2, doc.meta('mtime')['mtime'])
- self.assertEqual(2, doc.meta('prop')['mtime'])
- self.assertEqual('2', doc.meta('prop')['value'])
-
- for patch in diff(directory1, [[0, None]], Sequence()):
- directory2.merge(**patch)
-
- self.assertEqual(
- [(2, 2, 'guid')],
- [(i['ctime'], i['mtime'], i['guid']) for i in directory2.find()[0]])
- doc = directory2.get('guid')
- self.assertEqual(2, doc.get('seqno'))
- self.assertEqual(2, doc.meta('guid')['mtime'])
- self.assertEqual(2, doc.meta('ctime')['mtime'])
- self.assertEqual(2, doc.meta('mtime')['mtime'])
- self.assertEqual(2, doc.meta('prop')['mtime'])
- self.assertEqual('2', doc.meta('prop')['value'])
-
- os.utime('document1/gu/guid/mtime', (3, 3))
- for patch in diff(directory1, [[0, None]], Sequence()):
- directory2.merge(**patch)
-
- self.assertEqual(
- [(2, 1, 'guid')],
- [(i['ctime'], i['mtime'], i['guid']) for i in directory2.find()[0]])
- doc = directory2.get('guid')
- self.assertEqual(3, doc.get('seqno'))
- self.assertEqual(2, doc.meta('guid')['mtime'])
- self.assertEqual(2, doc.meta('ctime')['mtime'])
- self.assertEqual(3, doc.meta('mtime')['mtime'])
- self.assertEqual(2, doc.meta('prop')['mtime'])
- self.assertEqual('2', doc.meta('prop')['value'])
-
- os.utime('document1/gu/guid/prop', (4, 4))
- for patch in diff(directory1, [[0, None]], Sequence()):
- directory2.merge(**patch)
-
- self.assertEqual(
- [(2, 1, 'guid')],
- [(i['ctime'], i['mtime'], i['guid']) for i in directory2.find()[0]])
- doc = directory2.get('guid')
- self.assertEqual(4, doc.get('seqno'))
- self.assertEqual(2, doc.meta('guid')['mtime'])
- self.assertEqual(2, doc.meta('ctime')['mtime'])
- self.assertEqual(3, doc.meta('mtime')['mtime'])
- self.assertEqual(4, doc.meta('prop')['mtime'])
- self.assertEqual('1', doc.meta('prop')['value'])
-
- def test_merge_Aggprops(self):
-
- class Document(db.Resource):
-
- @db.stored_property(db.Aggregated)
- def prop(self, value):
- return value
-
- directory = Directory('document', Document, IndexWriter)
-
- directory.merge('1', {
- 'guid': {'mtime': 1, 'value': '1'},
- 'ctime': {'mtime': 1, 'value': 1},
- 'mtime': {'mtime': 1, 'value': 1},
- 'prop': {'mtime': 1, 'value': {'1': {}}},
- })
- self.assertEqual({
- '1': {'seqno': 1},
- },
- directory.get('1')['prop'])
-
- directory.merge('1', {
- 'prop': {'mtime': 1, 'value': {'1': {'probe': False}}},
- })
- self.assertEqual({
- '1': {'seqno': 1},
- },
- directory.get('1')['prop'])
-
- directory.merge('1', {
- 'prop': {'mtime': 2, 'value': {'1': {'probe': True}}},
- })
- self.assertEqual({
- '1': {'seqno': 2, 'probe': True},
- },
- directory.get('1')['prop'])
-
- directory.merge('1', {
- 'prop': {'mtime': 3, 'value': {'2': {'foo': 'bar'}}},
- })
- self.assertEqual({
- '1': {'seqno': 2, 'probe': True},
- '2': {'seqno': 3, 'foo': 'bar'},
- },
- directory.get('1')['prop'])
-
- directory.merge('1', {
- 'prop': {'mtime': 4, 'value': {'2': {}, '3': {'foo': 'bar'}}},
- })
- self.assertEqual({
- '1': {'seqno': 2, 'probe': True},
- '2': {'seqno': 4},
- '3': {'seqno': 4, 'foo': 'bar'},
- },
- directory.get('1')['prop'])
-
- def test_merge_CallSetters(self):
-
- class Document(db.Resource):
-
- @db.stored_property(db.Numeric)
- def prop(self, value):
- return value
-
- @prop.setter
- def prop(self, value):
- return value + 1
-
- directory = Directory('document', Document, IndexWriter)
-
- directory.merge('1', {
- 'guid': {'mtime': 1, 'value': '1'},
- 'ctime': {'mtime': 1, 'value': 1},
- 'mtime': {'mtime': 1, 'value': 1},
- 'prop': {'mtime': 1, 'value': 1},
- })
- self.assertEqual(2, directory.get('1')['prop'])
+ self.assertEqual({}, doc.format_patch({'prop': {'ru': 'ru'}}))
+ self.assertEqual({'prop': {'ru': 'ru_'}}, doc.format_patch({'prop': {'ru': 'ru_'}}))
+ self.assertEqual({'prop': {'en': 'en'}}, doc.format_patch({'prop': {'en': 'en'}}))
+ self.assertEqual({'prop': {'ru': 'ru', 'en': 'en'}}, doc.format_patch({'prop': {'ru': 'ru', 'en': 'en'}}))
+ self.assertEqual({'prop': {'ru': 'ru_', 'en': 'en'}}, doc.format_patch({'prop': {'ru': 'ru_', 'en': 'en'}}))
def test_wipe(self):
class Document(db.Resource):
pass
- directory = Directory(tests.tmpdir, Document, IndexWriter)
+ directory = Directory(tests.tmpdir, Document, IndexWriter, _SessionSeqno())
guid = directory.create({'prop': '1'})
self.assertEqual([guid], [i.guid for i in directory.find()[0]])
directory.commit()
- assert exists('index/mtime')
+ assert exists('index/document')
+ assert exists('db/document')
directory.wipe()
self.assertEqual([], [i.guid for i in directory.find()[0]])
- assert not exists('index/mtime')
+ assert not exists('db/document')
+
+
+class _SessionSeqno(object):
+
+ def __init__(self):
+ self._value = 0
+
+ @property
+ def value(self):
+ return self._value
+ def next(self):
+ self._value += 1
+ return self._value
-def diff(directory, in_seq, out_seq, exclude_seq=None, **kwargs):
- for guid, patch in directory.diff(Sequence(in_seq), Sequence(exclude_seq) if exclude_seq else None, **kwargs):
- diff = {}
- for prop, meta, seqno in patch:
- diff[prop] = meta
- out_seq.include(seqno, seqno)
- if diff:
- yield {'guid': guid, 'diff': diff}
+ def commit(self):
+ pass
if __name__ == '__main__':
diff --git a/tests/units/db/routes.py b/tests/units/db/routes.py
index 24499d4..2e1cabb 100755
--- a/tests/units/db/routes.py
+++ b/tests/units/db/routes.py
@@ -16,7 +16,6 @@ src_root = abspath(dirname(__file__))
from __init__ import tests
from sugar_network import db, toolkit
-from sugar_network.db import blobs
from sugar_network.model.user import User
from sugar_network.toolkit.router import Router, Request, Response, fallbackroute, ACL, File
from sugar_network.toolkit.coroutine import this
@@ -25,43 +24,6 @@ from sugar_network.toolkit import coroutine, http, i18n
class RoutesTest(tests.Test):
- def setUp(self):
- tests.Test.setUp(self)
- self.blobs = {}
-
- def files_post(content, mime_type=None, digest_to_assert=None):
- if hasattr(content, 'read'):
- content = content.read()
- digest = File.Digest(hash(content))
- if digest_to_assert:
- assert digest == digest_to_assert
- path = join('blobs', digest)
- with file(path, 'w') as f:
- f.write(content)
- self.blobs[digest] = {'content-type': mime_type or 'application/octet-stream'}
- return File(path, digest, self.blobs[digest].items())
-
- def files_update(digest, meta):
- self.blobs.setdefault(digest, {}).update(meta)
-
- def files_get(digest):
- if digest not in self.blobs:
- return None
- path = join('blobs', digest)
- return File(path, digest, self.blobs[digest].items())
-
- def files_delete(digest):
- path = join('blobs', digest)
- if exists(path):
- os.unlink(path)
- if digest in self.blobs:
- del self.blobs[digest]
-
- self.override(blobs, 'post', files_post)
- self.override(blobs, 'update', files_update)
- self.override(blobs, 'get', files_get)
- self.override(blobs, 'delete', files_delete)
-
def test_PostDefaults(self):
class Document(db.Resource):
@@ -90,15 +52,15 @@ class RoutesTest(tests.Test):
def test_Populate(self):
self.touch(
- ('document/1/1/guid', '{"value": "1"}'),
- ('document/1/1/ctime', '{"value": 1}'),
- ('document/1/1/mtime', '{"value": 1}'),
- ('document/1/1/seqno', '{"value": 0}'),
-
- ('document/2/2/guid', '{"value": "2"}'),
- ('document/2/2/ctime', '{"value": 2}'),
- ('document/2/2/mtime', '{"value": 2}'),
- ('document/2/2/seqno', '{"value": 0}'),
+ ('db/document/1/1/guid', '{"value": "1"}'),
+ ('db/document/1/1/ctime', '{"value": 1}'),
+ ('db/document/1/1/mtime', '{"value": 1}'),
+ ('db/document/1/1/seqno', '{"value": 0}'),
+
+ ('db/document/2/2/guid', '{"value": "2"}'),
+ ('db/document/2/2/ctime', '{"value": 2}'),
+ ('db/document/2/2/mtime', '{"value": 2}'),
+ ('db/document/2/2/seqno', '{"value": 0}'),
)
class Document(db.Resource):
@@ -112,7 +74,7 @@ class RoutesTest(tests.Test):
sorted(['1', '2']),
sorted([i.guid for i in volume['document'].find()[0]]))
- shutil.rmtree('document/index')
+ shutil.rmtree('index/document')
class Document(db.Resource):
pass
@@ -235,62 +197,12 @@ class RoutesTest(tests.Test):
content={'location': 'foo'}, content_type='application/json')
self.assertRaises(http.NotFound, this.call, method='GET', path=['testdocument', guid, 'blob'])
- this.call(method='PUT', path=['testdocument', guid, 'blob'],
- content={'location': 'url', 'digest': 'digest', 'foo': 'bar', 'content-type': 'foo/bar'}, content_type='application/json')
- self.assertEqual(
- {'blob': 'url'},
- this.call(method='GET', path=['testdocument', guid], reply='blob'))
- response = []
- [i for i in router({
- 'REQUEST_METHOD': 'HEAD',
- 'PATH_INFO': '/testdocument/%s/blob' % guid,
- }, lambda status, headers: response.extend([status, headers]))]
- self.assertEqual('303 See Other', response[0])
- self.assertEqual(
- sorted([
- ('last-modified', formatdate(os.stat('testdocument/%s/%s/mtime' % (guid[:2], guid)).st_mtime, localtime=False, usegmt=True)),
- ('location', 'url'),
- ('content-type', 'foo/bar'),
- ('foo', 'bar'),
- ]),
- sorted(response[1]))
-
- def test_UpdateUrlBLOBsWithMeta(self):
-
- class TestDocument(db.Resource):
-
- @db.stored_property(db.Blob)
- def blob(self, value):
- return value
-
- volume = db.Volume(tests.tmpdir, [TestDocument])
- router = Router(db.Routes(volume))
-
- guid = this.call(method='POST', path=['testdocument'], content={'blob': {'digest': 'digest', 'location': 'url'}})
- self.assertEqual({
- 'content-type': 'application/octet-stream',
- 'location': 'url',
- },
- this.call(method='GET', path=['testdocument', guid, 'blob']))
-
self.assertRaises(http.BadRequest, this.call, method='PUT', path=['testdocument', guid, 'blob'],
- content={'digest': 'fake'}, content_type='application/json')
- self.assertEqual({
- 'content-type': 'application/octet-stream',
- 'location': 'url',
- },
- this.call(method='GET', path=['testdocument', guid, 'blob']))
-
- this.call(method='PUT', path=['testdocument', guid, 'blob'],
- content={'foo': 'bar'}, content_type='application/json')
- self.assertEqual({
- 'content-type': 'application/octet-stream',
- 'location': 'url',
- 'foo': 'bar',
- },
- this.call(method='GET', path=['testdocument', guid, 'blob']))
+ content={'location': 'url', 'digest': 'digest', 'foo': 'bar', 'content-type': 'foo/bar'},
+ content_type='application/json')
+ self.assertRaises(http.NotFound, this.call, method='GET', path=['testdocument', guid, 'blob'])
- def test_UpdateFileBLOBsWithMeta(self):
+ def test_UpdateBLOBsWithMeta(self):
class TestDocument(db.Resource):
@@ -305,69 +217,23 @@ class RoutesTest(tests.Test):
blob = this.call(method='GET', path=['testdocument', guid, 'blob'], environ={'HTTP_HOST': 'localhost'})
self.assertEqual({
'content-type': 'application/octet-stream',
+ 'content-length': '4',
+ 'x-seqno': '1',
},
- blob)
+ dict(blob))
self.assertEqual('blob', file(blob.path).read())
self.assertRaises(http.BadRequest, this.call, method='PUT', path=['testdocument', guid, 'blob'],
- content={'digest': 'fake'}, content_type='application/json')
- blob = this.call(method='GET', path=['testdocument', guid, 'blob'], environ={'HTTP_HOST': 'localhost'})
- self.assertEqual({
- 'content-type': 'application/octet-stream',
- },
- blob)
- self.assertEqual('blob', file(blob.path).read())
-
- this.call(method='PUT', path=['testdocument', guid, 'blob'],
content={'foo': 'bar'}, content_type='application/json')
blob = this.call(method='GET', path=['testdocument', guid, 'blob'], environ={'HTTP_HOST': 'localhost'})
self.assertEqual({
'content-type': 'application/octet-stream',
- 'foo': 'bar',
+ 'content-length': '4',
+ 'x-seqno': '1',
},
- blob)
+ dict(blob))
self.assertEqual('blob', file(blob.path).read())
- def test_SwitchBLOBsType(self):
-
- class TestDocument(db.Resource):
-
- @db.stored_property(db.Blob)
- def blob(self, value):
- return value
-
- volume = db.Volume(tests.tmpdir, [TestDocument])
- router = Router(db.Routes(volume))
- guid = this.call(method='POST', path=['testdocument'], content={'blob': 'blob'})
-
- self.assertEqual(
- {'blob': 'http://localhost/blobs/%s' % hash('blob')},
- this.call(method='GET', path=['testdocument', guid], reply='blob', environ={'HTTP_HOST': 'localhost'}))
- self.assertEqual(
- ['blob'],
- [i for i in router({
- 'REQUEST_METHOD': 'GET',
- 'PATH_INFO': '/testdocument/%s/blob' % guid,
- }, lambda *args: None)])
- assert exists(this.call(method='GET', path=['testdocument', guid, 'blob']).path)
-
- this.call(method='PUT', path=['testdocument', guid, 'blob'],
- content={'location': 'url'}, content_type='application/json')
- self.assertEqual(
- {'blob': 'url'},
- this.call(method='GET', path=['testdocument', guid], reply='blob', environ={'HTTP_HOST': 'localhost'}))
- assert not exists(this.call(method='GET', path=['testdocument', guid, 'blob']).path)
-
- this.call(method='PUT', path=['testdocument', guid, 'blob'],
- content='new_blob', content_type='application/octet-stream', environ={'HTTP_HOST': 'localhost'})
- self.assertEqual(
- ['new_blob'],
- [i for i in router({
- 'REQUEST_METHOD': 'GET',
- 'PATH_INFO': '/testdocument/%s/blob' % guid,
- }, lambda *args: None)])
- assert exists(this.call(method='GET', path=['testdocument', guid, 'blob']).path)
-
def test_RemoveBLOBs(self):
class TestDocument(db.Resource):
@@ -428,7 +294,7 @@ class RoutesTest(tests.Test):
self.assertRaises(RuntimeError, this.call, method='PUT', path=['testdocument', guid, 'blob'], content='probe')
self.assertRaises(http.NotFound, this.call, method='GET', path=['testdocument', guid, 'blob'])
- assert not exists('blobs/%s' % hash('probe'))
+ assert not exists('blobs/%s' % hashlib.sha1('probe').hexdigest())
def test_SetBLOBsWithMimeType(self):
@@ -483,7 +349,7 @@ class RoutesTest(tests.Test):
guid = this.call(method='POST', path=['testdocument'], content={})
blob = 'blob'
this.call(method='PUT', path=['testdocument', guid, 'blob'], content=blob)
- digest = str(hash(blob))
+ digest = str(hashlib.sha1(blob).hexdigest())
blob_path = 'blobs/%s' % digest
self.assertEqual('blob', file(this.call(method='GET', path=['testdocument', guid, 'blob']).path).read())
@@ -514,25 +380,36 @@ class RoutesTest(tests.Test):
self.assertEqual(
{'blob': ''},
this.call(method='GET', path=['testdocument', guid1], reply=['blob'], environ={'HTTP_HOST': '127.0.0.1'}))
+ self.assertEqual(
+ sorted([
+ '',
+ ]),
+ sorted([i['blob'] for i in this.call(method='GET', path=['testdocument'], reply=['blob'],
+ environ={'HTTP_HOST': '127.0.0.1'})['result']]))
blob = 'file'
+ digest = hashlib.sha1(blob).hexdigest()
guid2 = this.call(method='POST', path=['testdocument'], content={'blob': blob})
self.assertEqual(
- 'http://127.0.0.1/blobs/%s' % hash(blob),
+ 'http://127.0.0.1/blobs/%s' % digest,
this.call(method='GET', path=['testdocument', guid2], reply=['blob'], environ={'HTTP_HOST': '127.0.0.1'})['blob'])
-
- guid3 = this.call(method='POST', path=['testdocument'], content={'blob': {'location': 'http://foo', 'digest': 'digest'}}, content_type='application/json')
self.assertEqual(
- 'http://foo',
- this.call(method='GET', path=['testdocument', guid3, 'blob'])['location'])
+ sorted([
+ '',
+ 'http://127.0.0.1/blobs/%s' % digest,
+ ]),
+ sorted([i['blob'] for i in this.call(method='GET', path=['testdocument'], reply=['blob'],
+ environ={'HTTP_HOST': '127.0.0.1'})['result']]))
+
+ volume.blobs.update(digest, {
+ 'location': 'http://foo',
+ })
self.assertEqual(
'http://foo',
- this.call(method='GET', path=['testdocument', guid3], reply=['blob'], environ={'HTTP_HOST': '127.0.0.1'})['blob'])
-
+ this.call(method='GET', path=['testdocument', guid2], reply=['blob'], environ={'HTTP_HOST': '127.0.0.1'})['blob'])
self.assertEqual(
sorted([
'',
- 'http://127.0.0.1/blobs/%s' % hash(blob),
'http://foo',
]),
sorted([i['blob'] for i in this.call(method='GET', path=['testdocument'], reply=['blob'],
@@ -744,37 +621,9 @@ class RoutesTest(tests.Test):
volume = db.Volume('.', ['foo.bar'])
volume['bar'].find()
- assert exists('bar/index')
+ assert exists('index/bar')
volume.close()
- def test_Command_GetBlobSetByUrl(self):
-
- class TestDocument(db.Resource):
-
- @db.indexed_property(slot=1, default='')
- def prop(self, value):
- return value
-
- @db.stored_property(db.Blob)
- def blob(self, value):
- return value
-
- @db.indexed_property(db.Localized, prefix='L', default={})
- def localized_prop(self, value):
- return value
-
- volume = db.Volume(tests.tmpdir, [TestDocument])
- router = Router(db.Routes(volume))
-
- guid = this.call(method='POST', path=['testdocument'], content={})
- this.call(method='PUT', path=['testdocument', guid, 'blob'], content={
- 'digest': 'digest',
- 'location': 'http://sugarlabs.org',
- }, content_type='application/json')
- self.assertEqual(
- 'http://sugarlabs.org',
- this.call(method='GET', path=['testdocument', guid, 'blob'])['location'])
-
def test_on_create(self):
class TestDocument(db.Resource):
@@ -902,7 +751,6 @@ class RoutesTest(tests.Test):
volume = db.Volume(tests.tmpdir, [Document1, Document2])
router = Router(db.Routes(volume))
- assert not exists('seqno')
self.assertEqual(0, volume.seqno.value)
volume['document1'].create({'guid': '1'})
@@ -915,9 +763,7 @@ class RoutesTest(tests.Test):
self.assertEqual(4, volume['document2'].get('2')['seqno'])
self.assertEqual(4, volume.seqno.value)
- assert not exists('seqno')
volume.seqno.commit()
- assert exists('db.seqno')
volume = db.Volume(tests.tmpdir, [Document1, Document2])
self.assertEqual(4, volume.seqno.value)
@@ -942,11 +788,11 @@ class RoutesTest(tests.Test):
return value
self.touch(
- ('document1/1/1/guid', '{"value": "1"}'),
- ('document1/1/1/ctime', '{"value": 1}'),
- ('document1/1/1/mtime', '{"value": 1}'),
- ('document1/1/1/prop', '{"value": ""}'),
- ('document1/1/1/seqno', '{"value": 0}'),
+ ('db/document1/1/1/guid', '{"value": "1"}'),
+ ('db/document1/1/1/ctime', '{"value": 1}'),
+ ('db/document1/1/1/mtime', '{"value": 1}'),
+ ('db/document1/1/1/prop', '{"value": ""}'),
+ ('db/document1/1/1/seqno', '{"value": 0}'),
)
events = []
@@ -956,7 +802,7 @@ class RoutesTest(tests.Test):
volume['document2']
coroutine.sleep(.1)
- mtime = int(os.stat('document1/index/mtime').st_mtime)
+ mtime = int(os.stat('index/document1/mtime').st_mtime)
self.assertEqual([
{'event': 'commit', 'resource': 'document1', 'mtime': mtime},
],
@@ -989,9 +835,9 @@ class RoutesTest(tests.Test):
del events[:]
volume['document1'].commit()
- mtime1 = int(os.stat('document1/index/mtime').st_mtime)
+ mtime1 = int(os.stat('index/document1/mtime').st_mtime)
volume['document2'].commit()
- mtime2 = int(os.stat('document2/index/mtime').st_mtime)
+ mtime2 = int(os.stat('index/document2/mtime').st_mtime)
self.assertEqual([
{'event': 'commit', 'resource': 'document1', 'mtime': mtime1},
@@ -1248,7 +1094,7 @@ class RoutesTest(tests.Test):
'set',
this.call(method='GET', path=['testdocument', guid, 'prop']))
- os.unlink('testdocument/%s/%s/prop' % (guid[:2], guid))
+ os.unlink('db/testdocument/%s/%s/prop' % (guid[:2], guid))
self.assertEqual(
[{'prop': 'default'}],
@@ -1853,49 +1699,53 @@ class RoutesTest(tests.Test):
router = Router(db.Routes(volume))
guid = this.call(method='POST', path=['document'], content={})
+ digest1 = hashlib.sha1('blob1').hexdigest()
+ digest2 = hashlib.sha1('blob2').hexdigest()
+ digest3 = hashlib.sha1('blob3').hexdigest()
+
agg1 = this.call(method='POST', path=['document', guid, 'blobs'], content='blob1')
self.assertEqual({
- agg1: {'seqno': 2, 'value': str(hash('blob1'))},
+ agg1: {'seqno': 3, 'value': digest1},
},
volume['document'].get(guid)['blobs'])
- assert blobs.get(str(hash('blob1')))
+ assert volume.blobs.get(digest1)
agg2 = this.call(method='POST', path=['document', guid, 'blobs'], content='blob2')
self.assertEqual({
- agg1: {'seqno': 2, 'value': str(hash('blob1'))},
- agg2: {'seqno': 3, 'value': str(hash('blob2'))},
+ agg1: {'seqno': 3, 'value': digest1},
+ agg2: {'seqno': 5, 'value': digest2},
},
volume['document'].get(guid)['blobs'])
- assert blobs.get(str(hash('blob2')))
+ assert volume.blobs.get(digest2)
this.call(method='DELETE', path=['document', guid, 'blobs', agg1])
self.assertEqual({
- agg1: {'seqno': 4},
- agg2: {'seqno': 3, 'value': str(hash('blob2'))},
+ agg1: {'seqno': 7},
+ agg2: {'seqno': 5, 'value': digest2},
},
volume['document'].get(guid)['blobs'])
- assert blobs.get(str(hash('blob1'))) is None
- assert blobs.get(str(hash('blob2')))
+ assert not volume.blobs.get(digest1).exists
+ assert volume.blobs.get(digest2)
this.call(method='DELETE', path=['document', guid, 'blobs', agg2])
self.assertEqual({
- agg1: {'seqno': 4},
- agg2: {'seqno': 5},
+ agg1: {'seqno': 7},
+ agg2: {'seqno': 9},
},
volume['document'].get(guid)['blobs'])
- assert blobs.get(str(hash('blob1'))) is None
- assert blobs.get(str(hash('blob2'))) is None
+ assert not volume.blobs.get(digest1).exists
+ assert not volume.blobs.get(digest2).exists
agg3 = this.call(method='POST', path=['document', guid, 'blobs'], content='blob3')
self.assertEqual({
- agg1: {'seqno': 4},
- agg2: {'seqno': 5},
- agg3: {'seqno': 6, 'value': str(hash('blob3'))},
+ agg1: {'seqno': 7},
+ agg2: {'seqno': 9},
+ agg3: {'seqno': 11, 'value': digest3},
},
volume['document'].get(guid)['blobs'])
- assert blobs.get(str(hash('blob1'))) is None
- assert blobs.get(str(hash('blob2'))) is None
- assert blobs.get(str(hash('blob3')))
+ assert not volume.blobs.get(digest1).exists
+ assert not volume.blobs.get(digest2).exists
+ assert volume.blobs.get(digest3)
def test_AggregatedSearch(self):
diff --git a/tests/units/db/storage.py b/tests/units/db/storage.py
index bb61f8a..5bd1e5e 100755
--- a/tests/units/db/storage.py
+++ b/tests/units/db/storage.py
@@ -11,7 +11,7 @@ from os.path import exists
from __init__ import tests
-from sugar_network.db.metadata import Metadata, Property
+from sugar_network.db.metadata import Property
from sugar_network.db.storage import Storage
from sugar_network.toolkit import BUFFER_SIZE
@@ -23,10 +23,7 @@ class StorageTest(tests.Test):
class Test(object):
pass
- metadata = Metadata(Test)
- for i in props:
- metadata[i.name] = i
- return Storage(tests.tmpdir, metadata)
+ return Storage(tests.tmpdir)
def test_Record_get(self):
storage = self.storage([Property('prop')])
diff --git a/tests/units/db/volume.py b/tests/units/db/volume.py
new file mode 100755
index 0000000..04bd9fc
--- /dev/null
+++ b/tests/units/db/volume.py
@@ -0,0 +1,750 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# sugar-lint: disable
+
+import os
+import json
+import sys
+import stat
+import time
+import urllib2
+import hashlib
+from base64 import b64encode
+from cStringIO import StringIO
+from os.path import join, exists
+
+import gobject
+
+from __init__ import tests
+
+from sugar_network import db
+from sugar_network.db import storage, index
+from sugar_network.db import directory as directory_
+from sugar_network.db.directory import Directory
+from sugar_network.db.index import IndexWriter
+from sugar_network.toolkit.router import ACL
+from sugar_network.toolkit.coroutine import this
+from sugar_network.toolkit import http
+
+
+class VolumeTest(tests.Test):
+
+ def setUp(self, fork_num=0):
+ tests.Test.setUp(self, fork_num)
+ this.broadcast = lambda x: x
+
+ def test_diff(self):
+
+ class Document(db.Resource):
+
+ @db.indexed_property(slot=1)
+ def prop(self, value):
+ return value
+
+ volume = db.Volume('.', [Document])
+
+ volume['document'].create({'guid': '1', 'prop': '1', 'ctime': 1, 'mtime': 1})
+ self.utime('db/document/1/1', 1)
+ volume['document'].create({'guid': '2', 'prop': '2', 'ctime': 2, 'mtime': 2})
+ self.utime('db/document/2/2', 2)
+ volume['document'].create({'guid': '3', 'prop': '3', 'ctime': 3, 'mtime': 3})
+ self.utime('db/document/3/3', 3)
+ volume.blobs.post('1')
+ self.touch(('files/foo/2', '22'))
+ self.touch(('files/bar/3', '333'))
+
+ r = [[1, None]]
+ self.assertEqual([
+ {'resource': 'document'},
+ {'guid': '1', 'patch': {
+ 'guid': {'value': '1', 'mtime': 1},
+ 'ctime': {'value': 1, 'mtime': 1},
+ 'prop': {'value': '1', 'mtime': 1},
+ 'mtime': {'value': 1, 'mtime': 1},
+ }},
+ {'guid': '2', 'patch': {
+ 'guid': {'value': '2', 'mtime': 2},
+ 'ctime': {'value': 2, 'mtime': 2},
+ 'prop': {'value': '2', 'mtime': 2},
+ 'mtime': {'value': 2, 'mtime': 2},
+ }},
+ {'guid': '3', 'patch': {
+ 'guid': {'value': '3', 'mtime': 3},
+ 'ctime': {'value': 3, 'mtime': 3},
+ 'prop': {'value': '3', 'mtime': 3},
+ 'mtime': {'value': 3, 'mtime': 3},
+ }},
+ {'content-type': 'application/octet-stream', 'content-length': '1'},
+ {'content-type': 'application/octet-stream', 'content-length': '2', 'path': 'foo/2'},
+ {'commit': [[1, 5]]},
+ ],
+ [dict(i) for i in volume.diff(r, ['foo'])])
+ self.assertEqual([[6, None]], r)
+
+ r = [[2, 2]]
+ self.assertEqual([
+ {'resource': 'document'},
+ {'guid': '2', 'patch': {
+ 'guid': {'value': '2', 'mtime': 2},
+ 'ctime': {'value': 2, 'mtime': 2},
+ 'prop': {'value': '2', 'mtime': 2},
+ 'mtime': {'value': 2, 'mtime': 2},
+ }},
+ {'commit': [[2, 2]]},
+ ],
+ [i for i in volume.diff(r)])
+ self.assertEqual([], r)
+
+ r = [[6, None]]
+ self.assertEqual([
+ {'resource': 'document'},
+ ],
+ [i for i in volume.diff(r)])
+ self.assertEqual([[6, None]], r)
+
+ volume['document'].update('2', {'prop': '22'})
+
+ r = [[6, None]]
+ self.assertEqual([
+ {'resource': 'document'},
+ {'guid': '2', 'patch': {
+ 'prop': {'value': '22', 'mtime': int(os.stat('db/document/2/2/prop').st_mtime)},
+ }},
+ {'commit': [[6, 6]]},
+ ],
+ [i for i in volume.diff(r)])
+ self.assertEqual([[7, None]], r)
+
+ volume.blobs.post('4444')
+ self.touch(('files/foo/2', '2222'))
+
+ r = [[7, None]]
+ self.assertEqual([
+ {'resource': 'document'},
+ {'content-type': 'application/octet-stream', 'content-length': '4'},
+ {'content-type': 'application/octet-stream', 'content-length': '4', 'path': 'foo/2'},
+ {'content-type': 'application/octet-stream', 'content-length': '3', 'path': 'bar/3'},
+ {'commit': [[7, 9]]},
+ ],
+ [dict(i) for i in volume.diff(r, ['foo', 'bar'])])
+ self.assertEqual([[10, None]], r)
+
+ def test_diff_Partial(self):
+ self.override(time, 'time', lambda: 0)
+
+ class Document(db.Resource):
+
+ @db.indexed_property(slot=1)
+ def prop(self, value):
+ return value
+
+ volume = db.Volume('.', [Document])
+ volume['document'].create({'guid': '1', 'prop': '1', 'ctime': 1, 'mtime': 1})
+ self.utime('db/document/1/1', 1)
+ volume['document'].create({'guid': '2', 'prop': '2', 'ctime': 2, 'mtime': 2})
+ self.utime('db/document/2/2', 2)
+
+ r = [[1, None]]
+ patch = volume.diff(r)
+ self.assertEqual({'resource': 'document'}, next(patch))
+ self.assertEqual('1', next(patch)['guid'])
+ self.assertRaises(StopIteration, patch.throw, StopIteration)
+ self.assertRaises(StopIteration, patch.next)
+ self.assertEqual([[1, None]], r)
+
+ r = [[1, None]]
+ patch = volume.diff(r)
+ self.assertEqual({'resource': 'document'}, next(patch))
+ self.assertEqual('1', next(patch)['guid'])
+ self.assertEqual('2', next(patch)['guid'])
+ self.assertEqual({'commit': [[1, 1]]}, patch.throw(StopIteration()))
+ self.assertRaises(StopIteration, patch.next)
+ self.assertEqual([[2, None]], r)
+
+ r = [[1, None]]
+ patch = volume.diff(r)
+ self.assertEqual({'resource': 'document'}, next(patch))
+ self.assertEqual('1', next(patch)['guid'])
+ self.assertEqual('2', next(patch)['guid'])
+ self.assertEqual({'commit': [[1, 2]]}, next(patch))
+ self.assertRaises(StopIteration, patch.next)
+ self.assertEqual([[3, None]], r)
+
+ def test_diff_IgnoreCalcProps(self):
+
+ class Document(db.Resource):
+
+ @db.indexed_property(slot=1, acl=ACL.PUBLIC | ACL.CALC)
+ def prop(self, value):
+ return value
+
+ volume = db.Volume('.', [Document])
+ volume['document'].create({'guid': '1', 'prop': '1', 'ctime': 1, 'mtime': 1})
+ self.utime('db/document/1/1', 1)
+
+ r = [[1, None]]
+ self.assertEqual([
+ {'resource': 'document'},
+ {'guid': '1', 'patch': {
+ 'guid': {'value': '1', 'mtime': 1},
+ 'ctime': {'value': 1, 'mtime': 1},
+ 'mtime': {'value': 1, 'mtime': 1},
+ }},
+ {'commit': [[1, 1]]},
+ ],
+ [i for i in volume.diff(r)])
+ self.assertEqual([[2, None]], r)
+
+ volume['document'].update('1', {'prop': '2'})
+ self.assertEqual([
+ {'resource': 'document'},
+ ],
+ [i for i in volume.diff(r)])
+ self.assertEqual([[2, None]], r)
+
+ volume['document'].create({'guid': '2', 'prop': '2', 'ctime': 2, 'mtime': 2})
+ self.utime('db/document/2/2', 2)
+
+ self.assertEqual([
+ {'resource': 'document'},
+ {'guid': '2', 'patch': {
+ 'guid': {'value': '2', 'mtime': 2},
+ 'ctime': {'value': 2, 'mtime': 2},
+ 'mtime': {'value': 2, 'mtime': 2},
+ }},
+ {'commit': [[2, 3]]},
+ ],
+ [i for i in volume.diff(r)])
+ self.assertEqual([[4, None]], r)
+
+ def test_diff_IgnoreOneWayResources(self):
+
+ class Document(db.Resource):
+ one_way = True
+
+ volume = db.Volume('.', [Document])
+ volume['document'].create({'guid': '1', 'ctime': 1, 'mtime': 1})
+ self.utime('db/document/1/1', 1)
+
+ r = [[1, None]]
+ self.assertEqual([
+ {'resource': 'document'},
+ {'guid': '1', 'patch': {
+ 'guid': {'value': '1', 'mtime': 1},
+ 'ctime': {'value': 1, 'mtime': 1},
+ 'mtime': {'value': 1, 'mtime': 1},
+ }},
+ {'commit': [[1, 1]]},
+ ],
+ [i for i in volume.diff(r)])
+ self.assertEqual([[2, None]], r)
+
+ r = [[1, None]]
+ self.assertEqual([
+ ],
+ [i for i in volume.diff(r, one_way=True)])
+ self.assertEqual([[1, None]], r)
+
+ def test_diff_TheSameInSeqForAllDocuments(self):
+ self.override(time, 'time', lambda: 0)
+
+ class Document1(db.Resource):
+ pass
+
+ class Document2(db.Resource):
+ pass
+
+ class Document3(db.Resource):
+ pass
+
+ volume = db.Volume('.', [Document1, Document2, Document3])
+ volume['document1'].create({'guid': '3', 'ctime': 3, 'mtime': 3})
+ self.utime('db/document1/3/3', 3)
+ volume['document2'].create({'guid': '2', 'ctime': 2, 'mtime': 2})
+ self.utime('db/document2/2/2', 2)
+ volume['document3'].create({'guid': '1', 'ctime': 1, 'mtime': 1})
+ self.utime('db/document3/1/1', 1)
+
+ r = [[1, None]]
+ patch = volume.diff(r)
+ self.assertEqual({'resource': 'document1'}, patch.send(None))
+ self.assertEqual('3', patch.send(None)['guid'])
+ self.assertEqual({'resource': 'document2'}, patch.send(None))
+ self.assertEqual('2', patch.send(None)['guid'])
+ self.assertEqual({'resource': 'document3'}, patch.send(None))
+ self.assertEqual('1', patch.send(None)['guid'])
+ self.assertEqual({'commit': [[1, 3]]}, patch.send(None))
+ self.assertRaises(StopIteration, patch.next)
+ self.assertEqual([[4, None]], r)
+
+ def test_patch_New(self):
+
+ class Document(db.Resource):
+
+ @db.indexed_property(slot=1)
+ def prop(self, value):
+ return value
+
+ volume1 = db.Volume('1', [Document])
+ volume1['document'].create({'guid': '1', 'prop': '1', 'ctime': 1, 'mtime': 1})
+ self.utime('1/db/document/1/1', 1)
+ volume1['document'].create({'guid': '2', 'prop': '2', 'ctime': 2, 'mtime': 2})
+ self.utime('1/db/document/2/2', 2)
+ volume1['document'].create({'guid': '3', 'prop': '3', 'ctime': 3, 'mtime': 3})
+ self.utime('1/db/document/3/3', 3)
+ volume1.blobs.post('1')
+ self.touch(('1/files/foo/2', '22'))
+ self.touch(('1/files/bar/3', '333'))
+
+ volume2 = db.Volume('2', [Document])
+ volume2.patch(volume1.diff([[1, None]], files=['foo']))
+
+ self.assertEqual(
+ sorted([
+ (1, '1', 1, '1'),
+ (2, '2', 2, '2'),
+ (3, '3', 3, '3'),
+ ]),
+ sorted([(i['ctime'], i['prop'], i['mtime'], i['guid']) for i in volume2['document'].find()[0]]))
+
+ doc = volume2['document'].get('1')
+ self.assertEqual(1, doc.get('seqno'))
+ self.assertEqual(1, doc.meta('guid')['mtime'])
+ self.assertEqual(1, doc.meta('ctime')['mtime'])
+ self.assertEqual(1, doc.meta('prop')['mtime'])
+ self.assertEqual(1, doc.meta('mtime')['mtime'])
+
+ doc = volume2['document'].get('2')
+ self.assertEqual(1, doc.get('seqno'))
+ self.assertEqual(2, doc.meta('guid')['mtime'])
+ self.assertEqual(2, doc.meta('ctime')['mtime'])
+ self.assertEqual(2, doc.meta('prop')['mtime'])
+ self.assertEqual(2, doc.meta('mtime')['mtime'])
+
+ doc = volume2['document'].get('3')
+ self.assertEqual(1, doc.get('seqno'))
+ self.assertEqual(3, doc.meta('guid')['mtime'])
+ self.assertEqual(3, doc.meta('ctime')['mtime'])
+ self.assertEqual(3, doc.meta('prop')['mtime'])
+ self.assertEqual(3, doc.meta('mtime')['mtime'])
+
+ blob = volume2.blobs.get(hashlib.sha1('1').hexdigest())
+ self.assertEqual({
+ 'x-seqno': '1',
+ 'content-length': '1',
+ 'content-type': 'application/octet-stream',
+ },
+ blob)
+ self.assertEqual('1', file(blob.path).read())
+
+ blob = volume2.blobs.get('foo/2')
+ self.assertEqual({
+ 'x-seqno': '1',
+ 'content-length': '2',
+ 'content-type': 'application/octet-stream',
+ },
+ blob)
+ self.assertEqual('22', file(blob.path).read())
+
+ assert volume2.blobs.get('bar/3') is None
+
+ def test_patch_Update(self):
+
+ class Document(db.Resource):
+
+ @db.stored_property(default='')
+ def prop(self, value):
+ return value
+
+ volume1 = db.Volume('1', [Document])
+ volume1['document'].create({'guid': 'guid', 'ctime': 1, 'mtime': 1})
+ volume1['document'].update('guid', {'prop': '1'})
+ self.utime('1/db/document/gu/guid', 1)
+
+ volume2 = db.Volume('2', [Document])
+ volume2['document'].create({'guid': 'guid', 'ctime': 2, 'mtime': 2})
+ volume2['document'].update('guid', {'prop': '2'})
+ self.utime('2/db/document/gu/guid', 2)
+
+ self.assertEqual(
+ [(2, 2, 'guid')],
+ [(i['ctime'], i['mtime'], i['guid']) for i in volume2['document'].find()[0]])
+ doc = volume2['document'].get('guid')
+ self.assertEqual(2, doc.get('seqno'))
+ self.assertEqual(2, doc.meta('guid')['mtime'])
+ self.assertEqual(2, doc.meta('ctime')['mtime'])
+ self.assertEqual(2, doc.meta('mtime')['mtime'])
+ self.assertEqual(2, doc.meta('prop')['mtime'])
+ self.assertEqual('2', doc.meta('prop')['value'])
+
+ volume2.patch(volume1.diff([[1, None]]))
+
+ self.assertEqual(
+ [(2, 2, 'guid')],
+ [(i['ctime'], i['mtime'], i['guid']) for i in volume2['document'].find()[0]])
+ doc = volume2['document'].get('guid')
+ self.assertEqual(2, doc.get('seqno'))
+ self.assertEqual(2, doc.meta('guid')['mtime'])
+ self.assertEqual(2, doc.meta('ctime')['mtime'])
+ self.assertEqual(2, doc.meta('mtime')['mtime'])
+ self.assertEqual(2, doc.meta('prop')['mtime'])
+ self.assertEqual('2', doc.meta('prop')['value'])
+
+ os.utime('1/db/document/gu/guid/mtime', (3, 3))
+ volume2.patch(volume1.diff([[1, None]]))
+
+ self.assertEqual(
+ [(2, 1, 'guid')],
+ [(i['ctime'], i['mtime'], i['guid']) for i in volume2['document'].find()[0]])
+ doc = volume2['document'].get('guid')
+ self.assertEqual(3, doc.get('seqno'))
+ self.assertEqual(2, doc.meta('guid')['mtime'])
+ self.assertEqual(2, doc.meta('ctime')['mtime'])
+ self.assertEqual(3, doc.meta('mtime')['mtime'])
+ self.assertEqual(2, doc.meta('prop')['mtime'])
+ self.assertEqual('2', doc.meta('prop')['value'])
+
+ os.utime('1/db/document/gu/guid/prop', (4, 4))
+ volume2.patch(volume1.diff([[1, None]]))
+
+ self.assertEqual(
+ [(2, 1, 'guid')],
+ [(i['ctime'], i['mtime'], i['guid']) for i in volume2['document'].find()[0]])
+ doc = volume2['document'].get('guid')
+ self.assertEqual(4, doc.get('seqno'))
+ self.assertEqual(2, doc.meta('guid')['mtime'])
+ self.assertEqual(2, doc.meta('ctime')['mtime'])
+ self.assertEqual(3, doc.meta('mtime')['mtime'])
+ self.assertEqual(4, doc.meta('prop')['mtime'])
+ self.assertEqual('1', doc.meta('prop')['value'])
+
+ def test_diff_AggProps(self):
+
+ class Document(db.Resource):
+
+ @db.stored_property(db.Aggregated)
+ def prop(self, value):
+ return value
+
+ volume = db.Volume('.', [Document])
+ volume['document'].create({'guid': '1', 'ctime': 1, 'mtime': 1, 'prop': {'1': {'prop': 1}}})
+ self.utime('db/document/1/1', 1)
+ volume['document'].create({'guid': '2', 'ctime': 2, 'mtime': 2, 'prop': {'2': {'prop': 2}}})
+ self.utime('db/document/2/2', 2)
+
+ r = [[1, None]]
+ self.assertEqual([
+ {'resource': 'document'},
+ {'guid': '1', 'patch': {
+ 'guid': {'value': '1', 'mtime': 1},
+ 'ctime': {'value': 1, 'mtime': 1},
+ 'mtime': {'value': 1, 'mtime': 1},
+ 'prop': {'value': {'1': {'prop': 1}}, 'mtime': 1},
+ }},
+ {'guid': '2', 'patch': {
+ 'guid': {'value': '2', 'mtime': 2},
+ 'ctime': {'value': 2, 'mtime': 2},
+ 'mtime': {'value': 2, 'mtime': 2},
+ 'prop': {'value': {'2': {'prop': 2}}, 'mtime': 2},
+ }},
+ {'commit': [[1, 2]]},
+ ],
+ [i for i in volume.diff(r)])
+ self.assertEqual([[3, None]], r)
+
+ r = [[1, 1]]
+ self.assertEqual([
+ {'resource': 'document'},
+ {'guid': '1', 'patch': {
+ 'guid': {'value': '1', 'mtime': 1},
+ 'ctime': {'value': 1, 'mtime': 1},
+ 'mtime': {'value': 1, 'mtime': 1},
+ 'prop': {'value': {'1': {'prop': 1}}, 'mtime': 1},
+ }},
+ {'commit': [[1, 1]]},
+ ],
+ [i for i in volume.diff(r)])
+ self.assertEqual([], r)
+
+ r = [[2, 2]]
+ self.assertEqual([
+ {'resource': 'document'},
+ {'guid': '2', 'patch': {
+ 'guid': {'value': '2', 'mtime': 2},
+ 'ctime': {'value': 2, 'mtime': 2},
+ 'mtime': {'value': 2, 'mtime': 2},
+ 'prop': {'value': {'2': {'prop': 2}}, 'mtime': 2},
+ }},
+ {'commit': [[2, 2]]},
+ ],
+ [i for i in volume.diff(r)])
+ self.assertEqual([], r)
+
+ r = [[3, None]]
+ self.assertEqual([
+ {'resource': 'document'},
+ ],
+ [i for i in volume.diff(r)])
+ self.assertEqual([[3, None]], r)
+
+ self.assertEqual({
+ '1': {'seqno': 1, 'prop': 1},
+ },
+ volume['document'].get('1')['prop'])
+ self.assertEqual({
+ '2': {'seqno': 2, 'prop': 2},
+ },
+ volume['document'].get('2')['prop'])
+
+ volume['document'].update('2', {'prop': {'2': {}, '3': {'prop': 3}}})
+ r = [[3, None]]
+ self.assertEqual([
+ {'resource': 'document'},
+ {'guid': '2', 'patch': {
+ 'prop': {'value': {'2': {}, '3': {'prop': 3}}, 'mtime': int(os.stat('db/document/2/2/prop').st_mtime)},
+ }},
+ {'commit': [[3, 3]]},
+ ],
+ [i for i in volume.diff(r)])
+ self.assertEqual([[4, None]], r)
+
+ self.assertEqual({
+ '2': {'seqno': 3},
+ '3': {'seqno': 3, 'prop': 3},
+ },
+ volume['document'].get('2')['prop'])
+
+ volume['document'].update('1', {'prop': {'1': {'foo': 'bar'}}})
+ r = [[4, None]]
+ self.assertEqual([
+ {'resource': 'document'},
+ {'guid': '1', 'patch': {
+ 'prop': {'value': {'1': {'foo': 'bar'}}, 'mtime': int(os.stat('db/document/1/1/prop').st_mtime)},
+ }},
+ {'commit': [[4, 4]]},
+ ],
+ [i for i in volume.diff(r)])
+ self.assertEqual([[5, None]], r)
+
+ self.assertEqual({
+ '1': {'seqno': 4, 'foo': 'bar'},
+ },
+ volume['document'].get('1')['prop'])
+
+ volume['document'].update('2', {'prop': {'2': {'restore': True}}})
+ r = [[5, None]]
+ self.assertEqual([
+ {'resource': 'document'},
+ {'guid': '2', 'patch': {
+ 'prop': {'value': {'2': {'restore': True}}, 'mtime': int(os.stat('db/document/2/2/prop').st_mtime)},
+ }},
+ {'commit': [[5, 5]]},
+ ],
+ [i for i in volume.diff(r)])
+ self.assertEqual([[6, None]], r)
+
+ self.assertEqual({
+ '2': {'seqno': 5, 'restore': True},
+ '3': {'seqno': 3, 'prop': 3},
+ },
+ volume['document'].get('2')['prop'])
+
+ volume['document'].update('2', {'ctime': 0})
+ r = [[6, None]]
+ self.assertEqual([
+ {'resource': 'document'},
+ {'guid': '2', 'patch': {
+ 'ctime': {'value': 0, 'mtime': int(os.stat('db/document/2/2/prop').st_mtime)},
+ }},
+ {'commit': [[6, 6]]},
+ ],
+ [i for i in volume.diff(r)])
+ self.assertEqual([[7, None]], r)
+
+ self.assertEqual({
+ '2': {'seqno': 5, 'restore': True},
+ '3': {'seqno': 3, 'prop': 3},
+ },
+ volume['document'].get('2')['prop'])
+
+ def test_patch_Aggprops(self):
+
+ class Document(db.Resource):
+
+ @db.stored_property(db.Aggregated)
+ def prop(self, value):
+ return value
+
+ volume = db.Volume('.', [Document])
+
+ volume.patch([
+ {'resource': 'document'},
+ {'guid': '1', 'patch': {
+ 'guid': {'mtime': 1, 'value': '1'},
+ 'ctime': {'mtime': 1, 'value': 1},
+ 'mtime': {'mtime': 1, 'value': 1},
+ 'prop': {'mtime': 1, 'value': {'1': {}}},
+ }},
+ ])
+ self.assertEqual({
+ '1': {'seqno': 1},
+ },
+ volume['document'].get('1')['prop'])
+
+ volume.patch([
+ {'resource': 'document'},
+ {'guid': '1', 'patch': {
+ 'prop': {'mtime': 1, 'value': {'1': {'probe': False}}},
+ }},
+ ])
+ self.assertEqual({
+ '1': {'seqno': 1},
+ },
+ volume['document'].get('1')['prop'])
+
+ volume.patch([
+ {'resource': 'document'},
+ {'guid': '1', 'patch': {
+ 'prop': {'mtime': 2, 'value': {'1': {'probe': True}}},
+ }},
+ ])
+ self.assertEqual({
+ '1': {'seqno': 2, 'probe': True},
+ },
+ volume['document'].get('1')['prop'])
+
+ volume.patch([
+ {'resource': 'document'},
+ {'guid': '1', 'patch': {
+ 'prop': {'mtime': 3, 'value': {'2': {'foo': 'bar'}}},
+ }},
+ ])
+ self.assertEqual({
+ '1': {'seqno': 2, 'probe': True},
+ '2': {'seqno': 3, 'foo': 'bar'},
+ },
+ volume['document'].get('1')['prop'])
+
+ volume.patch([
+ {'resource': 'document'},
+ {'guid': '1', 'patch': {
+ 'prop': {'mtime': 4, 'value': {'2': {}, '3': {'foo': 'bar'}}},
+ }},
+ ])
+ self.assertEqual({
+ '1': {'seqno': 2, 'probe': True},
+ '2': {'seqno': 4},
+ '3': {'seqno': 4, 'foo': 'bar'},
+ },
+ volume['document'].get('1')['prop'])
+
+ def test_patch_Ranges(self):
+
+ class Document(db.Resource):
+
+ @db.stored_property(default='')
+ def prop(self, value):
+ return value
+
+ volume1 = db.Volume('db1', [Document])
+ volume2 = db.Volume('db2', [Document])
+
+ committed, patched = volume2.patch(volume1.diff([[1, None]]))
+ self.assertEqual([], committed)
+ self.assertEqual([], patched)
+
+ volume1['document'].create({'guid': '1', 'ctime': 1, 'mtime': 1})
+ committed, patched = volume2.patch(volume1.diff([[1, None]]))
+ self.assertEqual([[1, 1]], committed)
+ self.assertEqual([[1, 1]], patched)
+ committed, patched = volume2.patch(volume1.diff([[1, None]]))
+ self.assertEqual([[1, 1]], committed)
+ self.assertEqual([], patched)
+
+ volume1['document'].update('1', {'prop': '1'})
+ committed, patched = volume2.patch(volume1.diff([[1, None]]))
+ self.assertEqual([[1, 2]], committed)
+ self.assertEqual([[2, 2]], patched)
+ committed, patched = volume2.patch(volume1.diff([[1, None]]))
+ self.assertEqual([[1, 2]], committed)
+ self.assertEqual([], patched)
+
+ volume3 = db.Volume('db3', [Document])
+ committed, patched = volume3.patch(volume1.diff([[1, None]]))
+ self.assertEqual([[1, 2]], committed)
+ self.assertEqual([[1, 1]], patched)
+ committed, patched = volume3.patch(volume1.diff([[1, None]]))
+ self.assertEqual([[1, 2]], committed)
+ self.assertEqual([], patched)
+
+ def test_patch_CallSetters(self):
+
+ class Document(db.Resource):
+
+ @db.stored_property(db.Numeric)
+ def prop(self, value):
+ return value
+
+ @prop.setter
+ def prop(self, value):
+ return value + 1
+
+ directory = Directory('document', Document, IndexWriter, _SessionSeqno())
+
+ directory.patch('1', {
+ 'guid': {'mtime': 1, 'value': '1'},
+ 'ctime': {'mtime': 1, 'value': 1},
+ 'mtime': {'mtime': 1, 'value': 1},
+ 'prop': {'mtime': 1, 'value': 1},
+ })
+ self.assertEqual(2, directory.get('1')['prop'])
+
+ def test_patch_MultipleCommits(self):
+
+ class Document(db.Resource):
+
+ @db.indexed_property(slot=1)
+ def prop(self, value):
+ return value
+
+ self.touch(('var/db.seqno', '100'))
+ volume = db.Volume('.', [Document])
+
+ def generator():
+ for i in [
+ {'resource': 'document'},
+ {'commit': [[1, 1]]},
+ {'guid': '1', 'patch': {
+ 'guid': {'value': '1', 'mtime': 1.0},
+ 'ctime': {'value': 2, 'mtime': 2.0},
+ 'mtime': {'value': 3, 'mtime': 3.0},
+ 'prop': {'value': '4', 'mtime': 4.0},
+ }},
+ {'commit': [[2, 3]]},
+ ]:
+ yield i
+
+ patch = generator()
+ self.assertEqual(([[1, 3]], [[101, 101]]), volume.patch(patch))
+ assert volume['document'].exists('1')
+
+
+class _SessionSeqno(object):
+
+ def __init__(self):
+ self._value = 0
+
+ @property
+ def value(self):
+ return self._value
+
+ def next(self):
+ self._value += 1
+ return self._value
+
+ def commit(self):
+ pass
+
+
+if __name__ == '__main__':
+ tests.main()
diff --git a/tests/units/toolkit/__main__.py b/tests/units/toolkit/__main__.py
index b44223b..89702a3 100644
--- a/tests/units/toolkit/__main__.py
+++ b/tests/units/toolkit/__main__.py
@@ -15,6 +15,7 @@ from gbus import *
from i18n import *
from sat import *
from parcel import *
+from ranges import *
if __name__ == '__main__':
tests.main()
diff --git a/tests/units/toolkit/parcel.py b/tests/units/toolkit/parcel.py
index 93edfa4..4f57c44 100755
--- a/tests/units/toolkit/parcel.py
+++ b/tests/units/toolkit/parcel.py
@@ -5,14 +5,15 @@ import os
import gzip
import uuid
import json
+import hashlib
from StringIO import StringIO
from os.path import exists
from __init__ import tests
-from sugar_network import db, toolkit
-from sugar_network.toolkit.router import File
-from sugar_network.toolkit import parcel, http
+from sugar_network import db, toolkit, client
+from sugar_network.toolkit.router import File, route, Router
+from sugar_network.toolkit import parcel, http, coroutine
class ParcelTest(tests.Test):
@@ -187,16 +188,16 @@ class ParcelTest(tests.Test):
with next(packets_iter) as packet:
self.assertEqual(1, packet.name)
self.assertEqual([
- (1, 'a'),
- (2, 'bb'),
+ (1, hashlib.sha1('a').hexdigest(), 'a'),
+ (2, hashlib.sha1('bb').hexdigest(), 'bb'),
],
- [(i['num'], file(i.path).read()) for i in packet])
+ [(i['num'], i.digest, file(i.path).read()) for i in packet])
with next(packets_iter) as packet:
self.assertEqual(2, packet.name)
self.assertEqual([
- (3, 'ccc'),
+ (3, hashlib.sha1('ccc').hexdigest(), 'ccc'),
],
- [(i['num'], file(i.path).read()) for i in packet])
+ [(i['num'], i.digest, file(i.path).read()) for i in packet])
self.assertRaises(StopIteration, packets_iter.next)
self.assertEqual(len(stream.getvalue()), stream.tell())
@@ -310,7 +311,7 @@ class ParcelTest(tests.Test):
('first', None, content()),
('second', None, content()),
],
- limit=RECORD / 2, compresslevel=0)]))
+ limit=RECORD / 2)]))
assert len(stream) < RECORD
self.assertEqual(4, len(stream.strip().split('\n')))
@@ -318,7 +319,7 @@ class ParcelTest(tests.Test):
('first', None, content()),
('second', None, content()),
],
- limit=RECORD * 1.5, compresslevel=0)]))
+ limit=RECORD * 1.5)]))
assert len(stream) > RECORD
assert len(stream) < RECORD * 2
self.assertEqual(5, len(stream.strip().split('\n')))
@@ -327,7 +328,7 @@ class ParcelTest(tests.Test):
('first', None, content()),
('second', None, content()),
],
- limit=RECORD * 2.5, compresslevel=0)]))
+ limit=RECORD * 2.5)]))
assert len(stream) > RECORD * 2
assert len(stream) < RECORD * 3
self.assertEqual(6, len(stream.strip().split('\n')))
@@ -336,7 +337,7 @@ class ParcelTest(tests.Test):
('first', None, content()),
('second', None, content()),
],
- limit=RECORD * 3.5, compresslevel=0)]))
+ limit=RECORD * 3.5)]))
assert len(stream) > RECORD * 3
assert len(stream) < RECORD * 4
self.assertEqual(7, len(stream.strip().split('\n')))
@@ -345,7 +346,7 @@ class ParcelTest(tests.Test):
('first', None, content()),
('second', None, content()),
],
- limit=RECORD * 4.5, compresslevel=0)]))
+ limit=RECORD * 4.5)]))
assert len(stream) > RECORD * 4
self.assertEqual(8, len(stream.strip().split('\n')))
@@ -353,7 +354,7 @@ class ParcelTest(tests.Test):
('first', None, content()),
('second', None, content()),
],
- compresslevel=0)]))
+ )]))
assert len(stream) > RECORD * 4
def test_limited_encode_FinalRecords(self):
@@ -373,7 +374,7 @@ class ParcelTest(tests.Test):
('first', None, content()),
('second', None, content()),
],
- limit=RECORD / 2, compresslevel=0)]))
+ limit=RECORD / 2)]))
assert len(stream) > RECORD * 4
assert len(stream) < RECORD * 5
self.assertEqual(8, len(stream.strip().split('\n')))
@@ -382,7 +383,7 @@ class ParcelTest(tests.Test):
('first', None, content()),
('second', None, content()),
],
- limit=RECORD * 1.5, compresslevel=0)]))
+ limit=RECORD * 1.5)]))
assert len(stream) > RECORD * 5
assert len(stream) < RECORD * 6
self.assertEqual(9, len(stream.strip().split('\n')))
@@ -391,7 +392,7 @@ class ParcelTest(tests.Test):
('first', None, content()),
('second', None, content()),
],
- limit=RECORD * 2.5, compresslevel=0)]))
+ limit=RECORD * 2.5)]))
assert len(stream) > RECORD * 6
assert len(stream) < RECORD * 7
self.assertEqual(10, len(stream.strip().split('\n')))
@@ -400,7 +401,7 @@ class ParcelTest(tests.Test):
('first', None, content()),
('second', None, content()),
],
- limit=RECORD * 3.5, compresslevel=0)]))
+ limit=RECORD * 3.5)]))
assert len(stream) > RECORD * 6
assert len(stream) < RECORD * 7
self.assertEqual(10, len(stream.strip().split('\n')))
@@ -409,7 +410,7 @@ class ParcelTest(tests.Test):
('first', None, content()),
('second', None, content()),
],
- limit=RECORD * 4.5, compresslevel=0)]))
+ limit=RECORD * 4.5)]))
assert len(stream) > RECORD * 6
assert len(stream) < RECORD * 7
self.assertEqual(10, len(stream.strip().split('\n')))
@@ -418,7 +419,7 @@ class ParcelTest(tests.Test):
('first', None, content()),
('second', None, content()),
],
- limit=RECORD * 5.5, compresslevel=0)]))
+ limit=RECORD * 5.5)]))
assert len(stream) > RECORD * 7
assert len(stream) < RECORD * 8
self.assertEqual(11, len(stream.strip().split('\n')))
@@ -427,7 +428,7 @@ class ParcelTest(tests.Test):
('first', None, content()),
('second', None, content()),
],
- limit=RECORD * 6.5, compresslevel=0)]))
+ limit=RECORD * 6.5)]))
assert len(stream) > RECORD * 8
assert len(stream) < RECORD * 9
self.assertEqual(12, len(stream.strip().split('\n')))
@@ -436,7 +437,7 @@ class ParcelTest(tests.Test):
('first', None, content()),
('second', None, content()),
],
- compresslevel=0)]))
+ )]))
assert len(stream) > RECORD * 8
assert len(stream) < RECORD * 9
self.assertEqual(12, len(stream.strip().split('\n')))
@@ -469,6 +470,56 @@ class ParcelTest(tests.Test):
json.dumps({'packet': 'last'}) + '\n',
unzips(stream))
+ def test_encode_BlobWithUrls(self):
+
+ class Routes(object):
+
+ @route('GET')
+ def probe(self):
+ return 'probe'
+
+ server = coroutine.WSGIServer(('127.0.0.1', client.ipc_port.value), Router(Routes()))
+ coroutine.spawn(server.serve_forever)
+ coroutine.dispatch()
+ url = 'http://127.0.0.1:%s' % client.ipc_port.value
+
+ stream = ''.join([i for i in parcel.encode([
+ (1, None, [File(None, meta={'location': 'fake'})]),
+ ])])
+ self.assertEqual(
+ json.dumps({}) + '\n' +
+ json.dumps({'packet': 1}) + '\n' +
+ json.dumps({'location': 'fake'}) + '\n' +
+ json.dumps({'packet': 'last'}) + '\n',
+ unzips(stream))
+
+ stream = ''.join([i for i in parcel.encode([
+ (1, None, [File(None, meta={'location': 'fake', 'content-length': '0'})]),
+ ])])
+ self.assertEqual(
+ json.dumps({}) + '\n' +
+ json.dumps({'packet': 1}) + '\n' +
+ json.dumps({'location': 'fake', 'content-length': '0'}) + '\n' +
+ json.dumps({'packet': 'last'}) + '\n',
+ unzips(stream))
+
+ stream = ''.join([i for i in parcel.encode([
+ (1, None, [File(None, meta={'location': url, 'content-length': str(len('probe'))})]),
+ ])])
+ self.assertEqual(
+ json.dumps({}) + '\n' +
+ json.dumps({'packet': 1}) + '\n' +
+ json.dumps({'location': url, 'content-length': str(len('probe'))}) + '\n' +
+ 'probe' + '\n' +
+ json.dumps({'packet': 'last'}) + '\n',
+ unzips(stream))
+
+ def encode():
+ stream = ''.join([i for i in parcel.encode([
+ (1, None, [File(None, meta={'location': 'http://127.0.0.1:108', 'content-length': str(len('probe'))})]),
+ ])])
+ self.assertRaises(http.ConnectionError, encode)
+
def test_limited_encode_Blobs(self):
RECORD = 1024 * 1024
self.touch(('blob', '.' * RECORD))
@@ -481,7 +532,7 @@ class ParcelTest(tests.Test):
('first', None, content()),
('second', None, content()),
],
- limit=RECORD / 2, compresslevel=0)]))
+ limit=RECORD / 2)]))
assert len(stream) < RECORD
self.assertEqual(4, len(stream.strip().split('\n')))
@@ -489,7 +540,7 @@ class ParcelTest(tests.Test):
('first', None, content()),
('second', None, content()),
],
- limit=RECORD * 1.5, compresslevel=0)]))
+ limit=RECORD * 1.5)]))
assert len(stream) > RECORD
assert len(stream) < RECORD * 2
self.assertEqual(6, len(stream.strip().split('\n')))
@@ -498,7 +549,7 @@ class ParcelTest(tests.Test):
('first', None, content()),
('second', None, content()),
],
- limit=RECORD * 2.5, compresslevel=0)]))
+ limit=RECORD * 2.5)]))
assert len(stream) > RECORD * 2
assert len(stream) < RECORD * 3
self.assertEqual(8, len(stream.strip().split('\n')))
@@ -507,7 +558,7 @@ class ParcelTest(tests.Test):
('first', None, content()),
('second', None, content()),
],
- limit=RECORD * 3.5, compresslevel=0)]))
+ limit=RECORD * 3.5)]))
assert len(stream) > RECORD * 3
assert len(stream) < RECORD * 4
self.assertEqual(10, len(stream.strip().split('\n')))
@@ -516,7 +567,7 @@ class ParcelTest(tests.Test):
('first', None, content()),
('second', None, content()),
],
- limit=RECORD * 4.5, compresslevel=0)]))
+ limit=RECORD * 4.5)]))
assert len(stream) > RECORD * 4
self.assertEqual(12, len(stream.strip().split('\n')))
@@ -524,7 +575,7 @@ class ParcelTest(tests.Test):
('first', None, content()),
('second', None, content()),
],
- compresslevel=0)]))
+ )]))
assert len(stream) > RECORD * 4
self.assertEqual(12, len(stream.strip().split('\n')))
@@ -546,7 +597,7 @@ class ParcelTest(tests.Test):
('first', None, content()),
('second', None, content()),
],
- limit=RECORD / 2, compresslevel=0)]))
+ limit=RECORD / 2)]))
assert len(stream) > RECORD * 4
assert len(stream) < RECORD * 5
self.assertEqual(12, len(stream.strip().split('\n')))
@@ -555,7 +606,7 @@ class ParcelTest(tests.Test):
('first', None, content()),
('second', None, content()),
],
- limit=RECORD * 1.5, compresslevel=0)]))
+ limit=RECORD * 1.5)]))
assert len(stream) > RECORD * 5
assert len(stream) < RECORD * 6
self.assertEqual(14, len(stream.strip().split('\n')))
@@ -564,7 +615,7 @@ class ParcelTest(tests.Test):
('first', None, content()),
('second', None, content()),
],
- limit=RECORD * 2.5, compresslevel=0)]))
+ limit=RECORD * 2.5)]))
assert len(stream) > RECORD * 6
assert len(stream) < RECORD * 7
self.assertEqual(16, len(stream.strip().split('\n')))
@@ -573,7 +624,7 @@ class ParcelTest(tests.Test):
('first', None, content()),
('second', None, content()),
],
- limit=RECORD * 3.5, compresslevel=0)]))
+ limit=RECORD * 3.5)]))
assert len(stream) > RECORD * 6
assert len(stream) < RECORD * 7
self.assertEqual(16, len(stream.strip().split('\n')))
@@ -582,7 +633,7 @@ class ParcelTest(tests.Test):
('first', None, content()),
('second', None, content()),
],
- limit=RECORD * 4.5, compresslevel=0)]))
+ limit=RECORD * 4.5)]))
assert len(stream) > RECORD * 6
assert len(stream) < RECORD * 7
self.assertEqual(16, len(stream.strip().split('\n')))
@@ -591,7 +642,7 @@ class ParcelTest(tests.Test):
('first', None, content()),
('second', None, content()),
],
- limit=RECORD * 5.5, compresslevel=0)]))
+ limit=RECORD * 5.5)]))
assert len(stream) > RECORD * 7
assert len(stream) < RECORD * 8
self.assertEqual(18, len(stream.strip().split('\n')))
@@ -600,7 +651,7 @@ class ParcelTest(tests.Test):
('first', None, content()),
('second', None, content()),
],
- limit=RECORD * 6.5, compresslevel=0)]))
+ limit=RECORD * 6.5)]))
assert len(stream) > RECORD * 8
assert len(stream) < RECORD * 9
self.assertEqual(20, len(stream.strip().split('\n')))
@@ -609,7 +660,7 @@ class ParcelTest(tests.Test):
('first', None, content()),
('second', None, content()),
],
- compresslevel=0)]))
+ )]))
assert len(stream) > RECORD * 8
assert len(stream) < RECORD * 9
self.assertEqual(20, len(stream.strip().split('\n')))
@@ -639,7 +690,7 @@ class ParcelTest(tests.Test):
packets_iter = parcel.decode_dir('parcels')
with next(packets_iter) as packet:
self.assertEqual(2, packet.name)
- self.assertEqual({'packet': 2}, packet.props)
+ self.assertEqual({'packet': 2}, packet.header)
items = iter(packet)
blob = next(items)
self.assertEqual({'num': 2, 'content-length': '8'}, blob)
@@ -648,7 +699,7 @@ class ParcelTest(tests.Test):
self.assertRaises(StopIteration, items.next)
with next(packets_iter) as packet:
self.assertEqual(1, packet.name)
- self.assertEqual({'foo': 'bar', 'packet': 1}, packet.props)
+ self.assertEqual({'foo': 'bar', 'packet': 1}, packet.header)
items = iter(packet)
self.assertEqual({'payload': 1}, next(items))
blob = next(items)
@@ -734,7 +785,7 @@ class ParcelTest(tests.Test):
File('blob', 'digest', [('num', 2)]),
{'payload': 3},
]),
- ], path='./parcel')
+ ], path='./parcel', limit=99999999)
assert exists('parcel')
diff --git a/tests/units/toolkit/ranges.py b/tests/units/toolkit/ranges.py
new file mode 100755
index 0000000..4f77a9d
--- /dev/null
+++ b/tests/units/toolkit/ranges.py
@@ -0,0 +1,442 @@
+#!/usr/bin/env python
+# sugar-lint: disable
+
+import copy
+from os.path import exists
+from cStringIO import StringIO
+
+from __init__ import tests
+
+from sugar_network import toolkit
+from sugar_network.toolkit import ranges
+
+
+class RangesTest(tests.Test):
+
+ def test_exclude(self):
+ r = [[1, None]]
+ ranges.exclude(r, 1, 10)
+ self.assertEqual(
+ [[11, None]],
+ r)
+ r = [[1, None]]
+ ranges.exclude(r, 5, 10)
+ self.assertEqual(
+ [[1, 4], [11, None]],
+ r)
+ ranges.exclude(r, 2, 2)
+ self.assertEqual(
+ [[1, 1], [3, 4], [11, None]],
+ r)
+ ranges.exclude(r, 1, 1)
+ self.assertEqual(
+ [[3, 4], [11, None]],
+ r)
+ ranges.exclude(r, 3, 3)
+ self.assertEqual(
+ [[4, 4], [11, None]],
+ r)
+ ranges.exclude(r, 1, 20)
+ self.assertEqual(
+ [[21, None]],
+ r)
+ ranges.exclude(r, 21, 21)
+ self.assertEqual(
+ [[22, None]],
+ r)
+
+ r = [[100, None]]
+ ranges.exclude(r, [[1, 98]])
+ self.assertEqual([[100, None]], r)
+
+ r = [[1, 100]]
+ ranges.exclude(r, [[200, 300]])
+ self.assertEqual([[1, 100]], r)
+
+ def test_exclude_OpenStart(self):
+ r = [[1, None]]
+ self.assertRaises(RuntimeError, ranges.exclude, r, None, None)
+
+ r = [[10, 20], [30, None]]
+ ranges.exclude(r, None, 1)
+ self.assertEqual([[10, 20], [30, None]], r)
+
+ r = [[10, 20], [30, None]]
+ ranges.exclude(r, None, 10)
+ self.assertEqual([[11, 20], [30, None]], r)
+
+ r = [[10, 20], [30, None]]
+ ranges.exclude(r, None, 15)
+ self.assertEqual([[16, 20], [30, None]], r)
+
+ r = [[10, 20], [30, None]]
+ ranges.exclude(r, None, 20)
+ self.assertEqual([[30, None]], r)
+
+ r = [[10, 20], [30, None]]
+ ranges.exclude(r, None, 35)
+ self.assertEqual([[36, None]], r)
+
+ r = [[10, 20], [30, None]]
+ ranges.exclude(r, None, 50)
+ self.assertEqual([[51, None]], r)
+
+ r = [[10, 20], [30, 40]]
+ ranges.exclude(r, None, 50)
+ self.assertEqual([], r)
+
+ r = [[2, 2]]
+ ranges.exclude(r, None, 2)
+ self.assertEqual([], r)
+
+ def test_exclude_OpenEnd(self):
+ r = [[10, 20], [30, None]]
+ ranges.exclude(r, 50, None)
+ self.assertEqual([[10, 20], [30, 49]], r)
+
+ r = [[10, 20], [30, None]]
+ ranges.exclude(r, 30, None)
+ self.assertEqual([[10, 20]], r)
+
+ r = [[10, 20], [30, None]]
+ ranges.exclude(r, 25, None)
+ self.assertEqual([[10, 20]], r)
+
+ r = [[10, 20], [30, None]]
+ ranges.exclude(r, 20, None)
+ self.assertEqual([[10, 19]], r)
+
+ r = [[10, 20], [30, None]]
+ ranges.exclude(r, 11, None)
+ self.assertEqual([[10, 10]], r)
+
+ r = [[10, 20], [30, None]]
+ ranges.exclude(r, 10, None)
+ self.assertEqual([], r)
+
+ r = [[10, 20], [30, None]]
+ ranges.exclude(r, 1, None)
+ self.assertEqual([], r)
+
+ def test_include_JoinExistingItems(self):
+ r = []
+
+ ranges.include(r, 1, None)
+ self.assertEqual(
+ [[1, None]],
+ r)
+
+ ranges.include(r, 2, None)
+ self.assertEqual(
+ [[1, None]],
+ r)
+
+ ranges.include(r, 4, 5)
+ self.assertEqual(
+ [[1, None]],
+ r)
+
+ ranges.exclude(r, 2, 2)
+ ranges.exclude(r, 4, 4)
+ ranges.exclude(r, 6, 6)
+ ranges.exclude(r, 9, 9)
+ self.assertEqual(
+ [[1, 1],
+ [3, 3],
+ [5, 5],
+ [7, 8],
+ [10, None]],
+ r)
+
+ ranges.include(r, 10, 20)
+ self.assertEqual(
+ [[1, 1],
+ [3, 3],
+ [5, 5],
+ [7, 8],
+ [10, None]],
+ r)
+
+ ranges.include(r, 8, 20)
+ self.assertEqual(
+ [[1, 1],
+ [3, 3],
+ [5, 5],
+ [7, None]],
+ r)
+
+ ranges.include(r, 5, None)
+ self.assertEqual(
+ [[1, 1],
+ [3, 3],
+ [5, None]],
+ r)
+
+ ranges.include(r, 1, None)
+ self.assertEqual(
+ [[1, None]],
+ r)
+
+ def test_include_InsertNewItems(self):
+ r = []
+
+ ranges.include(r, 8, 10)
+ ranges.include(r, 3, 3)
+ self.assertEqual(
+ [[3, 3],
+ [8, 10]],
+ r)
+
+ ranges.include(r, 9, 11)
+ self.assertEqual(
+ [[3, 3],
+ [8, 11]],
+ r)
+
+ ranges.include(r, 7, 12)
+ self.assertEqual(
+ [[3, 3],
+ [7, 12]],
+ r)
+
+ ranges.include(r, 5, 5)
+ self.assertEqual(
+ [[3, 3],
+ [5, 5],
+ [7, 12]],
+ r)
+
+ ranges.include(r, 4, 4)
+ self.assertEqual(
+ [[3, 5],
+ [7, 12]],
+ r)
+
+ ranges.include(r, 1, 1)
+ self.assertEqual(
+ [[1, 1],
+ [3, 5],
+ [7, 12]],
+ r)
+
+ ranges.include(r, 2, None)
+ self.assertEqual(
+ [[1, None]],
+ r)
+
+ def teste_Invert(self):
+ r1 = [[1, None]]
+ ranges.exclude(r1, 2, 2)
+ ranges.exclude(r1, 5, 10)
+
+ r2 = copy.deepcopy(r1)
+ r2[-1][1] = 20
+
+ self.assertEqual(
+ [
+ [1, 1],
+ [3, 4],
+ [11, None],
+ ],
+ r1)
+ ranges.exclude(r1, r2)
+ self.assertEqual(
+ [[21, None]],
+ r1)
+
+ def test_contains(self):
+ r = [[1, None]]
+
+ assert ranges.contains(r, 1)
+ assert ranges.contains(r, 4)
+
+ ranges.exclude(r, 2, 2)
+ ranges.exclude(r, 5, 10)
+
+ assert ranges.contains(r, 1)
+ assert not ranges.contains(r, 2)
+ assert ranges.contains(r, 3)
+ assert not ranges.contains(r, 5)
+ assert not ranges.contains(r, 10)
+ assert ranges.contains(r, 11)
+ assert ranges.contains(r, 12)
+
+ def test_stretch(self):
+ r = []
+ ranges.stretch(r)
+ self.assertEqual([], r)
+
+ r = [[1, None]]
+ ranges.stretch(r)
+ self.assertEqual([[1, None]], r)
+
+ r = [[1, 10]]
+ ranges.stretch(r)
+ self.assertEqual([[1, 10]], r)
+
+ r = [[1, 1], [3, 3], [5, None]]
+ ranges.stretch(r)
+ self.assertEqual([[1, None]], r)
+
+ r = [[3, 3], [5, 10]]
+ ranges.stretch(r)
+ self.assertEqual([[3, 10]], r)
+
+ def test_include(self):
+ r = []
+ ranges.include(r, 2, 2)
+ self.assertEqual(
+ [[2, 2]],
+ r)
+ ranges.include(r, 7, 10)
+ self.assertEqual(
+ [[2, 2], [7, 10]],
+ r)
+ ranges.include(r, 5, 5)
+ self.assertEqual(
+ [[2, 2], [5, 5], [7, 10]],
+ r)
+ ranges.include(r, 15, None)
+ self.assertEqual(
+ [[2, 2], [5, 5], [7, 10], [15, None]],
+ r)
+ ranges.include(r, 3, 5)
+ self.assertEqual(
+ [[2, 5], [7, 10], [15, None]],
+ r)
+ ranges.include(r, 11, 14)
+ self.assertEqual(
+ [[2, 5], [7, None]],
+ r)
+
+ r = []
+ ranges.include(r, 10, None)
+ self.assertEqual(
+ [[10, None]],
+ r)
+ ranges.include(r, 7, 8)
+ self.assertEqual(
+ [[7, 8], [10, None]],
+ r)
+ ranges.include(r, 2, 2)
+ self.assertEqual(
+ [[2, 2], [7, 8], [10, None]],
+ r)
+
+ def test_Union(self):
+ r1 = []
+ ranges.include(r1, 1, 2)
+ r2 = []
+ ranges.include(r2, 3, 4)
+ ranges.include(r1, r2)
+ self.assertEqual(
+ [[1, 4]],
+ r1)
+
+ r1 = []
+ ranges.include(r1, 1, None)
+ r2 = []
+ ranges.include(r2, 3, 4)
+ ranges.include(r1, r2)
+ self.assertEqual(
+ [[1, None]],
+ r1)
+
+ r2 = []
+ ranges.include(r2, 1, None)
+ r1 = []
+ ranges.include(r1, 3, 4)
+ ranges.include(r1, r2)
+ self.assertEqual(
+ [[1, None]],
+ r1)
+
+ r1 = []
+ ranges.include(r1, 1, None)
+ r2 = []
+ ranges.include(r2, 2, None)
+ ranges.include(r1, r2)
+ self.assertEqual(
+ [[1, None]],
+ r1)
+
+ r1 = []
+ r2 = []
+ ranges.include(r2, r1)
+ self.assertEqual([], r2)
+
+ r1 = []
+ r2 = []
+ ranges.include(r2, 1, None)
+ ranges.include(r2, r1)
+ self.assertEqual([[1, None]], r2)
+
+ r = []
+ ranges.include(r, 10, 11)
+ ranges.include(r, None)
+ self.assertEqual([[10, 11]], r)
+
+ def test_intersect_Closed(self):
+ self.assertEqual(
+ [],
+ ranges.intersect([], []))
+ self.assertEqual(
+ [],
+ ranges.intersect([[1, 1]], []))
+ self.assertEqual(
+ [],
+ ranges.intersect([], [[1, 1]]))
+
+ self.assertEqual(
+ [[1, 1]],
+ ranges.intersect([[1, 1]], [[1, 1]]))
+ self.assertEqual(
+ [[1, 1]],
+ ranges.intersect([[1, 10]], [[1, 1]]))
+ self.assertEqual(
+ [[1, 1]],
+ ranges.intersect([[1, 1]], [[1, 10]]))
+
+ self.assertEqual(
+ [[2, 5]],
+ ranges.intersect([[2, 10]], [[1, 5]]))
+ self.assertEqual(
+ [[2, 5]],
+ ranges.intersect([[1, 5]], [[2, 10]]))
+
+ self.assertEqual(
+ [[2, 3], [5, 7]],
+ ranges.intersect([[1, 10]], [[2, 3], [5, 7]]))
+ self.assertEqual(
+ [[2, 3], [5, 7]],
+ ranges.intersect([[2, 3], [5, 7]], [[1, 10]]))
+
+ self.assertEqual(
+ [[2, 2], [4, 4], [7, 7]],
+ ranges.intersect([[1, 2], [4, 5], [6, 8], [10, 11]], [[0, 0], [2, 4], [7, 7]]))
+ self.assertEqual(
+ [[2, 2], [4, 4], [7, 7]],
+ ranges.intersect([[0, 0], [2, 4], [7, 7]], [[1, 2], [4, 5], [6, 8], [10, 11]]))
+
+ def test_intersect_Open(self):
+ self.assertEqual(
+ [[1, None]],
+ ranges.intersect([[1, None]], [[1, None]]))
+
+ self.assertEqual(
+ [[2, None]],
+ ranges.intersect([[2, None]], [[1, None]]))
+ self.assertEqual(
+ [[2, None]],
+ ranges.intersect([[1, None]], [[2, None]]))
+
+ self.assertEqual(
+ [[2, 3], [5, None]],
+ ranges.intersect([[2, 3], [5, None]], [[1, None]]))
+ self.assertEqual(
+ [[2, 3], [5, None]],
+ ranges.intersect([[1, None]], [[2, 3], [5, None]]))
+
+
+if __name__ == '__main__':
+ tests.main()
diff --git a/tests/units/toolkit/router.py b/tests/units/toolkit/router.py
index 7d8af39..52a721e 100755
--- a/tests/units/toolkit/router.py
+++ b/tests/units/toolkit/router.py
@@ -5,6 +5,7 @@
import os
import json
from email.utils import formatdate
+from base64 import b64decode, b64encode
from cStringIO import StringIO
from __init__ import tests, src_root
@@ -1413,6 +1414,107 @@ class RouterTest(tests.Test):
self.assertEqual('бар', headers.get('фоо'))
self.assertEqual('кен', headers.get('йцу'))
+ def test_File_IterContentInFS(self):
+ self.touch(('blob', 'blob'))
+
+ self.assertEqual(
+ ['blob'],
+ [i for i in File('blob').iter_content()])
+
+ def test_File_IterContentByUrl(self):
+ this.http = http.Connection()
+
+ class Routes(object):
+
+ @route('GET')
+ def probe(self):
+ this.response['content-type'] = 'foo/bar'
+ this.response['content-length'] = str(len('probe'))
+ this.response['content-disposition'] = 'attachment; filename="foo"'
+
+ return 'probe'
+
+ server = coroutine.WSGIServer(('127.0.0.1', client.ipc_port.value), Router(Routes()))
+ coroutine.spawn(server.serve_forever)
+ coroutine.dispatch()
+
+ blob = File(None, meta={'location': 'http://127.0.0.1:%s' % client.ipc_port.value})
+ self.assertEqual(
+ ['probe'],
+ [i for i in blob.iter_content()])
+ self.assertEqual({
+ 'content-length': '5',
+ 'content-type': 'foo/bar',
+ 'content-disposition': 'attachment; filename="foo"',
+ },
+ dict(blob))
+
+ def test_SetCookie(self):
+
+ class Routes(object):
+
+ @route('GET')
+ def probe(self):
+ this.cookie['foo'] = -1
+ this.cookie['bar'] = None
+
+ server = coroutine.WSGIServer(('127.0.0.1', client.ipc_port.value), Router(Routes()))
+ coroutine.spawn(server.serve_forever)
+ coroutine.dispatch()
+ conn = http.Connection('http://127.0.0.1:%s' % client.ipc_port.value)
+
+ headers = conn.request('GET', []).headers
+ self.assertEqual(
+ 'sugar_network_node=%s; Max-Age=3600; HttpOnly' % b64encode(json.dumps({
+ 'foo': -1,
+ 'bar': None,
+ })),
+ headers.get('set-cookie'))
+
+ def test_UnsetCookie(self):
+
+ class Routes(object):
+
+ @route('GET', cmd='probe1')
+ def probe1(self):
+ pass
+
+ @route('GET', cmd='probe2')
+ def probe2(self):
+ this.cookie.clear()
+
+
+ server = coroutine.WSGIServer(('127.0.0.1', client.ipc_port.value), Router(Routes()))
+ coroutine.spawn(server.serve_forever)
+ coroutine.dispatch()
+ conn = http.Connection('http://127.0.0.1:%s' % client.ipc_port.value)
+
+ headers = conn.request('GET', [], params={'cmd': 'probe1'}).headers
+ assert 'set-cookie' not in headers
+
+ headers = conn.request('GET', [], params={'cmd': 'probe2'}).headers
+ assert 'set-cookie' not in headers
+
+ headers = conn.request('GET', [], params={'cmd': 'probe1'}, headers={
+ 'cookie': 'sugar_network_node="%s"' % b64encode(json.dumps({
+ 'foo': 'bar',
+ })),
+ }).headers
+ self.assertEqual(
+ 'sugar_network_node=%s; Max-Age=3600; HttpOnly' % b64encode(json.dumps({
+ 'foo': 'bar',
+ })),
+ headers.get('set-cookie'))
+
+ headers = conn.request('GET', [], params={'cmd': 'probe2'}, headers={
+ 'cookie': 'sugar_network_node="%s"' % b64encode(json.dumps({
+ 'foo': 'bar',
+ })),
+ }).headers
+ self.assertEqual(
+ 'sugar_network_node=unset_sugar_network_node; Max-Age=3600; HttpOnly',
+ headers.get('set-cookie'))
+
if __name__ == '__main__':
tests.main()
diff --git a/tests/units/toolkit/toolkit.py b/tests/units/toolkit/toolkit.py
index 883aac3..b3e7f6b 100755
--- a/tests/units/toolkit/toolkit.py
+++ b/tests/units/toolkit/toolkit.py
@@ -8,7 +8,7 @@ from cStringIO import StringIO
from __init__ import tests
from sugar_network import toolkit
-from sugar_network.toolkit import Seqno, Sequence
+from sugar_network.toolkit import Seqno
class ToolkitTest(tests.Test):
@@ -17,11 +17,9 @@ class ToolkitTest(tests.Test):
seqno = Seqno(tests.tmpdir + '/seqno')
self.assertEqual(False, seqno.commit())
- assert not exists('seqno')
seqno.next()
self.assertEqual(True, seqno.commit())
- assert exists('seqno')
self.assertEqual(False, seqno.commit())
seqno.next()
@@ -29,321 +27,6 @@ class ToolkitTest(tests.Test):
self.assertEqual(1, seqno.value)
self.assertEqual(False, seqno.commit())
- def test_Sequence_empty(self):
- scale = Sequence(empty_value=[1, None])
- self.assertEqual(
- [[1, None]],
- scale)
- assert scale.empty
- scale.exclude(1, 1)
- assert not scale.empty
-
- scale = Sequence()
- self.assertEqual(
- [],
- scale)
- assert scale.empty
- scale.include(1, None)
- assert not scale.empty
-
- def test_Sequence_exclude(self):
- scale = Sequence(empty_value=[1, None])
- scale.exclude(1, 10)
- self.assertEqual(
- [[11, None]],
- scale)
- scale = Sequence(empty_value=[1, None])
- scale.exclude(5, 10)
- self.assertEqual(
- [[1, 4], [11, None]],
- scale)
- scale.exclude(2, 2)
- self.assertEqual(
- [[1, 1], [3, 4], [11, None]],
- scale)
- scale.exclude(1, 1)
- self.assertEqual(
- [[3, 4], [11, None]],
- scale)
- scale.exclude(3, 3)
- self.assertEqual(
- [[4, 4], [11, None]],
- scale)
- scale.exclude(1, 20)
- self.assertEqual(
- [[21, None]],
- scale)
- scale.exclude(21, 21)
- self.assertEqual(
- [[22, None]],
- scale)
-
- seq = Sequence([[100, None]])
- seq.exclude([[1, 98]])
- self.assertEqual([[100, None]], seq)
-
- seq = Sequence([[1, 100]])
- seq.exclude([[200, 300]])
- self.assertEqual([[1, 100]], seq)
-
- def test_Sequence_include_JoinExistingItems(self):
- scale = Sequence()
-
- scale.include(1, None)
- self.assertEqual(
- [[1, None]],
- scale)
-
- scale.include(2, None)
- self.assertEqual(
- [[1, None]],
- scale)
-
- scale.include(4, 5)
- self.assertEqual(
- [[1, None]],
- scale)
-
- scale.exclude(2, 2)
- scale.exclude(4, 4)
- scale.exclude(6, 6)
- scale.exclude(9, 9)
- self.assertEqual(
- [[1, 1],
- [3, 3],
- [5, 5],
- [7, 8],
- [10, None]],
- scale)
-
- scale.include(10, 20)
- self.assertEqual(
- [[1, 1],
- [3, 3],
- [5, 5],
- [7, 8],
- [10, None]],
- scale)
-
- scale.include(8, 20)
- self.assertEqual(
- [[1, 1],
- [3, 3],
- [5, 5],
- [7, None]],
- scale)
-
- scale.include(5, None)
- self.assertEqual(
- [[1, 1],
- [3, 3],
- [5, None]],
- scale)
-
- scale.include(1, None)
- self.assertEqual(
- [[1, None]],
- scale)
-
- def test_Sequence_include_InsertNewItems(self):
- scale = Sequence()
-
- scale.include(8, 10)
- scale.include(3, 3)
- self.assertEqual(
- [[3, 3],
- [8, 10]],
- scale)
-
- scale.include(9, 11)
- self.assertEqual(
- [[3, 3],
- [8, 11]],
- scale)
-
- scale.include(7, 12)
- self.assertEqual(
- [[3, 3],
- [7, 12]],
- scale)
-
- scale.include(5, 5)
- self.assertEqual(
- [[3, 3],
- [5, 5],
- [7, 12]],
- scale)
-
- scale.include(4, 4)
- self.assertEqual(
- [[3, 5],
- [7, 12]],
- scale)
-
- scale.include(1, 1)
- self.assertEqual(
- [[1, 1],
- [3, 5],
- [7, 12]],
- scale)
-
- scale.include(2, None)
- self.assertEqual(
- [[1, None]],
- scale)
-
- def teste_Sequence_Invert(self):
- scale_1 = Sequence(empty_value=[1, None])
- scale_1.exclude(2, 2)
- scale_1.exclude(5, 10)
-
- scale_2 = copy.deepcopy(scale_1[:])
- scale_2[-1][1] = 20
-
- self.assertEqual(
- [
- [1, 1],
- [3, 4],
- [11, None],
- ],
- scale_1)
- scale_1.exclude(scale_2)
- self.assertEqual(
- [[21, None]],
- scale_1)
-
- def test_Sequence_contains(self):
- scale = Sequence(empty_value=[1, None])
-
- assert 1 in scale
- assert 4 in scale
-
- scale.exclude(2, 2)
- scale.exclude(5, 10)
-
- assert 1 in scale
- assert 2 not in scale
- assert 3 in scale
- assert 5 not in scale
- assert 10 not in scale
- assert 11 in scale
- assert 12 in scale
-
- def test_Sequence_stretch(self):
- seq = Sequence()
- seq.stretch()
- self.assertEqual([], seq)
-
- seq = Sequence([[1, None]])
- seq.stretch()
- self.assertEqual([[1, None]], seq)
-
- seq = Sequence([[1, 10]])
- seq.stretch()
- self.assertEqual([[1, 10]], seq)
-
- seq = Sequence([[1, 1], [3, 3], [5, None]])
- seq.stretch()
- self.assertEqual([[1, None]], seq)
-
- seq = Sequence([[3, 3], [5, 10]])
- seq.stretch()
- self.assertEqual([[3, 10]], seq)
-
- def test_Sequence_include(self):
- rng = Sequence()
- rng.include(2, 2)
- self.assertEqual(
- [[2, 2]],
- rng)
- rng.include(7, 10)
- self.assertEqual(
- [[2, 2], [7, 10]],
- rng)
- rng.include(5, 5)
- self.assertEqual(
- [[2, 2], [5, 5], [7, 10]],
- rng)
- rng.include(15, None)
- self.assertEqual(
- [[2, 2], [5, 5], [7, 10], [15, None]],
- rng)
- rng.include(3, 5)
- self.assertEqual(
- [[2, 5], [7, 10], [15, None]],
- rng)
- rng.include(11, 14)
- self.assertEqual(
- [[2, 5], [7, None]],
- rng)
-
- rng = Sequence()
- rng.include(10, None)
- self.assertEqual(
- [[10, None]],
- rng)
- rng.include(7, 8)
- self.assertEqual(
- [[7, 8], [10, None]],
- rng)
- rng.include(2, 2)
- self.assertEqual(
- [[2, 2], [7, 8], [10, None]],
- rng)
-
- def test_Sequence_Union(self):
- seq_1 = Sequence()
- seq_1.include(1, 2)
- seq_2 = Sequence()
- seq_2.include(3, 4)
- seq_1.include(seq_2)
- self.assertEqual(
- [[1, 4]],
- seq_1)
-
- seq_1 = Sequence()
- seq_1.include(1, None)
- seq_2 = Sequence()
- seq_2.include(3, 4)
- seq_1.include(seq_2)
- self.assertEqual(
- [[1, None]],
- seq_1)
-
- seq_2 = Sequence()
- seq_2.include(1, None)
- seq_1 = Sequence()
- seq_1.include(3, 4)
- seq_1.include(seq_2)
- self.assertEqual(
- [[1, None]],
- seq_1)
-
- seq_1 = Sequence()
- seq_1.include(1, None)
- seq_2 = Sequence()
- seq_2.include(2, None)
- seq_1.include(seq_2)
- self.assertEqual(
- [[1, None]],
- seq_1)
-
- seq_1 = Sequence()
- seq_2 = Sequence()
- seq_2.include(seq_1)
- self.assertEqual([], seq_2)
-
- seq_1 = Sequence()
- seq_2 = Sequence()
- seq_2.include(1, None)
- seq_2.include(seq_1)
- self.assertEqual([[1, None]], seq_2)
-
- seq = Sequence()
- seq.include(10, 11)
- seq.include(None)
- self.assertEqual([[10, 11]], seq)
-
def test_readline(self):
def readlines(string):