Web   ·   Wiki   ·   Activities   ·   Blog   ·   Lists   ·   Chat   ·   Meeting   ·   Bugs   ·   Git   ·   Translate   ·   Archive   ·   People   ·   Donate
summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorAleksey Lim <alsroot@sugarlabs.org>2014-02-25 13:56:12 (GMT)
committer Aleksey Lim <alsroot@sugarlabs.org>2014-02-25 13:56:12 (GMT)
commit2aed09c3b60188063623eecee4a0f79592a4719e (patch)
treeba73ab9ad425685187c4fb691bdb233f8972aa02
parent7552454f28decea7fae01af1a6d4d4faa173bf88 (diff)
Implement blobs storage
-rw-r--r--sugar_network/db/blobs.py146
-rw-r--r--sugar_network/db/files.py146
-rw-r--r--sugar_network/toolkit/__init__.py9
-rw-r--r--tests/__init__.py41
-rw-r--r--tests/units/db/__main__.py1
-rwxr-xr-xtests/units/db/blobs.py532
-rwxr-xr-xtests/units/db/files.py320
-rwxr-xr-xtests/units/db/routes.py39
-rwxr-xr-xtests/units/model/context.py27
-rwxr-xr-xtests/units/model/routes.py1
-rwxr-xr-xtests/units/node/files.py32
-rwxr-xr-xtests/units/node/model.py1
-rwxr-xr-xtests/units/node/node.py3
-rwxr-xr-xtests/units/toolkit/toolkit.py6
14 files changed, 761 insertions, 543 deletions
diff --git a/sugar_network/db/blobs.py b/sugar_network/db/blobs.py
new file mode 100644
index 0000000..da06483
--- /dev/null
+++ b/sugar_network/db/blobs.py
@@ -0,0 +1,146 @@
+# Copyright (C) 2014 Aleksey Lim
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <http://www.gnu.org/licenses/>.
+
+import os
+import logging
+import hashlib
+from contextlib import contextmanager
+from os.path import exists, abspath, join, isdir, isfile
+
+from sugar_network import toolkit
+from sugar_network.toolkit.router import File
+from sugar_network.toolkit import http, enforce
+
+
+_META_SUFFIX = '.meta'
+
+_logger = logging.getLogger('db.blobs')
+_root = None
+
+
+def init(path):
+ global _root
+ _root = abspath(path)
+ if not exists(_root):
+ os.makedirs(_root)
+
+
+def post(content, mime_type=None, digest_to_assert=None):
+ meta = []
+
+ @contextmanager
+ def write_blob():
+ tmp_path = join(_path(), 'post')
+ if hasattr(content, 'read'):
+ with toolkit.new_file(tmp_path) as blob:
+ digest = hashlib.sha1()
+ while True:
+ chunk = content.read(toolkit.BUFFER_SIZE)
+ if not chunk:
+ break
+ blob.write(chunk)
+ digest.update(chunk)
+ yield blob, digest.hexdigest()
+ elif isinstance(content, dict):
+ enforce('location' in content, http.BadRequest, 'No location')
+ enforce('digest' in content, http.BadRequest, 'No digest')
+ meta.append(('status', '301 Moved Permanently'))
+ meta.append(('location', content['location']))
+ with toolkit.new_file(tmp_path) as blob:
+ yield blob, content['digest']
+ else:
+ with toolkit.new_file(tmp_path) as blob:
+ blob.write(content)
+ yield blob, hashlib.sha1(content).hexdigest()
+
+ with write_blob() as (blob, digest):
+ if digest_to_assert and digest != digest_to_assert:
+ blob.unlink()
+ raise http.BadRequest('Digest mismatch')
+ path = _path(digest)
+ meta.append(('content-type', mime_type or 'application/octet-stream'))
+ with toolkit.new_file(path + _META_SUFFIX) as f:
+ for key, value in meta:
+ f.write('%s: %s\n' % (key, value))
+ blob.name = path
+
+ return File(path, digest, meta)
+
+
+def update(digest, meta):
+ path = _path(digest) + _META_SUFFIX
+ enforce(exists(path), http.NotFound, 'No such blob')
+ meta_content = ''
+ for key, value in meta.items():
+ meta_content += '%s: %s\n' % (key, value)
+ with toolkit.new_file(path) as f:
+ f.write(meta_content)
+
+
+def get(digest):
+ path = _path(digest)
+ if not exists(path) or not exists(path + _META_SUFFIX):
+ return None
+ meta = []
+ with file(path + _META_SUFFIX) as f:
+ for line in f:
+ key, value = line.split(':', 1)
+ meta.append((key, value.strip()))
+ return File(path, digest, meta)
+
+
+def delete(digest):
+ path = _path(digest)
+ if exists(path + _META_SUFFIX):
+ os.unlink(path + _META_SUFFIX)
+ if exists(path):
+ os.unlink(path)
+
+
+def diff(in_seq, out_seq=None):
+ if out_seq is None:
+ out_seq = toolkit.Sequence([])
+ is_the_only_seq = not out_seq
+
+ try:
+ root = _path()
+ for name in os.listdir(root):
+ dirpath = join(root, name)
+ if not isdir(dirpath) or os.stat(dirpath).st_ctime not in in_seq:
+ continue
+ for digest in os.listdir(dirpath):
+ if len(digest) != 40:
+ continue
+ path = join(dirpath, digest)
+ if not isfile(path):
+ continue
+ ctime = int(os.stat(path).st_ctime)
+ if ctime not in in_seq:
+ continue
+ blob = get(digest)
+ if blob is None:
+ continue
+ yield blob
+ out_seq.include(ctime, ctime)
+ if is_the_only_seq:
+ # There is only one diff, so, we can stretch it to remove all holes
+ out_seq.stretch()
+ except StopIteration:
+ pass
+
+
+def _path(digest=None):
+ enforce(_root is not None, 'Blobs storage is not initialized')
+ return join(_root, digest[:3], digest) if digest else _root
diff --git a/sugar_network/db/files.py b/sugar_network/db/files.py
deleted file mode 100644
index a675ea3..0000000
--- a/sugar_network/db/files.py
+++ /dev/null
@@ -1,146 +0,0 @@
-# Copyright (C) 2014 Aleksey Lim
-#
-# This program is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with this program. If not, see <http://www.gnu.org/licenses/>.
-
-from sugar_network import toolkit
-from sugar_network.toolkit import http, enforce
-
-
-class Digest(str):
- pass
-
-
-def post(content, meta=None):
- # if fileobj is tmp then move files
- pass
-
-
-def update(digest, meta):
- pass
-
-
-def get(digest):
- pass
-
-
-def delete(digest):
- pass
-
-
-def path(digest):
- pass
-
-
-
-
-
-
-"""
-
-def diff(volume, in_seq, out_seq=None, exclude_seq=None, layer=None,
- fetch_blobs=False, ignore_documents=None, **kwargs):
-
- if 'blob' in meta:
- blob_path = meta.pop('blob')
- yield {'guid': guid,
- 'diff': {prop: meta},
- 'blob_size': meta['blob_size'],
- 'blob': toolkit.iter_file(blob_path),
- }
- elif fetch_blobs and 'url' in meta:
- url = meta.pop('url')
- try:
- blob = connection.request('GET', url,
- allow_redirects=True,
- # We need uncompressed size
- headers={'Accept-Encoding': ''})
- except Exception:
- _logger.exception('Cannot fetch %r for %s:%s:%s',
- url, resource, guid, prop)
- is_the_only_seq = False
- continue
- yield {'guid': guid,
- 'diff': {prop: meta},
- 'blob_size':
- int(blob.headers['Content-Length']),
- 'blob': blob.iter_content(toolkit.BUFFER_SIZE),
- }
- else:
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- 'digest': hashlib.sha1(png.getvalue()).hexdigest(),
-
-
-
-
- if value is None:
- value = {'blob': None}
- elif isinstance(value, basestring) or hasattr(value, 'read'):
- value = _read_blob(request, prop, value)
- blobs.append(value['blob'])
- elif isinstance(value, dict):
- enforce('url' in value or 'blob' in value, 'No bundle')
- else:
- raise RuntimeError('Incorrect BLOB value')
-
-def _read_blob(request, prop, value):
- digest = hashlib.sha1()
- dst = toolkit.NamedTemporaryFile(delete=False)
-
- try:
- if isinstance(value, basestring):
- digest.update(value)
- dst.write(value)
- else:
- size = request.content_length or sys.maxint
- while size > 0:
- chunk = value.read(min(size, toolkit.BUFFER_SIZE))
- if not chunk:
- break
- dst.write(chunk)
- size -= len(chunk)
- digest.update(chunk)
- except Exception:
- os.unlink(dst.name)
- raise
- finally:
- dst.close()
-
- if request.prop and request.content_type:
- mime_type = request.content_type
- else:
- mime_type = prop.mime_type
-
- return {'blob': dst.name,
- 'digest': digest.hexdigest(),
- 'mime_type': mime_type,
- }
-
-)
-"""
diff --git a/sugar_network/toolkit/__init__.py b/sugar_network/toolkit/__init__.py
index 073ec4d..8acfe27 100644
--- a/sugar_network/toolkit/__init__.py
+++ b/sugar_network/toolkit/__init__.py
@@ -390,7 +390,10 @@ def new_file(path, mode=0644):
file object
"""
- result = _NewFile(dir=dirname(path), prefix=basename(path))
+ dirpath = dirname(path)
+ if not exists(dirpath):
+ os.makedirs(dirpath)
+ result = _NewFile(dir=dirpath, prefix=basename(path))
result.dst_path = path
os.fchmod(result.fileno(), mode)
return result
@@ -775,6 +778,10 @@ class _NewFile(object):
def name(self):
return self._file.name
+ @name.setter
+ def name(self, value):
+ self.dst_path = value
+
def close(self):
self._file.close()
if exists(self.name):
diff --git a/tests/__init__.py b/tests/__init__.py
index 6a997e7..3767614 100644
--- a/tests/__init__.py
+++ b/tests/__init__.py
@@ -18,7 +18,7 @@ from M2Crypto import DSA
from gevent import monkey
from sugar_network.toolkit import coroutine, http, mountpoints, Option, gbus, i18n, languages
-from sugar_network.toolkit.router import Router, File
+from sugar_network.toolkit.router import Router
from sugar_network.toolkit.coroutine import this
from sugar_network.db import blobs
from sugar_network.client import IPCConnection, journal, routes as client_routes
@@ -143,7 +143,7 @@ class Test(unittest.TestCase):
this.call = None
this.broadcast = lambda x: x
- self.override_files()
+ blobs.init('blobs')
def tearDown(self):
self.stop_nodes()
@@ -154,43 +154,6 @@ class Test(unittest.TestCase):
setattr(mod, name, old_handler)
sys.stdout.flush()
- def override_files(self):
- os.makedirs('blobs')
- self.blobs = {}
-
- def files_post(content, mime_type=None, digest_to_assert=None):
- if hasattr(content, 'read'):
- content = content.read()
- digest = File.Digest(hash(content))
- if digest_to_assert:
- assert digest == digest_to_assert
- path = join('blobs', digest)
- with file(path, 'w') as f:
- f.write(content)
- self.blobs[digest] = {'content-type': mime_type or 'application/octet-stream'}
- return File(path, digest, self.blobs[digest].items())
-
- def files_update(digest, meta):
- self.blobs.setdefault(digest, {}).update(meta)
-
- def files_get(digest):
- if digest not in self.blobs:
- return None
- path = join('blobs', digest)
- return File(path, digest, self.blobs[digest].items())
-
- def files_delete(digest):
- path = join('blobs', digest)
- if exists(path):
- os.unlink(path)
- if digest in self.blobs:
- del self.blobs[digest]
-
- self.override(blobs, 'post', files_post)
- self.override(blobs, 'update', files_update)
- self.override(blobs, 'get', files_get)
- self.override(blobs, 'delete', files_delete)
-
def stop_nodes(self):
if self.client is not None:
self.client.close()
diff --git a/tests/units/db/__main__.py b/tests/units/db/__main__.py
index 3b1b9ec..cff8f52 100644
--- a/tests/units/db/__main__.py
+++ b/tests/units/db/__main__.py
@@ -7,6 +7,7 @@ from storage import *
from index import *
from resource import *
from routes import *
+from blobs import *
#from migrate import *
if __name__ == '__main__':
diff --git a/tests/units/db/blobs.py b/tests/units/db/blobs.py
new file mode 100755
index 0000000..463af56
--- /dev/null
+++ b/tests/units/db/blobs.py
@@ -0,0 +1,532 @@
+#!/usr/bin/env python
+# sugar-lint: disable
+
+import os
+import time
+import hashlib
+from cStringIO import StringIO
+from os.path import exists, abspath
+
+from __init__ import tests
+
+from sugar_network import toolkit
+from sugar_network.db import blobs
+from sugar_network.toolkit.router import Request
+from sugar_network.toolkit.coroutine import this
+from sugar_network.toolkit import http
+
+
+class BlobsTest(tests.Test):
+
+ def setUp(self):
+ tests.Test.setUp(self)
+ blobs.init('.')
+
+ def test_post(self):
+ content = 'probe'
+ blob = blobs.post(content)
+
+ self.assertEqual(
+ hashlib.sha1(content).hexdigest(),
+ blob.digest)
+ self.assertEqual(
+ abspath('%s/%s' % (blob.digest[:3], blob.digest)),
+ blob.path)
+ self.assertEqual({
+ 'content-type': 'application/octet-stream',
+ },
+ blob)
+
+ self.assertEqual(
+ content,
+ file(blob.path).read())
+ self.assertEqual([
+ 'content-type: application/octet-stream',
+ ],
+ file(blob.path + '.meta').read().strip().split('\n'))
+
+ the_same_blob = blobs.get(blob.digest)
+ assert the_same_blob is not blob
+ assert the_same_blob == blob
+ assert the_same_blob.digest == blob.digest
+ assert the_same_blob.path == blob.path
+
+ def test_post_Stream(self):
+ content = 'probe'
+ blob = blobs.post(StringIO(content))
+
+ self.assertEqual(
+ hashlib.sha1(content).hexdigest(),
+ blob.digest)
+ self.assertEqual(
+ abspath('%s/%s' % (blob.digest[:3], blob.digest)),
+ blob.path)
+ self.assertEqual({
+ 'content-type': 'application/octet-stream',
+ },
+ blob)
+
+ self.assertEqual(
+ content,
+ file(blob.path).read())
+ self.assertEqual([
+ 'content-type: application/octet-stream',
+ ],
+ file(blob.path + '.meta').read().strip().split('\n'))
+
+ the_same_blob = blobs.get(blob.digest)
+ assert the_same_blob is not blob
+ assert the_same_blob == blob
+ assert the_same_blob.digest == blob.digest
+ assert the_same_blob.path == blob.path
+
+ def test_post_Url(self):
+ self.assertRaises(http.BadRequest, blobs.post, {})
+ self.assertRaises(http.BadRequest, blobs.post, {'digest': 'digest'})
+ blob = blobs.post({'location': 'location', 'digest': 'digest', 'foo': 'bar'})
+
+ self.assertEqual(
+ 'digest',
+ blob.digest)
+ self.assertEqual(
+ abspath('%s/%s' % (blob.digest[:3], blob.digest)),
+ blob.path)
+ self.assertEqual({
+ 'status': '301 Moved Permanently',
+ 'location': 'location',
+ 'content-type': 'application/octet-stream',
+ },
+ blob)
+
+ self.assertEqual(
+ '',
+ file(blob.path).read())
+ self.assertEqual([
+ 'status: 301 Moved Permanently',
+ 'location: location',
+ 'content-type: application/octet-stream',
+ ],
+ file(blob.path + '.meta').read().strip().split('\n'))
+
+ the_same_blob = blobs.get(blob.digest)
+ assert the_same_blob is not blob
+ assert the_same_blob == blob
+ assert the_same_blob.digest == blob.digest
+ assert the_same_blob.path == blob.path
+
+ def test_update(self):
+ blob = blobs.post('probe')
+ self.assertEqual({
+ 'content-type': 'application/octet-stream',
+ },
+ blob)
+
+ blobs.update(blob.digest, {'foo': 'bar'})
+ self.assertEqual({
+ 'foo': 'bar',
+ },
+ blobs.get(blob.digest))
+
+ def test_delete(self):
+ blob = blobs.post('probe')
+ assert exists(blob.path)
+ assert exists(blob.path + '.meta')
+
+ blobs.delete(blob.digest)
+ assert not exists(blob.path)
+ assert not exists(blob.path + '.meta')
+ assert blobs.get(blob.digest) is None
+
+ def test_diff(self):
+ blobs.init('blobs')
+ this.request = Request()
+ self.touch(
+ 'blobs/100/1000000000000000000000000000000000000001', ('blobs/100/1000000000000000000000000000000000000001.meta', ''),
+ 'blobs/100/1000000000000000000000000000000000000002', ('blobs/100/1000000000000000000000000000000000000002.meta', ''),
+ 'blobs/200/2000000000000000000000000000000000000003', ('blobs/200/2000000000000000000000000000000000000003.meta', ''),
+ )
+
+ in_seq1 = toolkit.Sequence([[0, None]])
+ out_seq1 = toolkit.Sequence([])
+ self.assertEqual([
+ '2000000000000000000000000000000000000003',
+ '1000000000000000000000000000000000000002',
+ '1000000000000000000000000000000000000001',
+ ],
+ [i.digest for i in blobs.diff(in_seq1, out_seq1)])
+ ctimes1 = [
+ int(os.stat('blobs/100/1000000000000000000000000000000000000001').st_ctime),
+ int(os.stat('blobs/200/2000000000000000000000000000000000000003').st_ctime),
+ ]
+ self.assertEqual(
+ [[min(ctimes1), max(ctimes1)]],
+ out_seq1)
+
+ in_seq2 = toolkit.Sequence([[0, None]])
+ in_seq2.exclude(out_seq1)
+ out_seq2 = toolkit.Sequence([])
+ self.assertEqual([
+ ],
+ [i.digest for i in blobs.diff(in_seq2, out_seq2)])
+ self.assertEqual(
+ [],
+ out_seq2)
+
+ time.sleep(1.1)
+ self.touch(
+ 'blobs/200/2000000000000000000000000000000000000004', ('blobs/200/2000000000000000000000000000000000000004.meta', ''),
+ 'blobs/300/3000000000000000000000000000000000000005', ('blobs/300/3000000000000000000000000000000000000005.meta', ''),
+ )
+
+ self.assertEqual([
+ '3000000000000000000000000000000000000005',
+ '2000000000000000000000000000000000000004',
+ ],
+ [i.digest for i in blobs.diff(in_seq2, out_seq2)])
+ ctimes2 = [
+ int(os.stat('blobs/200/2000000000000000000000000000000000000004').st_ctime),
+ int(os.stat('blobs/300/3000000000000000000000000000000000000005').st_ctime),
+ ]
+ self.assertEqual(
+ [[min(ctimes2), max(ctimes2)]],
+ out_seq2)
+
+ in_seq3 = toolkit.Sequence([[0, None]])
+ out_seq3 = toolkit.Sequence([])
+ self.assertEqual([
+ '3000000000000000000000000000000000000005',
+ '2000000000000000000000000000000000000004',
+ '2000000000000000000000000000000000000003',
+ '1000000000000000000000000000000000000002',
+ '1000000000000000000000000000000000000001',
+
+ ],
+ [i.digest for i in blobs.diff(in_seq3, out_seq3)])
+ self.assertEqual(
+ [[min(ctimes1 + ctimes2), max(ctimes1 + ctimes2)]],
+ out_seq3)
+
+"""
+ def test_diff_WithBlobsSetByUrl(self):
+ URL = 'http://src.sugarlabs.org/robots.txt'
+ URL_content = urllib2.urlopen(URL).read()
+
+ class Document(db.Resource):
+
+ @db.blob_property()
+ def blob(self, value):
+ return value
+
+ directory = Directory(tests.tmpdir, Document, IndexWriter)
+
+ directory.create({'guid': '1', 'ctime': 1, 'mtime': 1})
+ directory.update('1', {'blob': {'url': URL}})
+ self.utime('1/1', 1)
+
+ out_seq = Sequence()
+ self.assertEqual([
+ {'guid': '1', 'diff': {
+ 'guid': {'value': '1', 'mtime': 1},
+ 'ctime': {'value': 1, 'mtime': 1},
+ 'mtime': {'value': 1, 'mtime': 1},
+ 'blob': {
+ 'url': URL,
+ 'mtime': 1,
+ },
+ }},
+ ],
+ [i for i in diff(directory, [[0, None]], out_seq)])
+ self.assertEqual([[1, 2]], out_seq)
+
+ def test_merge_AvoidCalculatedBlobs(self):
+
+ class Document(db.Resource):
+
+ @db.blob_property()
+ def blob(self, value):
+ return {'url': 'http://foo/bar', 'mime_type': 'image/png'}
+
+ directory1 = Directory('document1', Document, IndexWriter)
+ directory1.create({'guid': 'guid', 'ctime': 1, 'mtime': 1})
+ for i in os.listdir('document1/gu/guid'):
+ os.utime('document1/gu/guid/%s' % i, (1, 1))
+
+ directory2 = Directory('document2', Document, IndexWriter)
+ for patch in diff(directory1, [[0, None]], Sequence()):
+ directory2.merge(**patch)
+
+ doc = directory2.get('guid')
+ self.assertEqual(1, doc.get('seqno'))
+ self.assertEqual(1, doc.meta('guid')['mtime'])
+ assert not exists('document2/gu/guid/blob')
+
+ def test_merge_Blobs(self):
+
+ class Document(db.Resource):
+
+ @db.blob_property()
+ def blob(self, value):
+ return value
+
+ directory = Directory('document', Document, IndexWriter)
+ self.touch(('blob', 'blob-1'))
+ directory.merge('1', {
+ 'guid': {'mtime': 1, 'value': '1'},
+ 'ctime': {'mtime': 2, 'value': 2},
+ 'mtime': {'mtime': 3, 'value': 3},
+ 'blob': {'mtime': 4, 'blob': 'blob'},
+ })
+
+ self.assertEqual(
+ [(2, 3, '1')],
+ [(i['ctime'], i['mtime'], i['guid']) for i in directory.find()[0]])
+
+ doc = directory.get('1')
+ self.assertEqual(1, doc.get('seqno'))
+ self.assertEqual(1, doc.meta('guid')['mtime'])
+ self.assertEqual(2, doc.meta('ctime')['mtime'])
+ self.assertEqual(3, doc.meta('mtime')['mtime'])
+ self.assertEqual(4, doc.meta('blob')['mtime'])
+ self.assertEqual('blob-1', file('document/1/1/blob.blob').read())
+
+ self.touch(('blob', 'blob-2'))
+ directory.merge('1', {
+ 'blob': {'mtime': 5, 'blob': 'blob'},
+ })
+
+ self.assertEqual(5, doc.meta('blob')['mtime'])
+ self.assertEqual('blob-2', file('document/1/1/blob.blob').read())
+
+
+ def test_DeleteOldBlobOnUpdate(self):
+
+ class Document(db.Resource):
+
+ @db.blob_property()
+ def blob(self, value):
+ return value
+
+ directory = Directory(tests.tmpdir, Document, IndexWriter)
+
+ directory.create({'guid': 'guid', 'blob': 'foo'})
+ assert exists('gu/guid/blob.blob')
+ directory.update('guid', {'blob': {'url': 'foo'}})
+ assert not exists('gu/guid/blob.blob')
+
+ directory.update('guid', {'blob': 'foo'})
+ assert exists('gu/guid/blob.blob')
+ directory.update('guid', {'blob': {}})
+ assert not exists('gu/guid/blob.blob')
+
+ def test_diff_Blobs(self):
+
+ class Document(db.Resource):
+
+ @db.blob_property()
+ def prop(self, value):
+ return value
+
+ volume = db.Volume('db', [Document])
+ cp = NodeRoutes('guid', volume)
+
+ guid = call(cp, method='POST', document='document', content={})
+ call(cp, method='PUT', document='document', guid=guid, content={'prop': 'payload'})
+ self.utime('db', 0)
+
+ patch = diff(volume, toolkit.Sequence([[1, None]]))
+ self.assertEqual(
+ {'resource': 'document'},
+ next(patch))
+ record = next(patch)
+ self.assertEqual('payload', ''.join([i for i in record.pop('blob')]))
+ self.assertEqual(
+ {'guid': guid, 'blob_size': len('payload'), 'diff': {
+ 'prop': {
+ 'digest': hashlib.sha1('payload').hexdigest(),
+ 'blob_size': len('payload'),
+ 'mime_type': 'application/octet-stream',
+ 'mtime': 0,
+ },
+ }},
+ record)
+ self.assertEqual(
+ {'guid': guid, 'diff': {
+ 'guid': {'value': guid, 'mtime': 0},
+ 'author': {'mtime': 0, 'value': {}},
+ 'layer': {'mtime': 0, 'value': []},
+ 'tags': {'mtime': 0, 'value': []},
+ 'mtime': {'value': 0, 'mtime': 0},
+ 'ctime': {'value': 0, 'mtime': 0},
+ }},
+ next(patch))
+ self.assertEqual(
+ {'commit': [[1, 2]]},
+ next(patch))
+ self.assertRaises(StopIteration, next, patch)
+
+ def test_diff_BlobUrls(self):
+ url = 'http://src.sugarlabs.org/robots.txt'
+ blob = urllib2.urlopen(url).read()
+
+ class Document(db.Resource):
+
+ @db.blob_property()
+ def prop(self, value):
+ return value
+
+ volume = db.Volume('db', [Document])
+ cp = NodeRoutes('guid', volume)
+
+ guid = call(cp, method='POST', document='document', content={})
+ call(cp, method='PUT', document='document', guid=guid, content={'prop': {'url': url}})
+ self.utime('db', 1)
+
+ self.assertEqual([
+ {'resource': 'document'},
+ {'guid': guid,
+ 'diff': {
+ 'guid': {'value': guid, 'mtime': 1},
+ 'author': {'mtime': 1, 'value': {}},
+ 'layer': {'mtime': 1, 'value': []},
+ 'tags': {'mtime': 1, 'value': []},
+ 'mtime': {'value': 0, 'mtime': 1},
+ 'ctime': {'value': 0, 'mtime': 1},
+ 'prop': {'url': url, 'mtime': 1},
+ },
+ },
+ {'commit': [[1, 2]]},
+ ],
+ [i for i in diff(volume, toolkit.Sequence([[1, None]]))])
+
+ patch = diff(volume, toolkit.Sequence([[1, None]]), fetch_blobs=True)
+ self.assertEqual(
+ {'resource': 'document'},
+ next(patch))
+ record = next(patch)
+ self.assertEqual(blob, ''.join([i for i in record.pop('blob')]))
+ self.assertEqual(
+ {'guid': guid, 'blob_size': len(blob), 'diff': {'prop': {'mtime': 1}}},
+ record)
+ self.assertEqual(
+ {'guid': guid, 'diff': {
+ 'guid': {'value': guid, 'mtime': 1},
+ 'author': {'mtime': 1, 'value': {}},
+ 'layer': {'mtime': 1, 'value': []},
+ 'tags': {'mtime': 1, 'value': []},
+ 'mtime': {'value': 0, 'mtime': 1},
+ 'ctime': {'value': 0, 'mtime': 1},
+ }},
+ next(patch))
+ self.assertEqual(
+ {'commit': [[1, 2]]},
+ next(patch))
+ self.assertRaises(StopIteration, next, patch)
+
+ def test_diff_SkipBrokenBlobUrls(self):
+
+ class Document(db.Resource):
+
+ @db.blob_property()
+ def prop(self, value):
+ return value
+
+ volume = db.Volume('db', [Document])
+ cp = NodeRoutes('guid', volume)
+
+ guid1 = call(cp, method='POST', document='document', content={})
+ call(cp, method='PUT', document='document', guid=guid1, content={'prop': {'url': 'http://foo/bar'}})
+ guid2 = call(cp, method='POST', document='document', content={})
+ self.utime('db', 1)
+
+ self.assertEqual([
+ {'resource': 'document'},
+ {'guid': guid1,
+ 'diff': {
+ 'guid': {'value': guid1, 'mtime': 1},
+ 'author': {'mtime': 1, 'value': {}},
+ 'layer': {'mtime': 1, 'value': []},
+ 'tags': {'mtime': 1, 'value': []},
+ 'mtime': {'value': 0, 'mtime': 1},
+ 'ctime': {'value': 0, 'mtime': 1},
+ 'prop': {'url': 'http://foo/bar', 'mtime': 1},
+ },
+ },
+ {'guid': guid2,
+ 'diff': {
+ 'guid': {'value': guid2, 'mtime': 1},
+ 'author': {'mtime': 1, 'value': {}},
+ 'layer': {'mtime': 1, 'value': []},
+ 'tags': {'mtime': 1, 'value': []},
+ 'mtime': {'value': 0, 'mtime': 1},
+ 'ctime': {'value': 0, 'mtime': 1},
+ },
+ },
+ {'commit': [[1, 3]]},
+ ],
+ [i for i in diff(volume, toolkit.Sequence([[1, None]]), fetch_blobs=False)])
+
+ self.assertEqual([
+ {'resource': 'document'},
+ {'guid': guid1,
+ 'diff': {
+ 'guid': {'value': guid1, 'mtime': 1},
+ 'author': {'mtime': 1, 'value': {}},
+ 'layer': {'mtime': 1, 'value': []},
+ 'tags': {'mtime': 1, 'value': []},
+ 'mtime': {'value': 0, 'mtime': 1},
+ 'ctime': {'value': 0, 'mtime': 1},
+ },
+ },
+ {'guid': guid2,
+ 'diff': {
+ 'guid': {'value': guid2, 'mtime': 1},
+ 'author': {'mtime': 1, 'value': {}},
+ 'layer': {'mtime': 1, 'value': []},
+ 'tags': {'mtime': 1, 'value': []},
+ 'mtime': {'value': 0, 'mtime': 1},
+ 'ctime': {'value': 0, 'mtime': 1},
+ },
+ },
+ {'commit': [[1, 3]]},
+ ],
+ [i for i in diff(volume, toolkit.Sequence([[1, None]]), fetch_blobs=True)])
+
+ def test_merge_Blobs(self):
+
+ class Document(db.Resource):
+
+ @db.blob_property()
+ def prop(self, value):
+ return value
+
+ volume = db.Volume('db', [Document])
+
+ merge(volume, [
+ {'resource': 'document'},
+ {'guid': '1', 'diff': {
+ 'guid': {'value': '1', 'mtime': 1.0},
+ 'ctime': {'value': 2, 'mtime': 2.0},
+ 'mtime': {'value': 3, 'mtime': 3.0},
+ 'prop': {
+ 'blob': StringIO('payload'),
+ 'blob_size': len('payload'),
+ 'digest': hashlib.sha1('payload').hexdigest(),
+ 'mime_type': 'foo/bar',
+ 'mtime': 1,
+ },
+ }},
+ {'commit': [[1, 1]]},
+ ])
+
+ assert volume['document'].exists('1')
+ blob = volume['document'].get('1')['prop']
+ self.assertEqual(1, blob['mtime'])
+ self.assertEqual('foo/bar', blob['mime_type'])
+ self.assertEqual(hashlib.sha1('payload').hexdigest(), blob['digest'])
+ self.assertEqual(tests.tmpdir + '/db/document/1/1/prop.blob', blob['blob'])
+ self.assertEqual('payload', file(blob['blob']).read())
+
+"""
+
+if __name__ == '__main__':
+ tests.main()
diff --git a/tests/units/db/files.py b/tests/units/db/files.py
deleted file mode 100755
index 0d806df..0000000
--- a/tests/units/db/files.py
+++ /dev/null
@@ -1,320 +0,0 @@
-
- def test_diff_WithBlobsSetByUrl(self):
- URL = 'http://src.sugarlabs.org/robots.txt'
- URL_content = urllib2.urlopen(URL).read()
-
- class Document(db.Resource):
-
- @db.blob_property()
- def blob(self, value):
- return value
-
- directory = Directory(tests.tmpdir, Document, IndexWriter)
-
- directory.create({'guid': '1', 'ctime': 1, 'mtime': 1})
- directory.update('1', {'blob': {'url': URL}})
- self.utime('1/1', 1)
-
- out_seq = Sequence()
- self.assertEqual([
- {'guid': '1', 'diff': {
- 'guid': {'value': '1', 'mtime': 1},
- 'ctime': {'value': 1, 'mtime': 1},
- 'mtime': {'value': 1, 'mtime': 1},
- 'blob': {
- 'url': URL,
- 'mtime': 1,
- },
- }},
- ],
- [i for i in diff(directory, [[0, None]], out_seq)])
- self.assertEqual([[1, 2]], out_seq)
-
- def test_merge_AvoidCalculatedBlobs(self):
-
- class Document(db.Resource):
-
- @db.blob_property()
- def blob(self, value):
- return {'url': 'http://foo/bar', 'mime_type': 'image/png'}
-
- directory1 = Directory('document1', Document, IndexWriter)
- directory1.create({'guid': 'guid', 'ctime': 1, 'mtime': 1})
- for i in os.listdir('document1/gu/guid'):
- os.utime('document1/gu/guid/%s' % i, (1, 1))
-
- directory2 = Directory('document2', Document, IndexWriter)
- for patch in diff(directory1, [[0, None]], Sequence()):
- directory2.merge(**patch)
-
- doc = directory2.get('guid')
- self.assertEqual(1, doc.get('seqno'))
- self.assertEqual(1, doc.meta('guid')['mtime'])
- assert not exists('document2/gu/guid/blob')
-
- def test_merge_Blobs(self):
-
- class Document(db.Resource):
-
- @db.blob_property()
- def blob(self, value):
- return value
-
- directory = Directory('document', Document, IndexWriter)
- self.touch(('blob', 'blob-1'))
- directory.merge('1', {
- 'guid': {'mtime': 1, 'value': '1'},
- 'ctime': {'mtime': 2, 'value': 2},
- 'mtime': {'mtime': 3, 'value': 3},
- 'blob': {'mtime': 4, 'blob': 'blob'},
- })
-
- self.assertEqual(
- [(2, 3, '1')],
- [(i['ctime'], i['mtime'], i['guid']) for i in directory.find()[0]])
-
- doc = directory.get('1')
- self.assertEqual(1, doc.get('seqno'))
- self.assertEqual(1, doc.meta('guid')['mtime'])
- self.assertEqual(2, doc.meta('ctime')['mtime'])
- self.assertEqual(3, doc.meta('mtime')['mtime'])
- self.assertEqual(4, doc.meta('blob')['mtime'])
- self.assertEqual('blob-1', file('document/1/1/blob.blob').read())
-
- self.touch(('blob', 'blob-2'))
- directory.merge('1', {
- 'blob': {'mtime': 5, 'blob': 'blob'},
- })
-
- self.assertEqual(5, doc.meta('blob')['mtime'])
- self.assertEqual('blob-2', file('document/1/1/blob.blob').read())
-
-
- def test_DeleteOldBlobOnUpdate(self):
-
- class Document(db.Resource):
-
- @db.blob_property()
- def blob(self, value):
- return value
-
- directory = Directory(tests.tmpdir, Document, IndexWriter)
-
- directory.create({'guid': 'guid', 'blob': 'foo'})
- assert exists('gu/guid/blob.blob')
- directory.update('guid', {'blob': {'url': 'foo'}})
- assert not exists('gu/guid/blob.blob')
-
- directory.update('guid', {'blob': 'foo'})
- assert exists('gu/guid/blob.blob')
- directory.update('guid', {'blob': {}})
- assert not exists('gu/guid/blob.blob')
-
- def test_diff_Blobs(self):
-
- class Document(db.Resource):
-
- @db.blob_property()
- def prop(self, value):
- return value
-
- volume = db.Volume('db', [Document])
- cp = NodeRoutes('guid', volume)
-
- guid = call(cp, method='POST', document='document', content={})
- call(cp, method='PUT', document='document', guid=guid, content={'prop': 'payload'})
- self.utime('db', 0)
-
- patch = diff(volume, toolkit.Sequence([[1, None]]))
- self.assertEqual(
- {'resource': 'document'},
- next(patch))
- record = next(patch)
- self.assertEqual('payload', ''.join([i for i in record.pop('blob')]))
- self.assertEqual(
- {'guid': guid, 'blob_size': len('payload'), 'diff': {
- 'prop': {
- 'digest': hashlib.sha1('payload').hexdigest(),
- 'blob_size': len('payload'),
- 'mime_type': 'application/octet-stream',
- 'mtime': 0,
- },
- }},
- record)
- self.assertEqual(
- {'guid': guid, 'diff': {
- 'guid': {'value': guid, 'mtime': 0},
- 'author': {'mtime': 0, 'value': {}},
- 'layer': {'mtime': 0, 'value': []},
- 'tags': {'mtime': 0, 'value': []},
- 'mtime': {'value': 0, 'mtime': 0},
- 'ctime': {'value': 0, 'mtime': 0},
- }},
- next(patch))
- self.assertEqual(
- {'commit': [[1, 2]]},
- next(patch))
- self.assertRaises(StopIteration, next, patch)
-
- def test_diff_BlobUrls(self):
- url = 'http://src.sugarlabs.org/robots.txt'
- blob = urllib2.urlopen(url).read()
-
- class Document(db.Resource):
-
- @db.blob_property()
- def prop(self, value):
- return value
-
- volume = db.Volume('db', [Document])
- cp = NodeRoutes('guid', volume)
-
- guid = call(cp, method='POST', document='document', content={})
- call(cp, method='PUT', document='document', guid=guid, content={'prop': {'url': url}})
- self.utime('db', 1)
-
- self.assertEqual([
- {'resource': 'document'},
- {'guid': guid,
- 'diff': {
- 'guid': {'value': guid, 'mtime': 1},
- 'author': {'mtime': 1, 'value': {}},
- 'layer': {'mtime': 1, 'value': []},
- 'tags': {'mtime': 1, 'value': []},
- 'mtime': {'value': 0, 'mtime': 1},
- 'ctime': {'value': 0, 'mtime': 1},
- 'prop': {'url': url, 'mtime': 1},
- },
- },
- {'commit': [[1, 2]]},
- ],
- [i for i in diff(volume, toolkit.Sequence([[1, None]]))])
-
- patch = diff(volume, toolkit.Sequence([[1, None]]), fetch_blobs=True)
- self.assertEqual(
- {'resource': 'document'},
- next(patch))
- record = next(patch)
- self.assertEqual(blob, ''.join([i for i in record.pop('blob')]))
- self.assertEqual(
- {'guid': guid, 'blob_size': len(blob), 'diff': {'prop': {'mtime': 1}}},
- record)
- self.assertEqual(
- {'guid': guid, 'diff': {
- 'guid': {'value': guid, 'mtime': 1},
- 'author': {'mtime': 1, 'value': {}},
- 'layer': {'mtime': 1, 'value': []},
- 'tags': {'mtime': 1, 'value': []},
- 'mtime': {'value': 0, 'mtime': 1},
- 'ctime': {'value': 0, 'mtime': 1},
- }},
- next(patch))
- self.assertEqual(
- {'commit': [[1, 2]]},
- next(patch))
- self.assertRaises(StopIteration, next, patch)
-
- def test_diff_SkipBrokenBlobUrls(self):
-
- class Document(db.Resource):
-
- @db.blob_property()
- def prop(self, value):
- return value
-
- volume = db.Volume('db', [Document])
- cp = NodeRoutes('guid', volume)
-
- guid1 = call(cp, method='POST', document='document', content={})
- call(cp, method='PUT', document='document', guid=guid1, content={'prop': {'url': 'http://foo/bar'}})
- guid2 = call(cp, method='POST', document='document', content={})
- self.utime('db', 1)
-
- self.assertEqual([
- {'resource': 'document'},
- {'guid': guid1,
- 'diff': {
- 'guid': {'value': guid1, 'mtime': 1},
- 'author': {'mtime': 1, 'value': {}},
- 'layer': {'mtime': 1, 'value': []},
- 'tags': {'mtime': 1, 'value': []},
- 'mtime': {'value': 0, 'mtime': 1},
- 'ctime': {'value': 0, 'mtime': 1},
- 'prop': {'url': 'http://foo/bar', 'mtime': 1},
- },
- },
- {'guid': guid2,
- 'diff': {
- 'guid': {'value': guid2, 'mtime': 1},
- 'author': {'mtime': 1, 'value': {}},
- 'layer': {'mtime': 1, 'value': []},
- 'tags': {'mtime': 1, 'value': []},
- 'mtime': {'value': 0, 'mtime': 1},
- 'ctime': {'value': 0, 'mtime': 1},
- },
- },
- {'commit': [[1, 3]]},
- ],
- [i for i in diff(volume, toolkit.Sequence([[1, None]]), fetch_blobs=False)])
-
- self.assertEqual([
- {'resource': 'document'},
- {'guid': guid1,
- 'diff': {
- 'guid': {'value': guid1, 'mtime': 1},
- 'author': {'mtime': 1, 'value': {}},
- 'layer': {'mtime': 1, 'value': []},
- 'tags': {'mtime': 1, 'value': []},
- 'mtime': {'value': 0, 'mtime': 1},
- 'ctime': {'value': 0, 'mtime': 1},
- },
- },
- {'guid': guid2,
- 'diff': {
- 'guid': {'value': guid2, 'mtime': 1},
- 'author': {'mtime': 1, 'value': {}},
- 'layer': {'mtime': 1, 'value': []},
- 'tags': {'mtime': 1, 'value': []},
- 'mtime': {'value': 0, 'mtime': 1},
- 'ctime': {'value': 0, 'mtime': 1},
- },
- },
- {'commit': [[1, 3]]},
- ],
- [i for i in diff(volume, toolkit.Sequence([[1, None]]), fetch_blobs=True)])
-
- def test_merge_Blobs(self):
-
- class Document(db.Resource):
-
- @db.blob_property()
- def prop(self, value):
- return value
-
- volume = db.Volume('db', [Document])
-
- merge(volume, [
- {'resource': 'document'},
- {'guid': '1', 'diff': {
- 'guid': {'value': '1', 'mtime': 1.0},
- 'ctime': {'value': 2, 'mtime': 2.0},
- 'mtime': {'value': 3, 'mtime': 3.0},
- 'prop': {
- 'blob': StringIO('payload'),
- 'blob_size': len('payload'),
- 'digest': hashlib.sha1('payload').hexdigest(),
- 'mime_type': 'foo/bar',
- 'mtime': 1,
- },
- }},
- {'commit': [[1, 1]]},
- ])
-
- assert volume['document'].exists('1')
- blob = volume['document'].get('1')['prop']
- self.assertEqual(1, blob['mtime'])
- self.assertEqual('foo/bar', blob['mime_type'])
- self.assertEqual(hashlib.sha1('payload').hexdigest(), blob['digest'])
- self.assertEqual(tests.tmpdir + '/db/document/1/1/prop.blob', blob['blob'])
- self.assertEqual('payload', file(blob['blob']).read())
-
diff --git a/tests/units/db/routes.py b/tests/units/db/routes.py
index 7da2f75..5786760 100755
--- a/tests/units/db/routes.py
+++ b/tests/units/db/routes.py
@@ -18,13 +18,50 @@ from __init__ import tests
from sugar_network import db, toolkit
from sugar_network.db import blobs
from sugar_network.model.user import User
-from sugar_network.toolkit.router import Router, Request, Response, fallbackroute, ACL
+from sugar_network.toolkit.router import Router, Request, Response, fallbackroute, ACL, File
from sugar_network.toolkit.coroutine import this
from sugar_network.toolkit import coroutine, http, i18n
class RoutesTest(tests.Test):
+ def setUp(self):
+ tests.Test.setUp(self)
+ self.blobs = {}
+
+ def files_post(content, mime_type=None, digest_to_assert=None):
+ if hasattr(content, 'read'):
+ content = content.read()
+ digest = File.Digest(hash(content))
+ if digest_to_assert:
+ assert digest == digest_to_assert
+ path = join('blobs', digest)
+ with file(path, 'w') as f:
+ f.write(content)
+ self.blobs[digest] = {'content-type': mime_type or 'application/octet-stream'}
+ return File(path, digest, self.blobs[digest].items())
+
+ def files_update(digest, meta):
+ self.blobs.setdefault(digest, {}).update(meta)
+
+ def files_get(digest):
+ if digest not in self.blobs:
+ return None
+ path = join('blobs', digest)
+ return File(path, digest, self.blobs[digest].items())
+
+ def files_delete(digest):
+ path = join('blobs', digest)
+ if exists(path):
+ os.unlink(path)
+ if digest in self.blobs:
+ del self.blobs[digest]
+
+ self.override(blobs, 'post', files_post)
+ self.override(blobs, 'update', files_update)
+ self.override(blobs, 'get', files_get)
+ self.override(blobs, 'delete', files_delete)
+
def test_PostDefaults(self):
class Document(db.Resource):
diff --git a/tests/units/model/context.py b/tests/units/model/context.py
index 0990f15..bd6ffaf 100755
--- a/tests/units/model/context.py
+++ b/tests/units/model/context.py
@@ -1,6 +1,7 @@
#!/usr/bin/env python
# sugar-lint: disable
+import hashlib
from cStringIO import StringIO
from os.path import exists
@@ -69,7 +70,7 @@ class ContextTest(tests.Test):
])
bundle1 = self.zips(('topdir/activity/activity.info', activity_info1))
release1 = conn.upload(['context', context, 'releases'], StringIO(bundle1))
- assert release1 == str(hash(bundle1))
+ assert release1 == str(hashlib.sha1(bundle1).hexdigest())
self.assertEqual({
release1: {
'seqno': 5,
@@ -80,12 +81,12 @@ class ContextTest(tests.Test):
'version': [[1], 0],
'requires': {},
'command': 'true',
- 'bundles': {'*-*': {'blob': str(hash(bundle1)), 'unpack_size': len(activity_info1)}},
+ 'bundles': {'*-*': {'blob': str(hashlib.sha1(bundle1).hexdigest()), 'unpack_size': len(activity_info1)}},
'stability': 'stable',
},
},
}, conn.get(['context', context, 'releases']))
- assert blobs.get(str(hash(bundle1)))
+ assert blobs.get(str(hashlib.sha1(bundle1).hexdigest()))
activity_info2 = '\n'.join([
'[Activity]',
@@ -98,7 +99,7 @@ class ContextTest(tests.Test):
])
bundle2 = self.zips(('topdir/activity/activity.info', activity_info2))
release2 = conn.upload(['context', context, 'releases'], StringIO(bundle2))
- assert release2 == str(hash(bundle2))
+ assert release2 == str(hashlib.sha1(bundle2).hexdigest())
self.assertEqual({
release1: {
'seqno': 5,
@@ -109,7 +110,7 @@ class ContextTest(tests.Test):
'version': [[1], 0],
'requires': {},
'command': 'true',
- 'bundles': {'*-*': {'blob': str(hash(bundle1)), 'unpack_size': len(activity_info1)}},
+ 'bundles': {'*-*': {'blob': str(hashlib.sha1(bundle1).hexdigest()), 'unpack_size': len(activity_info1)}},
'stability': 'stable',
},
},
@@ -122,13 +123,13 @@ class ContextTest(tests.Test):
'version': [[2], 0],
'requires': {},
'command': 'true',
- 'bundles': {'*-*': {'blob': str(hash(bundle2)), 'unpack_size': len(activity_info2)}},
+ 'bundles': {'*-*': {'blob': str(hashlib.sha1(bundle2).hexdigest()), 'unpack_size': len(activity_info2)}},
'stability': 'stable',
},
},
}, conn.get(['context', context, 'releases']))
- assert blobs.get(str(hash(bundle1)))
- assert blobs.get(str(hash(bundle2)))
+ assert blobs.get(str(hashlib.sha1(bundle1).hexdigest()))
+ assert blobs.get(str(hashlib.sha1(bundle2).hexdigest()))
conn.delete(['context', context, 'releases', release1])
self.assertEqual({
@@ -145,13 +146,13 @@ class ContextTest(tests.Test):
'version': [[2], 0],
'requires': {},
'command': 'true',
- 'bundles': {'*-*': {'blob': str(hash(bundle2)), 'unpack_size': len(activity_info2)}},
+ 'bundles': {'*-*': {'blob': str(hashlib.sha1(bundle2).hexdigest()), 'unpack_size': len(activity_info2)}},
'stability': 'stable',
},
},
}, conn.get(['context', context, 'releases']))
- assert blobs.get(str(hash(bundle1))) is None
- assert blobs.get(str(hash(bundle2)))
+ assert blobs.get(str(hashlib.sha1(bundle1).hexdigest())) is None
+ assert blobs.get(str(hashlib.sha1(bundle2).hexdigest()))
conn.delete(['context', context, 'releases', release2])
self.assertEqual({
@@ -164,8 +165,8 @@ class ContextTest(tests.Test):
'author': {tests.UID: {'name': tests.UID, 'order': 0, 'role': 3}},
},
}, conn.get(['context', context, 'releases']))
- assert blobs.get(str(hash(bundle1))) is None
- assert blobs.get(str(hash(bundle2))) is None
+ assert blobs.get(str(hashlib.sha1(bundle1).hexdigest())) is None
+ assert blobs.get(str(hashlib.sha1(bundle2).hexdigest())) is None
def test_IncrementReleasesSeqnoOnNewReleases(self):
events = []
diff --git a/tests/units/model/routes.py b/tests/units/model/routes.py
index 3c21483..be5ecdf 100755
--- a/tests/units/model/routes.py
+++ b/tests/units/model/routes.py
@@ -10,7 +10,6 @@ from os.path import exists
from __init__ import tests, src_root
from sugar_network import db, model
-from sugar_network.db import files
from sugar_network.model.user import User
from sugar_network.toolkit.router import Router, Request
from sugar_network.toolkit.coroutine import this
diff --git a/tests/units/node/files.py b/tests/units/node/files.py
index 111b7a8..cfacc30 100755
--- a/tests/units/node/files.py
+++ b/tests/units/node/files.py
@@ -26,8 +26,8 @@ class FilesTest(tests.Test):
return str(self.uuid)
def test_Index_Populate(self):
- seqno = toolkit.Seqno('seqno')
- seeder = files.Index('files', 'index', seqno)
+ seqno = toolkit.Seqno(tests.tmpdir + '/seqno')
+ seeder = files.Index(tests.tmpdir + '/files', tests.tmpdir + '/index', seqno)
os.utime('files', (1, 1))
assert seeder.sync()
@@ -85,8 +85,8 @@ class FilesTest(tests.Test):
self.assertEqual(3, seqno.value)
def test_Index_SelectiveDiff(self):
- seqno = toolkit.Seqno('seqno')
- seeder = files.Index('files', 'index', seqno)
+ seqno = toolkit.Seqno(tests.tmpdir + '/seqno')
+ seeder = files.Index(tests.tmpdir + '/files', tests.tmpdir + '/index', seqno)
self.touch(('files/1', '1'))
self.touch(('files/2', '2'))
@@ -105,8 +105,8 @@ class FilesTest(tests.Test):
sorted(files_diff(seeder, in_seq)))
def test_Index_PartialDiff(self):
- seqno = toolkit.Seqno('seqno')
- seeder = files.Index('files', 'index', seqno)
+ seqno = toolkit.Seqno(tests.tmpdir + '/seqno')
+ seeder = files.Index(tests.tmpdir + '/files', tests.tmpdir + '/index', seqno)
self.touch(('files/1', '1'))
self.touch(('files/2', '2'))
@@ -132,8 +132,8 @@ class FilesTest(tests.Test):
self.assertRaises(StopIteration, diff.next)
def test_Index_diff_Stretch(self):
- seqno = toolkit.Seqno('seqno')
- seeder = files.Index('files', 'index', seqno)
+ seqno = toolkit.Seqno(tests.tmpdir + '/seqno')
+ seeder = files.Index(tests.tmpdir + '/files', tests.tmpdir + '/index', seqno)
self.touch(('files/1', '1'))
self.touch(('files/2', '2'))
@@ -152,8 +152,8 @@ class FilesTest(tests.Test):
self.assertRaises(StopIteration, diff.next)
def test_Index_diff_DoNotStretchContinuesPacket(self):
- seqno = toolkit.Seqno('seqno')
- seeder = files.Index('files', 'index', seqno)
+ seqno = toolkit.Seqno(tests.tmpdir + '/seqno')
+ seeder = files.Index(tests.tmpdir + '/files', tests.tmpdir + '/index', seqno)
self.touch(('files/1', '1'))
self.touch(('files/2', '2'))
@@ -172,8 +172,8 @@ class FilesTest(tests.Test):
self.assertRaises(StopIteration, diff.next)
def test_Index_DiffUpdatedFiles(self):
- seqno = toolkit.Seqno('seqno')
- seeder = files.Index('files', 'index', seqno)
+ seqno = toolkit.Seqno(tests.tmpdir + '/seqno')
+ seeder = files.Index(tests.tmpdir + '/files', tests.tmpdir + '/index', seqno)
self.touch(('files/1', '1'))
self.touch(('files/2', '2'))
@@ -223,8 +223,8 @@ class FilesTest(tests.Test):
self.assertEqual(6, seqno.value)
def test_Index_DiffCreatedFiles(self):
- seqno = toolkit.Seqno('seqno')
- seeder = files.Index('files', 'index', seqno)
+ seqno = toolkit.Seqno(tests.tmpdir + '/seqno')
+ seeder = files.Index(tests.tmpdir + '/files', tests.tmpdir + '/index', seqno)
self.touch(('files/1', '1'))
self.touch(('files/2', '2'))
@@ -270,8 +270,8 @@ class FilesTest(tests.Test):
self.assertEqual(6, seqno.value)
def test_Index_DiffDeletedFiles(self):
- seqno = toolkit.Seqno('seqno')
- seeder = files.Index('files', 'index', seqno)
+ seqno = toolkit.Seqno(tests.tmpdir + '/seqno')
+ seeder = files.Index(tests.tmpdir + '/files', tests.tmpdir + '/index', seqno)
self.touch(('files/1', '1'))
self.touch(('files/2', '2'))
diff --git a/tests/units/node/model.py b/tests/units/node/model.py
index 795b124..12b5a21 100755
--- a/tests/units/node/model.py
+++ b/tests/units/node/model.py
@@ -7,7 +7,6 @@ import time
from __init__ import tests
from sugar_network import db, toolkit
-from sugar_network.db import files
from sugar_network.client import Connection, keyfile, api_url
from sugar_network.model.user import User
from sugar_network.model.post import Post
diff --git a/tests/units/node/node.py b/tests/units/node/node.py
index 2c8ab8a..82d4e43 100755
--- a/tests/units/node/node.py
+++ b/tests/units/node/node.py
@@ -16,7 +16,6 @@ from os.path import exists, join
from __init__ import tests
from sugar_network import db, node, model, client
-from sugar_network.db import files
from sugar_network.client import Connection, keyfile, api_url
from sugar_network.toolkit import http, coroutine
from sugar_network.toolkit.rrd import Rrd
@@ -560,7 +559,7 @@ class NodeTest(tests.Test):
'announce': announce,
'version': [[1], 0],
'requires': {},
- 'bundles': {'*-*': {'blob': str(hash(bundle)), 'unpack_size': len(activity_info) + len(changelog)}},
+ 'bundles': {'*-*': {'blob': str(hashlib.sha1(bundle).hexdigest()), 'unpack_size': len(activity_info) + len(changelog)}},
'command': 'true',
'stability': 'developer',
},
diff --git a/tests/units/toolkit/toolkit.py b/tests/units/toolkit/toolkit.py
index 87f0c2f..883aac3 100755
--- a/tests/units/toolkit/toolkit.py
+++ b/tests/units/toolkit/toolkit.py
@@ -11,10 +11,10 @@ from sugar_network import toolkit
from sugar_network.toolkit import Seqno, Sequence
-class UtilTest(tests.Test):
+class ToolkitTest(tests.Test):
def test_Seqno_commit(self):
- seqno = Seqno('seqno')
+ seqno = Seqno(tests.tmpdir + '/seqno')
self.assertEqual(False, seqno.commit())
assert not exists('seqno')
@@ -25,7 +25,7 @@ class UtilTest(tests.Test):
self.assertEqual(False, seqno.commit())
seqno.next()
- seqno = Seqno('seqno')
+ seqno = Seqno(tests.tmpdir + '/seqno')
self.assertEqual(1, seqno.value)
self.assertEqual(False, seqno.commit())