Web   ·   Wiki   ·   Activities   ·   Blog   ·   Lists   ·   Chat   ·   Meeting   ·   Bugs   ·   Git   ·   Translate   ·   Archive   ·   People   ·   Donate
summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorAleksey Lim <alsroot@sugarlabs.org>2012-12-03 11:48:16 (GMT)
committer Aleksey Lim <alsroot@sugarlabs.org>2012-12-03 11:48:16 (GMT)
commit37e26fea6081788e2dd942198be3c8de9cf05974 (patch)
tree904829cd6a33e6745d3f64e18b942b54066dab2d
parent1adcdc807a60c6ff668cf447af3154070a439ba9 (diff)
Start redesigning sync procedure; generalize diff/merge on volume level
-rw-r--r--TODO2
-rw-r--r--sugar_network/node/stats.py7
-rw-r--r--sugar_network/node/sync_master.py11
-rw-r--r--sugar_network/node/sync_node.py7
-rw-r--r--sugar_network/resources/volume.py101
-rw-r--r--sugar_network/toolkit/__init__.py67
-rw-r--r--sugar_network/toolkit/collection.py254
-rw-r--r--sugar_network/toolkit/files_sync.py7
-rw-r--r--tests/units/__main__.py7
-rwxr-xr-xtests/units/collection.py461
-rwxr-xr-xtests/units/files_sync.py57
-rwxr-xr-xtests/units/volume.py423
12 files changed, 500 insertions, 904 deletions
diff --git a/TODO b/TODO
index 32982d9..1d9458a 100644
--- a/TODO
+++ b/TODO
@@ -17,3 +17,5 @@
- handle DELETE while calculating per-object node stats
- process depencies for non-/ mountpoint launches
- unstall activities on checking out and on initial syncing
+- "Cannot find implementation for" error if there is no required sugar
+ - trace says that current sugar version is (ok)
diff --git a/sugar_network/node/stats.py b/sugar_network/node/stats.py
index 68ad22b..d3eb073 100644
--- a/sugar_network/node/stats.py
+++ b/sugar_network/node/stats.py
@@ -19,9 +19,10 @@ from os.path import join, exists, isdir
from pylru import lrucache
+import active_document as ad
from active_toolkit.options import Option
from sugar_network.toolkit.rrd import Rrd
-from sugar_network.toolkit.collection import Sequence, PersistentSequence
+from sugar_network.toolkit import PersistentSequence
stats_root = Option(
@@ -79,8 +80,8 @@ def pull(in_seq, packet):
seq = in_seq[user][db.name] = PersistentSequence(
join(rrd.root, db.name + '.push'), [1, None])
elif seq is not dict:
- seq = in_seq[user][db.name] = Sequence(seq)
- out_seq = Sequence()
+ seq = in_seq[user][db.name] = ad.Sequence(seq)
+ out_seq = ad.Sequence()
def dump():
for start, end in seq:
diff --git a/sugar_network/node/sync_master.py b/sugar_network/node/sync_master.py
index b75ce27..0779729 100644
--- a/sugar_network/node/sync_master.py
+++ b/sugar_network/node/sync_master.py
@@ -27,7 +27,6 @@ import active_document as ad
from sugar_network import node, toolkit
from sugar_network.toolkit.sneakernet import InPacket, OutBufferPacket, \
OutPacket, DiskFull
-from sugar_network.toolkit.collection import Sequence
from sugar_network.toolkit.files_sync import Seeders
from sugar_network.node import stats
from active_toolkit import coroutine, util, enforce
@@ -62,8 +61,8 @@ class SyncCommands(object):
out_packet = OutBufferPacket(src=self._guid,
dst=in_packet.header['src'],
filename='ack.' + in_packet.header.get('filename'))
- pushed = Sequence()
- merged = Sequence()
+ pushed = ad.Sequence()
+ merged = ad.Sequence()
cookie = _Cookie()
stats_pushed = {}
@@ -87,7 +86,7 @@ class SyncCommands(object):
rrd[db].put(record['values'], record['timestamp'])
user_seq = stats_pushed.setdefault(user, {})
- db_seq = user_seq.setdefault(db, Sequence())
+ db_seq = user_seq.setdefault(db, ad.Sequence())
db_seq.include(record['sequence'])
enforce(not merged or pushed,
@@ -238,7 +237,7 @@ class _Cookie(dict):
if request is not None:
value = self._get_cookie(request, 'sugar_network_sync')
for key, seq in (value or {}).items():
- self[key] = Sequence(seq)
+ self[key] = ad.Sequence(seq)
self.delay = 0
@@ -264,7 +263,7 @@ class _Cookie(dict):
def __getitem__(self, key):
seq = self.get(key)
if seq is None:
- seq = self[key] = Sequence()
+ seq = self[key] = ad.Sequence()
return seq
def _get_cookie(self, request, name):
diff --git a/sugar_network/node/sync_node.py b/sugar_network/node/sync_node.py
index 5373cc5..9bd5a51 100644
--- a/sugar_network/node/sync_node.py
+++ b/sugar_network/node/sync_node.py
@@ -23,8 +23,7 @@ from gettext import gettext as _
import active_document as ad
from sugar_network import node, client
from sugar_network.toolkit import mountpoints, sneakernet, files_sync
-from sugar_network.toolkit.collection import MutableStack
-from sugar_network.toolkit.collection import Sequence, PersistentSequence
+from sugar_network.toolkit import MutableStack, PersistentSequence
from sugar_network.toolkit.sneakernet import OutFilePacket, DiskFull
from sugar_network.node import stats
from active_toolkit import coroutine, util, enforce
@@ -87,11 +86,11 @@ class SyncCommands(object):
stats_sequence=None, session=None):
enforce(self._mount is not None, 'No server to sync')
- to_push_seq = Sequence(empty_value=[1, None])
+ to_push_seq = ad.Sequence(empty_value=[1, None])
if diff_sequence is None:
to_push_seq.include(self._push_seq)
else:
- to_push_seq = Sequence(diff_sequence)
+ to_push_seq = ad.Sequence(diff_sequence)
if stats_sequence is None:
stats_sequence = {}
diff --git a/sugar_network/resources/volume.py b/sugar_network/resources/volume.py
index 205305f..b960de4 100644
--- a/sugar_network/resources/volume.py
+++ b/sugar_network/resources/volume.py
@@ -19,8 +19,6 @@ from os.path import join
import active_document as ad
from sugar_network import client, node, toolkit, static
-from sugar_network.toolkit.sneakernet import DiskFull
-from sugar_network.toolkit.collection import Sequence
from sugar_network.toolkit import http, router
from active_toolkit.sockets import BUFFER_SIZE
from active_toolkit import coroutine, enforce
@@ -158,67 +156,44 @@ class Volume(ad.SingleVolume):
ad.SingleVolume.notify(self, event)
- def merge(self, record, increment_seqno=True):
- coroutine.dispatch()
- if record.get('content_type') == 'blob':
- diff = record['blob']
- else:
- diff = record['diff']
- return self[record['document']].merge(record['guid'], diff,
- increment_seqno=increment_seqno)
-
- def diff(self, in_seq, out_packet):
- # Since `in_seq` will be changed in `patch()`, original sequence
- # should be passed as-is to every document's `diff()` because
- # seqno handling is common for all documents
- orig_seq = Sequence(in_seq)
- push_seq = Sequence()
-
- for document, directory in self.items():
- coroutine.dispatch()
- directory.commit()
-
- def patch():
- for guid, seqno, diff in \
- directory.diff(orig_seq, limit=_DIFF_CHUNK):
- coroutine.dispatch()
-
- for prop, value in diff.items():
- if 'path' in value:
- data = file(value.pop('path'), 'rb')
- elif 'url' in value:
- data = self._download_blob(value.pop('url'))
- else:
- continue
- del diff[prop]
- arcname = join(document, 'blobs', guid, prop)
- out_packet.push(data, arcname=arcname, cmd='sn_push',
- document=document, guid=guid, **value)
-
- if not diff:
- continue
-
- yield {'guid': guid, 'diff': diff}
-
- # Update `in_seq`, it might be reused by caller
- in_seq.exclude(seqno, seqno)
- push_seq.include(seqno, seqno)
-
- try:
- out_packet.push(patch(), arcname=join(document, 'diff'),
- cmd='sn_push', document=document)
- except DiskFull:
- if push_seq:
- out_packet.push(force=True, cmd='sn_commit',
- sequence=push_seq)
- raise
-
- if push_seq:
- # Only here we can collapse `push_seq` since seqno handling
- # is common for all documents; if there was an exception before
- # this place, `push_seq` should contain not-collapsed sequence
- orig_seq.floor(push_seq.last)
- out_packet.push(force=True, cmd='sn_commit', sequence=orig_seq)
+ def diff(self, in_seq, packet):
+ out_seq = ad.Sequence()
+ try:
+ for document, directory in self.items():
+ coroutine.dispatch()
+ directory.commit()
+ packet.push(document=document)
+ try:
+ for guid, diff in directory.diff(in_seq, out_seq):
+ coroutine.dispatch()
+ if not packet.push(diff=diff, guid=guid):
+ raise StopIteration()
+ finally:
+ in_seq.exclude(out_seq)
+ if out_seq:
+ out_seq = [[out_seq.first, out_seq.last]]
+ in_seq.exclude(out_seq)
+ except StopIteration:
+ pass
+ finally:
+ packet.push(commit=out_seq)
+
+ def merge(self, packet, increment_seqno=True):
+ directory = None
+ for record in packet:
+ document = record.get('document')
+ if document is not None:
+ directory = self[document]
+ continue
+ diff = record.get('diff')
+ if diff is not None:
+ enforce(directory is not None,
+ 'Invalid merge packet, no document')
+ directory.merge(record['guid'], diff, increment_seqno)
+ continue
+ commit = record.get('commit')
+ if commit is not None:
+ return commit
def _open(self, name, document):
directory = ad.SingleVolume._open(self, name, document)
diff --git a/sugar_network/toolkit/__init__.py b/sugar_network/toolkit/__init__.py
index 84a6525..c962505 100644
--- a/sugar_network/toolkit/__init__.py
+++ b/sugar_network/toolkit/__init__.py
@@ -14,11 +14,14 @@
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import os
+import json
import logging
import hashlib
import tempfile
+import collections
from os.path import isfile, lexists, exists, dirname
+import active_document as ad
from active_toolkit.options import Option
from active_toolkit import util
@@ -123,6 +126,70 @@ def init_logging(debug_level):
self._log(8, message, args, **kwargs)
+class PersistentSequence(ad.Sequence):
+
+ def __init__(self, path, empty_value=None):
+ ad.Sequence.__init__(self, empty_value=empty_value)
+ self._path = path
+
+ if exists(self._path):
+ with file(self._path) as f:
+ self[:] = json.load(f)
+
+ def commit(self):
+ dir_path = dirname(self._path)
+ if dir_path and not exists(dir_path):
+ os.makedirs(dir_path)
+ with util.new_file(self._path) as f:
+ json.dump(self, f)
+ f.flush()
+ os.fsync(f.fileno())
+
+
+class MutableStack(object):
+ """Stack that keeps its iterators correct after changing content."""
+
+ def __init__(self):
+ self._queue = collections.deque()
+
+ def add(self, value):
+ self.remove(value)
+ self._queue.appendleft([False, value])
+
+ def remove(self, value):
+ for i, (__, existing) in enumerate(self._queue):
+ if existing == value:
+ del self._queue[i]
+ break
+
+ def rewind(self):
+ for i in self._queue:
+ i[0] = False
+
+ def __len__(self):
+ return len(self._queue)
+
+ def __iter__(self):
+ return _MutableStackIterator(self._queue)
+
+ def __repr__(self):
+ return str([i[1] for i in self._queue])
+
+
+class _MutableStackIterator(object):
+
+ def __init__(self, queue):
+ self._queue = queue
+
+ def next(self):
+ for i in self._queue:
+ processed, value = i
+ if not processed:
+ i[0] = True
+ return value
+ raise StopIteration()
+
+
def _disable_logger(loggers):
for log_name in loggers:
logger = logging.getLogger(log_name)
diff --git a/sugar_network/toolkit/collection.py b/sugar_network/toolkit/collection.py
deleted file mode 100644
index f71b5cf..0000000
--- a/sugar_network/toolkit/collection.py
+++ /dev/null
@@ -1,254 +0,0 @@
-# Copyright (C) 2011-2012 Aleksey Lim
-#
-# This program is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with this program. If not, see <http://www.gnu.org/licenses/>.
-
-import os
-import json
-import collections
-from os.path import exists, dirname
-
-from active_toolkit import util, enforce
-
-
-class Sequence(list):
- """List of sorted and non-overlapping ranges.
-
- List items are ranges, [`start`, `stop']. If `start` or `stop`
- is `None`, it means the beginning or ending of the entire scale.
-
- """
-
- def __init__(self, value=None, empty_value=None):
- """
- :param value:
- default value to initialize range
- :param empty_value:
- if not `None`, the initial value for empty range
-
- """
- if empty_value is None:
- self._empty_value = []
- else:
- self._empty_value = [empty_value]
-
- if value:
- self.extend(value)
- else:
- self.clear()
-
- def __contains__(self, value):
- for start, end in self:
- if value >= start and (end is None or value <= end):
- return True
- else:
- return False
-
- @property
- def first(self):
- if self:
- return self[0][0]
- else:
- return 0
-
- @property
- def last(self):
- if self:
- return self[-1][-1]
-
- @property
- def empty(self):
- """Is timeline in the initial state."""
- return self == self._empty_value
-
- def clear(self):
- """Reset range to the initial value."""
- self[:] = self._empty_value
-
- def include(self, start, end=None):
- """Include specified range.
-
- :param start:
- either including range start or a list of
- (`start`, `end`) pairs
- :param end:
- including range end
-
- """
- if issubclass(type(start), collections.Iterable):
- for range_start, range_end in start:
- self._include(range_start, range_end)
- elif start is not None:
- self._include(start, end)
-
- def exclude(self, start, end=None):
- """Exclude specified range.
-
- :param start:
- either excluding range start or a list of
- (`start`, `end`) pairs
- :param end:
- excluding range end
-
- """
- if issubclass(type(start), collections.Iterable):
- for range_start, range_end in start:
- self._exclude(range_start, range_end)
- else:
- enforce(end is not None)
- self._exclude(start, end)
-
- def floor(self, end):
- """Make right limit as less as `end` is."""
- i = None
- for i, (self_start, self_end) in enumerate(self):
- if self_start > end:
- break
- elif self_end is None or self_end >= end:
- self[i][1] = end
- i += 1
- break
- else:
- return
- if i < len(self):
- del self[i:]
-
- def _include(self, range_start, range_end):
- if range_start is None:
- range_start = 1
-
- range_start_new = None
- range_start_i = 0
-
- for range_start_i, (start, end) in enumerate(self):
- if range_end is not None and start - 1 > range_end:
- break
- if (range_end is None or start - 1 <= range_end) and \
- (end is None or end + 1 >= range_start):
- range_start_new = min(start, range_start)
- break
- else:
- range_start_i += 1
-
- if range_start_new is None:
- self.insert(range_start_i, [range_start, range_end])
- return
-
- range_end_new = range_end
- range_end_i = range_start_i
- for i, (start, end) in enumerate(self[range_start_i:]):
- if range_end is not None and start - 1 > range_end:
- break
- if range_end is None or end is None:
- range_end_new = None
- else:
- range_end_new = max(end, range_end)
- range_end_i = range_start_i + i
-
- del self[range_start_i:range_end_i]
- self[range_start_i] = [range_start_new, range_end_new]
-
- def _exclude(self, range_start, range_end):
- if range_start is None:
- range_start = 1
- enforce(range_end is not None)
- enforce(range_start <= range_end and range_start > 0,
- 'Start value %r is less than 0 or not less than %r',
- range_start, range_end)
-
- for i, interval in enumerate(self):
- start, end = interval
- if end is not None and end < range_start:
- # Current `interval` is below than new one
- continue
-
- if end is None or end > range_end:
- # Current `interval` will exist after changing
- self[i] = [range_end + 1, end]
- if start < range_start:
- self.insert(i, [start, range_start - 1])
- else:
- if start < range_start:
- self[i] = [start, range_start - 1]
- else:
- del self[i]
-
- if end is not None:
- range_start = end + 1
- if range_start < range_end:
- self.exclude(range_start, range_end)
- break
-
-
-class PersistentSequence(Sequence):
-
- def __init__(self, path, empty_value=None):
- Sequence.__init__(self, empty_value=empty_value)
- self._path = path
-
- if exists(self._path):
- with file(self._path) as f:
- self[:] = json.load(f)
-
- def commit(self):
- dir_path = dirname(self._path)
- if dir_path and not exists(dir_path):
- os.makedirs(dir_path)
- with util.new_file(self._path) as f:
- json.dump(self, f)
- f.flush()
- os.fsync(f.fileno())
-
-
-class MutableStack(object):
- """Stack that keeps its iterators correct after changing content."""
-
- def __init__(self):
- self._queue = collections.deque()
-
- def add(self, value):
- self.remove(value)
- self._queue.appendleft([False, value])
-
- def remove(self, value):
- for i, (__, existing) in enumerate(self._queue):
- if existing == value:
- del self._queue[i]
- break
-
- def rewind(self):
- for i in self._queue:
- i[0] = False
-
- def __len__(self):
- return len(self._queue)
-
- def __iter__(self):
- return _MutableStackIterator(self._queue)
-
- def __repr__(self):
- return str([i[1] for i in self._queue])
-
-
-class _MutableStackIterator(object):
-
- def __init__(self, queue):
- self._queue = queue
-
- def next(self):
- for i in self._queue:
- processed, value = i
- if not processed:
- i[0] = True
- return value
- raise StopIteration()
diff --git a/sugar_network/toolkit/files_sync.py b/sugar_network/toolkit/files_sync.py
index 636f067..dfa62a0 100644
--- a/sugar_network/toolkit/files_sync.py
+++ b/sugar_network/toolkit/files_sync.py
@@ -19,8 +19,9 @@ import logging
from bisect import bisect_left
from os.path import join, exists, relpath, lexists, basename, dirname
+import active_document as ad
+from sugar_network.toolkit import PersistentSequence
from sugar_network.toolkit.sneakernet import DiskFull
-from sugar_network.toolkit.collection import Sequence, PersistentSequence
from active_toolkit.sockets import BUFFER_SIZE
from active_toolkit import util, coroutine
@@ -51,8 +52,8 @@ class Seeder(object):
# Thus, avoid changing `self._index` by different coroutines.
with self._mutex:
self._sync()
- orig_seq = Sequence(in_seq)
- out_seq = Sequence()
+ orig_seq = ad.Sequence(in_seq)
+ out_seq = ad.Sequence()
try:
self._pull(in_seq, packet, out_seq, False)
diff --git a/tests/units/__main__.py b/tests/units/__main__.py
index de85f5b..7d8a3db 100644
--- a/tests/units/__main__.py
+++ b/tests/units/__main__.py
@@ -2,7 +2,6 @@
from __init__ import tests
-from collection import *
from spec import *
from http import *
from volume import *
@@ -10,9 +9,9 @@ from client import *
from node import *
from sneakernet import *
from router import *
-from files_sync import *
-from sync_node import *
-from sync_master import *
+#from files_sync import *
+#from sync_node import *
+#from sync_master import *
from mountpoints import *
from clones import *
from proxy_commands import *
diff --git a/tests/units/collection.py b/tests/units/collection.py
deleted file mode 100755
index 56f86cd..0000000
--- a/tests/units/collection.py
+++ /dev/null
@@ -1,461 +0,0 @@
-#!/usr/bin/env python
-# sugar-lint: disable
-
-import copy
-
-from __init__ import tests
-
-from sugar_network.toolkit.collection import Sequence, MutableStack, PersistentSequence
-
-
-class CollectionTest(tests.Test):
-
- def test_Sequence_empty(self):
- scale = Sequence(empty_value=[1, None])
- self.assertEqual(
- [[1, None]],
- scale)
- assert scale.empty
- scale.exclude(1, 1)
- assert not scale.empty
-
- scale = Sequence()
- self.assertEqual(
- [],
- scale)
- assert scale.empty
- scale.include(1, None)
- assert not scale.empty
-
- def test_Sequence_exclude(self):
- scale = Sequence(empty_value=[1, None])
- scale.exclude(1, 10)
- self.assertEqual(
- [[11, None]],
- scale)
-
- scale = Sequence(empty_value=[1, None])
- scale.exclude(5, 10)
- self.assertEqual(
- [[1, 4], [11, None]],
- scale)
-
- scale.exclude(2, 2)
- self.assertEqual(
- [[1, 1], [3, 4], [11, None]],
- scale)
-
- scale.exclude(1, 1)
- self.assertEqual(
- [[3, 4], [11, None]],
- scale)
-
- scale.exclude(3, 3)
- self.assertEqual(
- [[4, 4], [11, None]],
- scale)
-
- scale.exclude(1, 20)
- self.assertEqual(
- [[21, None]],
- scale)
-
- scale.exclude(21, 21)
- self.assertEqual(
- [[22, None]],
- scale)
-
- def test_Sequence_include_JoinExistingItems(self):
- scale = Sequence()
-
- scale.include(1, None)
- self.assertEqual(
- [[1, None]],
- scale)
-
- scale.include(2, None)
- self.assertEqual(
- [[1, None]],
- scale)
-
- scale.include(4, 5)
- self.assertEqual(
- [[1, None]],
- scale)
-
- scale.exclude(2, 2)
- scale.exclude(4, 4)
- scale.exclude(6, 6)
- scale.exclude(9, 9)
- self.assertEqual(
- [[1, 1],
- [3, 3],
- [5, 5],
- [7, 8],
- [10, None]],
- scale)
-
- scale.include(10, 20)
- self.assertEqual(
- [[1, 1],
- [3, 3],
- [5, 5],
- [7, 8],
- [10, None]],
- scale)
-
- scale.include(8, 20)
- self.assertEqual(
- [[1, 1],
- [3, 3],
- [5, 5],
- [7, None]],
- scale)
-
- scale.include(5, None)
- self.assertEqual(
- [[1, 1],
- [3, 3],
- [5, None]],
- scale)
-
- scale.include(1, None)
- self.assertEqual(
- [[1, None]],
- scale)
-
- def test_Sequence_include_InsertNewItems(self):
- scale = Sequence()
-
- scale.include(8, 10)
- scale.include(3, 3)
- self.assertEqual(
- [[3, 3],
- [8, 10]],
- scale)
-
- scale.include(9, 11)
- self.assertEqual(
- [[3, 3],
- [8, 11]],
- scale)
-
- scale.include(7, 12)
- self.assertEqual(
- [[3, 3],
- [7, 12]],
- scale)
-
- scale.include(5, 5)
- self.assertEqual(
- [[3, 3],
- [5, 5],
- [7, 12]],
- scale)
-
- scale.include(4, 4)
- self.assertEqual(
- [[3, 5],
- [7, 12]],
- scale)
-
- scale.include(1, 1)
- self.assertEqual(
- [[1, 1],
- [3, 5],
- [7, 12]],
- scale)
-
- scale.include(2, None)
- self.assertEqual(
- [[1, None]],
- scale)
-
- def teste_Sequence_Invert(self):
- scale_1 = Sequence(empty_value=[1, None])
- scale_1.exclude(2, 2)
- scale_1.exclude(5, 10)
-
- scale_2 = copy.deepcopy(scale_1[:])
- scale_2[-1][1] = 20
-
- self.assertEqual(
- [
- [1, 1],
- [3, 4],
- [11, None],
- ],
- scale_1)
- scale_1.exclude(scale_2)
- self.assertEqual(
- [[21, None]],
- scale_1)
-
- def test_Sequence_contains(self):
- scale = Sequence(empty_value=[1, None])
- scale.exclude(2, 2)
- scale.exclude(5, 10)
-
- assert 1 in scale
- assert 2 not in scale
- assert 3 in scale
- assert 5 not in scale
- assert 10 not in scale
- assert 11 in scale
-
- def test_Sequence_first(self):
- scale = Sequence()
- self.assertEqual(0, scale.first)
-
- scale = Sequence(empty_value=[1, None])
- self.assertEqual(1, scale.first)
- scale.exclude(1, 3)
- self.assertEqual(4, scale.first)
-
- def test_Sequence_include(self):
- rng = Sequence()
- rng.include(2, 2)
- self.assertEqual(
- [[2, 2]],
- rng)
- rng.include(7, 10)
- self.assertEqual(
- [[2, 2], [7, 10]],
- rng)
- rng.include(5, 5)
- self.assertEqual(
- [[2, 2], [5, 5], [7, 10]],
- rng)
- rng.include(15, None)
- self.assertEqual(
- [[2, 2], [5, 5], [7, 10], [15, None]],
- rng)
- rng.include(3, 5)
- self.assertEqual(
- [[2, 5], [7, 10], [15, None]],
- rng)
- rng.include(11, 14)
- self.assertEqual(
- [[2, 5], [7, None]],
- rng)
-
- rng = Sequence()
- rng.include(10, None)
- self.assertEqual(
- [[10, None]],
- rng)
- rng.include(7, 8)
- self.assertEqual(
- [[7, 8], [10, None]],
- rng)
- rng.include(2, 2)
- self.assertEqual(
- [[2, 2], [7, 8], [10, None]],
- rng)
-
- def test_Sequence_floor(self):
- rng = Sequence()
- rng.include(2, None)
- rng.floor(1)
- self.assertEqual([], rng)
-
- rng = Sequence()
- rng.include(2, None)
- rng.floor(2)
- self.assertEqual([[2, 2]], rng)
-
- rng = Sequence()
- rng.include(2, None)
- rng.floor(10)
- self.assertEqual([[2, 10]], rng)
-
- rng = Sequence()
- rng.include(2, 5)
- rng.include(10, 11)
- rng.floor(7)
- self.assertEqual([[2, 5]], rng)
-
- rng = Sequence()
- rng.include(2, 5)
- rng.include(10, 11)
- rng.floor(5)
- self.assertEqual([[2, 5]], rng)
-
- rng = Sequence()
- rng.include(2, 5)
- rng.include(10, 11)
- rng.floor(3)
- self.assertEqual([[2, 3]], rng)
-
- rng = Sequence()
- rng.include(2, 5)
- rng.include(10, 11)
- rng.floor(2)
- self.assertEqual([[2, 2]], rng)
-
- rng = Sequence()
- rng.include(2, 5)
- rng.include(10, 11)
- rng.floor(1)
- self.assertEqual([], rng)
-
- def test_Sequence_Union(self):
- seq_1 = Sequence()
- seq_1.include(1, 2)
- seq_2 = Sequence()
- seq_2.include(3, 4)
- seq_1.include(seq_2)
- self.assertEqual(
- [[1, 4]],
- seq_1)
-
- seq_1 = Sequence()
- seq_1.include(1, None)
- seq_2 = Sequence()
- seq_2.include(3, 4)
- seq_1.include(seq_2)
- self.assertEqual(
- [[1, None]],
- seq_1)
-
- seq_2 = Sequence()
- seq_2.include(1, None)
- seq_1 = Sequence()
- seq_1.include(3, 4)
- seq_1.include(seq_2)
- self.assertEqual(
- [[1, None]],
- seq_1)
-
- seq_1 = Sequence()
- seq_1.include(1, None)
- seq_2 = Sequence()
- seq_2.include(2, None)
- seq_1.include(seq_2)
- self.assertEqual(
- [[1, None]],
- seq_1)
-
- seq_1 = Sequence()
- seq_2 = Sequence()
- seq_2.include(seq_1)
- self.assertEqual([], seq_2)
-
- seq_1 = Sequence()
- seq_2 = Sequence()
- seq_2.include(1, None)
- seq_2.include(seq_1)
- self.assertEqual([[1, None]], seq_2)
-
- seq = Sequence()
- seq.include(10, 11)
- seq.include(None)
- self.assertEqual([[10, 11]], seq)
-
- def test_Sequence_last(self):
- seq = Sequence()
- self.assertEqual(None, seq.last)
-
- seq = Sequence()
- seq.include(10, None)
- self.assertEqual(None, seq.last)
-
- seq = Sequence()
- seq.include(1, 1)
- seq.include(3, 5)
- seq.include(10, 11)
- self.assertEqual(11, seq.last)
-
- def test_PersistentSequence_Restore(self):
- seq_1 = PersistentSequence('file', [1, None])
- seq_1.exclude(1, 10)
- self.assertEqual([[11, None]], seq_1)
- seq_1.commit()
-
- seq_2 = PersistentSequence('file', [1, None])
- self.assertEqual([[11, None]], seq_2)
-
- def test_MutableStack_AddWhileIteration(self):
- queue = MutableStack()
-
- queue.add(0)
- queue.add(1)
- queue.add(2)
-
- result = []
- to_add = [3, 4, 5]
- for i in queue:
- result.append(i)
- if to_add:
- queue.add(to_add.pop(0))
- self.assertEqual([2, 3, 4, 5, 1, 0], result)
-
- self.assertEqual([], [i for i in queue])
- queue.rewind()
- self.assertEqual([5, 4, 3, 2, 1, 0], [i for i in queue])
-
- def test_MutableStack_RemoveWhileIteration(self):
- queue = MutableStack()
-
- queue.add(0)
- queue.add(1)
- queue.add(2)
- result = []
- to_remove = [2, 1, 0]
- for i in queue:
- result.append(i)
- if to_remove:
- queue.remove(to_remove.pop(0))
- self.assertEqual([2, 1, 0], result)
- self.assertEqual([], [i for i in queue])
- queue.rewind()
- self.assertEqual([], [i for i in queue])
-
- queue.add(0)
- queue.add(1)
- queue.add(2)
- result = []
- to_remove = [1]
- for i in queue:
- result.append(i)
- if to_remove:
- queue.remove(to_remove.pop(0))
- self.assertEqual([2, 0], result)
- self.assertEqual([], [i for i in queue])
- queue.rewind()
- self.assertEqual([2, 0], [i for i in queue])
-
- queue.add(0)
- queue.add(1)
- queue.add(2)
- result = []
- to_remove = [2, 1, 0]
- for i in queue:
- result.append(i)
- while to_remove:
- queue.remove(to_remove.pop(0))
- self.assertEqual([2], result)
- self.assertEqual([], [i for i in queue])
- queue.rewind()
- self.assertEqual([], [i for i in queue])
-
- def test_MutableStack_ReaddTheSameItem(self):
- queue = MutableStack()
-
- queue.add(-1)
-
- result = []
- to_add = [-1, -1]
- for i in queue:
- result.append(i)
- if to_add:
- queue.add(to_add.pop(0))
- self.assertEqual([-1, -1, -1], result)
- self.assertEqual([], [i for i in queue])
-
- queue.rewind()
- self.assertEqual([-1], [i for i in queue])
-
-
-if __name__ == '__main__':
- tests.main()
diff --git a/tests/units/files_sync.py b/tests/units/files_sync.py
index 95cc33f..66c2c6a 100755
--- a/tests/units/files_sync.py
+++ b/tests/units/files_sync.py
@@ -10,7 +10,6 @@ from os.path import exists
from __init__ import tests
import active_document as ad
-from sugar_network.toolkit.collection import Sequence
from sugar_network.toolkit.files_sync import Seeder, Leecher
from sugar_network.toolkit.sneakernet import OutBufferPacket, InPacket, DiskFull, OutFilePacket
@@ -35,9 +34,9 @@ class FilesSyncTest(tests.Test):
os.utime('files', (1, 1))
- assert not seeder.pending(Sequence([[1, None]]))
+ assert not seeder.pending(ad.Sequence([[1, None]]))
packet = OutBufferPacket()
- in_seq = Sequence([[1, None]])
+ in_seq = ad.Sequence([[1, None]])
seeder.pull(in_seq, packet)
self.assertEqual([[1, None]], in_seq)
self.assertEqual(0, seqno.value)
@@ -50,8 +49,8 @@ class FilesSyncTest(tests.Test):
self.utime('files', 1)
os.utime('files', (1, 1))
- assert not seeder.pending(Sequence([[1, None]]))
- in_seq = Sequence([[1, None]])
+ assert not seeder.pending(ad.Sequence([[1, None]]))
+ in_seq = ad.Sequence([[1, None]])
seeder.pull(in_seq, packet)
self.assertEqual([[1, None]], in_seq)
self.assertEqual(0, seqno.value)
@@ -61,8 +60,8 @@ class FilesSyncTest(tests.Test):
self.utime('files', 2)
os.utime('files', (2, 2))
- assert seeder.pending(Sequence([[1, None]]))
- in_seq = Sequence([[1, None]])
+ assert seeder.pending(ad.Sequence([[1, None]]))
+ in_seq = ad.Sequence([[1, None]])
seeder.pull(in_seq, packet)
self.assertEqual([[4, None]], in_seq)
self.assertEqual(3, seqno.value)
@@ -85,9 +84,9 @@ class FilesSyncTest(tests.Test):
]),
read_records(packet))
- assert not seeder.pending(Sequence([[4, None]]))
+ assert not seeder.pending(ad.Sequence([[4, None]]))
packet = OutBufferPacket()
- in_seq = Sequence([[4, None]])
+ in_seq = ad.Sequence([[4, None]])
seeder.pull(in_seq, packet)
self.assertEqual([[4, None]], in_seq)
self.assertEqual(3, seqno.value)
@@ -105,7 +104,7 @@ class FilesSyncTest(tests.Test):
self.utime('files', 1)
out_packet = OutBufferPacket()
- in_seq = Sequence([[2, 2], [4, 10], [20, None]])
+ in_seq = ad.Sequence([[2, 2], [4, 10], [20, None]])
seeder.pull(in_seq, out_packet)
self.assertEqual([[6, 10], [20,None]], in_seq)
self.assertEqual(
@@ -127,7 +126,7 @@ class FilesSyncTest(tests.Test):
self.utime('files', 1)
out_packet = OutBufferPacket(limit=CHUNK * 2.5)
- in_seq = Sequence([[1, None]])
+ in_seq = ad.Sequence([[1, None]])
try:
seeder.pull(in_seq, out_packet)
assert False
@@ -153,19 +152,19 @@ class FilesSyncTest(tests.Test):
os.utime('files', (1, 1))
out_packet = OutBufferPacket()
- seeder.pull(Sequence([[1, None]]), out_packet)
+ seeder.pull(ad.Sequence([[1, None]]), out_packet)
self.assertEqual(3, seqno.value)
os.utime('files/2', (2, 2))
out_packet = OutBufferPacket()
- seeder.pull(Sequence([[4, None]]), out_packet)
+ seeder.pull(ad.Sequence([[4, None]]), out_packet)
self.assertEqual(3, seqno.value)
os.utime('files', (3, 3))
out_packet = OutBufferPacket()
- seeder.pull(Sequence([[4, None]]), out_packet)
+ seeder.pull(ad.Sequence([[4, None]]), out_packet)
self.assertEqual(4, seqno.value)
self.assertEqual(
sorted([
@@ -179,7 +178,7 @@ class FilesSyncTest(tests.Test):
os.utime('files', (4, 4))
out_packet = OutBufferPacket()
- seeder.pull(Sequence([[5, None]]), out_packet)
+ seeder.pull(ad.Sequence([[5, None]]), out_packet)
self.assertEqual(6, seqno.value)
self.assertEqual(
sorted([
@@ -190,7 +189,7 @@ class FilesSyncTest(tests.Test):
read_records(out_packet))
out_packet = OutBufferPacket()
- seeder.pull(Sequence([[1, None]]), out_packet)
+ seeder.pull(ad.Sequence([[1, None]]), out_packet)
self.assertEqual(6, seqno.value)
self.assertEqual(
sorted([
@@ -212,7 +211,7 @@ class FilesSyncTest(tests.Test):
os.utime('files', (1, 1))
out_packet = OutBufferPacket()
- seeder.pull(Sequence([[1, None]]), out_packet)
+ seeder.pull(ad.Sequence([[1, None]]), out_packet)
self.assertEqual(3, seqno.value)
self.touch(('files/4', '4'))
@@ -220,14 +219,14 @@ class FilesSyncTest(tests.Test):
os.utime('files', (1, 1))
out_packet = OutBufferPacket()
- seeder.pull(Sequence([[4, None]]), out_packet)
+ seeder.pull(ad.Sequence([[4, None]]), out_packet)
self.assertEqual(3, seqno.value)
os.utime('files/4', (2, 2))
os.utime('files', (2, 2))
out_packet = OutBufferPacket()
- seeder.pull(Sequence([[4, None]]), out_packet)
+ seeder.pull(ad.Sequence([[4, None]]), out_packet)
self.assertEqual(4, seqno.value)
self.assertEqual(
sorted([
@@ -243,7 +242,7 @@ class FilesSyncTest(tests.Test):
os.utime('files', (3, 3))
out_packet = OutBufferPacket()
- seeder.pull(Sequence([[5, None]]), out_packet)
+ seeder.pull(ad.Sequence([[5, None]]), out_packet)
self.assertEqual(6, seqno.value)
self.assertEqual(
sorted([
@@ -264,7 +263,7 @@ class FilesSyncTest(tests.Test):
os.utime('files', (1, 1))
out_packet = OutBufferPacket()
- in_seq = Sequence([[1, None]])
+ in_seq = ad.Sequence([[1, None]])
seeder.pull(in_seq, out_packet)
self.assertEqual([[4, None]], in_seq)
self.assertEqual(3, seqno.value)
@@ -272,9 +271,9 @@ class FilesSyncTest(tests.Test):
os.unlink('files/2')
os.utime('files', (2, 2))
- assert seeder.pending(Sequence([[4, None]]))
+ assert seeder.pending(ad.Sequence([[4, None]]))
out_packet = OutBufferPacket()
- in_seq = Sequence([[1, None]])
+ in_seq = ad.Sequence([[1, None]])
seeder.pull(in_seq, out_packet)
self.assertEqual([[2, 2], [5, None]], in_seq)
self.assertEqual(4, seqno.value)
@@ -291,9 +290,9 @@ class FilesSyncTest(tests.Test):
os.unlink('files/3')
os.utime('files', (3, 3))
- assert seeder.pending(Sequence([[5, None]]))
+ assert seeder.pending(ad.Sequence([[5, None]]))
out_packet = OutBufferPacket()
- in_seq = Sequence([[1, None]])
+ in_seq = ad.Sequence([[1, None]])
seeder.pull(in_seq, out_packet)
self.assertEqual([[1, 3], [7, None]], in_seq)
self.assertEqual(6, seqno.value)
@@ -307,7 +306,7 @@ class FilesSyncTest(tests.Test):
read_records(out_packet))
out_packet = OutBufferPacket()
- in_seq = Sequence([[4, None]])
+ in_seq = ad.Sequence([[4, None]])
seeder.pull(in_seq, out_packet)
self.assertEqual([[7, None]], in_seq)
self.assertEqual(6, seqno.value)
@@ -332,7 +331,7 @@ class FilesSyncTest(tests.Test):
os.utime('src/files', (1, 1))
with OutFilePacket('.') as packet:
- seeder.pull(Sequence([[1, None]]), packet)
+ seeder.pull(ad.Sequence([[1, None]]), packet)
self.assertEqual(3, seqno.value)
for i in InPacket(packet.path):
leecher.push(i)
@@ -354,7 +353,7 @@ class FilesSyncTest(tests.Test):
os.utime('src/files', (2, 2))
with OutFilePacket('.') as packet:
- seeder.pull(Sequence([[4, None]]), packet)
+ seeder.pull(ad.Sequence([[4, None]]), packet)
self.assertEqual(4, seqno.value)
for i in InPacket(packet.path):
leecher.push(i)
@@ -373,7 +372,7 @@ class FilesSyncTest(tests.Test):
os.utime('src/files', (3, 3))
with OutFilePacket('.') as packet:
- seeder.pull(Sequence([[5, None]]), packet)
+ seeder.pull(ad.Sequence([[5, None]]), packet)
self.assertEqual(7, seqno.value)
for i in InPacket(packet.path):
leecher.push(i)
diff --git a/tests/units/volume.py b/tests/units/volume.py
index 7f4b71b..d795466 100755
--- a/tests/units/volume.py
+++ b/tests/units/volume.py
@@ -1,7 +1,9 @@
#!/usr/bin/env python
# sugar-lint: disable
+import os
import json
+import time
import cPickle as pickle
from os.path import exists
@@ -9,7 +11,6 @@ from __init__ import tests
import active_document as ad
from sugar_network import node, sugar
-from sugar_network.toolkit.collection import Sequence
from sugar_network.toolkit.sneakernet import InPacket, OutBufferPacket, DiskFull
from sugar_network.resources.volume import Volume, Resource, Commands, VolumeCommands
from sugar_network.resources.user import User
@@ -19,7 +20,7 @@ from active_toolkit import coroutine
class VolumeTest(tests.Test):
- def test_diff_Partial(self):
+ def test_diff(self):
class Document(ad.Document):
@@ -28,50 +29,106 @@ class VolumeTest(tests.Test):
return value
volume = Volume('db', [Document])
+ volume['document'].create(guid='1', seqno=1, prop='a')
+ for i in os.listdir('db/document/1/1'):
+ os.utime('db/document/1/1/%s' % i, (1, 1))
+ volume['document'].create(guid='2', seqno=2, prop='b')
+ for i in os.listdir('db/document/2/2'):
+ os.utime('db/document/2/2/%s' % i, (2, 2))
- volume['document'].create(guid='1', seqno=1, prop='*' * 1024)
- volume['document'].create(guid='2', seqno=2, prop='*' * 1024)
- volume['document'].create(guid='3', seqno=3, prop='*' * 1024)
+ class Packet(list):
- in_seq = Sequence([[1, None]])
- try:
- packet = OutBufferPacket(filename='packet', limit=1024 - 512)
- volume.diff(in_seq, packet)
- assert False
- except DiskFull:
- pass
+ def push(self, **kwargs):
+ self.append(kwargs)
+ return True
+
+ packet = Packet()
+ in_seq = ad.Sequence([[1, None]])
+ volume.diff(in_seq, packet)
self.assertEqual([
+ {'document': 'document'},
+ {'guid': '1',
+ 'diff': {
+ 'guid': {'value': '1', 'mtime': 1.0},
+ 'mtime': {'value': 0, 'mtime': 1.0},
+ 'ctime': {'value': 0, 'mtime': 1.0},
+ 'prop': {'value': 'a', 'mtime': 1.0},
+ },
+ },
+ {'guid': '2',
+ 'diff': {
+ 'guid': {'value': '2', 'mtime': 2.0},
+ 'mtime': {'value': 0, 'mtime': 2.0},
+ 'ctime': {'value': 0, 'mtime': 2.0},
+ 'prop': {'value': 'b', 'mtime': 2.0},
+ },
+ },
+ {'commit': [[1, 2]]},
],
- read_packet(packet))
- self.assertEqual([[1, None]], in_seq)
+ packet)
+ self.assertEqual([[3, None]], in_seq)
- in_seq = Sequence([[1, None]])
- try:
- packet = OutBufferPacket(filename='packet', limit=1024 + 512)
- volume.diff(in_seq, packet)
- assert False
- except DiskFull:
- pass
+ def test_diff_Partial(self):
+
+ class Document(ad.Document):
+
+ @ad.active_property(slot=1)
+ def prop(self, value):
+ return value
+
+ volume = Volume('db', [Document])
+ volume['document'].create(guid='1', seqno=1, prop='a')
+ for i in os.listdir('db/document/1/1'):
+ os.utime('db/document/1/1/%s' % i, (1, 1))
+ volume['document'].create(guid='2', seqno=2, prop='b')
+ for i in os.listdir('db/document/2/2'):
+ os.utime('db/document/2/2/%s' % i, (2, 2))
+
+ class Packet(list):
+
+ def push(self, **kwargs):
+ if kwargs.get('guid') == '1':
+ return False
+ self.append(kwargs)
+ return True
+
+ packet = Packet()
+ in_seq = ad.Sequence([[1, None]])
+ volume.diff(in_seq, packet)
self.assertEqual([
- {'filename': 'packet', 'content_type': 'records', 'cmd': 'sn_push', 'document': 'document', 'guid': '1'},
- {'filename': 'packet', 'cmd': 'sn_commit', 'sequence': [[1, 1]]},
+ {'document': 'document'},
+ {'commit': []},
],
- read_packet(packet))
- self.assertEqual([[2, None]], in_seq)
+ packet)
+ self.assertEqual([[1, None]], in_seq)
+
+ class Packet(list):
+
+ def push(self, **kwargs):
+ if kwargs.get('guid') == '2':
+ return False
+ self.append(kwargs)
+ return True
- in_seq = Sequence([[1, None]])
- packet = OutBufferPacket(filename='packet', limit=None)
+ packet = Packet()
+ in_seq = ad.Sequence([[1, None]])
volume.diff(in_seq, packet)
self.assertEqual([
- {'filename': 'packet', 'content_type': 'records', 'cmd': 'sn_push', 'document': 'document', 'guid': '1'},
- {'filename': 'packet', 'content_type': 'records', 'cmd': 'sn_push', 'document': 'document', 'guid': '2'},
- {'filename': 'packet', 'content_type': 'records', 'cmd': 'sn_push', 'document': 'document', 'guid': '3'},
- {'filename': 'packet', 'cmd': 'sn_commit', 'sequence': [[1, 3]]},
+ {'document': 'document'},
+ {'guid': '1',
+ 'diff': {
+ 'guid': {'value': '1', 'mtime': 1.0},
+ 'mtime': {'value': 0, 'mtime': 1.0},
+ 'ctime': {'value': 0, 'mtime': 1.0},
+ 'prop': {'value': 'a', 'mtime': 1.0},
+ },
+ },
+ {'commit': [[1, 1]]},
],
- read_packet(packet))
- self.assertEqual([[4, None]], in_seq)
+ packet)
+ self.assertEqual([[2, None]], in_seq)
- def test_diff_CollapsedCommit(self):
+ def test_diff_Collapsed(self):
class Document(ad.Document):
@@ -80,47 +137,89 @@ class VolumeTest(tests.Test):
return value
volume = Volume('db', [Document])
-
- volume['document'].create(guid='2', seqno=2, prop='*' * 1024)
- volume['document'].create(guid='4', seqno=4, prop='*' * 1024)
- volume['document'].create(guid='6', seqno=6, prop='*' * 1024)
- volume['document'].create(guid='8', seqno=8, prop='*' * 1024)
-
- in_seq = Sequence([[1, None]])
- try:
- packet = OutBufferPacket(filename='packet', limit=1024 * 2)
- volume.diff(in_seq, packet)
- assert False
- except DiskFull:
- pass
- self.assertEqual([
- {'filename': 'packet', 'content_type': 'records', 'cmd': 'sn_push', 'document': 'document', 'guid': '2'},
- {'filename': 'packet', 'cmd': 'sn_commit', 'sequence': [[2, 2]]},
- ],
- read_packet(packet))
- self.assertEqual([[1, 1], [3, None]], in_seq)
-
- try:
- packet = OutBufferPacket(filename='packet', limit=1024 * 2)
- volume.diff(in_seq, packet)
- assert False
- except DiskFull:
- pass
+ volume['document'].create(guid='1', seqno=1, prop='a')
+ for i in os.listdir('db/document/1/1'):
+ os.utime('db/document/1/1/%s' % i, (1, 1))
+ volume['document'].create(guid='3', seqno=3, prop='c')
+ for i in os.listdir('db/document/3/3'):
+ os.utime('db/document/3/3/%s' % i, (3, 3))
+ volume['document'].create(guid='5', seqno=5, prop='f')
+ for i in os.listdir('db/document/5/5'):
+ os.utime('db/document/5/5/%s' % i, (5, 5))
+
+ class Packet(list):
+
+ def push(self, **kwargs):
+ if kwargs.get('guid') == '5':
+ return False
+ self.append(kwargs)
+ return True
+
+ packet = Packet()
+ in_seq = ad.Sequence([[1, None]])
+ volume.diff(in_seq, packet)
self.assertEqual([
- {'filename': 'packet', 'content_type': 'records', 'cmd': 'sn_push', 'document': 'document', 'guid': '4'},
- {'filename': 'packet', 'cmd': 'sn_commit', 'sequence': [[4, 4]]},
+ {'document': 'document'},
+ {'guid': '1',
+ 'diff': {
+ 'guid': {'value': '1', 'mtime': 1.0},
+ 'mtime': {'value': 0, 'mtime': 1.0},
+ 'ctime': {'value': 0, 'mtime': 1.0},
+ 'prop': {'value': 'a', 'mtime': 1.0},
+ },
+ },
+ {'guid': '3',
+ 'diff': {
+ 'guid': {'value': '3', 'mtime': 3.0},
+ 'mtime': {'value': 0, 'mtime': 3.0},
+ 'ctime': {'value': 0, 'mtime': 3.0},
+ 'prop': {'value': 'c', 'mtime': 3.0},
+ },
+ },
+ {'commit': [[1, 1], [3, 3]]},
],
- read_packet(packet))
- self.assertEqual([[1, 1], [3, 3], [5, None]], in_seq)
+ packet)
+ self.assertEqual([[2, 2], [4, None]], in_seq)
+
+ class Packet(list):
+
+ def push(self, **kwargs):
+ self.append(kwargs)
+ return True
- packet = OutBufferPacket(filename='packet')
+ packet = Packet()
+ in_seq = ad.Sequence([[1, None]])
volume.diff(in_seq, packet)
self.assertEqual([
- {'filename': 'packet', 'content_type': 'records', 'cmd': 'sn_push', 'document': 'document', 'guid': '6'},
- {'filename': 'packet', 'content_type': 'records', 'cmd': 'sn_push', 'document': 'document', 'guid': '8'},
- {'filename': 'packet', 'cmd': 'sn_commit', 'sequence': [[1, 1], [3, 3], [5, 8]]},
+ {'document': 'document'},
+ {'guid': '1',
+ 'diff': {
+ 'guid': {'value': '1', 'mtime': 1.0},
+ 'mtime': {'value': 0, 'mtime': 1.0},
+ 'ctime': {'value': 0, 'mtime': 1.0},
+ 'prop': {'value': 'a', 'mtime': 1.0},
+ },
+ },
+ {'guid': '3',
+ 'diff': {
+ 'guid': {'value': '3', 'mtime': 3.0},
+ 'mtime': {'value': 0, 'mtime': 3.0},
+ 'ctime': {'value': 0, 'mtime': 3.0},
+ 'prop': {'value': 'c', 'mtime': 3.0},
+ },
+ },
+ {'guid': '5',
+ 'diff': {
+ 'guid': {'value': '5', 'mtime': 5.0},
+ 'mtime': {'value': 0, 'mtime': 5.0},
+ 'ctime': {'value': 0, 'mtime': 5.0},
+ 'prop': {'value': 'f', 'mtime': 5.0},
+ },
+ },
+ {'commit': [[1, 5]]},
],
- read_packet(packet))
+ packet)
+ self.assertEqual([[6, None]], in_seq)
def test_diff_TheSameInSeqForAllDocuments(self):
@@ -134,21 +233,191 @@ class VolumeTest(tests.Test):
pass
volume = Volume('db', [Document1, Document2, Document3])
-
volume['document1'].create(guid='3', seqno=3)
+ for i in os.listdir('db/document1/3/3'):
+ os.utime('db/document1/3/3/%s' % i, (3, 3))
volume['document2'].create(guid='2', seqno=2)
+ for i in os.listdir('db/document2/2/2'):
+ os.utime('db/document2/2/2/%s' % i, (2, 2))
volume['document3'].create(guid='1', seqno=1)
+ for i in os.listdir('db/document3/1/1'):
+ os.utime('db/document3/1/1/%s' % i, (1, 1))
+
+ class Packet(list):
+
+ def push(self, **kwargs):
+ self.append(kwargs)
+ return True
- in_seq = Sequence([[1, None]])
- packet = OutBufferPacket(filename='packet')
+ packet = Packet()
+ in_seq = ad.Sequence([[1, None]])
volume.diff(in_seq, packet)
self.assertEqual([
- {'filename': 'packet', 'content_type': 'records', 'cmd': 'sn_push', 'document': 'document1', 'guid': '3'},
- {'filename': 'packet', 'content_type': 'records', 'cmd': 'sn_push', 'document': 'document2', 'guid': '2'},
- {'filename': 'packet', 'content_type': 'records', 'cmd': 'sn_push', 'document': 'document3', 'guid': '1'},
- {'filename': 'packet', 'cmd': 'sn_commit', 'sequence': [[1, 3]]},
+ {'document': 'document1'},
+ {'guid': '3',
+ 'diff': {
+ 'guid': {'value': '3', 'mtime': 3.0},
+ 'mtime': {'value': 0, 'mtime': 3.0},
+ 'ctime': {'value': 0, 'mtime': 3.0},
+ },
+ },
+ {'document': 'document2'},
+ {'guid': '2',
+ 'diff': {
+ 'guid': {'value': '2', 'mtime': 2.0},
+ 'mtime': {'value': 0, 'mtime': 2.0},
+ 'ctime': {'value': 0, 'mtime': 2.0},
+ },
+ },
+ {'document': 'document3'},
+ {'guid': '1',
+ 'diff': {
+ 'guid': {'value': '1', 'mtime': 1.0},
+ 'mtime': {'value': 0, 'mtime': 1.0},
+ 'ctime': {'value': 0, 'mtime': 1.0},
+ },
+ },
+ {'commit': [[1, 3]]},
],
- read_packet(packet))
+ packet)
+ self.assertEqual([[4, None]], in_seq)
+
+ def test_merge_Create(self):
+
+ class Document1(ad.Document):
+
+ @ad.active_property(slot=1)
+ def prop(self, value):
+ return value
+
+ class Document2(ad.Document):
+ pass
+
+ volume = Volume('db', [Document1, Document2])
+
+ self.assertEqual(
+ [[1, 2]],
+ volume.merge([
+ {'document': 'document1'},
+ {'guid': '1',
+ 'diff': {
+ 'guid': {'value': '1', 'mtime': 1.0},
+ 'ctime': {'value': 2, 'mtime': 2.0},
+ 'mtime': {'value': 3, 'mtime': 3.0},
+ 'prop': {'value': '4', 'mtime': 4.0},
+ },
+ },
+ {'document': 'document2'},
+ {'guid': '5',
+ 'diff': {
+ 'guid': {'value': '5', 'mtime': 5.0},
+ 'ctime': {'value': 6, 'mtime': 6.0},
+ 'mtime': {'value': 7, 'mtime': 7.0},
+ },
+ },
+ {'commit': [[1, 2]]},
+ ]))
+
+ self.assertEqual(
+ {'guid': '1', 'prop': '4', 'ctime': 2, 'mtime': 3},
+ volume['document1'].get('1').properties(['guid', 'ctime', 'mtime', 'prop']))
+ self.assertEqual(1, os.stat('db/document1/1/1/guid').st_mtime)
+ self.assertEqual(2, os.stat('db/document1/1/1/ctime').st_mtime)
+ self.assertEqual(3, os.stat('db/document1/1/1/mtime').st_mtime)
+ self.assertEqual(4, os.stat('db/document1/1/1/prop').st_mtime)
+
+ self.assertEqual(
+ {'guid': '5', 'ctime': 6, 'mtime': 7},
+ volume['document2'].get('5').properties(['guid', 'ctime', 'mtime']))
+ self.assertEqual(5, os.stat('db/document2/5/5/guid').st_mtime)
+ self.assertEqual(6, os.stat('db/document2/5/5/ctime').st_mtime)
+ self.assertEqual(7, os.stat('db/document2/5/5/mtime').st_mtime)
+
+ def test_merge_Update(self):
+
+ class Document(ad.Document):
+
+ @ad.active_property(slot=1)
+ def prop(self, value):
+ return value
+
+ volume = Volume('db', [Document])
+ volume['document'].create(guid='1', prop='1', ctime=1, mtime=1)
+ for i in os.listdir('db/document/1/1'):
+ os.utime('db/document/1/1/%s' % i, (2, 2))
+
+ self.assertEqual(
+ [],
+ volume.merge([
+ {'document': 'document'},
+ {'guid': '1',
+ 'diff': {
+ 'prop': {'value': '2', 'mtime': 1.0},
+ },
+ },
+ {'commit': []},
+ ]))
+ self.assertEqual(
+ {'guid': '1', 'prop': '1', 'ctime': 1, 'mtime': 1},
+ volume['document'].get('1').properties(['guid', 'ctime', 'mtime', 'prop']))
+ self.assertEqual(2, os.stat('db/document/1/1/prop').st_mtime)
+
+ self.assertEqual(
+ [],
+ volume.merge([
+ {'document': 'document'},
+ {'guid': '1',
+ 'diff': {
+ 'prop': {'value': '3', 'mtime': 2.0},
+ },
+ },
+ {'commit': []},
+ ]))
+ self.assertEqual(
+ {'guid': '1', 'prop': '1', 'ctime': 1, 'mtime': 1},
+ volume['document'].get('1').properties(['guid', 'ctime', 'mtime', 'prop']))
+ self.assertEqual(2, os.stat('db/document/1/1/prop').st_mtime)
+
+ self.assertEqual(
+ [],
+ volume.merge([
+ {'document': 'document'},
+ {'guid': '1',
+ 'diff': {
+ 'prop': {'value': '4', 'mtime': 3.0},
+ },
+ },
+ {'commit': []},
+ ]))
+ self.assertEqual(
+ {'guid': '1', 'prop': '4', 'ctime': 1, 'mtime': 1},
+ volume['document'].get('1').properties(['guid', 'ctime', 'mtime', 'prop']))
+ self.assertEqual(3, os.stat('db/document/1/1/prop').st_mtime)
+
+ def test_merge_StopOnCommit(self):
+
+ class Document(ad.Document):
+ pass
+
+ volume = Volume('db', [Document])
+
+ diff = iter([
+ {'document': 'document'},
+ {'guid': '1',
+ 'diff': {
+ 'guid': {'value': '1', 'mtime': 1.0},
+ 'ctime': {'value': 2, 'mtime': 2.0},
+ 'mtime': {'value': 3, 'mtime': 3.0},
+ 'prop': {'value': '4', 'mtime': 4.0},
+ },
+ },
+ {'commit': [[1, 1]]},
+ {'tail': True},
+ ])
+
+ self.assertEqual([[1, 1]], volume.merge(diff))
+ assert volume['document'].exists('1')
+ self.assertEqual([{'tail': True}], [i for i in diff])
def test_SimulateDeleteEvents(self):