Web   ·   Wiki   ·   Activities   ·   Blog   ·   Lists   ·   Chat   ·   Meeting   ·   Bugs   ·   Git   ·   Translate   ·   Archive   ·   People   ·   Donate
summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorAleksey Lim <alsroot@sugarlabs.org>2014-04-29 07:53:52 (GMT)
committer Aleksey Lim <alsroot@sugarlabs.org>2014-04-29 16:40:28 (GMT)
commit7b650854958d03386ae54e87cbbb2bc82053661e (patch)
tree486f54e50d58e21aedda86fcbc19237d921f401a
parentfe07c5f9f1da79059f8fcd5e33e0f043b7cc6814 (diff)
Fix aslo-sync script
-rwxr-xr-xmisc/aslo-sync433
-rw-r--r--sugar_network/client/routes.py14
-rw-r--r--sugar_network/db/blobs.py5
-rw-r--r--sugar_network/model/context.py59
-rw-r--r--sugar_network/node/__init__.py2
-rw-r--r--sugar_network/node/model.py51
-rw-r--r--sugar_network/node/slave.py4
-rw-r--r--sugar_network/toolkit/application.py4
-rw-r--r--sugar_network/toolkit/i18n.py14
-rwxr-xr-xtests/units/client/client_routes.py22
-rwxr-xr-xtests/units/node/slave.py72
11 files changed, 352 insertions, 328 deletions
diff --git a/misc/aslo-sync b/misc/aslo-sync
index 147ac04..141883e 100755
--- a/misc/aslo-sync
+++ b/misc/aslo-sync
@@ -1,6 +1,6 @@
#!/usr/bin/env python
-# Copyright (C) 2012-2013 Aleksey Lim
+# Copyright (C) 2012-2014 Aleksey Lim
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
@@ -21,16 +21,20 @@ import time
import getpass
import hashlib
import traceback
-from cStringIO import StringIO
from os.path import join
import MySQLdb as mdb
-from sugar_network import db, client, model, toolkit
-from sugar_network.node import data_root
+from sugar_network import db, toolkit
+from sugar_network.node import data_root, master_api
+from sugar_network.node.auth import Principal
+from sugar_network.node.master import RESOURCES
from sugar_network.node.slave import SlaveRoutes
-from sugar_network.node.routes import load_bundle
-from sugar_network.toolkit import spec, licenses, application, Option
+from sugar_network.node.model import load_bundle
+from sugar_network.toolkit.spec import parse_version
+from sugar_network.toolkit.router import Request, Router
+from sugar_network.toolkit.coroutine import this
+from sugar_network.toolkit import licenses, application, Option
DOWNLOAD_URL = 'http://download.sugarlabs.org/activities'
@@ -42,6 +46,15 @@ SUGAR_GUID = 'sugar'
SN_GUID = 'sugar-network'
PACKAGES_GUID = 'packages'
+SUGAR_API_COMPATIBILITY = {
+ '0.94': [
+ parse_version('0.86'),
+ parse_version('0.88'),
+ parse_version('0.90'),
+ parse_version('0.92'),
+ ],
+ }
+
CATEGIORIES_TO_TAGS = {
'Search & Discovery': 'discovery',
'Documents': 'productivity',
@@ -94,6 +107,12 @@ IGNORE_VERSIONS = frozenset([
30703, # Bad license
31164, # Bad bundle_id
31512, # Bad license
+ 30749, # Changed bundle_id
+ 31238, # Changed bundle_id
+ 31418, # Changed bundle_id
+ 31369, # Malformed version
+ 31557, # Malformed version
+ 31454, # Malformed version
])
IGNORE_PREVIEWS = frozenset([
@@ -138,19 +157,22 @@ LICENSES_MAP = {
class Application(application.Application):
_my_connection = None
- _volume = None
_client = None
-
- @property
- def volume(self):
- if self._volume is None:
- self._volume = db.Volume(data_root.value, model.RESOURCES)
- self._volume.populate()
- return self._volume
+ _router = None
+
+ def prolog(self):
+ this.volume = db.Volume(data_root.value, RESOURCES)
+ this.volume.populate()
+ this.broadcast = lambda event: None
+ this.localcast = lambda event: None
+ this.request = Request({'HTTP_HOST': master_api.value})
+ auth = _Auth()
+ routes = SlaveRoutes(master_api.value, this.volume, auth=auth)
+ self._router = Router(routes)
+ this.principal = auth.logon()
def epilog(self):
- if self._volume is not None:
- self._volume.close()
+ this.volume.close()
@application.command(
'consecutively launch pull and push commands')
@@ -161,27 +183,25 @@ class Application(application.Application):
@application.command(
'pull activities.sugarlabs.org content to local db')
def pull(self):
- if not self.volume['context'].exists(SN_GUID):
- self.volume['context'].create({
+ if not this.volume['context'][SN_GUID].exists:
+ this.volume['context'].create({
'guid': SN_GUID,
- 'implement': SN_GUID,
- 'type': 'project',
- 'title': 'Sugar Network',
- 'summary': 'Sugar Network',
- 'description': 'Sugar Network',
- 'ctime': time.time(),
- 'mtime': time.time(),
+ 'type': ['group'],
+ 'title': {'en': 'Sugar Network'},
+ 'summary': {'en': 'Sugar Network'},
+ 'description': {'en': 'Sugar Network'},
+ 'ctime': int(time.time()),
+ 'mtime': int(time.time()),
'author': ASLO_AUTHOR,
})
- if not self.volume['context'].exists(SUGAR_GUID):
- self.volume['context'].create({
+ if not this.volume['context'][SUGAR_GUID].exists:
+ this.volume['context'].create({
'guid': SUGAR_GUID,
- 'implement': SUGAR_GUID,
- 'type': 'package',
- 'title': 'sugar',
- 'summary': 'Constructionist learning platform',
- 'description':
+ 'type': ['package'],
+ 'title': {'en': 'sugar'},
+ 'summary': {'en': 'Constructionist learning platform'},
+ 'description': {'en':
'Sugar provides simple yet powerful means of engaging '
'young children in the world of learning that is '
'opened up by computers and the Internet. With Sugar, '
@@ -190,26 +210,28 @@ class Application(application.Application):
'in authentic problem-solving. Sugar promotes '
'sharing, collaborative learning, and reflection, '
'developing skills that help them in all aspects '
- 'of life.',
- 'ctime': time.time(),
- 'mtime': time.time(),
+ 'of life.'},
+ 'ctime': int(time.time()),
+ 'mtime': int(time.time()),
'author': ASLO_AUTHOR,
})
- if not self.volume['context'].exists(PACKAGES_GUID):
- self.volume['context'].create({
+ if not this.volume['context'][PACKAGES_GUID].exists:
+ this.volume['context'].create({
'guid': PACKAGES_GUID,
- 'implement': PACKAGES_GUID,
- 'type': 'project',
- 'title': 'Packages',
- 'summary': 'Collection of GNU/Linux packages metadata',
- 'description': 'Collection of GNU/Linux packages metadata',
- 'ctime': time.time(),
- 'mtime': time.time(),
- 'author': ASLO_AUTHOR,
- 'icon': {
- 'url': '/static/images/package.png',
+ 'type': ['group'],
+ 'title': {'en': 'Packages'},
+ 'summary': {
+ 'en': 'Collection of GNU/Linux packages metadata',
},
+ 'description': {
+ 'en': 'Collection of GNU/Linux packages metadata',
+ },
+ 'ctime': int(time.time()),
+ 'mtime': int(time.time()),
+ 'author': ASLO_AUTHOR,
+ 'icon': 'assets/package.png',
+ 'logo': 'assets/package-logo.png',
})
if self.args:
@@ -222,13 +244,12 @@ class Application(application.Application):
'submit pulled activities.sugarlabs.org content to '
'Sugar Network server')
def push(self):
- node = SlaveRoutes(join(data_root.value, 'key'), self.volume)
- node.online_sync(no_pull=True)
+ this.call(method='POST', cmd='online_sync', no_pull=True)
def sync_activities(self, addon_id=None):
- directory = self.volume['context']
+ directory = this.volume['context']
items, __ = directory.find(type='activity', guid=addon_id,
- not_layer='deleted')
+ not_state='deleted')
existing_activities = set([i.guid for i in items])
sql = """
@@ -251,7 +272,6 @@ class Application(application.Application):
self.sync_versions(addon_id, bundle_id)
self.sync_reviews(addon_id, bundle_id)
self.sync_previews(addon_id, bundle_id, authors)
- self.sync_comments(addon_id, bundle_id)
except Exception:
print '-- Failed to sync %s addon' % addon_id
traceback.print_exception(*sys.exc_info())
@@ -260,116 +280,52 @@ class Application(application.Application):
for guid in existing_activities:
print '-- Hide %r deleted activity' % guid
- directory.update(guid, {'layer': ['deleted']})
+ directory.update(guid, {'state': 'deleted'})
def sync_previews(self, addon_id, bundle_id, authors):
- directory = self.volume['artifact']
- items, __ = directory.find(context=bundle_id, type='preview',
- not_layer='deleted')
- existing = set([i.guid for i in items])
+ existing = this.volume['context'][bundle_id]['previews']
+ updates = {}
sql = """
SELECT
id,
- created,
modified,
- caption,
filedata
FROM
previews
WHERE
addon_id = %s
""" % addon_id
- for guid, created, modified, caption, data in self.sqlexec(sql):
+ for guid, modified, data in self.sqlexec(sql):
if guid in IGNORE_PREVIEWS:
continue
guid = str(guid)
- if directory.exists(guid):
- existing.remove(guid)
+ if guid in existing:
+ del existing[guid]
continue
try:
preview = scale_png(data, 200, 200)
except Exception:
print '-- Failed to load %s preview for %s' % (guid, bundle_id)
continue
- directory.create({
- 'guid': guid,
- 'ctime': int(time.mktime(created.timetuple())),
- 'mtime': int(time.mktime(modified.timetuple())),
- 'context': bundle_id,
- 'type': 'preview',
- 'title': self.get_i18n_field(caption),
- 'description': self.get_i18n_field(caption),
+ preview = this.volume.blobs.post(preview, 'image/png')
+ updates[guid] = {
'author': authors,
- 'preview': {
- 'blob': StringIO(preview),
- 'mime_type': 'image/png',
- 'digest': hashlib.sha1(preview).hexdigest(),
- },
- 'data': {
- 'blob': StringIO(data),
- 'mime_type': 'image/png',
- 'digest': hashlib.sha1(data).hexdigest(),
- },
- })
+ 'value': preview.digest,
+ 'ctime': int(time.mktime(modified.timetuple())),
+ }
for guid in existing:
print '-- Hide %s %s deleted preview' % (bundle_id, guid)
- directory.update(guid, {'layer': ['deleted']})
-
- def sync_comments(self, addon_id, bundle_id):
- directory = self.volume['comment']
- items, __ = directory.find(context=bundle_id, not_layer='deleted')
- existing = set([i.guid for i in items])
-
- sql = """
- SELECT
- reviews.id,
- reviews.created,
- reviews.modified,
- reviews.body,
- users.email,
- users.nickname,
- CONCAT_WS(' ', users.firstname, users.lastname),
- reviews.reply_to
- FROM
- reviews
- INNER JOIN versions ON versions.id = reviews.version_id
- INNER JOIN users ON users.id=reviews.user_id
- WHERE
- reply_to IS NOT NULL AND versions.addon_id = %s
- """ % addon_id
- for guid, created, modified, content, email, nickname, fullname, \
- reply_to in self.sqlexec(sql):
- guid = str(guid)
- if directory.exists(guid):
- existing.remove(guid)
- continue
- if not nickname:
- nickname = email.split('@')[0]
- fullname = fullname.strip()
- if not fullname:
- fullname = nickname
- directory.create({
- 'guid': guid,
- 'ctime': int(time.mktime(created.timetuple())),
- 'mtime': int(time.mktime(modified.timetuple())),
- 'context': bundle_id,
- 'review': str(reply_to),
- 'message': self.get_i18n_field(content),
- 'author': {nickname: {
- 'order': 0, 'role': 3, 'name': fullname,
- }},
- })
+ updates[guid] = {}
- for guid in existing:
- print '-- Hide %s %s deleted comment' % (bundle_id, guid)
- directory.update(guid, {'layer': ['deleted']})
+ this.volume['context'].update(bundle_id, {'previews': updates})
def sync_reviews(self, addon_id, bundle_id):
- directory = self.volume['review']
- items, __ = directory.find(context=bundle_id, not_layer='deleted')
- existing = set([i.guid for i in items])
+ directory = this.volume['post']
+ items, __ = directory.find(context=bundle_id, type='review',
+ not_state='deleted')
+ existing_topics = set([i.guid for i in items])
sql = """
SELECT
@@ -389,38 +345,81 @@ class Application(application.Application):
WHERE
reply_to IS NULL AND versions.addon_id = %s
""" % addon_id
- for guid, created, modified, title, content, rating, email, nickname, \
+ for topic, created, modified, title, content, vote, email, nickname, \
fullname in self.sqlexec(sql):
- guid = str(guid)
- if directory.exists(guid):
- existing.remove(guid)
- continue
- if not nickname:
- nickname = email.split('@')[0]
- fullname = fullname.strip()
- if not fullname:
- fullname = nickname
- directory.create({
- 'guid': guid,
- 'ctime': int(time.mktime(created.timetuple())),
- 'mtime': int(time.mktime(modified.timetuple())),
- 'context': bundle_id,
- 'title': self.get_i18n_field(title),
- 'content': self.get_i18n_field(content),
- 'rating': rating,
- 'author': {nickname: {
- 'order': 0, 'role': 3, 'name': fullname,
- }},
- })
-
- for guid in existing:
+ topic = str(topic)
+ if topic in existing_topics:
+ existing_topics.remove(topic)
+ else:
+ if not nickname:
+ nickname = email.split('@')[0]
+ fullname = fullname.strip()
+ if not fullname:
+ fullname = nickname
+ directory.create({
+ 'guid': topic,
+ 'ctime': int(time.mktime(created.timetuple())),
+ 'mtime': int(time.mktime(modified.timetuple())),
+ 'context': bundle_id,
+ 'type': 'review',
+ 'title': self.get_i18n_field(title),
+ 'message': self.get_i18n_field(content),
+ 'vote': vote,
+ 'author': {nickname: {
+ 'order': 0, 'role': 3, 'name': fullname,
+ }},
+ })
+
+ existing_comments = directory[topic]['comments']
+ updates = {}
+ sql = """
+ SELECT
+ reviews.id,
+ reviews.modified,
+ reviews.body,
+ users.email,
+ users.nickname,
+ CONCAT_WS(' ', users.firstname, users.lastname)
+ FROM
+ reviews
+ INNER JOIN versions ON versions.id = reviews.version_id
+ INNER JOIN users ON users.id=reviews.user_id
+ WHERE
+ reply_to = %s
+ ORDER BY
+ reviews.created
+ """ % topic
+ for guid, modified, content, email, nickname, fullname, \
+ in self.sqlexec(sql):
+ guid = str(guid)
+ if guid in existing_comments:
+ del existing_comments[guid]
+ continue
+ if not nickname:
+ nickname = email.split('@')[0]
+ fullname = fullname.strip()
+ if not fullname:
+ fullname = nickname
+ updates[guid] = {
+ 'author': {nickname: {
+ 'order': 0, 'role': 3, 'name': fullname,
+ }},
+ 'value': self.get_i18n_field(content),
+ 'ctime': int(time.mktime(modified.timetuple())),
+ }
+ for guid in existing_comments:
+ print '-- Hide %s %s deleted comment' % (bundle_id, guid)
+ updates[guid] = {}
+ directory.update(topic, {'comments': updates})
+
+ for guid in existing_topics:
print '-- Hide %s %s deleted review' % (bundle_id, guid)
- directory.update(guid, {'layer': ['deleted']})
+ directory.update(guid, {'state': 'deleted'})
def sync_versions(self, addon_id, bundle_id):
- directory = self.volume['release']
- items, __ = directory.find(context=bundle_id, not_layer='deleted')
- existing = set([i.guid for i in items])
+ existing = this.volume['context'][bundle_id]['releases']
+ updates = {}
+ most_recent = True
sql = """
SELECT
@@ -438,8 +437,7 @@ class Application(application.Application):
id=applications_versions.max),
users.email,
users.nickname,
- CONCAT_WS(' ', users.firstname, users.lastname),
- addons.status
+ CONCAT_WS(' ', users.firstname, users.lastname)
FROM addons
INNER JOIN versions ON versions.addon_id=addons.id
LEFT JOIN licenses ON licenses.id=versions.license_id
@@ -450,15 +448,19 @@ class Application(application.Application):
WHERE
addons.status > 0 AND addons.status < 5 AND addons.id = %s
ORDER BY
- versions.id ASC
+ versions.id DESC
""" % addon_id
for version_id, version, license_id, alicense, release_date, \
releasenotes, filename, sugar_min, sugar_max, \
- email, nickname, fullname, status in self.sqlexec(sql):
+ email, nickname, fullname in self.sqlexec(sql):
if version_id in IGNORE_VERSIONS:
continue
+
version_id = str(version_id)
+ if version_id in existing:
+ del existing[version_id]
+ continue
if filename.endswith('.xol'):
print '-- Ignore %r[%s] library bundle' % \
@@ -466,7 +468,7 @@ class Application(application.Application):
continue
try:
- spec.parse_version(version)
+ parse_version(version)
except Exception, error:
print '-- Cannot parse %r version for %r[%s]: %s' % \
(version, filename, version_id, error)
@@ -501,46 +503,49 @@ class Application(application.Application):
if not alicense and bundle_id in LICENSES_MAP:
alicense = LICENSES_MAP[bundle_id]
- layers = ['origin']
- if status == 4:
- layers.append('public')
-
if not nickname:
nickname = email.split('@')[0]
fullname = fullname.strip()
if not fullname:
fullname = nickname
- if directory.exists(version_id):
- if set(directory.get(version_id).layer) != set(layers) or \
- version_id not in existing:
- directory.update(version_id, {'layer': layers})
- if version_id in existing:
- existing.remove(version_id)
- continue
+ for max_version, sub_versions in SUGAR_API_COMPATIBILITY.items():
+ if parse_version(sugar_min) in sub_versions:
+ if parse_version(sugar_max) < parse_version(max_version):
+ sugar_max = max_version
+ elif parse_version(sugar_max) in sub_versions:
+ sugar_max = max_version
bundle_path = join(ACTIVITIES_PATH, str(addon_id), filename)
+ digest = hashlib.sha1()
+ with file(bundle_path, 'rb') as f:
+ while True:
+ chunk = f.read(toolkit.BUFFER_SIZE)
+ if not chunk:
+ break
+ digest.update(chunk)
+ blob = this.volume.blobs.post({
+ 'digest': digest.hexdigest(),
+ 'location': '/'.join([DOWNLOAD_URL, str(addon_id), filename]),
+ 'content-length': str(os.stat(bundle_path).st_size),
+ })
+ blob.path = bundle_path
+
try:
- with load_bundle(
- self.volume, Request(self.volume, {
- 'requires': 'sugar>=%s<=%s' %
- (sugar_min, sugar_max),
- 'license': alicense,
- }),
- file(bundle_path, 'rb')) as (impl, data):
- impl['guid'] = version_id
- if 'notes' not in impl:
- impl['notes'] = self.get_i18n_field(releasenotes)
- impl['stability'] = 'stable'
- impl['ctime'] = int(time.mktime(release_date.timetuple()))
- impl['mtime'] = time.time()
- impl['author'] = {nickname: {
- 'order': 0, 'role': 3, 'name': fullname,
- }}
- impl['layer'] = layers
- data['url'] = \
- '/'.join([DOWNLOAD_URL, str(addon_id), filename])
- data['size'] = os.stat(bundle_path).st_size
+ __, release = load_bundle(blob, license=alicense,
+ extra_deps='sugar>=%s<=%s' % (sugar_min, sugar_max),
+ release_notes=self.get_i18n_field(releasenotes),
+ update_context=most_recent)
+ updates[version_id] = {
+ 'author': {
+ nickname: {
+ 'order': 0, 'role': 3, 'name': fullname,
+ },
+ },
+ 'value': release,
+ 'ctime': int(time.mktime(release_date.timetuple())),
+ }
+ most_recent = False
except Exception, error:
print '-- Failed to sync %r[%s]' % (filename, version_id)
traceback.print_exception(*sys.exc_info())
@@ -549,10 +554,12 @@ class Application(application.Application):
for guid in existing:
print '-- Hide %s %s deleted version' % (bundle_id, guid)
- directory.update(guid, {'layer': ['deleted']})
+ updates[guid] = {}
+
+ this.volume['context'].update(bundle_id, {'releases': updates})
def sync_context(self, addon_id, bundle_id):
- directory = self.volume['context']
+ directory = this.volume['context']
created, modified, title, summary, description, homepage, \
featured = self.sqlexec("""
@@ -573,11 +580,11 @@ class Application(application.Application):
""" % addon_id)[0]
created = int(time.mktime(created.timetuple()))
modified = int(time.mktime(modified.timetuple()))
- layers = ['featured'] if featured else []
+ pins = ['featured'] if featured else []
- if directory.exists(bundle_id) and \
+ if directory[bundle_id].exists and \
directory.get(bundle_id)['mtime'] >= modified and \
- directory.get(bundle_id)['layer'] == layers:
+ directory.get(bundle_id)['pins'] == pins:
return
tags = set()
@@ -633,7 +640,7 @@ class Application(application.Application):
directory.update(bundle_id, {
'guid': bundle_id,
- 'type': 'activity',
+ 'type': ['activity'],
'title': self.get_i18n_field(title),
'summary': self.get_i18n_field(summary),
'description': self.get_i18n_field(description),
@@ -642,7 +649,7 @@ class Application(application.Application):
'author': authors,
'ctime': created,
'mtime': modified,
- 'layer': layers,
+ 'pins': pins,
})
print '-- Sync %r activity' % bundle_id
@@ -695,19 +702,10 @@ class Application(application.Application):
return cursor.fetchall()
-class Request(dict):
-
- def __init__(self, volume, props):
- dict.__init__(self, props)
- self._volume = volume
+class _Auth(object):
- def call(self, method, path, content):
- if method == 'POST':
- resource, = path
- return self._volume[resource].create(content)
- elif method == 'PUT':
- resource, guid = path
- self._volume[resource].update(guid, content)
+ def logon(self, request=None):
+ return Principal(ASLO_AUTHOR.keys()[0], 0xF)
def scale_png(data, w, h):
@@ -741,8 +739,7 @@ mysql_password = Option(
Option.seek('main', [application.debug, toolkit.cachedir])
Option.seek('aslo', [mysql_server, mysql_user, mysql_password, mysql_database])
-Option.seek('node', [data_root])
-Option.seek('client', [client.api_url])
+Option.seek('node', [data_root, master_api])
db.index_write_queue.value = 1024 * 10
db.index_flush_threshold.value = 0
diff --git a/sugar_network/client/routes.py b/sugar_network/client/routes.py
index 8a037ee..4bdd10f 100644
--- a/sugar_network/client/routes.py
+++ b/sugar_network/client/routes.py
@@ -53,8 +53,8 @@ class ClientRoutes(FrontRoutes, JournalRoutes):
self._connect_jobs = coroutine.Pool()
self._sync_jobs = coroutine.Pool()
self._no_subscription = no_subscription
- self._pull_r = toolkit.Bin(
- join(home_volume.root, 'var', 'pull'), [[1, None]])
+ self._refresh_r = toolkit.Bin(
+ join(home_volume.root, 'var', 'refresh'), [[1, None]])
def connect(self, api=None):
if self._connect_jobs:
@@ -71,7 +71,7 @@ class ClientRoutes(FrontRoutes, JournalRoutes):
self._connect_jobs.kill()
self._got_offline()
self._local.volume.close()
- self._pull_r.commit()
+ self._refresh_r.commit()
@fallbackroute('GET', ['hub'])
def hub(self):
@@ -370,14 +370,14 @@ class ClientRoutes(FrontRoutes, JournalRoutes):
contexts.delete(local_context.guid)
def _pull_checkin(self, request, response, header_key):
- request.headers[header_key] = self._pull_r.value
+ request.headers[header_key] = self._refresh_r.value
packet = packets.decode(self.fallback(request, response))
volume = self._local.volume
volume[request.resource].patch(request.guid, packet['patch'])
for blob in packet:
volume.blobs.patch(blob)
- ranges.exclude(self._pull_r.value, packet['ranges'])
+ ranges.exclude(self._refresh_r.value, packet['ranges'])
def _pull(self):
_logger.debug('Start pulling checkin updates')
@@ -389,7 +389,7 @@ class ClientRoutes(FrontRoutes, JournalRoutes):
request = Request(method='GET',
path=[directory.metadata.name], cmd='diff')
while True:
- request.headers['ranges'] = self._pull_r.value
+ request.headers['ranges'] = self._refresh_r.value
diff = self.fallback(request, response)
if not diff:
break
@@ -397,7 +397,7 @@ class ClientRoutes(FrontRoutes, JournalRoutes):
checkin = Request(method='GET',
path=[request.resource, guid], cmd='diff')
self._pull_checkin(checkin, response, 'ranges')
- ranges.exclude(self._pull_r.value, r)
+ ranges.exclude(self._refresh_r.value, r)
def _push(self):
volume = self._local.volume
diff --git a/sugar_network/db/blobs.py b/sugar_network/db/blobs.py
index 6426341..2faedb0 100644
--- a/sugar_network/db/blobs.py
+++ b/sugar_network/db/blobs.py
@@ -247,7 +247,8 @@ class Blobs(object):
return
if not exists(dirname(path)):
os.makedirs(dirname(path))
- os.rename(patch.path, path)
+ if patch.path:
+ os.rename(patch.path, path)
if exists(path + _META_SUFFIX):
meta = _read_meta(path)
meta.update(patch.meta)
@@ -282,7 +283,7 @@ def _write_meta(path, meta, seqno):
for key, value in meta.items() if isinstance(meta, dict) else meta:
if seqno is None and key == 'x-seqno':
seqno = int(value)
- f.write('%s: %s\n' % (key, toolkit.ascii(value)))
+ f.write(toolkit.ascii(key) + ': ' + toolkit.ascii(value) + '\n')
os.utime(path, (seqno, seqno))
diff --git a/sugar_network/model/context.py b/sugar_network/model/context.py
index cf24650..457281f 100644
--- a/sugar_network/model/context.py
+++ b/sugar_network/model/context.py
@@ -32,30 +32,9 @@ class Context(db.Resource):
self.post('icon', 'assets/package.png')
self.post('logo', 'assets/package-logo.png')
self.post('artefact_icon', 'assets/package.svg')
- return value
-
- svg = None
- blobs = this.volume.blobs
- if not self['artefact_icon']:
- for type_ in ('activity', 'book', 'group'):
- if type_ in value:
- with file(blobs.get('assets/%s.svg' % type_).path) as f:
- svg = f.read()
- from sugar_network.toolkit.sugar import color_svg
- svg = color_svg(svg, self['guid'])
- self.post('artefact_icon',
- blobs.post(svg, 'image/svg+xml').digest)
- break
- for prop, png, size in (
- ('icon', 'assets/missing.png', model.ICON_SIZE),
- ('logo', 'assets/missing-logo.svg', model.LOGO_SIZE),
- ):
- if self[prop]:
- continue
- if svg is not None:
- png = blobs.post(svg_to_png(svg, size), 'image/png').digest
- self.post(prop, png)
-
+ elif 'activity' not in value:
+ if not self['artefact_icon']:
+ self._generate_default_icons(value)
return value
@db.indexed_property(db.Localized, slot=1, prefix='S', full_text=True)
@@ -78,15 +57,18 @@ class Context(db.Resource):
def mime_types(self, value):
return value
- @db.stored_property(db.Blob, mime_type='image/png')
+ @db.stored_property(db.Blob, mime_type='image/png',
+ default='assets/missing.png')
def icon(self, value):
return value
- @db.stored_property(db.Blob, mime_type='image/svg+xml')
+ @db.stored_property(db.Blob, mime_type='image/svg+xml',
+ default='assets/missing.svg')
def artefact_icon(self, value):
return value
- @db.stored_property(db.Blob, mime_type='image/png')
+ @db.stored_property(db.Blob, mime_type='image/png',
+ default='assets/missing-logo.png')
def logo(self, value):
return value
@@ -118,3 +100,26 @@ class Context(db.Resource):
"""
return value
+
+ def _generate_default_icons(self, types):
+ blobs = this.volume.blobs
+ svg = None
+ for type_ in ('activity', 'book', 'group'):
+ if type_ in types:
+ with file(blobs.get('assets/%s.svg' % type_).path) as f:
+ svg = f.read()
+ from sugar_network.toolkit.sugar import color_svg
+ svg = color_svg(svg, self['guid'])
+ self.post('artefact_icon',
+ blobs.post(svg, 'image/svg+xml').digest)
+ break
+ else:
+ return
+ for prop, size in (
+ ('icon', model.ICON_SIZE),
+ ('logo', model.LOGO_SIZE),
+ ):
+ if self[prop]:
+ continue
+ png = blobs.post(svg_to_png(svg, size), 'image/png').digest
+ self.post(prop, png)
diff --git a/sugar_network/node/__init__.py b/sugar_network/node/__init__.py
index 66bd37e..82cb8a9 100644
--- a/sugar_network/node/__init__.py
+++ b/sugar_network/node/__init__.py
@@ -47,4 +47,4 @@ mode = Option(
master_api = Option(
'master API url either to connect to (for slave or proxy nodes), or, '
- 'to provide from (for master nodes)')
+ 'to provide from (for master nodes)', name='master-api')
diff --git a/sugar_network/node/model.py b/sugar_network/node/model.py
index d2a6475..310d2db 100644
--- a/sugar_network/node/model.py
+++ b/sugar_network/node/model.py
@@ -84,7 +84,7 @@ class _Release(object):
class Context(_context.Context):
@db.stored_property(db.Aggregated, subtype=_Release(),
- acl=ACL.READ | ACL.INSERT | ACL.REMOVE | ACL.REPLACE | ACL.LOCAL)
+ acl=ACL.READ | ACL.INSERT | ACL.REMOVE | ACL.REPLACE)
def releases(self, value):
return value
@@ -423,9 +423,11 @@ def presolve(presolve_path):
def load_bundle(blob, context=None, initial=False, extra_deps=None,
- license=None, release_notes=None):
+ license=None, release_notes=None, update_context=True):
context_type = None
context_meta = None
+ context_icon = None
+ context_updated = False
version = None
release = _ReleaseValue()
release.guid = blob.digest
@@ -456,7 +458,7 @@ def load_bundle(blob, context=None, initial=False, extra_deps=None,
changelog = None
unpack_size += bundle.getmember(arcname).size
spec = bundle.get_spec()
- context_meta = _load_context_metadata(bundle, spec)
+ context_meta, context_icon = _load_context_metadata(bundle, spec)
if not context:
context = spec['context']
@@ -489,10 +491,13 @@ def load_bundle(blob, context=None, initial=False, extra_deps=None,
enforce(context_meta, http.BadRequest, 'No way to initate context')
context_meta['guid'] = context
context_meta['type'] = [context_type]
+ if context_icon:
+ _generate_icons(context_icon, context_meta)
with this.principal as principal:
principal.cap_create_with_guid = True
this.call(method='POST', path=['context'], content=context_meta,
principal=principal)
+ context_updated = True
else:
enforce(doc.available, http.NotFound, 'No context')
enforce(context_type in doc['type'],
@@ -513,12 +518,15 @@ def load_bundle(blob, context=None, initial=False, extra_deps=None,
_logger.debug('Load %r release: %r', context, release)
- if this.principal in doc['author']:
- patch = doc.format_patch(context_meta)
- if patch:
- this.call(method='PUT', path=['context', context], content=patch,
- principal=this.principal)
- doc.posts.update(patch)
+ if this.principal in doc['author'] or this.principal.cap_author_override:
+ if not context_updated and update_context:
+ patch = doc.format_patch(context_meta) or {}
+ if context_icon and doc['artefact_icon'] == 'assets/missing.svg':
+ _generate_icons(context_icon, patch)
+ if patch:
+ this.call(method='PUT', path=['context', context],
+ content=patch, principal=this.principal)
+ doc.posts.update(patch)
# TRANS: Release notes title
title = i18n._('%(name)s %(version)s release')
else:
@@ -571,21 +579,12 @@ def _load_context_metadata(bundle, spec):
if spec[prop]:
result[prop] = spec[prop]
result['guid'] = spec['context']
+ icon_svg = None
try:
from sugar_network.toolkit.sugar import color_svg
-
icon_file = bundle.extractfile(join(bundle.rootdir, spec['icon']))
- svg = color_svg(icon_file.read(), result['guid'])
- blobs = this.volume.blobs
-
- result['artefact_icon'] = \
- blobs.post(svg, 'image/svg+xml').digest
- result['icon'] = \
- blobs.post(svg_to_png(svg, ICON_SIZE), 'image/png').digest
- result['logo'] = \
- blobs.post(svg_to_png(svg, LOGO_SIZE), 'image/png').digest
-
+ icon_svg = color_svg(icon_file.read(), result['guid'])
icon_file.close()
except Exception:
_logger.exception('Failed to load icon')
@@ -618,7 +617,7 @@ def _load_context_metadata(bundle, spec):
except Exception:
_logger.exception('Gettext failed to read %r', mo_path[-1])
- return result
+ return result, icon_svg
def _resolve_package_alias(doc, value):
@@ -669,6 +668,16 @@ def _resolve_package_alias(doc, value):
doc.post('releases', {'resolves': resolves})
+def _generate_icons(svg, props):
+ blobs = this.volume.blobs
+ props['artefact_icon'] = \
+ blobs.post(svg, 'image/svg+xml').digest
+ props['icon'] = \
+ blobs.post(svg_to_png(svg, ICON_SIZE), 'image/png').digest
+ props['logo'] = \
+ blobs.post(svg_to_png(svg, LOGO_SIZE), 'image/png').digest
+
+
_STABILITY_RATES = {
'insecure': 0,
'buggy': 1,
diff --git a/sugar_network/node/slave.py b/sugar_network/node/slave.py
index babf1f0..a1195ab 100644
--- a/sugar_network/node/slave.py
+++ b/sugar_network/node/slave.py
@@ -52,8 +52,8 @@ class SlaveRoutes(NodeRoutes):
f.write(guid)
NodeRoutes.__init__(self, guid, volume=volume, **kwargs)
vardir = join(volume.root, 'var')
- self._push_r = toolkit.Bin(join(vardir, 'push.ranges'), [[1, None]])
- self._pull_r = toolkit.Bin(join(vardir, 'pull.ranges'), [[1, None]])
+ self._push_r = toolkit.Bin(join(vardir, 'push'), [[1, None]])
+ self._pull_r = toolkit.Bin(join(vardir, 'pull'), [[1, None]])
self._master_guid = urlsplit(master_api).netloc
self._master_api = master_api
diff --git a/sugar_network/toolkit/application.py b/sugar_network/toolkit/application.py
index 06d55a6..bc6b99c 100644
--- a/sugar_network/toolkit/application.py
+++ b/sugar_network/toolkit/application.py
@@ -168,6 +168,9 @@ class Application(object):
init_logging(debug.value)
+ def prolog(self):
+ pass
+
def epilog(self):
pass
@@ -186,6 +189,7 @@ class Application(object):
if cmd.options.get('keep_stdout') and not foreground.value:
self._keep_stdout()
+ self.prolog()
exit(cmd() or 0)
except Exception:
printf.exception('%s %s', _('Aborted'), self.name)
diff --git a/sugar_network/toolkit/i18n.py b/sugar_network/toolkit/i18n.py
index 86d3cae..57f242f 100644
--- a/sugar_network/toolkit/i18n.py
+++ b/sugar_network/toolkit/i18n.py
@@ -23,7 +23,7 @@ from gettext import translation
_ = lambda x: x
_logger = logging.getLogger('i18n')
-_i18n = {}
+_i18n = None
def default_lang():
@@ -107,10 +107,18 @@ def decode(value, accept_language=None):
def encode(msgid, *args, **kwargs):
- if not _i18n:
+ global _i18n
+
+ if _i18n is None:
from sugar_network.toolkit.languages import LANGUAGES
+
+ _i18n = {}
for lang in LANGUAGES:
- _i18n[lang] = translation('sugar-network', languages=[lang])
+ try:
+ _i18n[lang] = translation('sugar-network', languages=[lang])
+ except IOError, error:
+ _logger.error('Failed to open %r locale: %s', lang, error)
+
result = {}
for lang, trans in _i18n.items():
diff --git a/tests/units/client/client_routes.py b/tests/units/client/client_routes.py
index bc7572f..e65a79d 100755
--- a/tests/units/client/client_routes.py
+++ b/tests/units/client/client_routes.py
@@ -1078,7 +1078,7 @@ class ClientRoutesTest(tests.Test):
local = IPCConnection()
remote = Connection()
- self.assertEqual([[1, None]], self.client_routes._pull_r.value)
+ self.assertEqual([[1, None]], self.client_routes._refresh_r.value)
self.assertEqual(0, local_volume.seqno.value)
guid = remote.post(['context'], {
@@ -1092,14 +1092,14 @@ class ClientRoutesTest(tests.Test):
self.assertEqual('1', local.get(['context', guid])['title'])
coroutine.sleep(1.1)
- self.assertEqual([[1, 1], [6, None]], self.client_routes._pull_r.value)
+ self.assertEqual([[1, 1], [6, None]], self.client_routes._refresh_r.value)
self.assertEqual(0, local_volume.seqno.value)
remote.put(['context', guid, 'title'], '2')
self.assertEqual('2', remote.get(['context', guid, 'title']))
self.assertEqual('1', local.get(['context', guid])['title'])
- self.assertEqual([[1, 1], [6, None]], self.client_routes._pull_r.value)
+ self.assertEqual([[1, 1], [6, None]], self.client_routes._refresh_r.value)
self.assertEqual(0, local_volume.seqno.value)
self.assertEqual('2', local.get(['context'], reply='title')['result'][0]['title'])
@@ -1107,7 +1107,7 @@ class ClientRoutesTest(tests.Test):
self.assertEqual('2', remote.get(['context', guid, 'title']))
self.assertEqual('2', local.get(['context', guid])['title'])
- self.assertEqual([[1, 1], [7, None]], self.client_routes._pull_r.value)
+ self.assertEqual([[1, 1], [7, None]], self.client_routes._refresh_r.value)
self.assertEqual(0, local_volume.seqno.value)
def test_PullCheckinsOnGettingOnline(self):
@@ -1118,7 +1118,7 @@ class ClientRoutesTest(tests.Test):
local = IPCConnection()
remote = Connection()
- self.assertEqual([[1, None]], self.client_routes._pull_r.value)
+ self.assertEqual([[1, None]], self.client_routes._refresh_r.value)
self.assertEqual(0, local_volume.seqno.value)
guid = remote.post(['context'], {
@@ -1135,7 +1135,7 @@ class ClientRoutesTest(tests.Test):
remote.put(['context', guid, 'title'], '2')
self.assertEqual('2', remote.get(['context', guid, 'title']))
self.assertEqual('1', local.get(['context', guid])['title'])
- self.assertEqual([[1, 1], [6, None]], self.client_routes._pull_r.value)
+ self.assertEqual([[1, 1], [6, None]], self.client_routes._refresh_r.value)
self.assertEqual(0, local_volume.seqno.value)
self.stop_master()
@@ -1144,7 +1144,7 @@ class ClientRoutesTest(tests.Test):
self.wait_for_events(event='sync', state='done').wait()
self.assertEqual('2', local.get(['context', guid])['title'])
- self.assertEqual([[1, 1], [7, None]], self.client_routes._pull_r.value)
+ self.assertEqual([[1, 1], [7, None]], self.client_routes._refresh_r.value)
self.assertEqual(0, local_volume.seqno.value)
def test_PullCheckinsOnUpdates(self):
@@ -1152,7 +1152,7 @@ class ClientRoutesTest(tests.Test):
local = IPCConnection()
remote = Connection()
- self.assertEqual([[1, None]], self.client_routes._pull_r.value)
+ self.assertEqual([[1, None]], self.client_routes._refresh_r.value)
self.assertEqual(0, local_volume.seqno.value)
guid = remote.post(['context'], {
@@ -1171,7 +1171,7 @@ class ClientRoutesTest(tests.Test):
self.assertEqual('1', remote.get(['context', guid, 'summary']))
self.assertEqual('1', local.get(['context', guid])['title'])
self.assertEqual('1', local.get(['context', guid])['summary'])
- self.assertEqual([[1, 1], [6, None]], self.client_routes._pull_r.value)
+ self.assertEqual([[1, 1], [6, None]], self.client_routes._refresh_r.value)
self.assertEqual(0, local_volume.seqno.value)
local.put(['context', guid, 'summary'], '2')
@@ -1179,7 +1179,7 @@ class ClientRoutesTest(tests.Test):
self.assertEqual('2', remote.get(['context', guid, 'summary']))
self.assertEqual('2', local.get(['context', guid])['title'])
self.assertEqual('2', local.get(['context', guid])['summary'])
- self.assertEqual([[1, 1], [8, None]], self.client_routes._pull_r.value)
+ self.assertEqual([[1, 1], [8, None]], self.client_routes._refresh_r.value)
self.assertEqual(0, local_volume.seqno.value)
def test_PushOfflineChanges(self):
@@ -1223,7 +1223,7 @@ class ClientRoutesTest(tests.Test):
local = IPCConnection()
remote = Connection()
- self.assertEqual([[1, None]], self.client_routes._pull_r.value)
+ self.assertEqual([[1, None]], self.client_routes._refresh_r.value)
self.assertEqual(0, local_volume.seqno.value)
guid = remote.post(['context'], {
diff --git a/tests/units/node/slave.py b/tests/units/node/slave.py
index 67a9d1c..f7c149d 100755
--- a/tests/units/node/slave.py
+++ b/tests/units/node/slave.py
@@ -61,8 +61,8 @@ class SlaveTest(tests.Test):
slave = Connection('http://127.0.0.1:8888')
slave.post(cmd='online_sync')
- self.assertEqual([[1, None]], json.load(file('slave/var/pull.ranges')))
- self.assertEqual([[1, None]], json.load(file('slave/var/push.ranges')))
+ self.assertEqual([[1, None]], json.load(file('slave/var/pull')))
+ self.assertEqual([[1, None]], json.load(file('slave/var/push')))
guid1 = slave.post(['document'], {'message': '1', 'title': ''})
guid2 = slave.post(['document'], {'message': '2', 'title': ''})
@@ -73,8 +73,8 @@ class SlaveTest(tests.Test):
{'guid': guid2, 'message': '2'},
],
master.get(['document'], reply=['guid', 'message'])['result'])
- self.assertEqual([[2, None]], json.load(file('slave/var/pull.ranges')))
- self.assertEqual([[4, None]], json.load(file('slave/var/push.ranges')))
+ self.assertEqual([[2, None]], json.load(file('slave/var/pull')))
+ self.assertEqual([[4, None]], json.load(file('slave/var/push')))
guid3 = slave.post(['document'], {'message': '3', 'title': ''})
slave.post(cmd='online_sync')
@@ -84,15 +84,15 @@ class SlaveTest(tests.Test):
{'guid': guid3, 'message': '3'},
],
master.get(['document'], reply=['guid', 'message'])['result'])
- self.assertEqual([[3, None]], json.load(file('slave/var/pull.ranges')))
- self.assertEqual([[5, None]], json.load(file('slave/var/push.ranges')))
+ self.assertEqual([[3, None]], json.load(file('slave/var/pull')))
+ self.assertEqual([[5, None]], json.load(file('slave/var/push')))
coroutine.sleep(1)
slave.put(['document', guid2], {'message': '22'})
slave.post(cmd='online_sync')
self.assertEqual('22', master.get(['document', guid2, 'message']))
- self.assertEqual([[4, None]], json.load(file('slave/var/pull.ranges')))
- self.assertEqual([[6, None]], json.load(file('slave/var/push.ranges')))
+ self.assertEqual([[4, None]], json.load(file('slave/var/pull')))
+ self.assertEqual([[6, None]], json.load(file('slave/var/push')))
coroutine.sleep(1)
slave.delete(['document', guid1])
@@ -102,8 +102,8 @@ class SlaveTest(tests.Test):
{'guid': guid3, 'message': '3'},
],
master.get(['document'], reply=['guid', 'message'])['result'])
- self.assertEqual([[5, None]], json.load(file('slave/var/pull.ranges')))
- self.assertEqual([[7, None]], json.load(file('slave/var/push.ranges')))
+ self.assertEqual([[5, None]], json.load(file('slave/var/pull')))
+ self.assertEqual([[7, None]], json.load(file('slave/var/push')))
coroutine.sleep(1)
slave.put(['document', guid2], {'message': 'b'})
@@ -116,8 +116,8 @@ class SlaveTest(tests.Test):
{'guid': guid4, 'message': 'd'},
]),
sorted(master.get(['document'], reply=['guid', 'message'])['result']))
- self.assertEqual([[6, None]], json.load(file('slave/var/pull.ranges')))
- self.assertEqual([[11, None]], json.load(file('slave/var/push.ranges')))
+ self.assertEqual([[6, None]], json.load(file('slave/var/pull')))
+ self.assertEqual([[11, None]], json.load(file('slave/var/push')))
def test_online_sync_Pull(self):
self.fork_master([User, self.Document])
@@ -126,8 +126,8 @@ class SlaveTest(tests.Test):
coroutine.sleep(1)
slave.post(cmd='online_sync')
- self.assertEqual([[1, None]], json.load(file('slave/var/pull.ranges')))
- self.assertEqual([[1, None]], json.load(file('slave/var/push.ranges')))
+ self.assertEqual([[1, None]], json.load(file('slave/var/pull')))
+ self.assertEqual([[1, None]], json.load(file('slave/var/push')))
guid1 = master.post(['document'], {'message': '1', 'title': ''})
guid2 = master.post(['document'], {'message': '2', 'title': ''})
@@ -138,8 +138,8 @@ class SlaveTest(tests.Test):
{'guid': guid2, 'message': '2'},
],
slave.get(['document'], reply=['guid', 'message'])['result'])
- self.assertEqual([[4, None]], json.load(file('slave/var/pull.ranges')))
- self.assertEqual([[2, None]], json.load(file('slave/var/push.ranges')))
+ self.assertEqual([[4, None]], json.load(file('slave/var/pull')))
+ self.assertEqual([[2, None]], json.load(file('slave/var/push')))
guid3 = master.post(['document'], {'message': '3', 'title': ''})
slave.post(cmd='online_sync')
@@ -149,15 +149,15 @@ class SlaveTest(tests.Test):
{'guid': guid3, 'message': '3'},
],
slave.get(['document'], reply=['guid', 'message'])['result'])
- self.assertEqual([[5, None]], json.load(file('slave/var/pull.ranges')))
- self.assertEqual([[3, None]], json.load(file('slave/var/push.ranges')))
+ self.assertEqual([[5, None]], json.load(file('slave/var/pull')))
+ self.assertEqual([[3, None]], json.load(file('slave/var/push')))
coroutine.sleep(1)
master.put(['document', guid2], {'message': '22'})
slave.post(cmd='online_sync')
self.assertEqual('22', slave.get(['document', guid2, 'message']))
- self.assertEqual([[6, None]], json.load(file('slave/var/pull.ranges')))
- self.assertEqual([[4, None]], json.load(file('slave/var/push.ranges')))
+ self.assertEqual([[6, None]], json.load(file('slave/var/pull')))
+ self.assertEqual([[4, None]], json.load(file('slave/var/push')))
coroutine.sleep(1)
master.delete(['document', guid1])
@@ -167,8 +167,8 @@ class SlaveTest(tests.Test):
{'guid': guid3, 'message': '3'},
],
slave.get(['document'], reply=['guid', 'message'])['result'])
- self.assertEqual([[7, None]], json.load(file('slave/var/pull.ranges')))
- self.assertEqual([[5, None]], json.load(file('slave/var/push.ranges')))
+ self.assertEqual([[7, None]], json.load(file('slave/var/pull')))
+ self.assertEqual([[5, None]], json.load(file('slave/var/push')))
coroutine.sleep(1)
master.put(['document', guid2], {'message': 'b'})
@@ -181,8 +181,8 @@ class SlaveTest(tests.Test):
{'guid': guid4, 'message': 'd'},
],
slave.get(['document'], reply=['guid', 'message'])['result'])
- self.assertEqual([[11, None]], json.load(file('slave/var/pull.ranges')))
- self.assertEqual([[6, None]], json.load(file('slave/var/push.ranges')))
+ self.assertEqual([[11, None]], json.load(file('slave/var/pull')))
+ self.assertEqual([[6, None]], json.load(file('slave/var/push')))
def test_online_sync_PullBlobs(self):
self.fork_master([User, self.Document])
@@ -191,8 +191,8 @@ class SlaveTest(tests.Test):
coroutine.sleep(1)
slave.post(cmd='online_sync')
- self.assertEqual([[1, None]], json.load(file('slave/var/pull.ranges')))
- self.assertEqual([[1, None]], json.load(file('slave/var/push.ranges')))
+ self.assertEqual([[1, None]], json.load(file('slave/var/pull')))
+ self.assertEqual([[1, None]], json.load(file('slave/var/push')))
guid = master.post(['document'], {'message': '1', 'title': ''})
master.put(['document', guid, 'blob'], 'blob')
@@ -208,8 +208,8 @@ class SlaveTest(tests.Test):
slave = Connection('http://127.0.0.1:8888')
slave.post(cmd='online_sync')
- self.assertEqual([[1, None]], json.load(file('slave/var/pull.ranges')))
- self.assertEqual([[1, None]], json.load(file('slave/var/push.ranges')))
+ self.assertEqual([[1, None]], json.load(file('slave/var/pull')))
+ self.assertEqual([[1, None]], json.load(file('slave/var/push')))
guid = slave.post(['document'], {'message': '1', 'title': '1'})
slave.post(cmd='online_sync')
@@ -251,8 +251,8 @@ class SlaveTest(tests.Test):
self.assertEqual(1, slave.get(['document', '1', 'ctime']))
self.assertEqual('a', file(self.slave_volume.blobs.get(hashlib.sha1('a').hexdigest()).path).read())
self.assertEqual('bb', file(self.slave_volume.blobs.get('foo/bar').path).read())
- self.assertEqual([[4, None]], json.load(file('slave/var/push.ranges')))
- self.assertEqual([[3, 100], [104, None]], json.load(file('slave/var/pull.ranges')))
+ self.assertEqual([[4, None]], json.load(file('slave/var/push')))
+ self.assertEqual([[3, 100], [104, None]], json.load(file('slave/var/pull')))
self.assertEqual(
sorted([
@@ -305,8 +305,8 @@ class SlaveTest(tests.Test):
self.assertEqual(1, slave.get(['document', '1', 'ctime']))
self.assertEqual('a', file(self.slave_volume.blobs.get(hashlib.sha1('a').hexdigest()).path).read())
self.assertEqual('bb', file(self.slave_volume.blobs.get('foo/bar').path).read())
- self.assertEqual([[2, None]], json.load(file('slave/var/push.ranges')))
- self.assertEqual([[3, None]], json.load(file('slave/var/pull.ranges')))
+ self.assertEqual([[2, None]], json.load(file('slave/var/push')))
+ self.assertEqual([[3, None]], json.load(file('slave/var/pull')))
self.assertEqual(
sorted([
@@ -340,8 +340,8 @@ class SlaveTest(tests.Test):
root='sync', limit=99999999)
slave.post(cmd='offline_sync', path=tests.tmpdir + '/sync')
- self.assertEqual([[4, None]], json.load(file('slave/var/push.ranges')))
- self.assertEqual([[1, 100], [104, None]], json.load(file('slave/var/pull.ranges')))
+ self.assertEqual([[4, None]], json.load(file('slave/var/push')))
+ self.assertEqual([[1, 100], [104, None]], json.load(file('slave/var/pull')))
self.assertEqual(
sorted([
@@ -374,8 +374,8 @@ class SlaveTest(tests.Test):
slave.post(cmd='offline_sync', path=tests.tmpdir + '/sync')
self.assertEqual(1, slave.get(['document', '1', 'ctime']))
- self.assertEqual([[2, None]], json.load(file('slave/var/push.ranges')))
- self.assertEqual([[1, None]], json.load(file('slave/var/pull.ranges')))
+ self.assertEqual([[2, None]], json.load(file('slave/var/push')))
+ self.assertEqual([[1, None]], json.load(file('slave/var/pull')))
self.assertEqual(
sorted([