Web   ·   Wiki   ·   Activities   ·   Blog   ·   Lists   ·   Chat   ·   Meeting   ·   Bugs   ·   Git   ·   Translate   ·   Archive   ·   People   ·   Donate
summaryrefslogtreecommitdiffstats
path: root/sugar_network
diff options
context:
space:
mode:
authorAleksey Lim <alsroot@sugarlabs.org>2014-02-04 12:19:15 (GMT)
committer Aleksey Lim <alsroot@sugarlabs.org>2014-02-18 21:20:34 (GMT)
commit6447b7951a66021f288f5b2b0c8cc301257d36ee (patch)
tree2c7d269a0dba29a6466996889a1c9980971a0e06 /sugar_network
parentb68085299cee6e5488e8301042cde13d040494ca (diff)
Polish design
The major points: * minimize number of resource types * use aggregated properties instead of resources * separate blob storage from resources db
Diffstat (limited to 'sugar_network')
-rw-r--r--sugar_network/client/cache.py46
-rw-r--r--sugar_network/client/journal.py10
-rw-r--r--sugar_network/client/releases.py167
-rw-r--r--sugar_network/client/routes.py119
-rw-r--r--sugar_network/client/solver.py33
-rw-r--r--sugar_network/db/__init__.py4
-rw-r--r--sugar_network/db/directory.py263
-rw-r--r--sugar_network/db/files.py146
-rw-r--r--sugar_network/db/index.py60
-rw-r--r--sugar_network/db/metadata.py421
-rw-r--r--sugar_network/db/resource.py211
-rw-r--r--sugar_network/db/routes.py452
-rw-r--r--sugar_network/db/storage.py36
-rw-r--r--sugar_network/db/volume.py46
-rw-r--r--sugar_network/model/__init__.py275
-rw-r--r--sugar_network/model/context.py156
-rw-r--r--sugar_network/model/post.py69
-rw-r--r--sugar_network/model/release.py83
-rw-r--r--sugar_network/model/report.py42
-rw-r--r--sugar_network/model/routes.py154
-rw-r--r--sugar_network/model/user.py6
-rw-r--r--sugar_network/node/master.py73
-rw-r--r--sugar_network/node/model.py177
-rw-r--r--sugar_network/node/obs.py116
-rw-r--r--sugar_network/node/routes.py466
-rw-r--r--sugar_network/node/slave.py13
-rw-r--r--sugar_network/node/stats_node.py311
-rw-r--r--sugar_network/node/sync.py2
-rw-r--r--sugar_network/node/volume.py142
-rw-r--r--sugar_network/static/httpdocs/favicon.icobin1150 -> 0 bytes
-rw-r--r--sugar_network/static/httpdocs/images/activity.svg68
-rw-r--r--sugar_network/static/httpdocs/images/book.svg68
-rw-r--r--sugar_network/static/httpdocs/images/group.svg68
-rw-r--r--sugar_network/static/httpdocs/images/missing-logo.pngbin4073 -> 0 bytes
-rw-r--r--sugar_network/static/httpdocs/images/missing.pngbin1566 -> 0 bytes
-rw-r--r--sugar_network/static/httpdocs/images/missing.svg75
-rw-r--r--sugar_network/static/httpdocs/images/package-logo.pngbin2874 -> 0 bytes
-rw-r--r--sugar_network/static/httpdocs/images/package.pngbin1199 -> 0 bytes
-rw-r--r--sugar_network/static/httpdocs/images/package.svg71
-rw-r--r--sugar_network/toolkit/__init__.py154
-rw-r--r--sugar_network/toolkit/coroutine.py109
-rw-r--r--sugar_network/toolkit/http.py4
-rw-r--r--sugar_network/toolkit/i18n.py134
-rw-r--r--sugar_network/toolkit/languages.py.in (renamed from sugar_network/static/__init__.py)10
-rw-r--r--sugar_network/toolkit/router.py351
45 files changed, 2183 insertions, 3028 deletions
diff --git a/sugar_network/client/cache.py b/sugar_network/client/cache.py
index e13ccb6..df76a29 100644
--- a/sugar_network/client/cache.py
+++ b/sugar_network/client/cache.py
@@ -17,9 +17,10 @@ import os
import sys
import time
import logging
-from os.path import exists, basename
+from os.path import exists
from sugar_network import client
+from sugar_network.db import files
from sugar_network.toolkit import pylru, enforce
@@ -30,8 +31,7 @@ _logger = logging.getLogger('cache')
class Cache(object):
- def __init__(self, volume):
- self._volume = volume
+ def __init__(self):
self._pool = None
self._du = 0
self._acquired = {}
@@ -71,14 +71,18 @@ class Cache(object):
self.checkin(guid, acquired[1])
del self._acquired[guid]
- def checkin(self, guid, size):
+ def checkin(self, digest, size):
self._ensure_open()
- if guid in self._pool:
- self._pool.__getitem__(guid)
+ if digest in self._pool:
+ self._pool.__getitem__(digest)
return
+
+
+
_logger.debug('Checkin %r %d bytes long', guid, size)
- mtime = os.stat(self._volume['release'].path(guid)).st_mtime
- self._pool[guid] = (size, mtime)
+
+ mtime = os.stat(files.get(digest).path).st_mtime
+ self._pool[digest] = (size, mtime)
self._du += size
def checkout(self, guid, *args):
@@ -112,17 +116,25 @@ class Cache(object):
_logger.debug('Open releases pool')
pool = []
- impls = self._volume['release']
- for res in impls.find(not_layer=['local'])[0]:
- meta = res.meta('data')
- if not meta or 'blob_size' not in meta:
- continue
- clone = self._volume['context'].path(res['context'], '.clone')
- if exists(clone) and basename(os.readlink(clone)) == res.guid:
+ for release in self._volume['release'].find(not_layer=['local'])[0]:
+ meta = files.get(release['data'])
+ if not meta:
continue
+
+ """
+ TODO
+
+ solution_path = client.path('solutions', release['context'])
+ if exists(solution_path):
+ with file(path) as f:
+ cached_api_url, cached_stability, solution = json.load(f)
+ if solution[0]['guid'] == release['guid']:
+ continue
+
+ """
pool.append((
- os.stat(impls.path(res.guid)).st_mtime,
- res.guid,
+ os.stat(meta.path).st_mtime,
+ release.guid,
meta.get('unpack_size') or meta['blob_size'],
))
diff --git a/sugar_network/client/journal.py b/sugar_network/client/journal.py
index ee2a2f3..0dcae12 100644
--- a/sugar_network/client/journal.py
+++ b/sugar_network/client/journal.py
@@ -19,8 +19,8 @@ import logging
from shutil import copyfileobj
from tempfile import NamedTemporaryFile
-from sugar_network import client
-from sugar_network.toolkit.router import Blob, route, Request
+from sugar_network import client, toolkit
+from sugar_network.toolkit.router import route, Request
from sugar_network.toolkit import enforce
@@ -105,15 +105,13 @@ class Routes(object):
@route('GET', ['journal', None, 'preview'])
def journal_get_preview(self, request, response):
- return Blob({
- 'blob': _prop_path(request.guid, 'preview'),
+ return toolkit.File(_prop_path(request.guid, 'preview'), {
'mime_type': 'image/png',
})
@route('GET', ['journal', None, 'data'])
def journal_get_data(self, request, response):
- return Blob({
- 'blob': _ds_path(request.guid, 'data'),
+ return toolkit.File(_ds_path(request.guid, 'data'), {
'mime_type': get(request.guid, 'mime_type') or 'application/octet',
})
diff --git a/sugar_network/client/releases.py b/sugar_network/client/releases.py
index ff35d16..c93a91a 100644
--- a/sugar_network/client/releases.py
+++ b/sugar_network/client/releases.py
@@ -32,7 +32,8 @@ from sugar_network.client.cache import Cache
from sugar_network.client import journal, packagekit
from sugar_network.toolkit.router import Request, Response, route
from sugar_network.toolkit.bundle import Bundle
-from sugar_network.toolkit import http, coroutine, enforce
+from sugar_network.toolkit.coroutine import this
+from sugar_network.toolkit import i18n, http, coroutine, enforce
_MIMETYPE_DEFAULTS_KEY = '/desktop/sugar/journal/defaults'
@@ -43,22 +44,20 @@ _logger = logging.getLogger('releases')
class Routes(object):
- def __init__(self, local_volume):
- self._volume = local_volume
+ def __init__(self):
self._node_mtime = None
self._call = lambda **kwargs: \
self._map_exceptions(self.fallback, **kwargs)
- self._cache = Cache(local_volume)
+ self._cache = Cache()
def invalidate_solutions(self, mtime):
self._node_mtime = mtime
@route('GET', ['context', None], cmd='path')
def path(self, request):
- clone_path = self._volume['context'].path(request.guid, '.clone')
- enforce(exists(clone_path), http.NotFound)
- clone_impl = basename(os.readlink(clone_path))
- return self._volume['release'].path(clone_impl, 'data')
+ clone = self._solve(request)
+ enforce(clone is not None, http.NotFound, 'No clones')
+ return clone['path']
@route('GET', ['context', None], cmd='launch', arguments={'args': list},
mime_type='text/event-stream')
@@ -75,18 +74,18 @@ class Routes(object):
acquired = []
try:
- impl = self._solve_impl(context, request)
+ impl = self._solve(request, context['type'])
if 'activity' not in context['type']:
app = request.get('context') or \
_mimetype_context(impl['data']['mime_type'])
enforce(app, 'Cannot find proper application')
- acquired += self._checkin_impl(
+ acquired += self._checkin(
context, request, self._cache.acquire)
request = Request(path=['context', app],
object_id=impl['path'], session=request.session)
for context in self._checkin_context(request):
- impl = self._solve_impl(context, request)
- acquired += self._checkin_impl(
+ impl = self._solve(request, context['type'])
+ acquired += self._checkin(
context, request, self._cache.acquire)
child = _exec(context, request, impl)
@@ -105,19 +104,15 @@ class Routes(object):
enforce(not request.content or self.inline(), http.ServiceUnavailable,
'Not available in offline')
for context in self._checkin_context(request, 'clone'):
- cloned_path = context.path('.clone')
if request.content:
- impl = self._solve_impl(context, request)
- self._checkin_impl(context, request, self._cache.checkout)
- impl_path = relpath(dirname(impl['path']), context.path())
- os.symlink(impl_path, cloned_path)
+ impl = self._solve(request, context['type'])
+ self._checkin(context, request, self._cache.checkout)
yield {'event': 'ready'}
else:
- cloned_impl = basename(os.readlink(cloned_path))
- meta = self._volume['release'].get(cloned_impl).meta('data')
+ clone = self._solve(request)
+ meta = this.volume['release'].get(clone['guid']).meta('data')
size = meta.get('unpack_size') or meta['blob_size']
- self._cache.checkin(cloned_impl, size)
- os.unlink(cloned_path)
+ self._cache.checkin(clone['guid'], size)
@route('GET', ['context', None], cmd='clone',
arguments={'requires': list})
@@ -147,18 +142,14 @@ class Routes(object):
raise http.ServiceUnavailable, error, sys.exc_info()[2]
def _checkin_context(self, request, layer=None):
- contexts = self._volume['context']
+ contexts = this.volume['context']
guid = request.guid
if layer and not request.content and not contexts.exists(guid):
return
if not contexts.exists(guid):
- context = self._call(method='GET', path=['context', guid])
- contexts.create(context, setters=True)
- for prop in ('icon', 'artifact_icon', 'logo'):
- blob = self._call(method='GET', path=['context', guid, prop])
- if blob is not None:
- contexts.update(guid, {prop: {'blob': blob}})
+ patch = self._call(method='GET', path=['context', guid], cmd='diff')
+ contexts.merge(guid, patch)
context = contexts.get(guid)
if layer and bool(request.content) == (layer in context['layer']):
return
@@ -171,14 +162,9 @@ class Routes(object):
else:
layer_value = set(context['layer']) - set([layer])
contexts.update(guid, {'layer': list(layer_value)})
- self.broadcast({
- 'event': 'update',
- 'resource': 'context',
- 'guid': guid,
- })
_logger.debug('Checked %r in: %r', guid, layer_value)
- def _solve_impl(self, context, request):
+ def _solve(self, request, force_type=None):
stability = request.get('stability') or \
client.stability(request.guid)
@@ -193,9 +179,11 @@ class Routes(object):
solution, stale = self._cache_solution_get(request.guid, stability)
if stale is False:
_logger.debug('Reuse cached %r solution', request.guid)
- elif solution is not None and not self.inline():
- _logger.debug('Reuse stale %r in offline', request.guid)
- elif 'activity' in context['type']:
+ elif solution is not None and (not force_type or not self.inline()):
+ _logger.debug('Reuse stale %r solution', request.guid)
+ elif not force_type:
+ return None
+ elif 'activity' in force_type:
from sugar_network.client import solver
solution = self._map_exceptions(solver.solve,
self.fallback, request.guid, stability)
@@ -203,16 +191,18 @@ class Routes(object):
response = Response()
blob = self._call(method='GET', path=['context', request.guid],
cmd='clone', stability=stability, response=response)
- response.meta['data']['blob'] = blob
- solution = [response.meta]
+ release = response.meta
+ release['mime_type'] = response.content_type
+ release['size'] = response.content_length
+ files.post(blob, digest=release['spec']['*-*']['bundle'])
+ solution = [release]
request.session['solution'] = solution
return solution[0]
- def _checkin_impl(self, context, request, cache_call):
+ def _checkin(self, context, request, cache_call):
if 'clone' in context['layer']:
cache_call = self._cache.checkout
- impls = self._volume['release']
if 'activity' in context['type']:
to_install = []
@@ -226,49 +216,42 @@ class Routes(object):
def cache_impl(sel):
guid = sel['guid']
- data = sel['data']
- sel['path'] = impls.path(guid, 'data')
- size = data.get('unpack_size') or data['blob_size']
-
- blob = None
- if 'blob' in data:
- blob = data.pop('blob')
-
- if impls.exists(guid):
- return cache_call(guid, size)
-
- if blob is None:
- blob = self._call(method='GET', path=['release', guid, 'data'])
-
- blob_dir = dirname(sel['path'])
- if not exists(blob_dir):
- os.makedirs(blob_dir)
-
- with toolkit.mkdtemp(dir=blob_dir) as blob_dir:
- if 'activity' in context['type']:
- self._cache.ensure(size, data['blob_size'])
- with toolkit.TemporaryFile() as tmp_file:
- shutil.copyfileobj(blob, tmp_file)
- tmp_file.seek(0)
- with Bundle(tmp_file, 'application/zip') as bundle:
- bundle.extractall(blob_dir, prefix=bundle.rootdir)
- for exec_dir in ('bin', 'activity'):
- bin_path = join(blob_dir, exec_dir)
- if not exists(bin_path):
- continue
- for filename in os.listdir(bin_path):
- os.chmod(join(bin_path, filename), 0755)
- blob = blob_dir
- else:
- self._cache.ensure(size)
- with file(join(blob_dir, 'data'), 'wb') as f:
- shutil.copyfileobj(blob, f)
- blob = f.name
- impl = deepcopy(sel)
- impl['mtime'] = impl['ctime']
- impl['data']['blob'] = blob
- impls.create(impl)
- return cache_call(guid, size)
+
+
+
+
+ data = files.get(guid)
+
+ if data is not None:
+ return cache_call(guid, data['unpack_size'])
+
+ response = Response()
+ blob = self._call(method='GET', path=['release', guid, 'data'],
+ response=response)
+
+ if 'activity' not in context['type']:
+ self._cache.ensure(response.content_length)
+ files.post(blob, response.meta, sel['data'])
+ return cache_call(guid, response.content_length)
+
+ with toolkit.mkdtemp(dir=files.path(sel['data'])) as blob_dir:
+ self._cache.ensure(
+ response.meta['unpack_size'],
+ response.content_length)
+ with toolkit.TemporaryFile() as tmp_file:
+ shutil.copyfileobj(blob, tmp_file)
+ tmp_file.seek(0)
+ with Bundle(tmp_file, 'application/zip') as bundle:
+ bundle.extractall(blob_dir, prefix=bundle.rootdir)
+ for exec_dir in ('bin', 'activity'):
+ bin_path = join(blob_dir, exec_dir)
+ if not exists(bin_path):
+ continue
+ for filename in os.listdir(bin_path):
+ os.chmod(join(bin_path, filename), 0755)
+
+ files.update(sel['data'], response.meta)
+ return cache_call(guid, response.meta['unpack_size'])
result = []
for sel in request.session['solution']:
@@ -278,11 +261,8 @@ class Routes(object):
request.session['stability'], request.session['solution'])
return result
- def _cache_solution_path(self, guid):
- return client.path('solutions', guid[:2], guid)
-
def _cache_solution_get(self, guid, stability):
- path = self._cache_solution_path(guid)
+ path = client.path('solutions', guid)
solution = None
if exists(path):
try:
@@ -305,7 +285,7 @@ class Routes(object):
def _cache_solution_set(self, guid, stability, solution):
if isinstance(solution, _CachedSolution):
return
- path = self._cache_solution_path(guid)
+ path = client.path('solutions', guid)
if not exists(dirname(path)):
os.makedirs(dirname(path))
with file(path, 'w') as f:
@@ -315,13 +295,12 @@ class Routes(object):
for context in self._checkin_context(request):
if 'clone' not in context['layer']:
return self._map_exceptions(self.fallback, request, response)
- guid = basename(os.readlink(context.path('.clone')))
- impl = self._volume['release'].get(guid)
- response.meta = impl.properties([
+ release = this.volume['release'].get(self._solve(request)['guid'])
+ response.meta = release.properties([
'guid', 'ctime', 'layer', 'author', 'tags',
'context', 'version', 'stability', 'license', 'notes', 'data',
])
- return impl.meta('data')
+ return release.meta('data')
def _activity_id_new():
@@ -397,7 +376,7 @@ def _exec(context, request, sel):
environ['SUGAR_BUNDLE_PATH'] = impl_path
environ['SUGAR_BUNDLE_ID'] = context.guid
environ['SUGAR_BUNDLE_NAME'] = \
- toolkit.gettext(context['title']).encode('utf8')
+ i18n.decode(context['title']).encode('utf8')
environ['SUGAR_BUNDLE_VERSION'] = sel['version']
environ['SUGAR_ACTIVITY_ROOT'] = datadir
environ['SUGAR_LOCALEDIR'] = join(impl_path, 'locale')
diff --git a/sugar_network/client/routes.py b/sugar_network/client/routes.py
index c6ea6d2..50d8632 100644
--- a/sugar_network/client/routes.py
+++ b/sugar_network/client/routes.py
@@ -24,6 +24,7 @@ from sugar_network import db, client, node, toolkit, model
from sugar_network.client import journal, releases
from sugar_network.node.slave import SlaveRoutes
from sugar_network.toolkit import netlink, mountpoints
+from sugar_network.toolkit.coroutine import this
from sugar_network.toolkit.router import ACL, Request, Response, Router
from sugar_network.toolkit.router import route, fallbackroute
from sugar_network.toolkit import zeroconf, coroutine, http, exception, enforce
@@ -189,44 +190,38 @@ class ClientRoutes(model.FrontRoutes, releases.Routes, journal.Routes):
yield {'event': 'done', 'guid': guid}
@fallbackroute()
- def fallback(self, request=None, response=None, method=None, path=None,
- cmd=None, content=None, content_stream=None, content_type=None,
- **kwargs):
+ def fallback(self, request=None, response=None, **kwargs):
if request is None:
- request = Request(method=method, path=path, cmd=cmd,
- content=content, content_stream=content_stream,
- content_type=content_type)
+ request = Request(**kwargs)
if response is None:
response = Response()
- request.update(kwargs)
- if self._inline.is_set():
- if client.layers.value and \
- request.resource in ('context', 'release'):
- request.add('layer', *client.layers.value)
- request.principal = self._auth.login
- try:
- reply = self._node.call(request, response)
- if hasattr(reply, 'read'):
- if response.relocations:
- return reply
- else:
- return _ResponseStream(reply, self._restart_online)
- else:
- return reply
- except (http.ConnectionError, IncompleteRead):
+
+ if not self._inline.is_set():
+ return self._local.call(request, response)
+
+ if client.layers.value and request.resource in ('context', 'release'):
+ request.add('layer', *client.layers.value)
+ request.principal = self._auth.login
+ try:
+ reply = self._node.call(request, response)
+ if hasattr(reply, 'read'):
if response.relocations:
- raise
- self._restart_online()
- return self._local.call(request, response)
- else:
+ return reply
+ else:
+ return _ResponseStream(reply, self._restart_online)
+ else:
+ return reply
+ except (http.ConnectionError, IncompleteRead):
+ if response.relocations:
+ raise
+ self._restart_online()
return self._local.call(request, response)
def _got_online(self):
enforce(not self._inline.is_set())
_logger.debug('Got online on %r', self._node)
self._inline.set()
- self.broadcast({'event': 'inline', 'state': 'online'})
- self._local.volume.broadcast = None
+ this.localcast({'event': 'inline', 'state': 'online'})
def _got_offline(self, force=False):
if not force and not self._inline.is_set():
@@ -235,9 +230,8 @@ class ClientRoutes(model.FrontRoutes, releases.Routes, journal.Routes):
self._node.close()
if self._inline.is_set():
_logger.debug('Got offline on %r', self._node)
- self.broadcast({'event': 'inline', 'state': 'offline'})
+ this.localcast({'event': 'inline', 'state': 'offline'})
self._inline.clear()
- self._local.volume.broadcast = self.broadcast
def _restart_online(self):
_logger.debug('Lost %r connection, try to reconnect in %s seconds',
@@ -266,16 +260,19 @@ class ClientRoutes(model.FrontRoutes, releases.Routes, journal.Routes):
mtime = event.get('mtime')
if mtime:
self.invalidate_solutions(mtime)
- self.broadcast(event)
+ this.broadcast(event)
def handshake(url):
_logger.debug('Connecting to %r node', url)
self._node = client.Connection(url, auth=self._auth)
status = self._node.get(cmd='status')
self._auth.allow_basic_auth = (status.get('level') == 'master')
+ """
+ TODO switch to seqno
impl_info = status['resources'].get('release')
if impl_info:
self.invalidate_solutions(impl_info['mtime'])
+ """
if self._inline.is_set():
_logger.info('Reconnected to %r node', url)
else:
@@ -284,7 +281,7 @@ class ClientRoutes(model.FrontRoutes, releases.Routes, journal.Routes):
def connect():
timeout = _RECONNECT_TIMEOUT
while True:
- self.broadcast({'event': 'inline', 'state': 'connecting'})
+ this.localcast({'event': 'inline', 'state': 'connecting'})
for url in self._remote_urls:
while True:
try:
@@ -329,8 +326,7 @@ class ClientRoutes(model.FrontRoutes, releases.Routes, journal.Routes):
profile['guid'] = self._auth.login
volume['user'].create(profile)
- self._node = _NodeRoutes(join(db_path, 'node'), volume,
- self.broadcast)
+ self._node = _NodeRoutes(join(db_path, 'node'), volume)
self._jobs.spawn(volume.populate)
logging.info('Start %r node on %s port', volume.root, node.port.value)
@@ -364,6 +360,11 @@ class CachedClientRoutes(ClientRoutes):
ClientRoutes._got_offline(self, force)
def _push(self):
+ # TODO should work using regular pull/push
+ return
+
+
+
pushed_seq = toolkit.Sequence()
skiped_seq = toolkit.Sequence()
volume = self._local.volume
@@ -388,24 +389,11 @@ class CachedClientRoutes(ClientRoutes):
diff_seq = toolkit.Sequence()
post_requests = []
for prop, meta, seqno in patch:
- if 'blob' in meta:
- request = Request(method='PUT', path=[res, guid, prop])
- request.content_type = meta['mime_type']
- request.content_length = os.stat(meta['blob']).st_size
- request.content_stream = \
- toolkit.iter_file(meta['blob'])
- post_requests.append((request, seqno))
- elif 'url' in meta:
- request = Request(method='PUT', path=[res, guid, prop])
- request.content_type = 'application/json'
- request.content = meta
- post_requests.append((request, seqno))
- else:
- value = meta['value']
- if prop == 'layer':
- value = list(set(value) - _LOCAL_LAYERS)
- diff[prop] = value
- diff_seq.include(seqno, seqno)
+ value = meta['value']
+ if prop == 'layer':
+ value = list(set(value) - _LOCAL_LAYERS)
+ diff[prop] = value
+ diff_seq.include(seqno, seqno)
if not diff:
continue
if 'guid' in diff:
@@ -426,7 +414,6 @@ class CachedClientRoutes(ClientRoutes):
if not pushed_seq:
if not self._push_seq.mtime:
self._push_seq.commit()
- self.broadcast({'event': 'push'})
return
_logger.info('Pushed %r local cache', pushed_seq)
@@ -441,38 +428,32 @@ class CachedClientRoutes(ClientRoutes):
volume['report'].wipe()
self._push_seq.commit()
- self.broadcast({'event': 'push'})
-class _LocalRoutes(model.VolumeRoutes, Router):
+class _LocalRoutes(db.Routes, Router):
def __init__(self, volume):
- model.VolumeRoutes.__init__(self, volume)
+ db.Routes.__init__(self, volume)
Router.__init__(self, self)
- def on_create(self, request, props, event):
+ def on_create(self, request, props):
props['layer'] = tuple(props['layer']) + ('local',)
- model.VolumeRoutes.on_create(self, request, props, event)
+ db.Routes.on_create(self, request, props)
class _NodeRoutes(SlaveRoutes, Router):
- def __init__(self, key_path, volume, localcast):
+ def __init__(self, key_path, volume):
SlaveRoutes.__init__(self, key_path, volume)
Router.__init__(self, self)
self.api_url = 'http://127.0.0.1:%s' % node.port.value
- self._localcast = localcast
self._mounts = toolkit.Pool()
self._jobs = coroutine.Pool()
mountpoints.connect(_SYNC_DIRNAME,
self.__found_mountcb, self.__lost_mount_cb)
- def broadcast(self, event=None, request=None):
- SlaveRoutes.broadcast(self, event, request)
- self._localcast(event)
-
def close(self):
self.volume.close()
@@ -481,27 +462,27 @@ class _NodeRoutes(SlaveRoutes, Router):
(self.volume.root, self.api_url)
def _sync_mounts(self):
- self._localcast({'event': 'sync_start'})
+ this.localcast({'event': 'sync_start'})
for mountpoint in self._mounts:
- self._localcast({'event': 'sync_next', 'path': mountpoint})
+ this.localcast({'event': 'sync_next', 'path': mountpoint})
try:
self._offline_session = self._offline_sync(
join(mountpoint, _SYNC_DIRNAME),
**(self._offline_session or {}))
except Exception, error:
_logger.exception('Failed to complete synchronization')
- self._localcast({'event': 'sync_abort', 'error': str(error)})
+ this.localcast({'event': 'sync_abort', 'error': str(error)})
self._offline_session = None
raise
if self._offline_session is None:
_logger.debug('Synchronization completed')
- self._localcast({'event': 'sync_complete'})
+ this.localcast({'event': 'sync_complete'})
else:
_logger.debug('Postpone synchronization with %r session',
self._offline_session)
- self._localcast({'event': 'sync_paused'})
+ this.localcast({'event': 'sync_paused'})
def __found_mountcb(self, path):
self._mounts.add(path)
diff --git a/sugar_network/client/solver.py b/sugar_network/client/solver.py
index 67350b6..84eb9cf 100644
--- a/sugar_network/client/solver.py
+++ b/sugar_network/client/solver.py
@@ -20,6 +20,7 @@ import logging
from os.path import isabs, join, dirname
from sugar_network.client import packagekit
+from sugar_network.toolkit.router import ACL
from sugar_network.toolkit.spec import parse_version
from sugar_network.toolkit import http, lsb_release
@@ -191,12 +192,10 @@ def _load_feed(context):
feed.name = context
return feed
- feed_content = None
+ releases = None
try:
- feed_content = _call(method='GET', path=['context', context],
- cmd='feed', layer='origin', stability=_stability,
- distro=lsb_release.distributor_id())
- _logger.trace('[%s] Found feed: %r', context, feed_content)
+ releases = _call(method='GET', path=['context', context, 'releases'])
+ _logger.trace('[%s] Found feed: %r', context, releases)
except http.ServiceUnavailable:
_logger.trace('[%s] Failed to fetch the feed', context)
raise
@@ -204,13 +203,33 @@ def _load_feed(context):
_logger.exception('[%s] Failed to fetch the feed', context)
return None
+ """
+ for digest, release in releases:
+ if [i for i in release['author'].values()
+ if i['role'] & ACL.ORIGINAL] and \
+ release['stability'] == _stability and \
+ f
+
+
+
+
+
+ stability=_stability,
+ distro=lsb_release.distributor_id())
+ """
+
+ for impl in feed_content['releases']:
+ feed.implement(impl)
+
+
+
# XXX 0install fails on non-ascii `name` values
feed.name = context
feed.to_resolve = feed_content.get('packages')
if not feed.to_resolve:
_logger.trace('[%s] No compatible packages', context)
- for impl in feed_content['releases']:
- feed.implement(impl)
+
+
if not feed.to_resolve and not feed.implementations:
_logger.trace('[%s] No releases', context)
diff --git a/sugar_network/db/__init__.py b/sugar_network/db/__init__.py
index 2f22a36..b2ceb67 100644
--- a/sugar_network/db/__init__.py
+++ b/sugar_network/db/__init__.py
@@ -350,8 +350,8 @@ Volume
"""
from sugar_network.db.metadata import \
- indexed_property, stored_property, blob_property, \
- Property, StoredProperty, BlobProperty, IndexedProperty, AggregatedType
+ stored_property, indexed_property, Property, Numeric, Boolean, Dict, \
+ Enum, List, Aggregated, Blob, Localized
from sugar_network.db.index import index_flush_timeout, \
index_flush_threshold, index_write_queue
from sugar_network.db.resource import Resource
diff --git a/sugar_network/db/directory.py b/sugar_network/db/directory.py
index 944f73a..c6957d7 100644
--- a/sugar_network/db/directory.py
+++ b/sugar_network/db/directory.py
@@ -1,4 +1,4 @@
-# Copyright (C) 2011-2013 Aleksey Lim
+# Copyright (C) 2011-2014 Aleksey Lim
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
@@ -16,15 +16,12 @@
import os
import shutil
import logging
-from cStringIO import StringIO
from os.path import exists, join
from sugar_network import toolkit
-from sugar_network.toolkit.router import ACL
from sugar_network.db.storage import Storage
-from sugar_network.db.metadata import BlobProperty, Metadata, GUID_PREFIX
-from sugar_network.db.metadata import IndexedProperty, StoredProperty
-from sugar_network.db.metadata import AggregatedType
+from sugar_network.db.metadata import Metadata, Guid
+from sugar_network.toolkit.coroutine import this
from sugar_network.toolkit import http, exception, enforce
@@ -36,8 +33,7 @@ _logger = logging.getLogger('db.directory')
class Directory(object):
- def __init__(self, root, resource_class, index_class,
- broadcast=None, seqno=None):
+ def __init__(self, root, resource_class, index_class, seqno=None):
"""
:param index_class:
what class to use to access to indexes, for regular casses
@@ -51,12 +47,10 @@ class Directory(object):
if resource_class.metadata is None:
# Metadata cannot be recreated
resource_class.metadata = Metadata(resource_class)
- resource_class.metadata['guid'] = IndexedProperty('guid',
- slot=0, prefix=GUID_PREFIX, acl=ACL.CREATE | ACL.READ)
+ resource_class.metadata['guid'] = Guid()
self.metadata = resource_class.metadata
self.resource_class = resource_class
- self.broadcast = broadcast or (lambda event: None)
self._index_class = index_class
self._root = root
self._seqno = _SessionSeqno() if seqno is None else seqno
@@ -65,25 +59,6 @@ class Directory(object):
self._open()
- @property
- def mtime(self):
- return self._index.mtime
-
- def checkpoint(self):
- ts = self._index.checkpoint()
- self.broadcast({'event': 'populate', 'mtime': ts})
-
- def path(self, guid, *args):
- record = self._storage.get(guid)
- if not args:
- return record.path()
- prop = args[0]
- if prop in self.metadata and \
- isinstance(self.metadata[prop], BlobProperty):
- return record.blob_path(*args)
- else:
- return record.path(*args)
-
def wipe(self):
self.close()
_logger.debug('Wipe %r directory', self.metadata.name)
@@ -102,7 +77,7 @@ class Directory(object):
"""Flush pending chnages to disk."""
self._index.commit()
- def create(self, props, event=None, setters=False):
+ def create(self, props):
"""Create new document.
If `guid` property is not specified, it will be auto set.
@@ -116,24 +91,12 @@ class Directory(object):
guid = props.get('guid')
if not guid:
guid = props['guid'] = toolkit.uuid()
- if setters:
- # XXX Setters are being proccessed on routes level, but,
- # while creating resources gotten from routes, it is important
- # to call setters as well, e.g., `author` property
- doc = self.resource_class(guid, None, props)
- for key, value in props.items():
- prop = self.metadata.get(key)
- if prop is not None and prop.on_set is not None:
- props[key] = prop.on_set(doc, value)
_logger.debug('Create %s[%s]: %r', self.metadata.name, guid, props)
- post_event = {'event': 'create', 'guid': guid}
- if event:
- post_event.update(event)
- self._index.store(guid, props, self._pre_store, self._post_store,
- post_event)
+ event = {'event': 'create', 'guid': guid}
+ self._index.store(guid, props, self._prestore, self._broadcast, event)
return guid
- def update(self, guid, props, event=None):
+ def update(self, guid, props):
"""Update properties for an existing document.
:param guid:
@@ -143,11 +106,8 @@ class Directory(object):
"""
_logger.debug('Update %s[%s]: %r', self.metadata.name, guid, props)
- post_event = {'event': 'update', 'guid': guid}
- if event:
- post_event.update(event)
- self._index.store(guid, props, self._pre_store, self._post_store,
- post_event)
+ event = {'event': 'update', 'guid': guid}
+ self._index.store(guid, props, self._prestore, self._broadcast, event)
def delete(self, guid):
"""Delete document.
@@ -158,7 +118,7 @@ class Directory(object):
"""
_logger.debug('Delete %s[%s]', self.metadata.name, guid)
event = {'event': 'delete', 'guid': guid}
- self._index.delete(guid, self._post_delete, event)
+ self._index.delete(guid, self._postdelete, guid, event)
def exists(self, guid):
return self._storage.get(guid).consistent
@@ -171,6 +131,9 @@ class Directory(object):
guid, self.metadata.name)
return self.resource_class(guid, record, cached_props)
+ def __getitem__(self, guid):
+ return self.get(guid)
+
def find(self, **kwargs):
mset = self._index.find(**kwargs)
@@ -195,9 +158,9 @@ class Directory(object):
"""
found = False
- migrate = (self.mtime == 0)
+ migrate = (self._index.mtime == 0)
- for guid in self._storage.walk(self.mtime):
+ for guid in self._storage.walk(self._index.mtime):
if not found:
_logger.info('Start populating %r index', self.metadata.name)
found = True
@@ -208,9 +171,7 @@ class Directory(object):
record = self._storage.get(guid)
try:
props = {}
- for name, prop in self.metadata.items():
- if not isinstance(prop, StoredProperty):
- continue
+ for name in self.metadata:
meta = record.get(name)
if meta is not None:
props[name] = meta['value']
@@ -224,33 +185,11 @@ class Directory(object):
if found:
self._save_layout()
self.commit()
- self.checkpoint()
-
- def patch(self, guid, props, accept_language=None):
- if not accept_language:
- accept_language = toolkit.default_lang()
- orig = self.get(guid)
- patch = {}
- for prop, value in (props or {}).items():
- if orig[prop] == value:
- continue
- if isinstance(self.metadata[prop], StoredProperty) and \
- self.metadata[prop].localized:
- if isinstance(value, dict):
- if value == dict([(i, orig[prop].get(i)) for i in value]):
- continue
- elif orig.get(prop, accept_language) == value:
- continue
- elif isinstance(self.metadata[prop], BlobProperty) and \
- isinstance(value, dict) and \
- value.get('digest') == orig[prop].get('digest'):
- continue
- patch[prop] = value
- return patch
def diff(self, seq, exclude_seq=None, **params):
- if exclude_seq is None:
- exclude_seq = []
+ if exclude_seq is not None:
+ for start, end in exclude_seq:
+ seq.exclude(start, end)
if 'group_by' in params:
# Pickup only most recent change
params['order_by'] = '-seqno'
@@ -263,82 +202,30 @@ class Directory(object):
if end:
query += str(end)
documents, __ = self.find(query=query, **params)
-
for doc in documents:
+ yield doc.guid, doc.diff(seq)
- def patch():
- for name, prop in self.metadata.items():
- if name == 'seqno' or prop.acl & ACL.CALC:
- continue
- meta = doc.meta(name)
- if meta is None:
- continue
- seqno = meta.get('seqno')
- if seqno not in seq or seqno in exclude_seq:
- continue
- if isinstance(prop, BlobProperty):
- del meta['seqno']
- else:
- value = meta.get('value')
- if prop.typecast is AggregatedType:
- value_ = {}
- for key, agg in value.items():
- aggseqno = agg.pop('seqno')
- if aggseqno >= start and \
- (not end or aggseqno <= end):
- value_[key] = agg
- value = value_
- meta = {'mtime': meta['mtime'], 'value': value}
- yield name, meta, seqno
-
- yield doc.guid, patch()
-
- def merge(self, guid, diff, shift_seqno=True, op=None, **kwargs):
+ def merge(self, guid, diff):
"""Apply changes for documents."""
- record = self._storage.get(guid)
- seqno = None
- merge = {}
- patch = {}
+ doc = self.resource_class(guid, self._storage.get(guid))
for prop, meta in diff.items():
- orig_meta = record.get(prop)
- if orig_meta is not None and orig_meta['mtime'] >= meta['mtime']:
+ orig_meta = doc.meta(prop)
+ if orig_meta and orig_meta['mtime'] >= meta['mtime']:
continue
- if shift_seqno:
- if not seqno:
- seqno = self._seqno.next()
- meta['seqno'] = seqno
- else:
- meta['seqno'] = (orig_meta or {}).get('seqno') or 0
- meta.update(kwargs)
- if self.metadata.get(prop).typecast is AggregatedType:
- for agg in meta['value'].values():
- agg['seqno'] = meta['seqno']
- if orig_meta:
- orig_meta['value'].update(meta['value'])
- meta['value'] = orig_meta['value']
- merge[prop] = meta
- if op is not None:
- patch[prop] = meta.get('value')
-
- if not merge:
- return seqno, False
-
- if op is not None:
- op(patch)
- for prop, meta in merge.items():
- is_blob = isinstance(self.metadata.get(prop), BlobProperty)
- record.set(prop, cleanup_blob=is_blob, **meta)
-
- if record.consistent:
- props = {}
- if seqno:
- props['seqno'] = seqno
+ if doc.post_seqno is None:
+ doc.post_seqno = self._seqno.next()
+ doc.post(prop, **meta)
+
+ if doc.post_seqno is None:
+ return None, False
+
+ if doc.exists:
# No need in after-merge event, further commit event
- # is enough to avoid events flow on nodes synchronization
- self._index.store(guid, props, self._pre_store, self._post_store)
+ # is enough to avoid increasing events flow
+ self._index.store(guid, doc.props, self._preindex)
- return seqno, True
+ return doc.post_seqno, True
def _open(self):
if not exists(self._root):
@@ -352,63 +239,37 @@ class Directory(object):
self._save_layout()
self._storage = Storage(self._root, self.metadata)
self._index = self._index_class(index_path, self.metadata,
- self._post_commit)
+ self._postcommit)
_logger.debug('Open %r resource', self.resource_class)
- def _pre_store(self, guid, changes, event=None):
- seqno = changes.get('seqno')
- if event is not None and not seqno:
- seqno = changes['seqno'] = self._seqno.next()
+ def _broadcast(self, event):
+ event['resource'] = self.metadata.name
+ this.broadcast(event)
+
+ def _preindex(self, guid, changes):
+ doc = self.resource_class(guid, self._storage.get(guid), changes)
+ for prop in self.metadata:
+ enforce(doc[prop] is not None, 'Empty %r property', prop)
+ return doc.props
+
+ def _prestore(self, guid, changes, event):
+ doc = self.resource_class(guid, self._storage.get(guid))
+ doc.post_seqno = self._seqno.next()
+ for prop in self.metadata.keys():
+ value = changes.get(prop)
+ if value is None:
+ enforce(doc[prop] is not None, 'Empty %r property', prop)
+ else:
+ doc.post(prop, value)
+ return doc.props
- record = self._storage.get(guid)
- existed = record.exists
-
- for name, prop in self.metadata.items():
- value = changes.get(name)
- if isinstance(prop, BlobProperty):
- if isinstance(value, dict):
- record.set(name, seqno=seqno, cleanup_blob=True, **value)
- elif isinstance(value, basestring):
- record.set(name, seqno=seqno, blob=StringIO(value))
- elif isinstance(prop, StoredProperty):
- if value is None:
- enforce(existed or prop.default is not None,
- 'Value is not specified for %r property', name)
- meta = record.get(name)
- if meta is not None:
- value = meta['value']
- changes[name] = prop.default if value is None else value
- else:
- if prop.typecast is AggregatedType:
- for aggvalue in value.values():
- aggvalue['seqno'] = seqno
- if existed:
- value_ = record.get(name)['value']
- value_.update(value)
- value = value_
- elif prop.localized:
- if not isinstance(value, dict):
- value = {toolkit.default_lang(): value}
- if existed and \
- type(value) is dict: # TODO To reset `value`
- meta = record.get(name)
- if meta is not None:
- meta['value'].update(value)
- value = meta['value']
- changes[name] = value
- record.set(name, value=value, seqno=seqno)
-
- def _post_store(self, guid, changes, event=None):
- if event is not None:
- self.broadcast(event)
-
- def _post_delete(self, guid, event):
+ def _postdelete(self, guid, event):
self._storage.delete(guid)
- self.broadcast(event)
+ self._broadcast(event)
- def _post_commit(self):
+ def _postcommit(self):
self._seqno.commit()
- self.broadcast({'event': 'commit', 'mtime': self.mtime})
+ self._broadcast({'event': 'commit', 'mtime': self._index.mtime})
def _save_layout(self):
path = join(self._root, 'layout')
diff --git a/sugar_network/db/files.py b/sugar_network/db/files.py
new file mode 100644
index 0000000..a675ea3
--- /dev/null
+++ b/sugar_network/db/files.py
@@ -0,0 +1,146 @@
+# Copyright (C) 2014 Aleksey Lim
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <http://www.gnu.org/licenses/>.
+
+from sugar_network import toolkit
+from sugar_network.toolkit import http, enforce
+
+
+class Digest(str):
+ pass
+
+
+def post(content, meta=None):
+ # if fileobj is tmp then move files
+ pass
+
+
+def update(digest, meta):
+ pass
+
+
+def get(digest):
+ pass
+
+
+def delete(digest):
+ pass
+
+
+def path(digest):
+ pass
+
+
+
+
+
+
+"""
+
+def diff(volume, in_seq, out_seq=None, exclude_seq=None, layer=None,
+ fetch_blobs=False, ignore_documents=None, **kwargs):
+
+ if 'blob' in meta:
+ blob_path = meta.pop('blob')
+ yield {'guid': guid,
+ 'diff': {prop: meta},
+ 'blob_size': meta['blob_size'],
+ 'blob': toolkit.iter_file(blob_path),
+ }
+ elif fetch_blobs and 'url' in meta:
+ url = meta.pop('url')
+ try:
+ blob = connection.request('GET', url,
+ allow_redirects=True,
+ # We need uncompressed size
+ headers={'Accept-Encoding': ''})
+ except Exception:
+ _logger.exception('Cannot fetch %r for %s:%s:%s',
+ url, resource, guid, prop)
+ is_the_only_seq = False
+ continue
+ yield {'guid': guid,
+ 'diff': {prop: meta},
+ 'blob_size':
+ int(blob.headers['Content-Length']),
+ 'blob': blob.iter_content(toolkit.BUFFER_SIZE),
+ }
+ else:
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 'digest': hashlib.sha1(png.getvalue()).hexdigest(),
+
+
+
+
+ if value is None:
+ value = {'blob': None}
+ elif isinstance(value, basestring) or hasattr(value, 'read'):
+ value = _read_blob(request, prop, value)
+ blobs.append(value['blob'])
+ elif isinstance(value, dict):
+ enforce('url' in value or 'blob' in value, 'No bundle')
+ else:
+ raise RuntimeError('Incorrect BLOB value')
+
+def _read_blob(request, prop, value):
+ digest = hashlib.sha1()
+ dst = toolkit.NamedTemporaryFile(delete=False)
+
+ try:
+ if isinstance(value, basestring):
+ digest.update(value)
+ dst.write(value)
+ else:
+ size = request.content_length or sys.maxint
+ while size > 0:
+ chunk = value.read(min(size, toolkit.BUFFER_SIZE))
+ if not chunk:
+ break
+ dst.write(chunk)
+ size -= len(chunk)
+ digest.update(chunk)
+ except Exception:
+ os.unlink(dst.name)
+ raise
+ finally:
+ dst.close()
+
+ if request.prop and request.content_type:
+ mime_type = request.content_type
+ else:
+ mime_type = prop.mime_type
+
+ return {'blob': dst.name,
+ 'digest': digest.hexdigest(),
+ 'mime_type': mime_type,
+ }
+
+)
+"""
diff --git a/sugar_network/db/index.py b/sugar_network/db/index.py
index 7ff43bb..b44bdfb 100644
--- a/sugar_network/db/index.py
+++ b/sugar_network/db/index.py
@@ -22,8 +22,7 @@ from os.path import exists, join
import xapian
-from sugar_network import toolkit
-from sugar_network.db.metadata import IndexedProperty, GUID_PREFIX, LIST_TYPES
+from sugar_network.db.metadata import GUID_PREFIX
from sugar_network.toolkit import Option, coroutine, exception, enforce
@@ -65,7 +64,7 @@ class IndexReader(object):
self._commit_cb = commit_cb
for name, prop in self.metadata.items():
- if isinstance(prop, IndexedProperty):
+ if prop.indexed:
self._props[name] = prop
@property
@@ -73,11 +72,6 @@ class IndexReader(object):
"""UNIX seconds of the last `commit()` call."""
return int(os.stat(self._mtime_path).st_mtime)
- def checkpoint(self):
- ts = time.time()
- os.utime(self._mtime_path, (ts, ts))
- return int(ts)
-
def ensure_open(self):
if not exists(self._mtime_path):
with file(self._mtime_path, 'w'):
@@ -200,8 +194,7 @@ class IndexReader(object):
else:
parser.add_prefix(name, prop.prefix)
parser.add_prefix('', prop.prefix)
- if prop.slot is not None and \
- prop.sortable_serialise is not None:
+ if prop.slot is not None:
value_range = xapian.NumberValueRangeProcessor(
prop.slot, name + ':')
parser.add_valuerangeprocessor(value_range)
@@ -230,9 +223,7 @@ class IndexReader(object):
for needle in value if type(value) in (tuple, list) else [value]:
if needle is None:
continue
- if prop.parse is not None:
- needle = prop.parse(needle)
- needle = next(_fmt_prop_value(prop, needle))
+ needle = prop.decode(needle)
queries.append(xapian.Query(_term(prop.prefix, needle)))
if len(sub_queries) == 1:
all_queries.append(sub_queries[0])
@@ -313,7 +304,7 @@ class IndexReader(object):
query = query[:exact_term.start()] + query[exact_term.end():]
term, __, value = exact_term.groups()
prop = self.metadata.get(term)
- if isinstance(prop, IndexedProperty) and prop.prefix:
+ if prop.indexed and prop.prefix:
props[term] = value
return query
@@ -345,7 +336,7 @@ class IndexWriter(IndexReader):
self.ensure_open()
if pre_cb is not None:
- pre_cb(guid, properties, *args)
+ properties = pre_cb(guid, properties, *args)
_logger.debug('Index %r object: %r', self.metadata.name, properties)
@@ -359,17 +350,10 @@ class IndexWriter(IndexReader):
else properties.get(name, prop.default)
if prop.slot is not None:
- if prop.sortable_serialise is not None:
- slotted_value = xapian.sortable_serialise(
- prop.sortable_serialise(value))
- elif prop.localized:
- slotted_value = toolkit.gettext(value) or ''
- else:
- slotted_value = next(_fmt_prop_value(prop, value))
- doc.add_value(prop.slot, slotted_value)
+ doc.add_value(prop.slot, prop.slotting(value))
if prop.prefix or prop.full_text:
- for value_ in _fmt_prop_value(prop, value):
+ for value_ in prop.encode(value):
if prop.prefix:
if prop.boolean:
doc.add_boolean_term(_term(prop.prefix, value_))
@@ -383,7 +367,7 @@ class IndexWriter(IndexReader):
self._pending_updates += 1
if post_cb is not None:
- post_cb(guid, properties, *args)
+ post_cb(*args)
self._check_for_commit()
@@ -397,7 +381,7 @@ class IndexWriter(IndexReader):
self._pending_updates += 1
if post_cb is not None:
- post_cb(guid, *args)
+ post_cb(*args)
self._check_for_commit()
@@ -433,10 +417,13 @@ class IndexWriter(IndexReader):
self._db.commit()
else:
self._db.flush()
- ts = self.checkpoint() - ts
+
+ checkpoint = time.time()
+ os.utime(self._mtime_path, (checkpoint, checkpoint))
self._pending_updates = 0
- _logger.debug('Commit to %r took %s seconds', self.metadata.name, ts)
+ _logger.debug('Commit to %r took %s seconds',
+ self.metadata.name, checkpoint - ts)
if self._commit_cb is not None:
self._commit_cb()
@@ -461,20 +448,3 @@ class IndexWriter(IndexReader):
def _term(prefix, value):
return _EXACT_PREFIX + prefix + str(value).split('\n')[0][:243]
-
-
-def _fmt_prop_value(prop, value):
-
- def fmt(value):
- if type(value) is unicode:
- yield value.encode('utf8')
- elif isinstance(value, basestring):
- yield value
- elif type(value) in LIST_TYPES:
- for i in value:
- for j in fmt(i):
- yield j
- elif value is not None:
- yield str(value)
-
- return fmt(value if prop.fmt is None else prop.fmt(value))
diff --git a/sugar_network/db/metadata.py b/sugar_network/db/metadata.py
index 55942a7..5282fd1 100644
--- a/sugar_network/db/metadata.py
+++ b/sugar_network/db/metadata.py
@@ -1,4 +1,4 @@
-# Copyright (C) 2011-2013 Aleksey Lim
+# Copyright (C) 2011-2014 Aleksey Lim
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
@@ -13,20 +13,20 @@
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
-import types
+import xapian
from sugar_network import toolkit
+from sugar_network.db import files
from sugar_network.toolkit.router import ACL
-from sugar_network.toolkit import http, enforce
+from sugar_network.toolkit.coroutine import this
+from sugar_network.toolkit import i18n, http, enforce
#: Xapian term prefix for GUID value
GUID_PREFIX = 'I'
-LIST_TYPES = (list, tuple, frozenset, types.GeneratorType)
-
-def indexed_property(property_class=None, *args, **kwargs):
+def stored_property(klass=None, *args, **kwargs):
def getter(func, self):
value = self[func.__name__]
@@ -34,7 +34,7 @@ def indexed_property(property_class=None, *args, **kwargs):
def decorate_setter(func, attr):
attr.prop.setter = lambda self, value: \
- self.set(attr.name, func(self, value))
+ self._set(attr.name, func(self, value))
attr.prop.on_set = func
return attr
@@ -46,20 +46,18 @@ def indexed_property(property_class=None, *args, **kwargs):
# pylint: disable-msg=W0212
attr._is_db_property = True
attr.name = func.__name__
- attr.prop = (property_class or IndexedProperty)(
- attr.name, *args, **kwargs)
+ attr.prop = (klass or Property)(*args, name=attr.name, **kwargs)
attr.prop.on_get = func
return attr
return decorate_getter
-stored_property = lambda ** kwargs: indexed_property(StoredProperty, **kwargs)
-blob_property = lambda ** kwargs: indexed_property(BlobProperty, **kwargs)
-
-
-class AggregatedType(dict):
- pass
+def indexed_property(klass=None, *args, **kwargs):
+ enforce('slot' in kwargs or 'prefix' in kwargs or 'full_text' in kwargs,
+ "None of 'slot', 'prefix' or 'full_text' was specified "
+ 'for indexed property')
+ return stored_property(klass, *args, **kwargs)
class Metadata(dict):
@@ -83,7 +81,6 @@ class Metadata(dict):
for attr in [getattr(cls, i) for i in dir(cls)]:
if not hasattr(attr, '_is_db_property'):
continue
-
prop = attr.prop
if hasattr(prop, 'slot'):
@@ -117,52 +114,73 @@ class Metadata(dict):
class Property(object):
- """Basic class to collect information about document property."""
+ """Collect information about document properties."""
- def __init__(self, name, acl=ACL.PUBLIC, typecast=None,
- parse=None, fmt=None, default=None, sortable_serialise=None):
+ def __init__(self, name=None,
+ slot=None, prefix=None, full_text=False, boolean=False,
+ acl=ACL.PUBLIC, default=None):
"""
:param name:
property name;
:param acl:
access to the property,
might be an ORed composition of `db.ACCESS_*` constants;
- :param typecast:
- cast property value before storing in the system;
- supported values are `None` (strings), `int` (intergers),
- `float` (floats), `bool` (booleans repesented by symbols
- `0` and `1`), a sequence of strings (property value should
- confirm one of values from the sequencei);
- :param parse:
- parse property value from a string;
- :param fmt:
- format property value to a string or a list of strings;
:param default:
- default property value or None;
- :param sortable_serialise:
- cast property value before storing as a srotable value.
+ default property value;
+ :param slot:
+ Xapian document's slot number to add property value to;
+ :param prefix:
+ Xapian serach term prefix, if `None`, property is not a term;
+ :param full_text:
+ the property takes part in full-text search;
+ :param boolean:
+ Xapian will use boolean search for this property;
"""
- if typecast is bool:
- if fmt is None:
- fmt = lambda x: '1' if x else '0'
- if parse is None:
- parse = lambda x: str(x).lower() in ('true', '1', 'on', 'yes')
- if sortable_serialise is None and typecast in [int, float, bool]:
- sortable_serialise = typecast
+ enforce(name == 'guid' or slot != 0,
+ "Slot '0' is reserved for internal needs in %r",
+ name)
+ enforce(name == 'guid' or prefix != GUID_PREFIX,
+ 'Prefix %r is reserved for internal needs in %r',
+ GUID_PREFIX, name)
self.setter = None
self.on_get = lambda self, x: x
self.on_set = None
self.name = name
self.acl = acl
- self.typecast = typecast
- self.parse = parse
- self.fmt = fmt
self.default = default
- self.sortable_serialise = sortable_serialise
+ self.indexed = slot is not None or prefix is not None or full_text
+ self.slot = slot
+ self.prefix = prefix
+ self.full_text = full_text
+ self.boolean = boolean
- def assert_access(self, mode):
+ def typecast(self, value):
+ """Convert input values to types stored in the system."""
+ return value
+
+ def reprcast(self, value):
+ """Convert output values before returning out of the system."""
+ return self.default if value is None else value
+
+ def encode(self, value):
+ """Convert stored value to strings capable for indexing."""
+ yield toolkit.ascii(value)
+
+ def decode(self, value):
+ """Make input string capable for indexing."""
+ return toolkit.ascii(value)
+
+ def slotting(self, value):
+ """Convert stored value to xapian.NumberValueRangeProcessor values."""
+ return next(self.encode(value))
+
+ def teardown(self, value):
+ """Cleanup property value on resetting."""
+ pass
+
+ def assert_access(self, mode, value=None):
"""Is access to the property permitted.
If there are no permissions, function should raise
@@ -178,106 +196,255 @@ class Property(object):
ACL.NAMES[mode], self.name)
-class StoredProperty(Property):
- """Property to save only in persistent storage, no index."""
+class Boolean(Property):
- def __init__(self, name, localized=False, typecast=None, fmt=None,
- **kwargs):
- """
- :param: localized:
- property value will be stored per locale;
- :param: **kwargs
- :class:`.Property` arguments
+ def typecast(self, value):
+ if isinstance(value, basestring):
+ return value.lower() in ('true', '1', 'on', 'yes')
+ return bool(value)
- """
- self.localized = localized
+ def encode(self, value):
+ yield '1' if value else '0'
- if localized:
- enforce(typecast is None,
- 'typecast should be None for localized properties')
- enforce(fmt is None,
- 'fmt should be None for localized properties')
- typecast = _localized_typecast
- fmt = _localized_fmt
+ def decode(self, value):
+ return '1' if self.typecast(value) else '0'
- Property.__init__(self, name, typecast=typecast, fmt=fmt, **kwargs)
+ def slotting(self, value):
+ return xapian.sortable_serialise(value)
-class IndexedProperty(StoredProperty):
- """Property which needs to be indexed."""
+class Numeric(Property):
- def __init__(self, name, slot=None, prefix=None, full_text=False,
- boolean=False, **kwargs):
- """
- :param slot:
- Xapian document's slot number to add property value to;
- :param prefix:
- Xapian serach term prefix, if `None`, property is not a term;
- :param full_text:
- property takes part in full-text search;
- :param boolean:
- Xapian will use boolean search for this property;
- :param: **kwargs
- :class:`.StoredProperty` arguments
+ def typecast(self, value):
+ return int(value)
- """
- enforce(name == 'guid' or slot != 0,
- "For %r property, slot '0' is reserved for internal needs",
- name)
- enforce(name == 'guid' or prefix != GUID_PREFIX,
- 'For %r property, prefix %r is reserved for internal needs',
- name, GUID_PREFIX)
- enforce(slot is not None or prefix or full_text,
- 'For %r property, either slot, prefix or full_text '
- 'need to be set',
- name)
- enforce(slot is None or _is_sloted_prop(kwargs.get('typecast')) or
- kwargs.get('sortable_serialise'),
- 'Slot can be set only for properties for str, int, float, '
- 'bool types, or, for list of these types')
+ def encode(self, value):
+ yield str(value)
- StoredProperty.__init__(self, name, **kwargs)
- self.slot = slot
- self.prefix = prefix
- self.full_text = full_text
- self.boolean = boolean
+ def decode(self, value):
+ return str(int(value))
+ def slotting(self, value):
+ return xapian.sortable_serialise(value)
-class BlobProperty(Property):
- """Binary large objects which needs to be fetched alone, no index."""
- def __init__(self, name, acl=ACL.PUBLIC,
- mime_type='application/octet-stream'):
- """
- :param mime_type:
- MIME type for BLOB content;
- by default, MIME type is application/octet-stream;
- :param: **kwargs
- :class:`.Property` arguments
+class List(Property):
- """
- Property.__init__(self, name, acl=acl)
- self.mime_type = mime_type
+ def __init__(self, subtype=None, **kwargs):
+ Property.__init__(self, **kwargs)
+ self._subtype = subtype or Property()
+
+ def typecast(self, value):
+ if value is None:
+ return []
+ if type(value) not in (list, tuple):
+ return [self._subtype.typecast(value)]
+ return [self._subtype.typecast(i) for i in value]
+
+ def encode(self, value):
+ for i in value:
+ for j in self._subtype.encode(i):
+ yield j
+ def decode(self, value):
+ return self._subtype.decode(value)
-def _is_sloted_prop(typecast):
- if typecast in [None, int, float, bool, str]:
- return True
- if type(typecast) in LIST_TYPES:
- if typecast and [i for i in typecast
- if type(i) in [None, int, float, bool, str]]:
- return True
+class Dict(Property):
-def _localized_typecast(value):
- if isinstance(value, dict):
+ def __init__(self, subtype=None, **kwargs):
+ Property.__init__(self, **kwargs)
+ self._subtype = subtype or Property()
+
+ def typecast(self, value):
+ for key, value_ in value.items():
+ value[key] = self._subtype.typecast(value_)
return value
- else:
- return {toolkit.default_lang(): value}
+ def encode(self, items):
+ for i in items.values():
+ for j in self._subtype.encode(i):
+ yield j
+
+
+class Enum(Property):
+
+ def __init__(self, items, **kwargs):
+ enforce(items, 'Enum should not be empty')
+ Property.__init__(self, **kwargs)
+ self._items = items
+ if type(next(iter(items))) in (int, long):
+ self._subtype = Numeric()
+ else:
+ self._subtype = Property()
+
+ def typecast(self, value):
+ value = self._subtype.typecast(value)
+ enforce(value in self._items, ValueError,
+ "Value %r is not in '%s' enum",
+ value, ', '.join([str(i) for i in self._items]))
+ return value
-def _localized_fmt(value):
- if isinstance(value, dict):
- return value.values()
- else:
- return [value]
+ def slotting(self, value):
+ return self._subtype.slotting(value)
+
+
+class Blob(Property):
+
+ def __init__(self, mime_type='application/octet-stream', default='',
+ **kwargs):
+ Property.__init__(self, default=default, **kwargs)
+ self.mime_type = mime_type
+
+ def typecast(self, value):
+ if isinstance(value, toolkit.File):
+ return value.digest
+ if isinstance(value, files.Digest):
+ return value
+
+ enforce(value is None or isinstance(value, basestring) or \
+ isinstance(value, dict) and value or hasattr(value, 'read'),
+ 'Inappropriate blob value')
+
+ if not value:
+ return ''
+
+ if not isinstance(value, dict):
+ return files.post(value, {
+ 'mime_type': this.request.content_type or self.mime_type,
+ }).digest
+
+ digest = this.resource[self.name] if self.name else None
+ if digest:
+ meta = files.get(digest)
+ enforce('digest' not in value or value.pop('digest') == digest,
+ "Inappropriate 'digest' value")
+ enforce(meta.path or 'url' in meta or 'url' in value,
+ 'Blob points to nothing')
+ if 'url' in value and meta.path:
+ files.delete(digest)
+ meta.update(value)
+ value = meta
+ else:
+ enforce('url' in value, 'Blob points to nothing')
+ enforce('digest' in value, "Missed 'digest' value")
+ if 'mime_type' not in value:
+ value['mime_type'] = self.mime_type
+ digest = value.pop('digest')
+
+ files.update(digest, value)
+ return digest
+
+ def reprcast(self, value):
+ if not value:
+ return toolkit.File.AWAY
+ meta = files.get(value)
+ if 'url' not in meta:
+ meta['url'] = '%s/blobs/%s' % (this.request.static_prefix, value)
+ meta['size'] = meta.size
+ meta['mtime'] = meta.mtime
+ meta['digest'] = value
+ return meta
+
+ def teardown(self, value):
+ if value:
+ files.delete(value)
+
+ def assert_access(self, mode, value=None):
+ if mode == ACL.WRITE and not value:
+ mode = ACL.CREATE
+ Property.assert_access(self, mode, value)
+
+
+class Composite(Property):
+ pass
+
+
+class Localized(Composite):
+
+ def typecast(self, value):
+ if isinstance(value, dict):
+ return value
+ return {this.request.accept_language[0]: value}
+
+ def reprcast(self, value):
+ if value is None:
+ return self.default
+ return i18n.decode(value, this.request.accept_language)
+
+ def encode(self, value):
+ for i in value.values():
+ yield toolkit.ascii(i)
+
+ def slotting(self, value):
+ # TODO Multilingual sorting
+ return i18n.decode(value) or ''
+
+
+class Aggregated(Composite):
+
+ def __init__(self, subtype=None, acl=ACL.READ | ACL.INSERT | ACL.REMOVE,
+ **kwargs):
+ enforce(not (acl & (ACL.CREATE | ACL.WRITE)),
+ 'ACL.CREATE|ACL.WRITE not allowed for aggregated properties')
+ Property.__init__(self, acl=acl, default={}, **kwargs)
+ self._subtype = subtype or Property()
+
+ def subtypecast(self, value):
+ return self._subtype.typecast(value)
+
+ def subteardown(self, value):
+ self._subtype.teardown(value)
+
+ def typecast(self, value):
+ return dict(value)
+
+ def encode(self, items):
+ for agg in items.values():
+ if 'value' in agg:
+ for j in self._subtype.encode(agg['value']):
+ yield j
+
+
+class Guid(Property):
+
+ def __init__(self):
+ Property.__init__(self, name='guid', slot=0, prefix=GUID_PREFIX,
+ acl=ACL.CREATE | ACL.READ)
+
+
+class Authors(Dict):
+
+ def typecast(self, value):
+ if type(value) not in (list, tuple):
+ return dict(value)
+ result = {}
+ for order, author in enumerate(value):
+ user = author.pop('guid')
+ author['order'] = order
+ result[user] = author
+ return result
+
+ def reprcast(self, value):
+ result = []
+ for guid, props in sorted(value.items(),
+ cmp=lambda x, y: cmp(x[1]['order'], y[1]['order'])):
+ if 'name' in props:
+ result.append({
+ 'guid': guid,
+ 'name': props['name'],
+ 'role': props['role'],
+ })
+ else:
+ result.append({
+ 'name': guid,
+ 'role': props['role'],
+ })
+ return result
+
+ def encode(self, value):
+ for guid, props in value.items():
+ if 'name' in props:
+ yield props['name']
+ if not (props['role'] & ACL.INSYSTEM):
+ yield guid
diff --git a/sugar_network/db/resource.py b/sugar_network/db/resource.py
index 207824e..2636dca 100644
--- a/sugar_network/db/resource.py
+++ b/sugar_network/db/resource.py
@@ -1,4 +1,4 @@
-# Copyright (C) 2011-2012 Aleksey Lim
+# Copyright (C) 2011-2014 Aleksey Lim
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
@@ -13,10 +13,11 @@
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
-from sugar_network import toolkit
-from sugar_network.db.metadata import indexed_property
-from sugar_network.db.metadata import StoredProperty, BlobProperty
-from sugar_network.toolkit.router import Blob, ACL
+from sugar_network.db.metadata import indexed_property, Localized
+from sugar_network.db.metadata import Numeric, List, Authors
+from sugar_network.db.metadata import Composite, Aggregated
+from sugar_network.toolkit.coroutine import this
+from sugar_network.toolkit.router import ACL
class Resource(object):
@@ -25,85 +26,69 @@ class Resource(object):
#: `Metadata` object that describes the document
metadata = None
- def __init__(self, guid, record, cached_props=None, request=None):
+ def __init__(self, guid, record, cached_props=None):
self.props = cached_props or {}
self.guid = guid
self.is_new = not bool(guid)
- self._record = record
- self.request = request
- self._modifies = set()
+ self.record = record
+ self._post_seqno = None
@property
- def volume(self):
- return self.request.routes.volume
+ def post_seqno(self):
+ return self._post_seqno
- @property
- def directory(self):
- return self.volume[self.metadata.name]
+ @post_seqno.setter
+ def post_seqno(self, value):
+ if self._post_seqno is None:
+ self._post_seqno = value
+ self.post('seqno', value)
- @indexed_property(slot=1000, prefix='RC', typecast=int, default=0,
- acl=ACL.READ)
- def ctime(self, value):
+ @indexed_property(Numeric, slot=1000, prefix='RS', acl=0)
+ def seqno(self, value):
return value
- @indexed_property(slot=1001, prefix='RM', typecast=int, default=0,
- acl=ACL.READ)
- def mtime(self, value):
+ @indexed_property(Numeric, slot=1001, prefix='RC', default=0, acl=ACL.READ)
+ def ctime(self, value):
return value
- @indexed_property(slot=1002, prefix='RS', typecast=int, default=0, acl=0)
- def seqno(self, value):
+ @indexed_property(Numeric, slot=1002, prefix='RM', default=0, acl=ACL.READ)
+ def mtime(self, value):
return value
- @indexed_property(prefix='RA', typecast=dict, full_text=True, default={},
- fmt=lambda x: _fmt_authors(x), acl=ACL.READ)
+ @indexed_property(Authors, prefix='RA', default={}, full_text=True,
+ acl=ACL.READ)
def author(self, value):
- result = []
- for guid, props in sorted(value.items(),
- cmp=lambda x, y: cmp(x[1]['order'], y[1]['order'])):
- if 'name' in props:
- result.append({
- 'guid': guid,
- 'name': props['name'],
- 'role': props['role'],
- })
- else:
- result.append({
- 'name': guid,
- 'role': props['role'],
- })
- return result
+ return value
- @author.setter
- def author(self, value):
- if type(value) not in (list, tuple):
- return value
- result = {}
- for order, author in enumerate(value):
- user = author.pop('guid')
- author['order'] = order
- result[user] = author
- return result
+ @indexed_property(List, prefix='RL', default=[])
+ def layer(self, value):
+ return value
- @indexed_property(prefix='RL', typecast=[], default=[])
+ @layer.setter
def layer(self, value):
+ orig = self['layer']
+ if 'deleted' in value:
+ if this.request.method != 'POST' and 'deleted' not in orig:
+ self.deleted()
+ elif this.request.method != 'POST' and 'deleted' in orig:
+ self.restored()
return value
- @indexed_property(prefix='RT', full_text=True, default=[], typecast=[])
+ @indexed_property(List, prefix='RT', full_text=True, default=[])
def tags(self, value):
return value
- def path(self, *args):
- if not args:
- return self._record.path()
- prop = args[0]
- if prop in self.metadata and \
- isinstance(self.metadata[prop], BlobProperty):
- return self._record.blob_path(*args)
- else:
- return self._record.path(*args)
-
- def get(self, prop, accept_language=None):
+ @property
+ def exists(self):
+ return self.record is not None and self.record.consistent
+
+ def deleted(self):
+ pass
+
+ def restored(self):
+ pass
+
+ def get(self, prop):
"""Get document's property value.
:param prop:
@@ -113,57 +98,83 @@ class Resource(object):
"""
prop = self.metadata[prop]
-
value = self.props.get(prop.name)
- if value is None and self._record is not None:
- meta = self._record.get(prop.name)
- if isinstance(prop, StoredProperty):
- if meta is not None:
- value = meta.get('value')
- else:
- value = prop.default
+ if value is None and self.record is not None:
+ meta = self.record.get(prop.name)
+ if meta is not None:
+ value = meta.get('value')
else:
- value = meta or Blob()
+ value = prop.default
self.props[prop.name] = value
-
- if value is not None and accept_language:
- if isinstance(prop, StoredProperty) and prop.localized:
- value = toolkit.gettext(value, accept_language)
-
return value
- def properties(self, props, accept_language=None):
+ def properties(self, props):
result = {}
for i in props:
- result[i] = self.get(i, accept_language)
+ result[i] = self.get(i)
return result
def meta(self, prop):
- return self._record.get(prop)
+ if self.record is not None:
+ return self.record.get(prop)
+
+ def diff(self, seq):
+ for name, prop in self.metadata.items():
+ if name == 'seqno' or prop.acl & ACL.CALC:
+ continue
+ meta = self.meta(name)
+ if meta is None:
+ continue
+ seqno = meta.get('seqno')
+ if seqno not in seq:
+ continue
+ value = meta.get('value')
+ if isinstance(prop, Aggregated):
+ value_ = {}
+ for key, agg in value.items():
+ if agg.pop('seqno') in seq:
+ value_[key] = agg
+ value = value_
+ meta = {'mtime': meta['mtime'], 'value': value}
+ yield name, meta, seqno
+
+ def patch(self, props):
+ if not props:
+ return {}
+ patch = {}
+ for prop, value in props.items():
+ if self[prop] == value:
+ continue
+ orig_value = self[prop]
+ if orig_value and isinstance(self.metadata[prop], Localized):
+ for lang, subvalue in value.items():
+ if orig_value.get(lang) != subvalue:
+ break
+ else:
+ continue
+ patch[prop] = value
+ return patch
- def modified(self, prop):
- return prop in self._modifies
+ def post(self, prop, value, **meta):
+ prop = self.metadata[prop]
+ if prop.on_set is not None:
+ value = prop.on_set(self, value)
+ if isinstance(prop, Aggregated):
+ for agg in value.values():
+ agg['seqno'] = self.post_seqno
+ if isinstance(prop, Composite):
+ old_value = self[prop.name]
+ if old_value:
+ old_value.update(value)
+ value = old_value
+ self.record.set(prop.name, value=value, seqno=self.post_seqno, **meta)
+ self.props[prop.name] = value
+
+ def _set(self, prop, value):
+ self.props[prop] = value
def __contains__(self, prop):
- return self.get(prop)
+ return prop in self.props
def __getitem__(self, prop):
return self.get(prop)
-
- def __setitem__(self, prop, value):
- self.props[prop] = value
- self._modifies.add(prop)
-
-
-def _fmt_authors(value):
- if isinstance(value, dict):
- for guid, props in value.items():
- if not isinstance(props, dict):
- yield guid
- else:
- if 'name' in props:
- yield props['name']
- if not (props['role'] & ACL.INSYSTEM):
- yield guid
- else:
- yield value
diff --git a/sugar_network/db/routes.py b/sugar_network/db/routes.py
index 19ad26c..2f8fc69 100644
--- a/sugar_network/db/routes.py
+++ b/sugar_network/db/routes.py
@@ -1,4 +1,4 @@
-# Copyright (C) 2011-2013 Aleksey Lim
+# Copyright (C) 2011-2014 Aleksey Lim
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
@@ -13,21 +13,17 @@
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
-import os
import re
-import sys
import time
import json
-import types
-import hashlib
import logging
from contextlib import contextmanager
-from os.path import exists
from sugar_network import toolkit
-from sugar_network.db.metadata import AggregatedType
-from sugar_network.db.metadata import BlobProperty, StoredProperty, LIST_TYPES
-from sugar_network.toolkit.router import Blob, ACL, route
+from sugar_network.db import files
+from sugar_network.db.metadata import Aggregated
+from sugar_network.toolkit.router import ACL, route, preroute, fallbackroute
+from sugar_network.toolkit.coroutine import this
from sugar_network.toolkit import http, enforce
@@ -38,62 +34,86 @@ _logger = logging.getLogger('db.routes')
class Routes(object):
- def __init__(self, volume):
+ def __init__(self, volume, find_limit=None):
self.volume = volume
+ self._find_limit = find_limit
+ this.volume = self.volume
- @route('POST', [None],
- acl=ACL.AUTH, mime_type='application/json')
+ @preroute
+ def __preroute__(self, op, request, response):
+ this.request = request
+ this.response = response
+
+ @route('POST', [None], acl=ACL.AUTH, mime_type='application/json')
def create(self, request):
- with self._post(request, ACL.CREATE) as (directory, doc):
- event = {}
- self.on_create(request, doc.props, event)
- if 'guid' not in doc.props:
- doc.props['guid'] = toolkit.uuid()
- doc.guid = doc.props['guid']
- directory.create(doc.props, event)
- return doc.guid
+ with self._post(request, ACL.CREATE) as doc:
+ self.on_create(request, doc.props)
+ self.volume[request.resource].create(doc.props)
+ self.after_post(doc)
+ return doc['guid']
@route('GET', [None],
- arguments={'offset': int, 'limit': int, 'reply': ('guid',)},
+ arguments={
+ 'offset': int,
+ 'limit': int,
+ 'layer': [],
+ 'reply': ('guid',),
+ },
mime_type='application/json')
- def find(self, request, reply):
+ def find(self, request, reply, limit, layer):
self._preget(request)
- documents, total = self.volume[request.resource].find(**request)
- result = [self._get_props(i, request, reply) for i in documents]
+ if self._find_limit:
+ if limit <= 0:
+ request['limit'] = self._find_limit
+ elif limit > self._find_limit:
+ _logger.warning('The find limit is restricted to %s',
+ self._find_limit)
+ request['limit'] = self._find_limit
+ if 'deleted' in layer:
+ _logger.warning('Requesting "deleted" layer, will ignore')
+ layer.remove('deleted')
+ documents, total = self.volume[request.resource].find(
+ not_layer='deleted', **request)
+ result = [self._postget(request, i, reply) for i in documents]
return {'total': total, 'result': result}
- @route('GET', [None, None], cmd='exists',
- mime_type='application/json')
+ @route('GET', [None, None], cmd='exists', mime_type='application/json')
def exists(self, request):
directory = self.volume[request.resource]
return directory.exists(request.guid)
- @route('PUT', [None, None],
- acl=ACL.AUTH | ACL.AUTHOR)
+ @route('PUT', [None, None], acl=ACL.AUTH | ACL.AUTHOR)
def update(self, request):
- with self._post(request, ACL.WRITE) as (directory, doc):
+ with self._post(request, ACL.WRITE) as doc:
if not doc.props:
return
- event = {}
- self.on_update(request, doc.props, event)
- directory.update(doc.guid, doc.props, event)
-
- @route('PUT', [None, None, None],
- acl=ACL.AUTH | ACL.AUTHOR)
- def update_prop(self, request, url=None):
- if url:
- value = Blob({'url': url})
- elif request.content is None:
+ self.on_update(request, doc.props)
+ self.volume[request.resource].update(doc.guid, doc.props)
+ self.after_post(doc)
+
+ @route('GET', [None, None], cmd='diff', mime_type='application/json')
+ def diff(self, request):
+ result = {}
+ res = self.volume[request.resource][request.guid]
+ for prop, meta, __ in res.diff(toolkit.Sequence([[0, None]])):
+ result[prop] = meta
+ return result
+
+ @route('PUT', [None, None, None], acl=ACL.AUTH | ACL.AUTHOR)
+ def update_prop(self, request):
+ if request.content is None:
value = request.content_stream
else:
value = request.content
request.content = {request.prop: value}
self.update(request)
- @route('DELETE', [None, None],
- acl=ACL.AUTH | ACL.AUTHOR)
+ @route('DELETE', [None, None], acl=ACL.AUTH | ACL.AUTHOR)
def delete(self, request):
- self.volume[request.resource].delete(request.guid)
+ # Node data should not be deleted immediately
+ # to make master-slave synchronization possible
+ request.content = {'layer': 'deleted'}
+ self.update(request)
@route('GET', [None, None], arguments={'reply': list},
mime_type='application/json')
@@ -101,65 +121,56 @@ class Routes(object):
if not reply:
reply = []
for prop in self.volume[request.resource].metadata.values():
- if prop.acl & ACL.READ and not (prop.acl & ACL.LOCAL):
+ if prop.acl & ACL.READ and not (prop.acl & ACL.LOCAL) and \
+ not isinstance(prop, Aggregated):
reply.append(prop.name)
self._preget(request)
doc = self.volume[request.resource].get(request.guid)
- return self._get_props(doc, request, reply)
+ enforce('deleted' not in doc['layer'], http.NotFound, 'Deleted')
+ return self._postget(request, doc, reply)
@route('GET', [None, None, None], mime_type='application/json')
def get_prop(self, request, response):
- return self._prop_meta(request, response)
+ directory = self.volume[request.resource]
+ doc = directory.get(request.guid)
+
+ prop = directory.metadata[request.prop]
+ prop.assert_access(ACL.READ)
+
+ meta = doc.meta(prop.name) or {}
+ if 'value' in meta:
+ value = _get_prop(doc, prop, meta.pop('value'))
+ enforce(value is not toolkit.File.AWAY, http.NotFound, 'No blob')
+ else:
+ value = prop.default
+
+ response.meta = meta
+ response.last_modified = meta.get('mtime')
+ if isinstance(value, toolkit.File):
+ response.content_length = value.get('size') or 0
+ else:
+ response.content_length = len(json.dumps(value))
+
+ return value
@route('HEAD', [None, None, None])
def get_prop_meta(self, request, response):
- self._prop_meta(request, response)
+ self.get_prop(request, response)
@route('POST', [None, None, None],
acl=ACL.AUTH, mime_type='application/json')
def insert_to_aggprop(self, request):
- content = request.content or {}
- enforce(isinstance(content, dict), http.BadRequest, 'Invalid value')
-
- directory = self.volume[request.resource]
- prop = directory.metadata[request.prop]
+ return self._aggpost(request, ACL.INSERT)
- enforce(prop.typecast is AggregatedType, http.BadRequest,
- 'Property is not aggregated')
- prop.assert_access(ACL.INSERT)
- self.on_aggprop_update(request, prop, None)
-
- if request.principal:
- authors = content['author'] = {}
- self._useradd(authors, request.principal, ACL.ORIGINAL)
- guid = content.pop('guid') if 'guid' in content else toolkit.uuid()
- props = {request.prop: {guid: content}}
- event = {}
- self.on_update(request, props, event)
- directory.update(request.guid, props, event)
-
- return guid
+ @route('PUT', [None, None, None, None],
+ acl=ACL.AUTH, mime_type='application/json')
+ def update_aggprop(self, request):
+ self._aggpost(request, ACL.REPLACE, request.key)
@route('DELETE', [None, None, None, None],
acl=ACL.AUTH, mime_type='application/json')
def remove_from_aggprop(self, request):
- directory = self.volume[request.resource]
- doc = directory.get(request.guid)
- prop = directory.metadata[request.prop]
-
- enforce(prop.typecast is AggregatedType, http.BadRequest,
- 'Property is not aggregated')
- prop.assert_access(ACL.REMOVE)
-
- guid = request.path[3]
- enforce(guid in doc[request.prop], http.NotFound,
- 'No such aggregated item')
- self.on_aggprop_update(request, prop, doc[request.prop][guid])
-
- props = {request.prop: {guid: {}}}
- event = {}
- self.on_update(request, props, event)
- directory.update(request.guid, props, event)
+ self._aggpost(request, ACL.REMOVE, request.key)
@route('PUT', [None, None], cmd='useradd',
arguments={'role': 0}, acl=ACL.AUTH | ACL.AUTHOR)
@@ -180,7 +191,11 @@ class Routes(object):
del authors[user]
directory.update(request.guid, {'author': authors})
- def on_create(self, request, props, event):
+ @fallbackroute('GET', ['blobs'])
+ def blobs(self, request):
+ return files.get(request.guid)
+
+ def on_create(self, request, props):
ts = int(time.time())
props['ctime'] = ts
props['mtime'] = ts
@@ -189,7 +204,7 @@ class Routes(object):
authors = props['author'] = {}
self._useradd(authors, request.principal, ACL.ORIGINAL)
- def on_update(self, request, props, event):
+ def on_update(self, request, props):
props['mtime'] = int(time.time())
def on_aggprop_update(self, request, prop, value):
@@ -200,103 +215,57 @@ class Routes(object):
@contextmanager
def _post(self, request, access):
- content = request.content or {}
- enforce(isinstance(content, dict), 'Invalid value')
-
+ content = request.content
+ enforce(isinstance(content, dict), http.BadRequest, 'Invalid value')
directory = self.volume[request.resource]
- if request.guid:
- doc = directory.get(request.guid)
- else:
- doc = directory.resource_class(None, {})
- doc.request = request
- blobs = []
-
- for name, value in content.items():
- prop = directory.metadata[name]
- if isinstance(prop, BlobProperty):
- prop.assert_access(ACL.CREATE if
- access == ACL.WRITE and doc.meta(name) is None
- else access)
- if value is None:
- value = {'blob': None}
- elif isinstance(value, basestring) or hasattr(value, 'read'):
- value = _read_blob(request, prop, value)
- blobs.append(value['blob'])
- elif isinstance(value, dict):
- enforce('url' in value or 'blob' in value, 'No bundle')
- else:
- raise RuntimeError('Incorrect BLOB value')
- else:
- prop.assert_access(access)
- if prop.localized and isinstance(value, basestring):
- value = {request.accept_language[0]: value}
- try:
- value = _typecast_prop_value(prop.typecast, value)
- except Exception, error:
- error = 'Value %r for %r property is invalid: %s' % \
- (value, prop.name, error)
- toolkit.exception(error)
- raise RuntimeError(error)
- doc[name] = value
if access == ACL.CREATE:
- for name, prop in directory.metadata.items():
- if not isinstance(prop, BlobProperty) and \
- content.get(name) is None and \
- (prop.default is not None or prop.on_set is not None):
- doc[name] = prop.default
- if doc['guid']:
+ doc = directory.resource_class(None, None)
+ if 'guid' in content:
# TODO Temporal security hole, see TODO
- enforce(not self.volume[request.resource].exists(doc['guid']),
- '%s already exists', doc['guid'])
- enforce(_GUID_RE.match(doc['guid']) is not None,
- 'Malformed %s GUID', doc['guid'])
+ guid = content['guid']
+ enforce(not directory.exists(guid),
+ http.BadRequest, '%s already exists', guid)
+ enforce(_GUID_RE.match(guid) is not None,
+ http.BadRequest, 'Malformed %s GUID', guid)
else:
- doc['guid'] = toolkit.uuid()
+ doc.props['guid'] = toolkit.uuid()
+ for name, prop in directory.metadata.items():
+ if name not in content and prop.default is not None:
+ doc.props[name] = prop.default
+ orig = None
+ this.resource = doc
+ else:
+ doc = directory.get(request.guid)
+ orig = directory.get(request.guid)
+ this.resource = orig
- try:
- for name, value in doc.props.items():
+ def teardown(new):
+ if orig is None:
+ return
+ for name, orig_value in orig.props.items():
+ if doc[name] == orig_value:
+ continue
prop = directory.metadata[name]
- if prop.on_set is not None:
- doc.props[name] = prop.on_set(doc, value)
- yield directory, doc
- finally:
- for path in blobs:
- if exists(path):
- os.unlink(path)
+ prop.teardown(doc[name] if new else orig_value)
- self.after_post(doc)
-
- def _prop_meta(self, request, response):
- directory = self.volume[request.resource]
- prop = directory.metadata[request.prop]
- doc = directory.get(request.guid)
- doc.request = request
-
- prop.assert_access(ACL.READ)
-
- if isinstance(prop, StoredProperty):
- meta = doc.meta(prop.name) or {}
- if 'value' in meta:
- del meta['value']
- value = doc.get(prop.name, request.accept_language)
- value = prop.on_get(doc, value)
- response.content_length = len(json.dumps(value))
+ try:
+ for name, value in content.items():
+ prop = directory.metadata[name]
+ prop.assert_access(access, orig[name] if orig else None)
+ try:
+ doc.props[name] = prop.typecast(value)
+ except Exception, error:
+ error = 'Value %r for %r property is invalid: %s' % \
+ (value, prop.name, error)
+ toolkit.exception(error)
+ raise http.BadRequest(error)
+ yield doc
+ except Exception:
+ teardown(True)
+ raise
else:
- value = prop.on_get(doc, doc.meta(prop.name))
- enforce(value is not None and ('blob' in value or 'url' in value),
- http.NotFound, 'BLOB does not exist')
- if 'blob' in value:
- meta = value.copy()
- meta.pop('blob')
- else:
- meta = value
-
- response.meta = meta
- response.last_modified = meta.get('mtime')
- response.content_length = meta.get('blob_size') or 0
-
- return value
+ teardown(False)
def _preget(self, request):
reply = request.get('reply')
@@ -307,27 +276,11 @@ class Routes(object):
for prop in reply:
directory.metadata[prop].assert_access(ACL.READ)
- def _get_props(self, doc, request, props):
+ def _postget(self, request, doc, props):
result = {}
- metadata = doc.metadata
- doc.request = request
for name in props:
- prop = metadata[name]
- value = prop.on_get(doc, doc.get(name, request.accept_language))
- if value is None:
- value = prop.default
- elif isinstance(value, Blob):
- for key in ('mtime', 'seqno', 'blob'):
- if key in value:
- del value[key]
- url = value.get('url')
- if url is None:
- value['url'] = '/'.join([
- request.static_prefix, metadata.name, doc.guid, name,
- ])
- elif url.startswith('/'):
- value['url'] = request.static_prefix + url
- result[name] = value
+ prop = doc.metadata[name]
+ result[name] = _get_prop(doc, prop, doc.get(name))
return result
def _useradd(self, authors, user, role):
@@ -351,83 +304,48 @@ class Routes(object):
props['order'] = 0
authors[user] = props
+ def _aggpost(self, request, acl, aggid=None):
+ doc = this.resource = self.volume[request.resource][request.guid]
+ prop = doc.metadata[request.prop]
+ enforce(isinstance(prop, Aggregated), http.BadRequest,
+ 'Property is not aggregated')
+ prop.assert_access(acl)
-def _read_blob(request, prop, value):
- digest = hashlib.sha1()
- dst = toolkit.NamedTemporaryFile(delete=False)
-
- try:
- if isinstance(value, basestring):
- digest.update(value)
- dst.write(value)
- else:
- size = request.content_length or sys.maxint
- while size > 0:
- chunk = value.read(min(size, toolkit.BUFFER_SIZE))
- if not chunk:
- break
- dst.write(chunk)
- size -= len(chunk)
- digest.update(chunk)
- except Exception:
- os.unlink(dst.name)
- raise
- finally:
- dst.close()
-
- if request.prop and request.content_type:
- mime_type = request.content_type
- else:
- mime_type = prop.mime_type
-
- return {'blob': dst.name,
- 'digest': digest.hexdigest(),
- 'mime_type': mime_type,
- }
-
-
-def _typecast_prop_value(typecast, value):
- if typecast is None:
- return value
- enforce(value is not None, ValueError, 'Property value cannot be None')
-
- def cast(typecast, value):
- if isinstance(typecast, types.FunctionType):
- return typecast(value)
- elif typecast is unicode:
- return value.encode('utf-8')
- elif typecast is str:
- return str(value)
- elif typecast is int:
- return int(value)
- elif typecast is float:
- return float(value)
- elif typecast is bool:
- return bool(value)
- elif typecast is dict:
- return dict(value)
+ if aggid and aggid in doc[request.prop]:
+ aggvalue = doc[request.prop][aggid]
+ self.on_aggprop_update(request, prop, aggvalue)
+ prop.subteardown(aggvalue['value'])
else:
- raise ValueError('Unknown typecast')
+ enforce(acl != ACL.REMOVE, http.NotFound, 'No aggregated item')
+ self.on_aggprop_update(request, prop, None)
+
+ aggvalue = {}
+ if acl != ACL.REMOVE:
+ value = prop.subtypecast(
+ request.content_stream if request.content is None
+ else request.content)
+ if type(value) is tuple:
+ aggid_, value = value
+ enforce(not aggid or aggid == aggid_, http.BadRequest,
+ 'Wrong aggregated id')
+ aggid = aggid_
+ elif not aggid:
+ aggid = toolkit.uuid()
+ aggvalue['value'] = value
+
+ if request.principal:
+ authors = aggvalue['author'] = {}
+ role = ACL.ORIGINAL if request.principal in doc['author'] else 0
+ self._useradd(authors, request.principal, role)
+ props = {request.prop: {aggid: aggvalue}}
+ self.on_update(request, props)
+ self.volume[request.resource].update(request.guid, props)
+
+ return aggid
- if type(typecast) in LIST_TYPES:
- if typecast:
- first = iter(typecast).next()
- else:
- first = None
- if first is not None and type(first) is not type and \
- type(first) not in LIST_TYPES:
- value = cast(type(first), value)
- enforce(value in typecast, ValueError,
- "Value %r is not in '%s' list",
- value, ', '.join([str(i) for i in typecast]))
- else:
- enforce(len(typecast) <= 1, ValueError,
- 'List values should contain values of the same type')
- if type(value) not in LIST_TYPES:
- value = (value,)
- typecast, = typecast or [str]
- value = tuple([_typecast_prop_value(typecast, i) for i in value])
- else:
- value = cast(typecast, value)
+def _get_prop(doc, prop, value):
+ value = prop.reprcast(value)
+ if prop.on_get is not None:
+ value = prop.on_get(doc, value)
return value
diff --git a/sugar_network/db/storage.py b/sugar_network/db/storage.py
index a280a13..72cbcf7 100644
--- a/sugar_network/db/storage.py
+++ b/sugar_network/db/storage.py
@@ -1,4 +1,4 @@
-# Copyright (C) 2012-2013 Aleksey Lim
+# Copyright (C) 2012-2014 Aleksey Lim
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
@@ -20,10 +20,6 @@ import shutil
from os.path import exists, join, isdir, basename
from sugar_network import toolkit
-from sugar_network.toolkit.router import Blob
-
-
-_BLOB_SUFFIX = '.blob'
class Storage(object):
@@ -67,9 +63,8 @@ class Storage(object):
:param mtime:
return entities that were modified after `mtime`
:returns:
- generator returns (guid, properties) typle for all found
- documents; the properties dictionary will contain only
- `StoredProperty` properties
+ generator returns (guid, properties) tuple for all found
+ documents
"""
if not exists(self._root):
@@ -113,9 +108,6 @@ class Record(object):
def path(self, *args):
return join(self._root, *args)
- def blob_path(self, prop, *args):
- return join(self._root, prop + _BLOB_SUFFIX, *args)
-
def invalidate(self):
guid_path = join(self._root, 'guid')
if exists(guid_path):
@@ -126,32 +118,14 @@ class Record(object):
if not exists(path):
return None
with file(path) as f:
- meta = Blob(json.load(f))
- blob_path = path + _BLOB_SUFFIX
- if exists(blob_path):
- meta['blob'] = blob_path
- if 'blob_size' not in meta:
- meta['blob_size'] = os.stat(blob_path).st_size
+ meta = json.load(f)
meta['mtime'] = int(os.stat(path).st_mtime)
return meta
- def set(self, prop, mtime=None, cleanup_blob=False, blob=None, **meta):
+ def set(self, prop, mtime=None, **meta):
if not exists(self._root):
os.makedirs(self._root)
meta_path = join(self._root, prop)
- dst_blob_path = meta_path + _BLOB_SUFFIX
-
- if (cleanup_blob or blob is not None) and exists(dst_blob_path):
- os.unlink(dst_blob_path)
-
- if blob is not None:
- if hasattr(blob, 'read'):
- with toolkit.new_file(dst_blob_path) as f:
- shutil.copyfileobj(blob, f)
- elif blob is not None:
- os.rename(blob, dst_blob_path)
- elif exists(dst_blob_path):
- os.unlink(dst_blob_path)
with toolkit.new_file(meta_path) as f:
json.dump(meta, f)
diff --git a/sugar_network/db/volume.py b/sugar_network/db/volume.py
index 3080eb8..6457b93 100644
--- a/sugar_network/db/volume.py
+++ b/sugar_network/db/volume.py
@@ -1,4 +1,4 @@
-# Copyright (C) 2011-2013 Aleksey Lim
+# Copyright (C) 2011-2014 Aleksey Lim
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
@@ -30,11 +30,9 @@ class Volume(dict):
_flush_pool = []
- def __init__(self, root, documents, broadcast=None, index_class=None,
- lazy_open=False):
+ def __init__(self, root, documents, index_class=None):
Volume._flush_pool.append(self)
self.resources = {}
- self.broadcast = broadcast or (lambda event: None)
self._populators = coroutine.Pool()
if index_class is None:
@@ -46,7 +44,8 @@ class Volume(dict):
if not exists(root):
os.makedirs(root)
self._index_class = index_class
- self.seqno = toolkit.Seqno(join(self._root, 'seqno'))
+ self.seqno = toolkit.Seqno(join(self._root, 'db.seqno'))
+ self.releases_seqno = toolkit.Seqno(join(self._root, 'releases.seqno'))
for document in documents:
if isinstance(document, basestring):
@@ -54,20 +53,11 @@ class Volume(dict):
else:
name = document.__name__.lower()
self.resources[name] = document
- if not lazy_open:
- self[name] = self._open(name, document)
@property
def root(self):
return self._root
- def mtime(self, name):
- path = join(self._root, name, 'index', 'mtime')
- if exists(path):
- return int(os.stat(path).st_mtime)
- else:
- return 0
-
def close(self):
"""Close operations with the server."""
_logger.info('Closing documents in %r', self._root)
@@ -75,6 +65,7 @@ class Volume(dict):
while self:
__, cls = self.popitem()
cls.close()
+ self.releases_seqno.commit()
def populate(self):
for cls in self.values():
@@ -92,25 +83,18 @@ class Volume(dict):
if directory is None:
enforce(name in self.resources, http.BadRequest,
'Unknown %r resource', name)
- directory = self[name] = self._open(name, self.resources[name])
- return directory
-
- def _open(self, name, resource):
- if isinstance(resource, basestring):
- mod = __import__(resource, fromlist=[name])
- cls = getattr(mod, name.capitalize())
- else:
- cls = resource
- directory = Directory(join(self._root, name), cls, self._index_class,
- lambda event: self._broadcast(name, event), self.seqno)
- self._populators.spawn(self._populate, directory)
+ resource = self.resources[name]
+ if isinstance(resource, basestring):
+ mod = __import__(resource, fromlist=[name])
+ cls = getattr(mod, name.capitalize())
+ else:
+ cls = resource
+ directory = Directory(join(self._root, name), cls,
+ self._index_class, self.seqno)
+ self._populators.spawn(self._populate, directory)
+ self[name] = directory
return directory
def _populate(self, directory):
for __ in directory.populate():
coroutine.dispatch()
-
- def _broadcast(self, resource, event):
- if self.broadcast is not None:
- event['resource'] = resource
- self.broadcast(event)
diff --git a/sugar_network/model/__init__.py b/sugar_network/model/__init__.py
index 167eb30..7278d10 100644
--- a/sugar_network/model/__init__.py
+++ b/sugar_network/model/__init__.py
@@ -13,34 +13,283 @@
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
-from sugar_network.model.routes import VolumeRoutes, FrontRoutes
+import os
+import gettext
+import logging
+from os.path import join
+
+import xapian
+
+from sugar_network import toolkit, db
+from sugar_network.db import files
+from sugar_network.model.routes import FrontRoutes
+from sugar_network.toolkit.spec import parse_version, parse_requires
+from sugar_network.toolkit.spec import EMPTY_LICENSE
+from sugar_network.toolkit.coroutine import this
+from sugar_network.toolkit.bundle import Bundle
+from sugar_network.toolkit.router import ACL
+from sugar_network.toolkit import i18n, http, exception, enforce
CONTEXT_TYPES = [
'activity', 'group', 'package', 'book',
]
+
POST_TYPES = [
- 'review', # Review the Context
- 'object', # Object generated by Context application
- 'question', # Q&A request
- 'answer', # Q&A response
- 'issue', # Propblem with the Context
- 'announce', # General announcement
- 'update', # Auto-generated Post for updates within the Context
- 'feedback', # Review parent Post
- 'comment', # Dependent Post
+ 'review', # Review the Context
+ 'object', # Object generated by Context application
+ 'question', # Q&A request
+ 'answer', # Q&A response
+ 'issue', # Propblem with the Context
+ 'announce', # General announcement
+ 'notification', # Auto-generated Post for updates within the Context
+ 'feedback', # Review parent Post
+ 'post', # General purpose dependent Post
]
STABILITIES = [
'insecure', 'buggy', 'developer', 'testing', 'stable',
]
-RATINGS = [0, 1, 2, 3, 4, 5]
-
RESOURCES = (
'sugar_network.model.context',
'sugar_network.model.post',
- 'sugar_network.model.release',
'sugar_network.model.report',
'sugar_network.model.user',
)
+
+_logger = logging.getLogger('model')
+
+
+class Rating(db.List):
+
+ def __init__(self, **kwargs):
+ db.List.__init__(self, db.Numeric(), default=[0, 0], **kwargs)
+
+ def slotting(self, value):
+ rating = float(value[1]) / value[0] if value[0] else 0
+ return xapian.sortable_serialise(rating)
+
+
+class Release(object):
+
+ def typecast(self, rel):
+ if this.resource.exists and \
+ 'activity' not in this.resource['type'] and \
+ 'book' not in this.resource['type']:
+ return rel
+ if not isinstance(rel, dict):
+ __, rel = load_bundle(files.post(rel), context=this.request.guid)
+ return rel['spec']['*-*']['bundle'], rel
+
+ def teardown(self, rel):
+ if this.resource.exists and \
+ 'activity' not in this.resource['type'] and \
+ 'book' not in this.resource['type']:
+ return
+ for spec in rel['spec'].values():
+ files.delete(spec['bundle'])
+
+ def encode(self, value):
+ return []
+
+
+def generate_node_stats(volume):
+
+ def calc_rating(**kwargs):
+ rating = [0, 0]
+ alldocs, __ = volume['post'].find(**kwargs)
+ for post in alldocs:
+ if post['vote']:
+ rating[0] += 1
+ rating[1] += post['vote']
+ return rating
+
+ alldocs, __ = volume['context'].find()
+ for context in alldocs:
+ rating = calc_rating(type='review', context=context.guid)
+ volume['context'].update(context.guid, {'rating': rating})
+
+ alldocs, __ = volume['post'].find(topic='')
+ for topic in alldocs:
+ rating = calc_rating(type='feedback', topic=topic.guid)
+ volume['post'].update(topic.guid, {'rating': rating})
+
+
+def populate_context_images(props, svg):
+ if 'guid' in props:
+ from sugar_network.toolkit.sugar import color_svg
+ svg = color_svg(svg, props['guid'])
+ props['artifact_icon'] = files.post(
+ svg,
+ {'mime_type': 'image/svg+xml'},
+ ).digest
+ props['icon'] = files.post(
+ toolkit.svg_to_png(svg, 55, 55),
+ {'mime_type': 'image/png'},
+ ).digest
+ props['logo'] = files.post(
+ toolkit.svg_to_png(svg, 140, 140),
+ {'mime_type': 'image/png'},
+ ).digest
+
+
+def load_bundle(blob, context=None, initial=False, extra_deps=None):
+ contexts = this.volume['context']
+ context_type = None
+ context_meta = None
+ release_notes = None
+ release = {}
+ blob_meta = {}
+
+ try:
+ bundle = Bundle(blob.path, mime_type='application/zip')
+ except Exception:
+ context_type = 'book'
+ if not context:
+ context = this.request['context']
+ release['version'] = this.request['version']
+ if 'license' in this.request:
+ release['license'] = this.request['license']
+ if isinstance(release['license'], basestring):
+ release['license'] = [release['license']]
+ release['spec'] = {'*-*': {
+ 'bundle': blob.digest,
+ }}
+ blob_meta['mime_type'] = this.request.content_type
+ else:
+ context_type = 'activity'
+ unpack_size = 0
+
+ with bundle:
+ changelog = join(bundle.rootdir, 'CHANGELOG')
+ for arcname in bundle.get_names():
+ if changelog and arcname == changelog:
+ with bundle.extractfile(changelog) as f:
+ release_notes = f.read()
+ changelog = None
+ unpack_size += bundle.getmember(arcname).size
+ spec = bundle.get_spec()
+ context_meta = _load_context_metadata(bundle, spec)
+
+ if not context:
+ context = spec['context']
+ else:
+ enforce(context == spec['context'],
+ http.BadRequest, 'Wrong context')
+ if extra_deps:
+ spec.requires.update(parse_requires(extra_deps))
+
+ release['version'] = spec['version']
+ release['stability'] = spec['stability']
+ if spec['license'] is not EMPTY_LICENSE:
+ release['license'] = spec['license']
+ release['requires'] = requires = []
+ for dep_name, dep in spec.requires.items():
+ found = False
+ for version in dep.versions_range():
+ requires.append('%s-%s' % (dep_name, version))
+ found = True
+ if not found:
+ requires.append(dep_name)
+ release['spec'] = {'*-*': {
+ 'bundle': blob.digest,
+ 'commands': spec.commands,
+ 'requires': spec.requires,
+ }}
+ release['unpack_size'] = unpack_size
+ blob_meta['mime_type'] = 'application/vnd.olpc-sugar'
+
+ enforce(context, http.BadRequest, 'Context is not specified')
+ enforce(release['version'], http.BadRequest, 'Version is not specified')
+ release['release'] = parse_version(release['version'])
+ if initial and not contexts.exists(context):
+ enforce(context_meta, http.BadRequest, 'No way to initate context')
+ context_meta['guid'] = context
+ context_meta['type'] = [context_type]
+ this.call(method='POST', path=['context'], content=context_meta)
+ else:
+ enforce(context_type in contexts[context]['type'],
+ http.BadRequest, 'Inappropriate bundle type')
+ context_obj = contexts[context]
+
+ releases = context_obj['releases']
+ if 'license' not in release:
+ enforce(releases, http.BadRequest, 'License is not specified')
+ recent = max(releases, key=lambda x: releases[x]['release'])
+ release['license'] = releases[recent]['license']
+
+ _logger.debug('Load %r release: %r', context, release)
+
+ if this.request.principal in context_obj['author']:
+ diff = context_obj.patch(context_meta)
+ if diff:
+ this.call(method='PUT', path=['context', context], content=diff)
+ context_obj.props.update(diff)
+ # TRANS: Release notes title
+ title = i18n._('%(name)s %(version)s release')
+ else:
+ # TRANS: 3rd party release notes title
+ title = i18n._('%(name)s %(version)s third-party release')
+ release['announce'] = this.call(method='POST', path=['post'],
+ content={
+ 'context': context,
+ 'type': 'notification',
+ 'title': i18n.encode(title,
+ name=context_obj['title'],
+ version=release['version'],
+ ),
+ 'message': release_notes or '',
+ },
+ content_type='application/json')
+
+ filename = ''.join(i18n.decode(context_obj['title']).split())
+ blob_meta['name'] = '%s-%s' % (filename, release['version'])
+ files.update(blob.digest, blob_meta)
+
+ return context, release
+
+
+def _load_context_metadata(bundle, spec):
+ result = {}
+ for prop in ('homepage', 'mime_types'):
+ if spec[prop]:
+ result[prop] = spec[prop]
+ result['guid'] = spec['context']
+
+ try:
+ icon_file = bundle.extractfile(join(bundle.rootdir, spec['icon']))
+ populate_context_images(result, icon_file.read())
+ icon_file.close()
+ except Exception:
+ exception(_logger, 'Failed to load icon')
+
+ msgids = {}
+ for prop, confname in [
+ ('title', 'name'),
+ ('summary', 'summary'),
+ ('description', 'description'),
+ ]:
+ if spec[confname]:
+ msgids[prop] = spec[confname]
+ result[prop] = {'en': spec[confname]}
+ with toolkit.mkdtemp() as tmpdir:
+ for path in bundle.get_names():
+ if not path.endswith('.mo'):
+ continue
+ mo_path = path.strip(os.sep).split(os.sep)
+ if len(mo_path) != 5 or mo_path[1] != 'locale':
+ continue
+ lang = mo_path[2]
+ bundle.extract(path, tmpdir)
+ try:
+ translation = gettext.translation(spec['context'],
+ join(tmpdir, *mo_path[:2]), [lang])
+ for prop, value in msgids.items():
+ msgstr = translation.gettext(value).decode('utf8')
+ if lang == 'en' or msgstr != value:
+ result[prop][lang] = msgstr
+ except Exception:
+ exception(_logger, 'Gettext failed to read %r', mo_path[-1])
+
+ return result
diff --git a/sugar_network/model/context.py b/sugar_network/model/context.py
index 1763d65..6bac120 100644
--- a/sugar_network/model/context.py
+++ b/sugar_network/model/context.py
@@ -13,42 +13,33 @@
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
-import hashlib
-from cStringIO import StringIO
-
-from sugar_network import db, model, static, toolkit
-from sugar_network.toolkit.router import Blob, ACL
+from sugar_network import db, model
+from sugar_network.toolkit.coroutine import this
+from sugar_network.toolkit.router import ACL
class Context(db.Resource):
- @db.indexed_property(prefix='T', full_text=True,
- typecast=[model.CONTEXT_TYPES])
+ @db.indexed_property(db.List, prefix='T', full_text=True,
+ subtype=db.Enum(model.CONTEXT_TYPES))
def type(self, value):
return value
@type.setter
def type(self, value):
- if value and 'package' in value and 'common' not in self['layer']:
- self['layer'] = tuple(self['layer']) + ('common',)
- if 'artifact_icon' not in self:
- for name in ('activity', 'book', 'group'):
- if name not in self.type:
- continue
- with file(static.path('images', name + '.svg')) as f:
- Context.populate_images(self, f.read())
- break
- return value
-
- @db.indexed_property(slot=1, prefix='S', full_text=True, localized=True)
+ if 'package' in value and 'common' not in self['layer']:
+ self.post('layer', self['layer'] + ['common'])
+ return value
+
+ @db.indexed_property(db.Localized, slot=1, prefix='S', full_text=True)
def title(self, value):
return value
- @db.indexed_property(prefix='R', full_text=True, localized=True)
+ @db.indexed_property(db.Localized, prefix='R', full_text=True)
def summary(self, value):
return value
- @db.indexed_property(prefix='D', full_text=True, localized=True)
+ @db.indexed_property(db.Localized, prefix='D', full_text=True)
def description(self, value):
return value
@@ -56,72 +47,49 @@ class Context(db.Resource):
def homepage(self, value):
return value
- @db.indexed_property(prefix='Y', default=[], typecast=[], full_text=True)
+ @db.indexed_property(db.List, prefix='Y', default=[], full_text=True)
def mime_types(self, value):
return value
- @db.blob_property(mime_type='image/png')
+ @db.stored_property(db.Blob, mime_type='image/png', default='missing.png')
def icon(self, value):
- if value:
- return value
- if 'package' in self['type']:
- return Blob({
- 'url': '/static/images/package.png',
- 'blob': static.path('images', 'package.png'),
- 'mime_type': 'image/png',
- })
- else:
- return Blob({
- 'url': '/static/images/missing.png',
- 'blob': static.path('images', 'missing.png'),
- 'mime_type': 'image/png',
- })
-
- @db.blob_property(mime_type='image/svg+xml')
+ return value
+
+ @db.stored_property(db.Blob, mime_type='image/svg+xml',
+ default='missing.svg')
def artifact_icon(self, value):
- if value:
- return value
- if 'package' in self['type']:
- return Blob({
- 'url': '/static/images/package.svg',
- 'blob': static.path('images', 'package.svg'),
- 'mime_type': 'image/png',
- })
- else:
- return Blob({
- 'url': '/static/images/missing.svg',
- 'blob': static.path('images', 'missing.svg'),
- 'mime_type': 'image/svg+xml',
- })
-
- @db.blob_property(mime_type='image/png')
+ return value
+
+ @db.stored_property(db.Blob, mime_type='image/png',
+ default='missing-logo.png')
def logo(self, value):
- if value:
- return value
- if 'package' in self['type']:
- return Blob({
- 'url': '/static/images/package-logo.png',
- 'blob': static.path('images', 'package-logo.png'),
- 'mime_type': 'image/png',
- })
- else:
- return Blob({
- 'url': '/static/images/missing-logo.png',
- 'blob': static.path('images', 'missing-.png'),
- 'mime_type': 'image/png',
- })
-
- @db.indexed_property(slot=2, default=0, acl=ACL.READ | ACL.CALC)
- def downloads(self, value):
return value
- @db.indexed_property(slot=3, typecast=[], default=[0, 0],
- sortable_serialise=lambda x: float(x[1]) / x[0] if x[0] else 0,
+ @db.stored_property(db.Aggregated, subtype=db.Blob())
+ def previews(self, value):
+ return value
+
+ @db.stored_property(db.Aggregated, subtype=model.Release(),
+ acl=ACL.READ | ACL.INSERT | ACL.REMOVE | ACL.REPLACE)
+ def releases(self, value):
+ return value
+
+ @releases.setter
+ def releases(self, value):
+ if value or this.request.method != 'POST':
+ self.invalidate_solutions()
+ return value
+
+ @db.indexed_property(db.Numeric, slot=2, default=0,
acl=ACL.READ | ACL.CALC)
+ def downloads(self, value):
+ return value
+
+ @db.indexed_property(model.Rating, slot=3, acl=ACL.READ | ACL.CALC)
def rating(self, value):
return value
- @db.stored_property(typecast=[], default=[], acl=ACL.PUBLIC | ACL.LOCAL)
+ @db.stored_property(db.List, default=[], acl=ACL.PUBLIC | ACL.LOCAL)
def dependencies(self, value):
"""Software dependencies.
@@ -131,32 +99,20 @@ class Context(db.Resource):
"""
return value
- @db.stored_property(typecast=dict, default={},
- acl=ACL.PUBLIC | ACL.LOCAL)
- def aliases(self, value):
- return value
-
- @db.stored_property(typecast=dict, default={}, acl=ACL.PUBLIC | ACL.LOCAL)
- def packages(self, value):
+ @dependencies.setter
+ def dependencies(self, value):
+ if value or this.request.method != 'POST':
+ self.invalidate_solutions()
return value
- @staticmethod
- def populate_images(props, svg):
- if 'guid' in props:
- from sugar_network.toolkit.sugar import color_svg
- svg = color_svg(svg, props['guid'])
+ def deleted(self):
+ self.invalidate_solutions()
- def convert(w, h):
- png = toolkit.svg_to_png(svg, w, h)
- return {'blob': png,
- 'mime_type': 'image/png',
- 'digest': hashlib.sha1(png.getvalue()).hexdigest(),
- }
+ def restored(self):
+ self.invalidate_solutions()
- props['artifact_icon'] = {
- 'blob': StringIO(svg),
- 'mime_type': 'image/svg+xml',
- 'digest': hashlib.sha1(svg).hexdigest(),
- }
- props['icon'] = convert(55, 55)
- props['logo'] = convert(140, 140)
+ def invalidate_solutions(self):
+ this.broadcast({
+ 'event': 'release',
+ 'seqno': this.volume.releases_seqno.next(),
+ })
diff --git a/sugar_network/model/post.py b/sugar_network/model/post.py
index 88c6956..107f354 100644
--- a/sugar_network/model/post.py
+++ b/sugar_network/model/post.py
@@ -13,39 +13,31 @@
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
-from sugar_network import db, model, static
-from sugar_network.toolkit.router import Blob, ACL
+from sugar_network import db, model
+from sugar_network.toolkit.router import ACL
+from sugar_network.toolkit.coroutine import this
class Post(db.Resource):
- @db.indexed_property(prefix='C',
- acl=ACL.CREATE | ACL.READ)
+ @db.indexed_property(prefix='C', acl=ACL.CREATE | ACL.READ)
def context(self, value):
return value
- @db.indexed_property(prefix='A', default='',
- acl=ACL.CREATE | ACL.READ)
+ @db.indexed_property(prefix='A', default='', acl=ACL.CREATE | ACL.READ)
def topic(self, value):
return value
- @topic.setter
- def topic(self, value):
- if value and not self['context']:
- post = self.volume['post'].get(value)
- self['context'] = post['context']
- return value
-
- @db.indexed_property(prefix='T', typecast=model.POST_TYPES)
+ @db.indexed_property(db.Enum, prefix='T', items=model.POST_TYPES)
def type(self, value):
return value
- @db.indexed_property(slot=1, prefix='N', full_text=True, localized=True,
+ @db.indexed_property(db.Localized, slot=1, prefix='N', full_text=True,
acl=ACL.CREATE | ACL.READ)
def title(self, value):
return value
- @db.indexed_property(prefix='M', full_text=True, localized=True,
+ @db.indexed_property(db.Localized, prefix='M', full_text=True,
acl=ACL.CREATE | ACL.READ)
def message(self, value):
return value
@@ -54,40 +46,45 @@ class Post(db.Resource):
def solution(self, value):
return value
- @db.indexed_property(prefix='V', typecast=model.RATINGS, default=0,
+ @db.indexed_property(db.Enum, prefix='V', items=range(5), default=0,
acl=ACL.CREATE | ACL.READ)
def vote(self, value):
return value
- @db.indexed_property(prefix='D', typecast=db.AggregatedType,
- full_text=True, default=db.AggregatedType(),
- fmt=lambda x: [i.get('message') for i in x.values()],
- acl=ACL.READ | ACL.INSERT | ACL.REMOVE)
+ @vote.setter
+ def vote(self, value):
+ if value:
+ if self['topic']:
+ resource = this.volume['post']
+ guid = self['topic']
+ else:
+ resource = this.volume['context']
+ guid = self['context']
+ orig = resource[guid]['rating']
+ resource.update(guid, {'rating': [orig[0] + 1, orig[1] + value]})
+ return value
+
+ @db.indexed_property(db.Aggregated, prefix='D', full_text=True,
+ subtype=db.Localized())
def comments(self, value):
return value
- @db.blob_property(mime_type='image/png')
+ @db.stored_property(db.Blob, mime_type='image/png',
+ default='missing-logo.png')
def preview(self, value):
- if value:
- return value
- return Blob({
- 'url': '/static/images/missing-logo.png',
- 'blob': static.path('images', 'missing-logo.png'),
- 'mime_type': 'image/png',
- })
-
- @db.blob_property()
- def data(self, value):
+ return value
+
+ @db.stored_property(db.Aggregated, subtype=db.Blob())
+ def attachments(self, value):
if value:
value['name'] = self['title']
return value
- @db.indexed_property(slot=2, default=0, acl=ACL.READ | ACL.CALC)
+ @db.indexed_property(db.Numeric, slot=2, default=0,
+ acl=ACL.READ | ACL.CALC)
def downloads(self, value):
return value
- @db.indexed_property(slot=3, typecast=[], default=[0, 0],
- sortable_serialise=lambda x: float(x[1]) / x[0] if x[0] else 0,
- acl=ACL.READ | ACL.CALC)
+ @db.indexed_property(model.Rating, slot=3, acl=ACL.READ | ACL.CALC)
def rating(self, value):
return value
diff --git a/sugar_network/model/release.py b/sugar_network/model/release.py
deleted file mode 100644
index 46eeaae..0000000
--- a/sugar_network/model/release.py
+++ /dev/null
@@ -1,83 +0,0 @@
-# Copyright (C) 2012-2013 Aleksey Lim
-#
-# This program is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with this program. If not, see <http://www.gnu.org/licenses/>.
-
-import xapian
-
-from sugar_network import db, model
-from sugar_network.toolkit.router import ACL
-from sugar_network.toolkit.licenses import GOOD_LICENSES
-from sugar_network.toolkit.spec import parse_version
-
-
-class Release(db.Resource):
-
- @db.indexed_property(prefix='C',
- acl=ACL.CREATE | ACL.READ)
- def context(self, value):
- return value
-
- @context.setter
- def context(self, value):
- if self.request.principal:
- authors = self.volume['context'].get(value)['author']
- if self.request.principal in authors:
- self['layer'] = ('origin',) + tuple(self.layer)
- return value
-
- @db.indexed_property(prefix='L', full_text=True, typecast=[GOOD_LICENSES],
- acl=ACL.CREATE | ACL.READ)
- def license(self, value):
- return value
-
- @db.indexed_property(slot=1, prefix='V', fmt=lambda x: _fmt_version(x),
- acl=ACL.CREATE | ACL.READ)
- def version(self, value):
- return value
-
- @db.indexed_property(prefix='S', default='stabile',
- acl=ACL.CREATE | ACL.READ, typecast=model.STABILITIES)
- def stability(self, value):
- return value
-
- @db.indexed_property(prefix='N', full_text=True, localized=True,
- default='', acl=ACL.CREATE | ACL.READ)
- def notes(self, value):
- return value
-
- @db.indexed_property(prefix='R', typecast=[], default=[],
- acl=ACL.CREATE | ACL.READ)
- def requires(self, value):
- return value
-
- @db.blob_property()
- def data(self, value):
- return value
-
-
-def _fmt_version(version):
- version = parse_version(version)
- # Convert to [(`version`, `modifier`)]
- version = zip(*([iter(version)] * 2))
- major, modifier = version.pop(0)
-
- result = sum([(rank % 10000) * pow(10000, 3 - i)
- for i, rank in enumerate((major + [0, 0])[:3])])
- result += (5 + modifier) * 1000
- if modifier and version:
- minor, __ = version.pop(0)
- if minor:
- result += (minor[0] % 1000)
-
- return xapian.sortable_serialise(result)
diff --git a/sugar_network/model/report.py b/sugar_network/model/report.py
index 84db43a..980c3ff 100644
--- a/sugar_network/model/report.py
+++ b/sugar_network/model/report.py
@@ -1,4 +1,4 @@
-# Copyright (C) 2012-2013 Aleksey Lim
+# Copyright (C) 2012-2014 Aleksey Lim
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
@@ -17,6 +17,19 @@ from sugar_network import db
from sugar_network.toolkit.router import ACL
+class _Solution(db.Property):
+
+ def __init__(self, **kwargs):
+ db.Property.__init__(self, default=[], **kwargs)
+
+ def typecast(self, value):
+ return [] if value is None else list(value)
+
+ def encode(self, value):
+ for i in value:
+ yield i[0]
+
+
class Report(db.Resource):
@db.indexed_property(prefix='C', acl=ACL.CREATE | ACL.READ)
@@ -24,28 +37,27 @@ class Report(db.Resource):
return value
@db.indexed_property(prefix='V', default='', acl=ACL.CREATE | ACL.READ)
- def release(self, value):
+ def version(self, value):
return value
- @release.setter
- def release(self, value):
- if value and 'version' not in self.props and 'release' in value:
- version = self.volume['release'].get(value)
- self['version'] = version['version']
+ @db.indexed_property(prefix='E', full_text=True, acl=ACL.CREATE | ACL.READ)
+ def error(self, value):
return value
- @db.stored_property(default='', acl=ACL.CREATE | ACL.READ)
- def version(self, value):
+ @db.indexed_property(prefix='U', full_text=True, acl=ACL.CREATE | ACL.READ)
+ def uname(self, value):
return value
- @db.stored_property(typecast=dict, default={}, acl=ACL.CREATE | ACL.READ)
- def environ(self, value):
+ @db.indexed_property(db.Dict, prefix='L', full_text=True,
+ acl=ACL.CREATE | ACL.READ)
+ def lsb_release(self, value):
return value
- @db.indexed_property(prefix='T', acl=ACL.CREATE | ACL.READ)
- def error(self, value):
+ @db.indexed_property(_Solution, prefix='S', full_text=True,
+ acl=ACL.CREATE | ACL.READ)
+ def solution(self, value):
return value
- @db.blob_property()
- def data(self, value):
+ @db.stored_property(db.Aggregated, subtype=db.Blob())
+ def logs(self, value):
return value
diff --git a/sugar_network/model/routes.py b/sugar_network/model/routes.py
index c8f8da6..ff0377f 100644
--- a/sugar_network/model/routes.py
+++ b/sugar_network/model/routes.py
@@ -1,4 +1,4 @@
-# Copyright (C) 2013 Aleksey Lim
+# Copyright (C) 2013-2014 Aleksey Lim
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
@@ -14,55 +14,21 @@
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import logging
-import mimetypes
-from os.path import split
-from sugar_network import static, db
-from sugar_network.toolkit.router import route, fallbackroute, Blob, ACL
+from sugar_network.db import files
+from sugar_network.toolkit.router import route
+from sugar_network.toolkit.coroutine import this
from sugar_network.toolkit import coroutine
_logger = logging.getLogger('model.routes')
-class VolumeRoutes(db.Routes):
-
- @route('GET', ['context', None], cmd='feed',
- mime_type='application/json')
- def feed(self, request, distro):
- context = self.volume['context'].get(request.guid)
- releases = self.volume['release']
- versions = []
-
- impls, __ = releases.find(context=context.guid,
- not_layer='deleted', **request)
- for impl in impls:
- version = impl.properties([
- 'guid', 'ctime', 'layer', 'author', 'tags',
- 'version', 'stability', 'license', 'notes',
- ])
- if context['dependencies']:
- requires = version.setdefault('requires', {})
- for i in context['dependencies']:
- requires.setdefault(i, {})
- version['data'] = data = impl.meta('data')
- for key in ('mtime', 'seqno', 'blob'):
- if key in data:
- del data[key]
- versions.append(version)
-
- result = {'releases': versions}
- if distro:
- aliases = context['aliases'].get(distro)
- if aliases and 'binary' in aliases:
- result['packages'] = aliases['binary']
- return result
-
-
class FrontRoutes(object):
def __init__(self):
- self._pooler = _Pooler()
+ self._spooler = coroutine.Spooler()
+ this.broadcast = self._broadcast
@route('GET', mime_type='text/html')
def hello(self):
@@ -80,34 +46,14 @@ class FrontRoutes(object):
response.content_length = 0
@route('GET', cmd='subscribe', mime_type='text/event-stream')
- def subscribe(self, request=None, response=None, ping=False, **condition):
+ def subscribe(self, request=None, response=None, **condition):
"""Subscribe to Server-Sent Events."""
if request is not None and not condition:
condition = request
if response is not None:
response.content_type = 'text/event-stream'
response['Cache-Control'] = 'no-cache'
- return self._pull_events(request, ping, condition)
-
- @route('POST', cmd='broadcast',
- mime_type='application/json', acl=ACL.LOCAL)
- def broadcast(self, event=None, request=None):
- if request is not None:
- event = request.content
- _logger.debug('Broadcast event: %r', event)
- self._pooler.notify_all(event)
-
- @fallbackroute('GET', ['static'])
- def get_static(self, request):
- path = static.path(*request.path[1:])
- if not mimetypes.inited:
- mimetypes.init()
- mime_type = mimetypes.types_map.get('.' + path.rsplit('.', 1)[-1])
- return Blob({
- 'blob': path,
- 'filename': split(path)[-1],
- 'mime_type': mime_type,
- })
+ return self._pull_events(request, condition)
@route('GET', ['robots.txt'], mime_type='text/plain')
def robots(self, request, response):
@@ -115,34 +61,29 @@ class FrontRoutes(object):
@route('GET', ['favicon.ico'])
def favicon(self, request, response):
- return Blob({
- 'blob': static.path('favicon.ico'),
- 'mime_type': 'image/x-icon',
- })
-
- def _pull_events(self, request, ping, condition):
- _logger.debug('Start subscription, total=%s', self._pooler.waiters + 1)
-
- if ping:
- # XXX The whole commands' kwargs handling should be redesigned
- if 'ping' in condition:
- condition.pop('ping')
- # If non-greenlet application needs only to initiate
- # a subscription and do not stuck in waiting for the first event,
- # it should pass `ping` argument to return fake event to unblock
- # `GET /?cmd=subscribe` call.
- yield {'event': 'pong'}
-
- rfile = None
+ return files.get('favicon.ico')
+
+ def _broadcast(self, event):
+ _logger.debug('Broadcast event: %r', event)
+ self._spooler.notify_all(event)
+
+ def _pull_events(self, request, condition):
+ _logger.debug('Start %s-nth subscription', self._spooler.waiters + 1)
+
+ # Unblock `GET /?cmd=subscribe` call to let non-greenlet application
+ # initiate a subscription and do not stuck in waiting for the 1st event
+ yield {'event': 'pong'}
+
+ subscription = None
if request is not None:
- rfile = request.content_stream
- if rfile is not None:
- coroutine.spawn(self._waiter_for_closing, rfile)
+ subscription = request.content_stream
+ if subscription is not None:
+ coroutine.spawn(self._wait_for_closing, subscription)
while True:
- event = self._pooler.wait()
+ event = self._spooler.wait()
if not isinstance(event, dict):
- if event is rfile:
+ if event is subscription:
break
else:
continue
@@ -155,48 +96,13 @@ class FrontRoutes(object):
else:
yield event
- _logger.debug('Stop subscription, total=%s', self._pooler.waiters)
+ _logger.debug('Stop %s-nth subscription', self._spooler.waiters)
- def _waiter_for_closing(self, rfile):
+ def _wait_for_closing(self, rfile):
try:
coroutine.select([rfile.fileno()], [], [])
finally:
- self._pooler.notify_all(rfile)
-
-
-class _Pooler(object):
- """One-producer-to-many-consumers events delivery."""
-
- def __init__(self):
- self._value = None
- self._waiters = 0
- self._ready = coroutine.Event()
- self._open = coroutine.Event()
- self._open.set()
-
- @property
- def waiters(self):
- return self._waiters
-
- def wait(self):
- self._open.wait()
- self._waiters += 1
- try:
- self._ready.wait()
- finally:
- self._waiters -= 1
- if self._waiters == 0:
- self._ready.clear()
- self._open.set()
- return self._value
-
- def notify_all(self, value=None):
- self._open.wait()
- if not self._waiters:
- return
- self._open.clear()
- self._value = value
- self._ready.set()
+ self._spooler.notify_all(rfile)
_HELLO_HTML = """\
diff --git a/sugar_network/model/user.py b/sugar_network/model/user.py
index 69d0d42..b44093e 100644
--- a/sugar_network/model/user.py
+++ b/sugar_network/model/user.py
@@ -1,4 +1,4 @@
-# Copyright (C) 2012-2013 Aleksey Lim
+# Copyright (C) 2012-2014 Aleksey Lim
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
@@ -27,10 +27,10 @@ class User(db.Resource):
def location(self, value):
return value
- @db.indexed_property(slot=2, prefix='B', default=0, typecast=int)
+ @db.indexed_property(db.Numeric, slot=2, prefix='B', default=0)
def birthday(self, value):
return value
- @db.blob_property(acl=ACL.CREATE, mime_type='text/plain')
+ @db.stored_property(db.Blob, acl=ACL.CREATE, mime_type='text/plain')
def pubkey(self, value):
return value
diff --git a/sugar_network/node/master.py b/sugar_network/node/master.py
index 19a8cf1..c7c22e0 100644
--- a/sugar_network/node/master.py
+++ b/sugar_network/node/master.py
@@ -20,12 +20,19 @@ from Cookie import SimpleCookie
from os.path import join
from sugar_network import node, toolkit
-from sugar_network.node import sync, stats_user, files, volume, downloads, obs
+from sugar_network.node import sync, stats_user, files, model, downloads, obs
from sugar_network.node.routes import NodeRoutes
from sugar_network.toolkit.router import route, ACL
-from sugar_network.toolkit import http, coroutine, enforce
+from sugar_network.toolkit import http, enforce
+RESOURCES = (
+ 'sugar_network.node.model',
+ 'sugar_network.model.post',
+ 'sugar_network.model.report',
+ 'sugar_network.model.user',
+ )
+
_ONE_WAY_DOCUMENTS = ['report']
_logger = logging.getLogger('node.master')
@@ -33,12 +40,12 @@ _logger = logging.getLogger('node.master')
class MasterRoutes(NodeRoutes):
- def __init__(self, guid, volume_):
- NodeRoutes.__init__(self, guid, volume_)
+ def __init__(self, guid, volume, **kwargs):
+ NodeRoutes.__init__(self, guid, volume=volume, **kwargs)
self._pulls = {
'pull': lambda **kwargs:
- ('diff', None, volume.diff(self.volume,
+ ('diff', None, model.diff(self.volume,
ignore_documents=_ONE_WAY_DOCUMENTS, **kwargs)),
'files_pull': lambda **kwargs:
('files_diff', None, self._files.diff(**kwargs)),
@@ -50,7 +57,7 @@ class MasterRoutes(NodeRoutes):
if node.files_root.value:
self._files = files.Index(node.files_root.value,
- join(volume_.root, 'files.index'), volume_.seqno)
+ join(volume.root, 'files.index'), volume.seqno)
@route('POST', cmd='sync',
acl=ACL.AUTH)
@@ -137,25 +144,13 @@ class MasterRoutes(NodeRoutes):
enforce(node.files_root.value, http.BadRequest, 'Disabled')
aliases = self.volume['context'].get(request.guid)['aliases']
enforce(aliases, http.BadRequest, 'Nothing to presolve')
- return obs.presolve(aliases, node.files_root.value)
+ return obs.presolve(None, aliases, node.files_root.value)
def status(self):
result = NodeRoutes.status(self)
result['level'] = 'master'
return result
- def after_post(self, doc):
- if doc.metadata.name == 'context':
- shift_releases = doc.modified('dependencies')
- if doc.modified('aliases'):
- # TODO Already launched job should be killed
- coroutine.spawn(self._resolve_aliases, doc)
- shift_releases = True
- if shift_releases and not doc.is_new:
- # Shift checkpoint to invalidate solutions
- self.volume['release'].checkpoint()
- NodeRoutes.after_post(self, doc)
-
def _push(self, stream):
reply = []
cookie = _Cookie()
@@ -172,8 +167,7 @@ class MasterRoutes(NodeRoutes):
if self._files is not None:
cookie['files_pull'].include(packet['sequence'])
elif packet.name == 'diff':
- seq, ack_seq = volume.merge(self.volume, packet,
- stats=self._stats)
+ seq, ack_seq = model.merge(self.volume, packet)
reply.append(('ack', {
'ack': ack_seq,
'sequence': seq,
@@ -189,43 +183,6 @@ class MasterRoutes(NodeRoutes):
return reply, cookie
- def _resolve_aliases(self, doc):
- packages = {}
- for repo in obs.get_repos():
- alias = doc['aliases'].get(repo['distributor_id'])
- if not alias:
- continue
- package = packages[repo['name']] = {}
- for kind in ('binary', 'devel'):
- obs_fails = []
- for to_resolve in alias.get(kind) or []:
- if not to_resolve:
- continue
- try:
- for arch in repo['arches']:
- obs.resolve(repo['name'], arch, to_resolve)
- except Exception, error:
- _logger.warning('Failed to resolve %r on %s',
- to_resolve, repo['name'])
- obs_fails.append(str(error))
- continue
- package[kind] = to_resolve
- break
- else:
- package['status'] = '; '.join(obs_fails)
- break
- else:
- if 'binary' in package:
- package['status'] = 'success'
- else:
- package['status'] = 'no packages to resolve'
-
- if packages != doc['packages']:
- self.volume['context'].update(doc.guid, {'packages': packages})
-
- if node.files_root.value:
- obs.presolve(doc['aliases'], node.files_root.value)
-
class _Cookie(list):
diff --git a/sugar_network/node/model.py b/sugar_network/node/model.py
new file mode 100644
index 0000000..2681b2d
--- /dev/null
+++ b/sugar_network/node/model.py
@@ -0,0 +1,177 @@
+# Copyright (C) 2012-2014 Aleksey Lim
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <http://www.gnu.org/licenses/>.
+
+import logging
+
+from sugar_network import db, toolkit
+from sugar_network.model import Release, context
+from sugar_network.node import obs
+from sugar_network.toolkit.router import ACL
+from sugar_network.toolkit.coroutine import this
+from sugar_network.toolkit import http, coroutine, enforce
+
+
+_logger = logging.getLogger('node.model')
+_presolve_queue = None
+
+
+class _Release(Release):
+
+ _package_cast = db.Dict(db.List())
+
+ def typecast(self, value):
+ if not this.resource.exists or 'package' not in this.resource['type']:
+ return Release.typecast(self, value)
+
+ value = self._package_cast.typecast(value)
+ enforce(value.get('binary'), http.BadRequest, 'No binary aliases')
+
+ distro = this.request.key
+ if distro == '*':
+ lsb_id = None
+ lsb_release = None
+ elif '-' in this.request.key:
+ lsb_id, lsb_release = distro.split('-', 1)
+ else:
+ lsb_id = distro
+ lsb_release = None
+ releases = this.resource.record.get('releases')
+ statuses = releases['value'].setdefault('status', {})
+ to_presolve = []
+
+ for repo in obs.get_repos():
+ if lsb_id and lsb_id != repo['lsb_id'] or \
+ lsb_release and lsb_release != repo['lsb_release']:
+ continue
+ # Make sure there are no alias overrides
+ if not lsb_id and repo['lsb_id'] in releases['value'] or \
+ not lsb_release and repo['name'] in releases['value']:
+ continue
+ pkgs = sum([value.get(i, []) for i in ('binary', 'devel')], [])
+ try:
+ for arch in repo['arches']:
+ obs.resolve(repo['name'], arch, pkgs)
+ except Exception, error:
+ _logger.warning('Failed to resolve %r on %s',
+ pkgs, repo['name'])
+ status = str(error)
+ else:
+ to_presolve.append((repo['name'], pkgs))
+ status = 'success'
+ statuses[repo['name']] = status
+
+ if to_presolve and _presolve_queue is not None:
+ _presolve_queue.put(to_presolve)
+ if statuses:
+ this.resource.record.set('releases', **releases)
+
+ return value
+
+ def teardown(self, value):
+ if 'package' not in this.resource['type']:
+ return Release.typecast(self, value)
+ # TODO Delete presolved files
+
+
+class Context(context.Context):
+
+ @db.stored_property(db.Aggregated, subtype=_Release(),
+ acl=ACL.READ | ACL.INSERT | ACL.REMOVE | ACL.REPLACE)
+ def releases(self, value):
+ return value
+
+ @releases.setter
+ def releases(self, value):
+ if value or this.request.method != 'POST':
+ self.invalidate_solutions()
+ return value
+
+
+def diff(volume, in_seq, out_seq=None, exclude_seq=None, layer=None,
+ ignore_documents=None, **kwargs):
+ if out_seq is None:
+ out_seq = toolkit.Sequence([])
+ is_the_only_seq = not out_seq
+ if layer:
+ if isinstance(layer, basestring):
+ layer = [layer]
+ layer.append('common')
+ try:
+ for resource, directory in volume.items():
+ if ignore_documents and resource in ignore_documents:
+ continue
+ coroutine.dispatch()
+ directory.commit()
+ yield {'resource': resource}
+ for guid, patch in directory.diff(in_seq, exclude_seq,
+ layer=layer if resource == 'context' else None):
+ adiff = {}
+ adiff_seq = toolkit.Sequence()
+ for prop, meta, seqno in patch:
+ adiff[prop] = meta
+ adiff_seq.include(seqno, seqno)
+ if adiff:
+ yield {'guid': guid, 'diff': adiff}
+ out_seq.include(adiff_seq)
+ if is_the_only_seq:
+ # There is only one diff, so, we can stretch it to remove all holes
+ out_seq.stretch()
+ except StopIteration:
+ pass
+
+ yield {'commit': out_seq}
+
+
+def merge(volume, records):
+ directory = None
+ commit_seq = toolkit.Sequence()
+ merged_seq = toolkit.Sequence()
+ synced = False
+
+ for record in records:
+ resource_ = record.get('resource')
+ if resource_:
+ resource = resource_
+ directory = volume[resource_]
+ continue
+
+ if 'guid' in record:
+ guid = record['guid']
+ existed = directory.exists(guid)
+ if existed:
+ layer = directory.get(guid)['layer']
+ seqno, merged = directory.merge(**record)
+ synced = synced or merged
+ if seqno is not None:
+ merged_seq.include(seqno, seqno)
+ continue
+
+ commit = record.get('commit')
+ if commit is not None:
+ commit_seq.include(commit)
+ continue
+
+ if synced:
+ this.broadcast({'event': 'sync'})
+
+ return commit_seq, merged_seq
+
+
+def presolve(presolve_path):
+ global _presolve_queue
+ _presolve_queue = coroutine.Queue()
+
+ for repo_name, pkgs in _presolve_queue:
+ obs.presolve(repo_name, pkgs, presolve_path)
diff --git a/sugar_network/node/obs.py b/sugar_network/node/obs.py
index 1d8a547..6ef9e55 100644
--- a/sugar_network/node/obs.py
+++ b/sugar_network/node/obs.py
@@ -1,4 +1,4 @@
-# Copyright (C) 2012-2013 Aleksey Lim
+# Copyright (C) 2012-2014 Aleksey Lim
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
@@ -37,7 +37,7 @@ obs_presolve_project = Option(
default='presolve')
_logger = logging.getLogger('node.obs')
-_client = None
+_conn = None
_repos = {}
@@ -45,82 +45,68 @@ def get_repos():
return _get_repos(obs_project.value)
-def resolve(repo, arch, names):
- for package in names:
- _request('GET', ['resolve'], params={
- 'project': obs_project.value,
- 'repository': repo,
- 'arch': arch,
- 'package': package,
- })
+def resolve(repo, arch, packages):
+ _request('GET', ['resolve'], params={
+ 'project': obs_project.value,
+ 'repository': repo,
+ 'arch': arch,
+ 'package': packages,
+ })
-def presolve(aliases, dst_path):
+def presolve(repo_name, packages, dst_path):
for repo in _get_repos(obs_presolve_project.value):
- # Presolves make sense only for XO, thus, for Fedora
- alias = aliases.get('Fedora')
- if not alias:
- continue
-
- name_variants = alias['binary']
- while name_variants:
- names = name_variants.pop()
- presolves = []
+ dst_dir = join(dst_path, 'packages',
+ obs_presolve_project.value, repo['name'])
+ result = {}
+ to_download = []
+
+ for package in packages:
+ files = result.setdefault(package, {})
try:
- for arch in repo['arches']:
- for package in names:
- response = _request('GET', ['resolve'], params={
- 'project': obs_presolve_project.value,
- 'repository': repo['name'],
- 'arch': arch,
- 'package': package,
- 'withdeps': '1',
- 'exclude': 'sweets-sugar',
- })
- binaries = []
- for pkg in response.findall('binary'):
- binaries.append(dict(pkg.items()))
- presolves.append((package, binaries))
+ for repo_arch in repo['arches']:
+ response = _request('GET', ['resolve'], params={
+ 'project': obs_presolve_project.value,
+ 'repository': '%(lsb_id)s-%(lsb_release)s' % repo,
+ 'arch': repo_arch,
+ 'package': package,
+ 'withdeps': '1',
+ 'exclude': 'sweets-sugar',
+ })
+ for binary in response.findall('binary'):
+ binary = dict(binary.items())
+ arch = binary.pop('arch')
+ url = binary.pop('url')
+ filename = binary['path'] = basename(url)
+ path = join(dst_dir, filename)
+ if not exists(path):
+ to_download.append((url, path))
+ files.setdefault(arch, []).append(binary)
except Exception:
toolkit.exception(_logger, 'Failed to presolve %r on %s',
- names, repo['name'])
+ packages, repo['name'])
continue
- _logger.debug('Presolve %r on %s', names, repo['name'])
-
- dst_dir = join(dst_path, 'packages',
- obs_presolve_project.value, repo['name'])
- if not exists(dst_dir):
- os.makedirs(dst_dir)
- result = {}
-
- for package, binaries in presolves:
- files = []
- for binary in binaries:
- arch = binary.pop('arch')
- if not files:
- result.setdefault(package, {})[arch] = files
- url = binary.pop('url')
- filename = binary['path'] = basename(url)
- path = join(dst_dir, filename)
- if not exists(path):
- _client.download(url, path)
- files.append(binary)
+ _logger.debug('Presolve %r on %s', packages, repo['name'])
- for package, info in result.items():
- with toolkit.new_file(join(dst_dir, package)) as f:
- json.dump(info, f)
+ if not exists(dst_dir):
+ os.makedirs(dst_dir)
+ for url, path in to_download:
+ _conn.download(url, path)
+ for package, info in result.items():
+ with toolkit.new_file(join(dst_dir, package)) as f:
+ json.dump(info, f)
- return {'repo': repo['name'], 'packages': result}
+ return {'repo': repo['name'], 'packages': result}
def _request(*args, **kwargs):
- global _client
+ global _conn
- if _client is None:
- _client = http.Connection(obs_url.value)
+ if _conn is None:
+ _conn = http.Connection(obs_url.value)
- response = _client.request(*args, allowed=(400, 404), **kwargs)
+ response = _conn.request(*args, allowed=(400, 404), **kwargs)
enforce(response.headers.get('Content-Type') == 'text/xml',
'Irregular OBS response')
reply = ElementTree.fromstring(response.content)
@@ -144,8 +130,10 @@ def _get_repos(project):
for repo in _request('GET', ['build', project]).findall('entry'):
repo = repo.get('name')
arches = _request('GET', ['build', project, repo])
+ lsb_id, lsb_release = repo.split('-', 1)
repos.append({
- 'distributor_id': repo.split('-', 1)[0],
+ 'lsb_id': lsb_id,
+ 'lsb_release': lsb_release,
'name': repo,
'arches': [i.get('name') for i in arches.findall('entry')],
})
diff --git a/sugar_network/node/routes.py b/sugar_network/node/routes.py
index eb48c70..6323cbc 100644
--- a/sugar_network/node/routes.py
+++ b/sugar_network/node/routes.py
@@ -1,4 +1,4 @@
-# Copyright (C) 2012-2013 Aleksey Lim
+# Copyright (C) 2012-2014 Aleksey Lim
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
@@ -15,23 +15,21 @@
import os
import time
-import shutil
-import gettext
import logging
import hashlib
-from contextlib import contextmanager
from ConfigParser import ConfigParser
from os.path import join, isdir, exists
-from sugar_network import node, toolkit, model
-from sugar_network.node import stats_node, stats_user
-from sugar_network.model.context import Context
+from sugar_network import db, node, toolkit, model
+from sugar_network.db import files
+from sugar_network.node import stats_user
# pylint: disable-msg=W0611
from sugar_network.toolkit.router import route, preroute, postroute, ACL
from sugar_network.toolkit.router import Unauthorized, Request, fallbackroute
-from sugar_network.toolkit.spec import EMPTY_LICENSE
from sugar_network.toolkit.spec import parse_requires, ensure_requires
+from sugar_network.toolkit.spec import parse_version
from sugar_network.toolkit.bundle import Bundle
+from sugar_network.toolkit.coroutine import this
from sugar_network.toolkit import pylru, http, coroutine, exception, enforce
@@ -41,28 +39,16 @@ _AUTH_POOL_SIZE = 1024
_logger = logging.getLogger('node.routes')
-class NodeRoutes(model.VolumeRoutes, model.FrontRoutes):
+class NodeRoutes(db.Routes, model.FrontRoutes):
- def __init__(self, guid, volume):
- model.VolumeRoutes.__init__(self, volume)
+ def __init__(self, guid, **kwargs):
+ db.Routes.__init__(self, **kwargs)
model.FrontRoutes.__init__(self)
- volume.broadcast = self.broadcast
-
self._guid = guid
- self._stats = None
self._auth_pool = pylru.lrucache(_AUTH_POOL_SIZE)
self._auth_config = None
self._auth_config_mtime = 0
- if stats_node.stats_node.value:
- stats_path = join(node.stats_root.value, 'node')
- self._stats = stats_node.Sniffer(volume, stats_path)
- coroutine.spawn(self._commit_stats)
-
- def close(self):
- if self._stats is not None:
- self._stats.suspend()
-
@property
def guid(self):
return self._guid
@@ -80,33 +66,12 @@ class NodeRoutes(model.VolumeRoutes, model.FrontRoutes):
@route('GET', cmd='status', mime_type='application/json')
def status(self):
- documents = {}
- for name, directory in self.volume.items():
- documents[name] = {'mtime': directory.mtime}
- return {'guid': self._guid, 'resources': documents}
-
- @route('GET', cmd='stats', arguments={
- 'start': int, 'end': int, 'records': int, 'source': list},
- mime_type='application/json')
- def stats(self, start, end, records, source):
- enforce(self._stats is not None, 'Node stats is disabled')
- if not source:
- return {}
-
- if records > _MAX_STAT_RECORDS:
- _logger.debug('Decrease %d stats records number to %d',
- records, _MAX_STAT_RECORDS)
- records = _MAX_STAT_RECORDS
- elif records <= 0:
- records = _MAX_STAT_RECORDS / 10
-
- stats = {}
- for i in source:
- enforce('.' in i, 'Misnamed source')
- db_name, ds_name = i.split('.', 1)
- stats.setdefault(db_name, []).append(ds_name)
-
- return self._stats.report(stats, start, end, records)
+ return {'guid': self._guid,
+ 'seqno': {
+ 'db': self.volume.seqno.value,
+ 'releases': self.volume.releases_seqno.value,
+ },
+ }
@route('POST', ['user'], mime_type='application/json')
def register(self, request):
@@ -149,23 +114,19 @@ class NodeRoutes(model.VolumeRoutes, model.FrontRoutes):
response.content_type = 'application/json'
return result
- @route('POST', ['release'], cmd='submit',
+ @route('POST', ['context'], cmd='submit',
arguments={'initial': False},
mime_type='application/json', acl=ACL.AUTH)
- def submit_release(self, request, document):
- with toolkit.NamedTemporaryFile() as blob:
- shutil.copyfileobj(request.content_stream, blob)
- blob.flush()
- with load_bundle(self.volume, request, blob.name) as impl:
- impl['data']['blob'] = blob.name
- return impl['guid']
-
- @route('DELETE', [None, None], acl=ACL.AUTH | ACL.AUTHOR)
- def delete(self, request):
- # Servers data should not be deleted immediately
- # to let master-slave synchronization possible
- request.call(method='PUT', path=request.path,
- content={'layer': ['deleted']})
+ def submit_release(self, request, initial):
+ blob = files.post(request.content_stream)
+ try:
+ context, release = model.load_bundle(blob, initial=initial)
+ except Exception:
+ files.delete(blob.digest)
+ raise
+ this.call(method='POST', path=['context', context, 'releases'],
+ content_type='application/json', content=release)
+ return blob.digest
@route('PUT', [None, None], cmd='attach', acl=ACL.AUTH | ACL.SUPERUSER)
def attach(self, request):
@@ -186,43 +147,37 @@ class NodeRoutes(model.VolumeRoutes, model.FrontRoutes):
@route('GET', ['context', None], cmd='clone',
arguments={'requires': list})
def get_clone(self, request, response):
- return self._get_clone(request, response)
+ deps = {}
+ if 'requires' in request:
+ for i in request['requires']:
+ deps.update(parse_requires(i))
+ version = request.get('version')
+ if version:
+ version = parse_version(version)[0]
+ stability = request.get('stability') or 'stable'
+
+ recent = None
+ context = self.volume['context'][request.guid]
+ for release in context['releases'].values():
+ release = release.get('value')
+ if not release:
+ continue
+ spec = release['spec']['*-*']
+ if version and version != release['release'][0] or \
+ stability and stability != release['stability'] or \
+ deps and not ensure_requires(spec['requires'], deps):
+ continue
+ if recent is None or release['release'] > recent['release']:
+ recent = release
+ enforce(recent, http.NotFound, 'No releases found')
+
+ response.meta = recent
+ return files.get(recent['spec']['*-*']['bundle'])
@route('HEAD', ['context', None], cmd='clone',
arguments={'requires': list})
def head_clone(self, request, response):
- self._get_clone(request, response)
-
- @route('GET', ['context', None], cmd='deplist',
- mime_type='application/json', arguments={'requires': list})
- def deplist(self, request, repo):
- """List of native packages context is dependening on.
-
- Command return only GNU/Linux package names and ignores
- Sugar Network dependencies.
-
- :param repo:
- OBS repository name to get package names for, e.g.,
- Fedora-14
- :returns:
- list of package names
-
- """
- enforce(repo, 'Argument %r should be set', 'repo')
-
- spec = self._solve(request).meta('data')['spec']['*-*']
- common_deps = self.volume['context'].get(request.guid)['dependencies']
- result = []
-
- for package in set(spec.get('requires') or []) | set(common_deps):
- if package == 'sugar':
- continue
- dep = self.volume['context'].get(package)
- enforce(repo in dep['packages'],
- 'No packages for %r on %r', package, repo)
- result.extend(dep['packages'][repo].get('binary') or [])
-
- return result
+ self.get_clone(request, response)
@route('GET', ['user', None], cmd='stats-info',
mime_type='application/json', acl=ACL.AUTH)
@@ -246,15 +201,6 @@ class NodeRoutes(model.VolumeRoutes, model.FrontRoutes):
for timestamp, values in values:
rrd[name].put(values, timestamp)
- @route('GET', ['report', None], cmd='log', mime_type='text/html')
- def log(self, request):
- # In further implementations, `data` might be a tarball
- data = self.volume[request.resource].get(request.guid).meta('data')
- if data and 'blob' in data:
- return file(data['blob'], 'rb')
- else:
- return ''
-
@preroute
def preroute(self, op, request, response):
if op.acl & ACL.AUTH and request.principal is None:
@@ -277,22 +223,11 @@ class NodeRoutes(model.VolumeRoutes, model.FrontRoutes):
enforce(self.authorize(request.principal, 'root'), http.Forbidden,
'Operation is permitted only for superusers')
- @postroute
- def postroute(self, request, response, result, error):
- if error is None or isinstance(error, http.StatusPass):
- if self._stats is not None:
- self._stats.log(request)
-
- def on_create(self, request, props, event):
+ def on_create(self, request, props):
if request.resource == 'user':
- with file(props['pubkey']['blob']) as f:
+ with file(files.get(props['pubkey']).path) as f:
props['guid'] = str(hashlib.sha1(f.read()).hexdigest())
- model.VolumeRoutes.on_create(self, request, props, event)
-
- def on_update(self, request, props, event):
- model.VolumeRoutes.on_update(self, request, props, event)
- if 'deleted' in props.get('layer', []):
- event['event'] = 'delete'
+ db.Routes.on_create(self, request, props)
def on_aggprop_update(self, request, prop, value):
if prop.acl & ACL.AUTHOR:
@@ -300,27 +235,6 @@ class NodeRoutes(model.VolumeRoutes, model.FrontRoutes):
elif value is not None:
self._enforce_authority(request, value.get('author'))
- def find(self, request, reply):
- limit = request.get('limit')
- if limit is None or limit < 0:
- request['limit'] = node.find_limit.value
- elif limit > node.find_limit.value:
- _logger.warning('The find limit is restricted to %s',
- node.find_limit.value)
- request['limit'] = node.find_limit.value
- layer = request.setdefault('layer', [])
- if 'deleted' in layer:
- _logger.warning('Requesting "deleted" layer')
- layer.remove('deleted')
- request.add('not_layer', 'deleted')
- return model.VolumeRoutes.find(self, request, reply)
-
- def get(self, request, reply):
- doc = self.volume[request.resource].get(request.guid)
- enforce('deleted' not in doc['layer'], http.NotFound,
- 'Resource deleted')
- return model.VolumeRoutes.get(self, request, reply)
-
def authenticate(self, auth):
enforce(auth.scheme == 'sugar', http.BadRequest,
'Unknown authentication scheme')
@@ -329,8 +243,9 @@ class NodeRoutes(model.VolumeRoutes, model.FrontRoutes):
from M2Crypto import RSA
+ pubkey = self.volume['user'][auth.login]['pubkey']
+ key = RSA.load_pub_key(files.get(pubkey).path)
data = hashlib.sha1('%s:%s' % (auth.login, auth.nonce)).digest()
- key = RSA.load_pub_key(self.volume['user'].path(auth.login, 'pubkey'))
enforce(key.verify(data, auth.signature.decode('hex')),
http.Forbidden, 'Bad credentials')
@@ -356,52 +271,6 @@ class NodeRoutes(model.VolumeRoutes, model.FrontRoutes):
return self._auth_config.get(user, role).strip().lower() in \
('true', 'on', '1', 'allow')
- def _commit_stats(self):
- while True:
- coroutine.sleep(stats_node.stats_node_step.value)
- self._stats.commit()
-
- def _solve(self, request):
- requires = {}
- if 'requires' in request:
- for i in request['requires']:
- requires.update(parse_requires(i))
- request.pop('requires')
- else:
- request['limit'] = 1
-
- if 'stability' not in request:
- request['stability'] = 'stable'
-
- impls, __ = self.volume['release'].find(
- context=request.guid, order_by='-version', not_layer='deleted',
- **request)
- impl = None
- for impl in impls:
- if requires:
- impl_deps = impl.meta('data')['spec']['*-*']['requires']
- if not ensure_requires(impl_deps, requires):
- continue
- break
- else:
- raise http.NotFound('No releases found')
- return impl
-
- def _get_clone(self, request, response):
- impl = self._solve(request)
- result = request.call(method=request.method,
- path=['release', impl['guid'], 'data'],
- response=response)
- response.meta = impl.properties([
- 'guid', 'ctime', 'layer', 'author', 'tags',
- 'context', 'version', 'stability', 'license', 'notes',
- ])
- response.meta['data'] = data = impl.meta('data')
- for key in ('mtime', 'seqno', 'blob'):
- if key in data:
- del data[key]
- return result
-
def _enforce_authority(self, request, author=None):
if request.resource == 'user':
allowed = (request.principal == request.guid)
@@ -412,222 +281,3 @@ class NodeRoutes(model.VolumeRoutes, model.FrontRoutes):
allowed = request.principal in author
enforce(allowed or self.authorize(request.principal, 'root'),
http.Forbidden, 'Operation is permitted only for authors')
-
-
-def generate_node_stats(volume, path):
- tmp_path = toolkit.mkdtemp()
- new_stats = stats_node.Sniffer(volume, tmp_path, True)
- old_stats = stats_node.Sniffer(volume, path)
-
- def timeline(ts):
- ts = long(ts)
- end = long(time.time())
- step = None
-
- archives = {}
- for rra in stats_node.stats_node_rras.value:
- a_step, a_size = [long(i) for i in rra.split(':')[-2:]]
- a_step *= stats_node.stats_node_step.value
- a_start = end - min(end, a_step * a_size)
- if archives.setdefault(a_start, a_step) > a_step:
- archives[a_start] = a_step
- archives = list(sorted(archives.items()))
-
- try:
- while ts <= end:
- while not step or archives and ts >= archives[0][0]:
- archive_start, step = archives.pop(0)
- ts = max(ts / step * step, archive_start)
- yield ts, ts + step - 1, step
- ts += step
- except GeneratorExit:
- shutil.rmtree(tmp_path, ignore_errors=True)
-
- start = next(volume['context'].find(limit=1, order_by='ctime')[0])['ctime']
- for left, right, step in timeline(start):
- for resource, props in [
- ('user', []),
- ('context', []),
- ('release', ['context']),
- ('report', ['context', 'release']),
- ('post', ['context', 'topic', 'type', 'vote']),
- ]:
- objs, __ = volume[resource].find(
- query='ctime:%s..%s' % (left, right))
- for obj in objs:
- request = Request(method='POST', path=[resource],
- content=obj.properties(props))
- new_stats.log(request)
- for resource, props in [
- ('user', ['layer']),
- ('context', ['layer']),
- ('release', ['layer']),
- ('report', ['layer']),
- ('post', ['layer']),
- ]:
- objs, __ = volume[resource].find(
- query='mtime:%s..%s' % (left, right))
- for obj in objs:
- if 'deleted' in obj['layer']:
- request = Request(method='DELETE',
- path=[resource, obj.guid])
- else:
- request = Request(method='PUT', path=[resource, obj.guid],
- content=obj.properties(props))
- new_stats.log(request)
- downloaded = {}
- for resource in ('context', 'post'):
- stats = old_stats.report(
- {resource: ['downloaded']}, left - step, right, 1)
- if not stats.get(resource):
- continue
- stats = stats[resource][-1][1].get('downloaded')
- if stats:
- downloaded[resource] = {'downloaded': stats}
- new_stats.commit(left + (right - left) / 2, downloaded)
-
- new_stats.commit_objects(True)
- shutil.rmtree(path)
- shutil.move(tmp_path, path)
-
-
-@contextmanager
-def load_bundle(volume, request, bundle_path):
- impl = request.copy()
- initial = False
- if 'initial' in impl:
- initial = impl.pop('initial')
- data = impl.setdefault('data', {})
- contexts = volume['context']
- context = impl.get('context')
- context_meta = None
- impls = volume['release']
-
- try:
- bundle = Bundle(bundle_path, mime_type='application/zip')
- except Exception:
- _logger.debug('Load unrecognized bundle from %r', bundle_path)
- context_type = 'book'
- else:
- _logger.debug('Load Sugar Activity bundle from %r', bundle_path)
- context_type = 'activity'
- unpack_size = 0
-
- with bundle:
- changelog = join(bundle.rootdir, 'CHANGELOG')
- for arcname in bundle.get_names():
- if changelog and arcname == changelog:
- with bundle.extractfile(changelog) as f:
- impl['notes'] = f.read()
- changelog = None
- unpack_size += bundle.getmember(arcname).size
- spec = bundle.get_spec()
- context_meta = _load_context_metadata(bundle, spec)
- if 'requires' in impl:
- spec.requires.update(parse_requires(impl.pop('requires')))
-
- context = impl['context'] = spec['context']
- impl['version'] = spec['version']
- impl['stability'] = spec['stability']
- if spec['license'] is not EMPTY_LICENSE:
- impl['license'] = spec['license']
- requires = impl['requires'] = []
- for dep_name, dep in spec.requires.items():
- found = False
- for version in dep.versions_range():
- requires.append('%s-%s' % (dep_name, version))
- found = True
- if not found:
- requires.append(dep_name)
-
- data['spec'] = {'*-*': {
- 'commands': spec.commands,
- 'requires': spec.requires,
- }}
- data['unpack_size'] = unpack_size
- data['mime_type'] = 'application/vnd.olpc-sugar'
-
- if initial and not contexts.exists(context):
- context_meta['type'] = 'activity'
- request.call(method='POST', path=['context'], content=context_meta)
- context_meta = None
-
- enforce(context, 'Context is not specified')
- enforce('version' in impl, 'Version is not specified')
- enforce(context_type in contexts.get(context)['type'],
- http.BadRequest, 'Inappropriate bundle type')
- if 'license' not in impl:
- existing, total = impls.find(
- context=context, order_by='-version', not_layer='deleted')
- enforce(total, 'License is not specified')
- impl['license'] = next(existing)['license']
-
- digest = hashlib.sha1()
- with file(bundle_path, 'rb') as f:
- while True:
- chunk = f.read(toolkit.BUFFER_SIZE)
- if not chunk:
- break
- digest.update(chunk)
- data['digest'] = digest.hexdigest()
-
- yield impl
-
- existing, __ = impls.find(
- context=context, version=impl['version'], not_layer='deleted')
- if 'url' not in data:
- data['blob'] = bundle_path
- impl['guid'] = request.call(method='POST', path=['release'], content=impl)
- for i in existing:
- layer = i['layer'] + ['deleted']
- impls.update(i.guid, {'layer': layer})
-
- if 'origin' in impls.get(impl['guid']).layer:
- diff = contexts.patch(context, context_meta)
- if diff:
- request.call(method='PUT', path=['context', context], content=diff)
-
-
-def _load_context_metadata(bundle, spec):
- result = {}
- for prop in ('homepage', 'mime_types'):
- if spec[prop]:
- result[prop] = spec[prop]
- result['guid'] = spec['context']
-
- try:
- icon_file = bundle.extractfile(join(bundle.rootdir, spec['icon']))
- Context.populate_images(result, icon_file.read())
- icon_file.close()
- except Exception:
- exception(_logger, 'Failed to load icon')
-
- msgids = {}
- for prop, confname in [
- ('title', 'name'),
- ('summary', 'summary'),
- ('description', 'description'),
- ]:
- if spec[confname]:
- msgids[prop] = spec[confname]
- result[prop] = {'en': spec[confname]}
- with toolkit.mkdtemp() as tmpdir:
- for path in bundle.get_names():
- if not path.endswith('.mo'):
- continue
- mo_path = path.strip(os.sep).split(os.sep)
- if len(mo_path) != 5 or mo_path[1] != 'locale':
- continue
- lang = mo_path[2]
- bundle.extract(path, tmpdir)
- try:
- i18n = gettext.translation(spec['context'],
- join(tmpdir, *mo_path[:2]), [lang])
- for prop, value in msgids.items():
- msgstr = i18n.gettext(value).decode('utf8')
- if lang == 'en' or msgstr != value:
- result[prop][lang] = msgstr
- except Exception:
- exception(_logger, 'Gettext failed to read %r', mo_path[-1])
-
- return result
diff --git a/sugar_network/node/slave.py b/sugar_network/node/slave.py
index 69584be..2d60ea8 100644
--- a/sugar_network/node/slave.py
+++ b/sugar_network/node/slave.py
@@ -23,9 +23,10 @@ from gettext import gettext as _
from sugar_network import node, toolkit
from sugar_network.client import api_url
-from sugar_network.node import sync, stats_user, files, volume
+from sugar_network.node import sync, stats_user, files, model
from sugar_network.node.routes import NodeRoutes
from sugar_network.toolkit.router import route, ACL
+from sugar_network.toolkit.coroutine import this
from sugar_network.toolkit import http, enforce
@@ -55,7 +56,7 @@ class SlaveRoutes(NodeRoutes):
# loosing payload after authentication
conn.get(cmd='logon')
- push = [('diff', None, volume.diff(self.volume, self._push_seq))]
+ push = [('diff', None, model.diff(self.volume, self._push_seq))]
if not no_pull:
push.extend([
('pull', {
@@ -119,7 +120,7 @@ class SlaveRoutes(NodeRoutes):
}, None))
push.append(('files_pull', {'sequence': self._files_seq}, None))
- self.broadcast({
+ this.broadcast({
'event': 'sync_progress',
'progress': _('Reading sneakernet packages'),
})
@@ -129,14 +130,14 @@ class SlaveRoutes(NodeRoutes):
if exists(offline_script):
shutil.copy(offline_script, path)
- self.broadcast({
+ this.broadcast({
'event': 'sync_progress',
'progress': _('Generating new sneakernet package'),
})
diff_seq = toolkit.Sequence([])
push.append(('diff', None,
- volume.diff(self.volume, push_seq, diff_seq)))
+ model.diff(self.volume, push_seq, diff_seq)))
if stats_user.stats_user.value:
push.append(('stats_diff', None, stats_user.diff(stats_seq)))
complete = sync.sneakernet_encode(push, root=path,
@@ -156,7 +157,7 @@ class SlaveRoutes(NodeRoutes):
if packet.name == 'diff':
_logger.debug('Processing %r', packet)
- seq, __ = volume.merge(self.volume, packet, shift_seqno=False)
+ seq, __ = model.merge(self.volume, packet, shift_seqno=False)
if from_master and seq:
self._pull_seq.exclude(seq)
self._pull_seq.commit()
diff --git a/sugar_network/node/stats_node.py b/sugar_network/node/stats_node.py
deleted file mode 100644
index d37819b..0000000
--- a/sugar_network/node/stats_node.py
+++ /dev/null
@@ -1,311 +0,0 @@
-# Copyright (C) 2012-2014 Aleksey Lim
-#
-# This program is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with this program. If not, see <http://www.gnu.org/licenses/>.
-
-import os
-import time
-import json
-import logging
-from os.path import exists, join
-
-from sugar_network.toolkit.rrd import Rrd
-from sugar_network.toolkit import Option
-
-
-stats_node = Option(
- 'collect unpersonalized node statistics',
- default=False, type_cast=Option.bool_cast, action='store_true')
-
-stats_node_step = Option(
- 'step interval in seconds for node RRD databases',
- default=60 * 5, type_cast=int)
-
-stats_node_rras = Option(
- 'comma separated list of RRAs for node RRD databases',
- default=[
- 'RRA:AVERAGE:0.5:1:864', # 3d with 5min step
- 'RRA:AVERAGE:0.5:288:3660', # 10y with 1d step
- 'RRA:AVERAGE:0.5:2880:366', # 10y with 10d step
- 'RRA:AVERAGE:0.5:8640:122', # 10y with 30d step
- 'RRA:AVERAGE:0.5:105408:10', # 10y with 1y step
- ],
- type_cast=Option.list_cast, type_repr=Option.list_repr)
-
-_HEARTBEAT = 60 * 60 * 24 * 365
-
-_logger = logging.getLogger('node.stats_node')
-
-
-class Sniffer(object):
-
- def __init__(self, volume, path, reset=False):
- _logger.info('Collect node stats in %r', path)
-
- self._volume = volume
- self._rrd = Rrd(path, stats_node_step.value, stats_node_rras.value)
- self._stats = {}
- self._suspend_path = join(path, '.suspend')
- self._last = int(time.time())
-
- for name, cls in _STATS.items():
- stats = self._stats[name] = cls(self._stats, volume)
- fields = {}
- for field in stats:
- fields[field] = 'DS:%s:GAUGE:%s:U:U' % (field, _HEARTBEAT)
- if fields:
- if not reset:
- stats.update(self._rrd[name].last_ds)
- stats['total'] = volume[name].find(limit=0)[1]
- self._rrd[name].fields = fields
-
- if exists(self._suspend_path):
- with file(self._suspend_path) as f:
- suspend = json.load(f)
- for name, stats in self._stats.items():
- if name not in suspend['state']:
- continue
- total_stats, stats.objects = suspend['state'][name]
- stats.update(total_stats)
- if suspend['timestamp'] < int(time.time()):
- self.commit(suspend['timestamp'])
- self.commit_objects()
- os.unlink(self._suspend_path)
-
- def __getitem__(self, name):
- return self._rrd[name]
-
- def suspend(self):
- state = dict([(i, (j, j.objects)) for i, j in self._stats.items()])
- with file(self._suspend_path, 'w') as f:
- json.dump({
- 'timestamp': self._last + stats_node_step.value,
- 'state': state,
- }, f)
-
- def log(self, request):
- if request.cmd or request.resource not in _STATS:
- return
- self._stats[request.resource].log(request)
-
- def commit(self, timestamp=None, extra_values=None):
- _logger.trace('Commit node stats')
-
- for resource, stats in self._stats.items():
- if resource not in self._rrd:
- continue
- values = stats.copy()
- if extra_values and resource in extra_values:
- values.update(extra_values[resource])
- if values:
- self._rrd[resource].put(values, timestamp=timestamp)
-
- self._last = timestamp or int(time.time())
-
- def commit_objects(self, reset=False):
- _logger.trace('Commit object stats')
-
- for resource, stats in self._stats.items():
- old = {
- 'downloads': 0,
- 'rating': (0, 0),
- }
- directory = self._volume[resource]
- for guid, new in stats.objects.items():
- if not directory.exists(guid):
- _logger.warning('Ignore stats for missed %r %s',
- guid, resource)
- continue
- if not reset:
- old = directory.get(guid)
- patch = {}
- if 'downloads' in new:
- patch['downloads'] = new['downloads'] + old['downloads']
- if 'votes' in new:
- votes, rating = old['rating']
- votes += new['votes']
- rating += new['rating']
- patch['rating'] = [votes, rating]
- directory.update(guid, patch)
- stats.objects.clear()
-
- def report(self, dbs, start, end, records):
- result = {}
-
- rdbs = [self._rrd[i] for i in dbs if i in self._rrd]
- if not rdbs:
- return result
-
- if not start:
- start = min([i.first for i in rdbs]) or 0
- if not end:
- end = max([i.last for i in rdbs]) or 0
- resolution = max(1, (end - start) / records)
-
- _logger.debug('Report start=%s end=%s resolution=%s dbs=%r',
- start, end, resolution, dbs)
-
- for rdb in rdbs:
- info = result[rdb.name] = []
- for ts, ds_values in rdb.get(start, end, resolution):
- values = {}
- for name in dbs[rdb.name]:
- values[name] = ds_values.get(name)
- info.append((ts, values))
-
- return result
-
-
-class _Stats(dict):
-
- RESOURCE = None
- PARENTS = []
-
- def __init__(self, stats, volume):
- self.objects = {}
- self._stats = stats
- self._volume = volume
-
- def inc(self, guid, prop, value=1):
- obj = self.objects.setdefault(guid, {})
- if prop not in obj:
- obj[prop] = value
- else:
- obj[prop] += value
-
- def log(self, request):
- pass
-
-
-class _ResourceStats(_Stats):
-
- def __init__(self, stats, volume):
- _Stats.__init__(self, stats, volume)
- self['total'] = 0
-
- def log(self, request):
- if request.method == 'POST':
- self['total'] += 1
- elif request.method == 'DELETE':
- self['total'] -= 1
-
- def parse_context(self, request):
- context = None
- directory = self._volume[self.RESOURCE]
-
- def parse_context(props):
- for prop, resource in self.PARENTS:
- guid = props.get(prop)
- if not guid:
- continue
- if resource == 'context':
- return guid
- else:
- return self._volume[resource].get(guid)['context']
-
- if request.method == 'GET':
- if not request.guid:
- context = parse_context(request)
- elif self.RESOURCE == 'context':
- context = request.guid
- elif self.RESOURCE != 'user':
- context = directory.get(request.guid)['context']
- elif request.method == 'PUT':
- if self.RESOURCE == 'context':
- context = request.guid
- else:
- context = request.content.get('context')
- if not context:
- context = directory.get(request.guid)['context']
- elif request.method == 'POST':
- context = parse_context(request.content)
-
- return context
-
-
-class _UserStats(_ResourceStats):
-
- RESOURCE = 'user'
-
-
-class _ContextStats(_ResourceStats):
-
- RESOURCE = 'context'
-
- def __init__(self, stats, volume):
- _ResourceStats.__init__(self, stats, volume)
- self['released'] = 0
- self['failed'] = 0
- self['downloaded'] = 0
-
-
-class _ReleaseStats(_Stats):
-
- RESOURCE = 'release'
- PARENTS = [('context', 'context')]
-
- def log(self, request):
- if request.method == 'GET':
- if request.prop == 'data':
- context = self._volume[self.RESOURCE].get(request.guid)
- self._stats['context'].inc(context.context, 'downloads')
- self._stats['context']['downloaded'] += 1
- elif request.method == 'POST':
- self._stats['context']['released'] += 1
-
-
-class _ReportStats(_Stats):
-
- RESOURCE = 'report'
- PARENTS = [('context', 'context'), ('release', 'release')]
-
- def log(self, request):
- if request.method == 'POST':
- self._stats['context']['failed'] += 1
-
-
-class _PostStats(_ResourceStats):
-
- RESOURCE = 'post'
- PARENTS = [('context', 'context'), ('topic', 'post')]
-
- def __init__(self, stats, volume):
- _ResourceStats.__init__(self, stats, volume)
- self['downloaded'] = 0
-
- def log(self, request):
- _ResourceStats.log(self, request)
-
- if request.method == 'POST':
- stats = None
- if request.content['type'] == 'review':
- stats = self._stats['context']
- guid = request.content['context']
- elif request.content['type'] == 'feedback':
- stats = self._stats['post']
- guid = request.content['topic']
- if stats:
- stats.inc(guid, 'votes')
- stats.inc(guid, 'rating', request.content.get('vote') or 0)
-
- elif request.method == 'GET' and request.prop == 'data':
- self.inc(request.guid, 'downloads')
- self['downloaded'] += 1
-
-
-_STATS = {_UserStats.RESOURCE: _UserStats,
- _ContextStats.RESOURCE: _ContextStats,
- _ReleaseStats.RESOURCE: _ReleaseStats,
- _ReportStats.RESOURCE: _ReportStats,
- _PostStats.RESOURCE: _PostStats,
- }
diff --git a/sugar_network/node/sync.py b/sugar_network/node/sync.py
index b0a20bf..f5b946c 100644
--- a/sugar_network/node/sync.py
+++ b/sugar_network/node/sync.py
@@ -199,7 +199,7 @@ def _encode(limit, packets, download_blobs, header, status):
pos = (yield chunk) or 0
blob_size -= len(chunk)
enforce(blob_size == 0, EOFError,
- 'Blob size is not the same as declared')
+ 'File size is not the same as declared')
record = next(content)
except StopIteration:
diff --git a/sugar_network/node/volume.py b/sugar_network/node/volume.py
deleted file mode 100644
index 0c254f7..0000000
--- a/sugar_network/node/volume.py
+++ /dev/null
@@ -1,142 +0,0 @@
-# Copyright (C) 2012-2013 Aleksey Lim
-#
-# This program is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with this program. If not, see <http://www.gnu.org/licenses/>.
-
-import logging
-
-from sugar_network import toolkit
-from sugar_network.toolkit.router import Request
-from sugar_network.toolkit import http, coroutine, enforce
-
-
-# Apply node level layer for these documents
-_LIMITED_RESOURCES = ('context', 'release')
-
-_logger = logging.getLogger('node.volume')
-
-
-def diff(volume, in_seq, out_seq=None, exclude_seq=None, layer=None,
- fetch_blobs=False, ignore_documents=None, **kwargs):
- connection = http.Connection()
- if out_seq is None:
- out_seq = toolkit.Sequence([])
- is_the_only_seq = not out_seq
- if layer:
- if isinstance(layer, basestring):
- layer = [layer]
- layer.append('common')
- try:
- for resource, directory in volume.items():
- if ignore_documents and resource in ignore_documents:
- continue
- coroutine.dispatch()
- directory.commit()
- yield {'resource': resource}
- for guid, patch in directory.diff(in_seq, exclude_seq,
- layer=layer if resource in _LIMITED_RESOURCES else None):
- adiff = {}
- adiff_seq = toolkit.Sequence()
- for prop, meta, seqno in patch:
- if 'blob' in meta:
- blob_path = meta.pop('blob')
- yield {'guid': guid,
- 'diff': {prop: meta},
- 'blob_size': meta['blob_size'],
- 'blob': toolkit.iter_file(blob_path),
- }
- elif fetch_blobs and 'url' in meta:
- url = meta.pop('url')
- try:
- blob = connection.request('GET', url,
- allow_redirects=True,
- # We need uncompressed size
- headers={'Accept-Encoding': ''})
- except Exception:
- _logger.exception('Cannot fetch %r for %s:%s:%s',
- url, resource, guid, prop)
- is_the_only_seq = False
- continue
- yield {'guid': guid,
- 'diff': {prop: meta},
- 'blob_size':
- int(blob.headers['Content-Length']),
- 'blob': blob.iter_content(toolkit.BUFFER_SIZE),
- }
- else:
- adiff[prop] = meta
- adiff_seq.include(seqno, seqno)
- if adiff:
- yield {'guid': guid, 'diff': adiff}
- out_seq.include(adiff_seq)
- if is_the_only_seq:
- # There is only one diff, so, we can stretch it to remove all holes
- out_seq.stretch()
- except StopIteration:
- pass
-
- yield {'commit': out_seq}
-
-
-def merge(volume, records, shift_seqno=True, stats=None):
- resource = None
- directory = None
- commit_seq = toolkit.Sequence()
- merged_seq = toolkit.Sequence()
- synced = False
-
- for record in records:
- resource_ = record.get('resource')
- if resource_:
- resource = resource_
- directory = volume[resource_]
- continue
-
- if 'guid' in record:
- guid = record['guid']
- layer = []
- existed = directory.exists(guid)
- if existed:
- layer = directory.get(guid)['layer']
-
- def update_stats(upd):
- method = 'PUT' if existed else 'POST'
- if ('deleted' in layer) != ('deleted' in upd.get('layer', [])):
- if 'deleted' in layer:
- # TODO
- enforce(not 'supported yet')
- else:
- method = 'DELETE'
- stats.log(Request(
- method=method,
- path=[resource, guid],
- content=upd,
- ))
-
- if stats is not None:
- record['op'] = update_stats
- seqno, merged = directory.merge(shift_seqno=shift_seqno, **record)
- synced = synced or merged
- if seqno is not None:
- merged_seq.include(seqno, seqno)
- continue
-
- commit = record.get('commit')
- if commit is not None:
- commit_seq.include(commit)
- continue
-
- if synced:
- volume.broadcast({'event': 'sync'})
-
- return commit_seq, merged_seq
diff --git a/sugar_network/static/httpdocs/favicon.ico b/sugar_network/static/httpdocs/favicon.ico
deleted file mode 100644
index 80e42ba..0000000
--- a/sugar_network/static/httpdocs/favicon.ico
+++ /dev/null
Binary files differ
diff --git a/sugar_network/static/httpdocs/images/activity.svg b/sugar_network/static/httpdocs/images/activity.svg
deleted file mode 100644
index c5302fd..0000000
--- a/sugar_network/static/httpdocs/images/activity.svg
+++ /dev/null
@@ -1,68 +0,0 @@
-<?xml version="1.0" encoding="UTF-8" standalone="no"?>
-<!DOCTYPE svg PUBLIC '-//W3C//DTD SVG 1.1//EN' 'http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd' [
- <!ENTITY stroke_color "#ffffff">
- <!ENTITY fill_color "#000000">
-]>
-<svg
- xmlns:dc="http://purl.org/dc/elements/1.1/"
- xmlns:cc="http://creativecommons.org/ns#"
- xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
- xmlns:svg="http://www.w3.org/2000/svg"
- xmlns="http://www.w3.org/2000/svg"
- xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
- xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
- enable-background="new 0 0 55.125 55"
- height="55px"
- id="Layer_1"
- version="1.1"
- viewBox="0 0 55.125 55"
- width="55.125px"
- x="0px"
- xml:space="preserve"
- y="0px"
- inkscape:version="0.48.3.1 r9886"
- sodipodi:docname="activities.svg"><metadata
- id="metadata3043"><rdf:RDF><cc:Work
- rdf:about=""><dc:format>image/svg+xml</dc:format><dc:type
- rdf:resource="http://purl.org/dc/dcmitype/StillImage" /></cc:Work></rdf:RDF></metadata><defs
- id="defs3041" /><sodipodi:namedview
- pagecolor="#ffff5f"
- bordercolor="#666666"
- borderopacity="1"
- objecttolerance="10"
- gridtolerance="10"
- guidetolerance="10"
- inkscape:pageopacity="1"
- inkscape:pageshadow="2"
- inkscape:window-width="1364"
- inkscape:window-height="725"
- id="namedview3039"
- showgrid="true"
- inkscape:zoom="8.165204"
- inkscape:cx="39.036816"
- inkscape:cy="27.452329"
- inkscape:window-x="0"
- inkscape:window-y="0"
- inkscape:window-maximized="0"
- inkscape:current-layer="g3049"><inkscape:grid
- type="xygrid"
- id="grid3045"
- empspacing="5"
- visible="true"
- enabled="true"
- snapvisiblegridlinesonly="true" /></sodipodi:namedview><g
- style="display:inline"
- inkscape:label="orig"
- id="g3049"
- inkscape:groupmode="layer"><rect
- style="fill:&stroke_color;"
- id="rect3006"
- width="51"
- height="51"
- x="2"
- y="2"
- ry="6.1807232" /><path
- style="fill:&fill_color;"
- inkscape:connector-curvature="0"
- d="m 40.198922,16.862795 q 0,-0.85783 -0.600131,-1.458322 -0.600147,-0.600492 -1.457476,-0.600492 -0.85733,0 -1.457463,0.600492 -0.600136,0.600492 -0.600136,1.458322 0,0.85784 0.600136,1.458331 0.600133,0.600487 1.457463,0.600487 0.857329,0 1.457476,-0.600487 0.600131,-0.600491 0.600131,-1.458331 z M 45,10.686356 q 0,5.340075 -1.618229,9.232518 -1.61823,3.89245 -5.433358,7.731302 -1.736116,1.715676 -4.179526,3.774485 l -0.428689,8.128035 q -0.04283,0.343147 -0.342934,0.557592 l -8.23044,4.803918 Q 24.616786,45 24.423888,45 24.166686,45 23.930913,44.806981 l -1.37172,-1.37253 q -0.278657,-0.300246 -0.171452,-0.686288 l 1.82182,-5.919094 -6.022764,-6.026326 -5.915608,1.822902 q -0.06426,0.02143 -0.192906,0.02143 -0.300069,0 -0.492975,-0.193021 l -1.37172,-1.37253 Q 9.8492312,30.674052 10.106426,30.245118 l 4.801082,-8.235297 q 0.214326,-0.300247 0.557261,-0.34314 l 8.12324,-0.428936 Q 25.645611,18.79289 27.36027,17.055752 31.389762,13.045347 35.033409,11.522656 38.677045,9.999965 44.271206,10 q 0.30007,0 0.514389,0.203757 0.214328,0.203751 0.214328,0.482529 z"
- id="path2995-9" /></g></svg>
diff --git a/sugar_network/static/httpdocs/images/book.svg b/sugar_network/static/httpdocs/images/book.svg
deleted file mode 100644
index 92fb811..0000000
--- a/sugar_network/static/httpdocs/images/book.svg
+++ /dev/null
@@ -1,68 +0,0 @@
-<?xml version="1.0" encoding="UTF-8" standalone="no"?>
-<!DOCTYPE svg PUBLIC '-//W3C//DTD SVG 1.1//EN' 'http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd' [
- <!ENTITY stroke_color "#ffffff">
- <!ENTITY fill_color "#000000">
-]>
-<svg
- xmlns:dc="http://purl.org/dc/elements/1.1/"
- xmlns:cc="http://creativecommons.org/ns#"
- xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
- xmlns:svg="http://www.w3.org/2000/svg"
- xmlns="http://www.w3.org/2000/svg"
- xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
- xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
- enable-background="new 0 0 55.125 55"
- height="55px"
- id="Layer_1"
- version="1.1"
- viewBox="0 0 55.125 55"
- width="55.125px"
- x="0px"
- xml:space="preserve"
- y="0px"
- inkscape:version="0.48.3.1 r9886"
- sodipodi:docname="books.svg"><metadata
- id="metadata3043"><rdf:RDF><cc:Work
- rdf:about=""><dc:format>image/svg+xml</dc:format><dc:type
- rdf:resource="http://purl.org/dc/dcmitype/StillImage" /></cc:Work></rdf:RDF></metadata><defs
- id="defs3041" /><sodipodi:namedview
- pagecolor="#ffff5f"
- bordercolor="#666666"
- borderopacity="1"
- objecttolerance="10"
- gridtolerance="10"
- guidetolerance="10"
- inkscape:pageopacity="1"
- inkscape:pageshadow="2"
- inkscape:window-width="1364"
- inkscape:window-height="725"
- id="namedview3039"
- showgrid="true"
- inkscape:zoom="9.9636364"
- inkscape:cx="18.33272"
- inkscape:cy="27.485017"
- inkscape:window-x="0"
- inkscape:window-y="0"
- inkscape:window-maximized="0"
- inkscape:current-layer="layer1"><inkscape:grid
- type="xygrid"
- id="grid3045"
- empspacing="5"
- visible="true"
- enabled="true"
- snapvisiblegridlinesonly="true" /></sodipodi:namedview><g
- inkscape:groupmode="layer"
- id="layer1"
- inkscape:label="Orig"
- style="display:inline"><rect
- style="fill:&stroke_color;"
- id="rect3006"
- width="51"
- height="51"
- x="2"
- y="2"
- ry="6.1807232" /><path
- inkscape:connector-curvature="0"
- d="m 44.457628,17.975387 q 0.840668,1.298804 0.378314,2.939427 l -5.77965,20.644433 q -0.399316,1.45831 -1.607781,2.449552 Q 36.240046,45.000041 34.873943,45 H 15.475341 q -1.618299,0 -3.121003,-1.219054 -1.502701,-1.219053 -2.091174,-2.99641 -0.5043946,-1.526698 -0.04204,-2.893852 0,-0.09115 0.06304,-0.615248 0.06304,-0.524093 0.08408,-0.843102 0.021,-0.182273 -0.06304,-0.48992 -0.08404,-0.307642 -0.06304,-0.444339 0.04204,-0.25066 0.16812,-0.478512 0.12608,-0.227853 0.346794,-0.535497 0.220714,-0.307646 0.346795,-0.535498 0.483393,-0.865869 0.945749,-2.084963 0.462355,-1.219091 0.630512,-2.084963 0.06305,-0.227852 0.01052,-0.683596 -0.05252,-0.455746 -0.01052,-0.638018 0.06305,-0.250663 0.357276,-0.638018 0.294235,-0.387356 0.357276,-0.524093 0.441356,-0.820292 0.882709,-2.096329 0.441354,-1.276034 0.525434,-2.050789 0.021,-0.205083 -0.05257,-0.729175 -0.07357,-0.524093 0.01052,-0.638018 0.08409,-0.29624 0.462357,-0.695001 0.378277,-0.398762 0.462356,-0.512689 0.399315,-0.592438 0.893227,-1.925457 0.493913,-1.33302 0.577955,-2.198891 0.021,-0.182273 -0.06305,-0.581035 -0.08405,-0.398759 -0.04205,-0.603842 0.04205,-0.182276 0.189157,-0.410167 0.147118,-0.227893 0.378315,-0.524092 0.231196,-0.296199 0.357276,-0.478513 0.168119,-0.273432 0.346795,-0.695002 0.178675,-0.42157 0.315237,-0.797522 0.136562,-0.375952 0.336275,-0.820294 0.199712,-0.444339 0.409834,-0.729173 0.210122,-0.284836 0.556953,-0.535497 0.346831,-0.250661 0.756592,-0.262026 0.409761,-0.01136 0.998307,0.125331 l -0.021,0.06834 Q 22.473973,10 22.747208,10 h 15.993845 q 1.55526,0 2.39593,1.276037 0.84067,1.276035 0.378315,2.962238 l -5.758612,20.644432 q -0.756592,2.711578 -1.502702,3.497697 -0.746109,0.786117 -2.700684,0.786117 H 13.289642 q -0.567472,0 -0.798631,0.341778 -0.231195,0.364588 -0.021,0.979796 0.504396,1.595046 3.026444,1.595046 h 19.398601 q 0.609473,0 1.176945,-0.353182 0.567472,-0.353185 0.735591,-0.945624 l 6.305082,-22.490139 q 0.147118,-0.501282 0.105079,-1.298807 0.798632,0.341779 1.239986,0.979798 z m -22.361969,0.04557 q -0.08408,0.29624 0.04205,0.512688 0.126117,0.216446 0.420354,0.216486 H 35.33634 q 0.273237,0 0.535915,-0.216486 0.262679,-0.216487 0.346795,-0.512688 l 0.441354,-1.45831 q 0.08409,-0.29624 -0.04205,-0.512687 -0.126114,-0.216449 -0.420351,-0.216489 H 23.419725 q -0.273237,0 -0.535916,0.216489 -0.262679,0.216487 -0.346794,0.512687 z m -1.74438,5.83328 q -0.08408,0.29624 0.04205,0.512689 0.126116,0.216446 0.420352,0.216486 h 12.778283 q 0.273235,0 0.535914,-0.216486 0.26268,-0.216489 0.346795,-0.512689 l 0.441354,-1.45831 q 0.08408,-0.29624 -0.04205,-0.512688 -0.126118,-0.216447 -0.420354,-0.216487 h -12.77828 q -0.273234,0 -0.535915,0.216487 -0.262679,0.216489 -0.346794,0.512688 z"
- id="path2990"
- style="fill:&fill_color;" /></g></svg>
diff --git a/sugar_network/static/httpdocs/images/group.svg b/sugar_network/static/httpdocs/images/group.svg
deleted file mode 100644
index c9a6b64..0000000
--- a/sugar_network/static/httpdocs/images/group.svg
+++ /dev/null
@@ -1,68 +0,0 @@
-<?xml version="1.0" encoding="UTF-8" standalone="no"?>
-<!DOCTYPE svg PUBLIC '-//W3C//DTD SVG 1.1//EN' 'http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd' [
- <!ENTITY stroke_color "#ffffff">
- <!ENTITY fill_color "#000000">
-]>
-<svg
- xmlns:dc="http://purl.org/dc/elements/1.1/"
- xmlns:cc="http://creativecommons.org/ns#"
- xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
- xmlns:svg="http://www.w3.org/2000/svg"
- xmlns="http://www.w3.org/2000/svg"
- xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
- xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
- enable-background="new 0 0 55.125 55"
- height="55px"
- id="Layer_1"
- version="1.1"
- viewBox="0 0 55.125 55"
- width="55.125px"
- x="0px"
- xml:space="preserve"
- y="0px"
- inkscape:version="0.48.3.1 r9886"
- sodipodi:docname="groups.svg"><metadata
- id="metadata3043"><rdf:RDF><cc:Work
- rdf:about=""><dc:format>image/svg+xml</dc:format><dc:type
- rdf:resource="http://purl.org/dc/dcmitype/StillImage" /></cc:Work></rdf:RDF></metadata><defs
- id="defs3041" /><sodipodi:namedview
- pagecolor="#ffff5f"
- bordercolor="#666666"
- borderopacity="1"
- objecttolerance="10"
- gridtolerance="10"
- guidetolerance="10"
- inkscape:pageopacity="1"
- inkscape:pageshadow="2"
- inkscape:window-width="1364"
- inkscape:window-height="725"
- id="namedview3039"
- showgrid="true"
- inkscape:zoom="9.9636364"
- inkscape:cx="15.364787"
- inkscape:cy="26.136611"
- inkscape:window-x="0"
- inkscape:window-y="0"
- inkscape:window-maximized="0"
- inkscape:current-layer="layer1"><inkscape:grid
- type="xygrid"
- id="grid3045"
- empspacing="5"
- visible="true"
- enabled="true"
- snapvisiblegridlinesonly="true" /></sodipodi:namedview><g
- inkscape:groupmode="layer"
- id="layer1"
- inkscape:label="Orig"
- style="display:inline"><rect
- style="fill:&stroke_color;"
- id="rect3006"
- width="51"
- height="51"
- x="2"
- y="2"
- ry="6.1807232" /><path
- inkscape:connector-curvature="0"
- d="m 20.80989,27.5 q -2.953112,0.09765 -4.830743,2.500005 h -2.442693 q -1.494787,0 -2.515621,-0.791021 Q 10,28.417965 10,26.894541 q 0,-6.894521 2.260412,-6.894521 0.109389,0 0.792964,0.410156 0.683576,0.410156 1.777334,0.830087 1.09376,0.419932 2.16927,0.419932 1.221365,0 2.424479,-0.449224 -0.09114,0.722662 -0.09114,1.289054 0,2.714858 1.476571,5.000009 z m 19.523435,12.441406 q 0,2.343736 -1.33072,3.701182 Q 37.671885,45.000034 35.466152,45 H 19.533848 q -2.205733,0 -3.536453,-1.357412 -1.330719,-1.357412 -1.330719,-3.701182 0,-1.035165 0.0638,-2.021489 0.0638,-0.986323 0.255209,-2.128916 0.191406,-1.142592 0.483077,-2.119141 0.291671,-0.976547 0.78384,-1.904287 0.492169,-0.927739 1.130221,-1.582041 0.638054,-0.654302 1.55859,-1.044907 0.920536,-0.390605 2.032543,-0.39064 0.182283,0 0.783839,0.419932 0.601559,0.419931 1.330721,0.937515 0.729162,0.517583 1.950525,0.937514 1.221363,0.419933 2.460942,0.419933 1.239578,0 2.460941,-0.419933 1.221363,-0.419931 1.950525,-0.937514 0.729163,-0.517584 1.33072,-0.937515 0.601558,-0.419932 0.783841,-0.419932 1.111974,0 2.032543,0.39064 0.920567,0.39064 1.558588,1.044907 0.638021,0.654268 1.130221,1.582041 0.492203,0.927773 0.783842,1.904287 0.291638,0.976514 0.483077,2.119141 0.191438,1.142628 0.255208,2.128916 0.06377,0.98629 0.0638,2.021489 z M 21.666657,15.00001 q 0,2.070297 -1.367184,3.535171 -1.367183,1.464873 -3.299493,1.464839 -1.932308,-3.5e-5 -3.299491,-1.464839 -1.367183,-1.464805 -1.367183,-3.535171 0,-2.070366 1.367183,-3.535171 Q 15.067672,10.000034 16.99998,10 q 1.93231,-3.4e-5 3.299493,1.464839 1.367184,1.464873 1.367184,3.535171 z m 12.833339,7.499981 q 0,3.105463 -2.05079,5.302738 -2.05079,2.197276 -4.949221,2.197276 -2.898434,0 -4.949224,-2.197276 -2.05079,-2.197275 -2.05079,-5.302738 0,-3.105464 2.05079,-5.30274 2.05079,-2.197276 4.949224,-2.197276 2.898431,0 4.949221,2.197276 2.05079,2.197276 2.05079,5.30274 z M 45,26.894541 q 0,1.523424 -1.020833,2.314443 -1.020834,0.791021 -2.515621,0.791021 H 39.020853 Q 37.143253,27.597652 34.19011,27.5 q 1.476571,-2.285152 1.476571,-5.000009 0,-0.566393 -0.09114,-1.289053 1.203114,0.449223 2.424479,0.449223 1.07551,0 2.16927,-0.419932 1.093758,-0.419931 1.777334,-0.830087 0.683575,-0.410157 0.792964,-0.410157 Q 45,19.999985 45,26.894507 z M 42.666663,15.00001 q 0,2.070297 -1.367183,3.535171 -1.367183,1.464873 -3.299493,1.464839 -1.93231,-3.5e-5 -3.299492,-1.464839 -1.367183,-1.464805 -1.367183,-3.535171 0,-2.070366 1.367183,-3.535171 Q 36.067677,10.000034 37.999987,10 q 1.93231,-3.4e-5 3.299493,1.464839 1.367183,1.464873 1.367183,3.535171 z"
- id="path2988"
- style="fill:&fill_color;" /></g></svg>
diff --git a/sugar_network/static/httpdocs/images/missing-logo.png b/sugar_network/static/httpdocs/images/missing-logo.png
deleted file mode 100644
index 98be121..0000000
--- a/sugar_network/static/httpdocs/images/missing-logo.png
+++ /dev/null
Binary files differ
diff --git a/sugar_network/static/httpdocs/images/missing.png b/sugar_network/static/httpdocs/images/missing.png
deleted file mode 100644
index 91a65a8..0000000
--- a/sugar_network/static/httpdocs/images/missing.png
+++ /dev/null
Binary files differ
diff --git a/sugar_network/static/httpdocs/images/missing.svg b/sugar_network/static/httpdocs/images/missing.svg
deleted file mode 100644
index 7e6a568..0000000
--- a/sugar_network/static/httpdocs/images/missing.svg
+++ /dev/null
@@ -1,75 +0,0 @@
-<?xml version="1.0" encoding="UTF-8" standalone="no"?>
-<svg
- xmlns:dc="http://purl.org/dc/elements/1.1/"
- xmlns:cc="http://creativecommons.org/ns#"
- xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
- xmlns:svg="http://www.w3.org/2000/svg"
- xmlns="http://www.w3.org/2000/svg"
- xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
- xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
- enable-background="new 0 0 55.125 55"
- height="55px"
- id="Layer_1"
- version="1.1"
- viewBox="0 0 55.125 55"
- width="55.125px"
- x="0px"
- xml:space="preserve"
- y="0px"
- inkscape:version="0.48.4 r9939"
- sodipodi:docname="missing.svg"><metadata
- id="metadata3043"><rdf:RDF><cc:Work
- rdf:about=""><dc:format>image/svg+xml</dc:format><dc:type
- rdf:resource="http://purl.org/dc/dcmitype/StillImage" /></cc:Work></rdf:RDF></metadata><defs
- id="defs3041" /><sodipodi:namedview
- pagecolor="#ffffff"
- bordercolor="#666666"
- borderopacity="1"
- objecttolerance="10"
- gridtolerance="10"
- guidetolerance="10"
- inkscape:pageopacity="1"
- inkscape:pageshadow="2"
- inkscape:window-width="1278"
- inkscape:window-height="703"
- id="namedview3039"
- showgrid="true"
- inkscape:zoom="11.313708"
- inkscape:cx="22.254646"
- inkscape:cy="27.69536"
- inkscape:window-x="0"
- inkscape:window-y="48"
- inkscape:window-maximized="0"
- inkscape:current-layer="layer1"
- showguides="true"
- inkscape:guide-bbox="true"
- showborder="true"><inkscape:grid
- type="xygrid"
- id="grid3045"
- empspacing="5"
- visible="true"
- enabled="true"
- snapvisiblegridlinesonly="true" /><sodipodi:guide
- orientation="0,1"
- position="21.375,63.75"
- id="guide3095" /></sodipodi:namedview><g
- inkscape:groupmode="layer"
- id="layer1"
- inkscape:label="Orig"
- style="display:inline"><g
- id="g3018"
- transform="matrix(1.0024382,0,0,1.0033449,-0.06721282,-0.09913801)"
- style="fill:#e5e5e5;fill-opacity:1"><path
- inkscape:connector-curvature="0"
- id="path3014"
- d="M 7.312668,52.88195 C 6.7099324,52.779031 5.4955186,52.338195 5.0176599,52.048855 4.2658562,51.593644 3.2489622,50.517012 2.8351314,49.738115 2.0161815,48.196716 2.062668,49.534599 2.062668,27.506609 2.062668,5.6552305 2.0232307,6.8739053 2.7786566,5.3814313 3.498485,3.9592864 4.8883161,2.803557 6.4312977,2.3440372 7.264056,2.096031 7.5895024,2.09214 27.500168,2.09214 c 19.910665,0 20.236112,0.00389 21.06887,0.2518972 1.542982,0.4595198 2.932812,1.6152492 3.652641,3.0373941 0.755426,1.492474 0.715989,0.2737992 0.715989,22.1251777 0,22.017872 0.04589,20.691205 -0.768746,22.224531 -0.427042,0.803788 -1.433087,1.870171 -2.178743,2.309416 -0.280494,0.16523 -0.842994,0.427244 -1.25,0.582251 l -0.740011,0.281833 -20.1875,0.01534 c -11.103125,0.0084 -20.328125,-0.0087 -20.5,-0.03802 z"
- style="fill:#e5e5e5;fill-opacity:1;stroke:none"
- sodipodi:nodetypes="csssssssssssscscc" /><path
- inkscape:connector-curvature="0"
- id="path3016"
- d="M 24.978969,41.781971 C 16.145314,40.179195 10.799373,30.889246 13.900132,22.52964 c 1.747089,-4.710129 5.756064,-8.166365 10.744487,-9.263088 1.348135,-0.296392 4.362963,-0.296392 5.711098,0 4.894498,1.076073 8.905941,4.469126 10.634216,8.994886 0.585261,1.532597 0.788962,2.39317 0.936335,3.955702 0.698281,7.403629 -4.583521,14.270159 -11.973724,15.566266 -1.266075,0.222047 -3.745807,0.221332 -4.973575,-0.0014 l 0,2e-6 z m -3.452783,-5.086423 c 0.104108,-0.0864 0.415792,-0.633848 0.692631,-1.216546 1.070481,-2.253171 2.963376,-3.511862 5.281351,-3.511862 2.327261,0 4.210227,1.257339 5.291596,3.53343 0.502986,1.058694 0.831835,1.421421 1.333404,1.470764 0.622337,0.06123 0.961284,-0.02897 1.322817,-0.351999 0.785237,-0.701609 0.548547,-2.027396 -0.696221,-3.899794 -1.01576,-1.527921 -2.584326,-2.716596 -4.38378,-3.32207 -3.754919,-1.263443 -7.99672,0.129084 -10.119412,3.32207 -1.189591,1.7894 -1.465414,3.149908 -0.777967,3.837354 0.517901,0.517901 1.517382,0.585317 2.055581,0.138653 l 0,0 z M 23.027949,24.27353 c 1.062891,-0.529661 1.557446,-1.360203 1.562781,-2.624492 0.0074,-1.761863 -1.368456,-3.036782 -3.139072,-2.908708 -2.473463,0.178915 -3.621562,3.064289 -1.968455,4.947074 0.828346,0.943434 2.329588,1.191665 3.544746,0.586126 l 0,0 z m 11.598291,0.08549 c 2.072782,-0.941644 2.229132,-3.980824 0.267195,-5.193796 -1.98621,-1.22798 -4.49363,0.161006 -4.483828,2.483817 0.0036,0.856027 0.137445,1.275425 0.60089,1.883033 0.730373,0.95757 2.458305,1.352759 3.615743,0.826946 l 0,0 z"
- style="fill:#e5e5e5;fill-opacity:1;stroke:none" /></g><path
- inkscape:connector-curvature="0"
- d="m 35.839852,35.087905 q 0.182274,0.569673 -0.09115,1.105131 -0.273431,0.53546 -0.843105,0.717774 -0.569673,0.182315 -1.116536,-0.09116 -0.546864,-0.273471 -0.729179,-0.865874 -0.569672,-1.822906 -2.107742,-2.950848 -1.538069,-1.127941 -3.452132,-1.127941 -1.914063,0 -3.452133,1.127941 -1.538069,1.127942 -2.107742,2.950848 -0.182274,0.592442 -0.717774,0.865874 -0.535499,0.273432 -1.105132,0.09116 -0.592442,-0.182274 -0.865874,-0.717774 -0.273432,-0.535498 -0.09116,-1.105131 0.843105,-2.757169 3.144527,-4.443379 2.301421,-1.686209 5.195324,-1.686209 2.893905,0 5.195326,1.686209 2.301421,1.68621 3.144526,4.443379 z M 24.583328,21.666654 q 0,1.207693 -0.85451,2.062163 -0.854509,0.85447 -2.062163,0.854509 -1.207654,4e-5 -2.062164,-0.854509 -0.85451,-0.854549 -0.85451,-2.062163 0,-1.207615 0.85451,-2.062165 0.85451,-0.854549 2.062164,-0.854509 1.207654,4e-5 2.062163,0.854509 0.85451,0.85447 0.85451,2.062165 z m 11.666692,0 q 0,1.207693 -0.85451,2.062163 -0.85451,0.85447 -2.062163,0.854509 -1.207654,4e-5 -2.062163,-0.854509 -0.85451,-0.854549 -0.85451,-2.062163 0,-1.207615 0.85451,-2.062165 0.854509,-0.854549 2.062163,-0.854509 1.207653,4e-5 2.062163,0.854509 0.85451,0.85447 0.85451,2.062165 z M 42.083327,27.5 q 0,-2.962252 -1.162116,-5.662437 -1.162115,-2.700184 -3.110352,-4.648422 -1.948238,-1.948237 -4.648421,-3.110352 -2.700185,-1.162116 -5.662437,-1.162116 -2.962252,0 -5.662437,1.162116 -2.700184,1.162115 -4.648422,3.110352 -1.948237,1.948238 -3.110352,4.648422 -1.162116,2.700185 -1.162116,5.662437 0,2.962252 1.162116,5.662437 1.162115,2.700184 3.110352,4.648422 1.948238,1.948237 4.648422,3.110352 2.700185,1.162115 5.662437,1.162115 2.962252,0 5.662437,-1.162115 2.700183,-1.162115 4.648421,-3.110352 1.948237,-1.948238 3.110352,-4.648422 Q 42.083327,30.462252 42.083327,27.5 z M 45,27.5 q 0,4.762388 -2.347001,8.784194 Q 40.306,40.306 36.284194,42.653 32.262389,45 27.500001,45 22.737613,45 18.715807,42.653 14.694002,40.306 12.347001,36.284194 10,32.262388 10,27.5 10,22.737612 12.347001,18.715806 14.694002,14.694 18.715807,12.347 q 4.021806,-2.3470003 8.784194,-2.3470003 4.762388,0 8.784193,2.3470003 4.021806,2.347 6.368805,6.368806 Q 45,22.737612 45,27.5 z"
- id="path3844"
- style="fill:#ffffff;fill-opacity:1" /></g></svg> \ No newline at end of file
diff --git a/sugar_network/static/httpdocs/images/package-logo.png b/sugar_network/static/httpdocs/images/package-logo.png
deleted file mode 100644
index c6cf086..0000000
--- a/sugar_network/static/httpdocs/images/package-logo.png
+++ /dev/null
Binary files differ
diff --git a/sugar_network/static/httpdocs/images/package.png b/sugar_network/static/httpdocs/images/package.png
deleted file mode 100644
index 24bd5ac..0000000
--- a/sugar_network/static/httpdocs/images/package.png
+++ /dev/null
Binary files differ
diff --git a/sugar_network/static/httpdocs/images/package.svg b/sugar_network/static/httpdocs/images/package.svg
deleted file mode 100644
index a5fd32d..0000000
--- a/sugar_network/static/httpdocs/images/package.svg
+++ /dev/null
@@ -1,71 +0,0 @@
-<?xml version="1.0" encoding="UTF-8" standalone="no"?>
-<svg
- xmlns:dc="http://purl.org/dc/elements/1.1/"
- xmlns:cc="http://creativecommons.org/ns#"
- xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
- xmlns:svg="http://www.w3.org/2000/svg"
- xmlns="http://www.w3.org/2000/svg"
- xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
- xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
- enable-background="new 0 0 55.125 55"
- height="55px"
- id="Layer_1"
- version="1.1"
- viewBox="0 0 55.125 55"
- width="55.125px"
- x="0px"
- xml:space="preserve"
- y="0px"
- inkscape:version="0.48.3.1 r9886"
- sodipodi:docname="package.svg"><metadata
- id="metadata3043"><rdf:RDF><cc:Work
- rdf:about=""><dc:format>image/svg+xml</dc:format><dc:type
- rdf:resource="http://purl.org/dc/dcmitype/StillImage" /></cc:Work></rdf:RDF></metadata><defs
- id="defs3041" /><sodipodi:namedview
- pagecolor="#ffffff"
- bordercolor="#666666"
- borderopacity="1"
- objecttolerance="10"
- gridtolerance="10"
- guidetolerance="10"
- inkscape:pageopacity="1"
- inkscape:pageshadow="2"
- inkscape:window-width="1364"
- inkscape:window-height="725"
- id="namedview3039"
- showgrid="true"
- inkscape:zoom="8"
- inkscape:cx="25.537668"
- inkscape:cy="28.88286"
- inkscape:window-x="0"
- inkscape:window-y="0"
- inkscape:window-maximized="0"
- inkscape:current-layer="layer1"
- showguides="true"
- inkscape:guide-bbox="true"
- showborder="true"><inkscape:grid
- type="xygrid"
- id="grid3045"
- empspacing="5"
- visible="true"
- enabled="true"
- snapvisiblegridlinesonly="true" /><sodipodi:guide
- orientation="0,1"
- position="21.375,63.75"
- id="guide3095" /></sodipodi:namedview><g
- inkscape:groupmode="layer"
- id="layer1"
- inkscape:label="Orig"
- style="display:inline"><rect
- style="fill:#fdd99b;fill-opacity:1"
- id="rect3006"
- width="51"
- height="51"
- x="-53"
- y="2"
- ry="6.1807232"
- transform="scale(-1,1)" /><path
- inkscape:connector-curvature="0"
- d="m 21.272799,37.045483 q 0,-1.107944 -0.809649,-1.917636 -0.80965,-0.809686 -1.917636,-0.809649 -1.107979,3.6e-5 -1.917628,0.809649 -0.80965,0.809613 -0.80965,1.917636 0,1.108022 0.80965,1.917635 0.809649,0.809614 1.917628,0.809649 1.107986,3.6e-5 1.917636,-0.809649 0.809649,-0.809686 0.809649,-1.917635 z M 13.090987,26.136378 h 8.181855 v -5.454569 h -3.366501 q -0.277004,0 -0.468734,0.191772 l -4.154854,4.154854 q -0.191766,0.191765 -0.191766,0.468734 v 0.639209 z m 27.272772,10.909105 q 0,-1.107944 -0.80965,-1.917636 -0.80965,-0.809686 -1.917635,-0.809649 -1.107986,3.6e-5 -1.917636,0.809649 -0.809649,0.809613 -0.809649,1.917636 0,1.108022 0.809649,1.917635 0.80965,0.809614 1.917636,0.809649 1.107985,3.6e-5 1.917635,-0.809649 0.80965,-0.809686 0.80965,-1.917635 z m 5.45457,-23.181866 v 21.818244 q 0,0.319583 -0.08524,0.564634 -0.08524,0.245052 -0.287631,0.394159 -0.202393,0.149114 -0.35158,0.245016 -0.149185,0.09591 -0.500729,0.12786 -0.351543,0.03198 -0.479396,0.04259 -0.12786,0.01069 -0.543315,0 -0.415448,-0.01069 -0.479397,-0.01069 0,2.258515 -1.598009,3.856561 -1.59801,1.598045 -3.85656,1.598009 -2.258552,-3.6e-5 -3.856562,-1.598009 -1.598009,-1.597973 -1.598009,-3.856561 h -8.181854 q 0,2.258515 -1.59801,3.856561 -1.598008,1.598045 -3.856562,1.598009 -2.258541,-3.2e-5 -3.856557,-1.598004 -1.59801,-1.597974 -1.59801,-3.856562 h -1.363621 q -0.06388,0 -0.479395,0.01069 -0.415493,0.01069 -0.543318,0 -0.127816,-0.01069 -0.479394,-0.04259 Q 9.8736003,36.981554 9.72445,36.885674 9.5753064,36.789764 9.372871,36.640654 9.1704416,36.491547 9.08524,36.246495 9.000042,36.001444 9,35.681861 q 0,-0.553979 0.4048281,-0.9588 0.404821,-0.404827 0.9587929,-0.404827 v -6.818191 q 0,-0.170441 -0.01069,-0.745745 -0.01069,-0.575298 0,-0.809649 0.01069,-0.234346 0.05328,-0.735075 0.04259,-0.500729 0.13848,-0.78836 0.09586,-0.28763 0.298299,-0.649872 0.20243,-0.362248 0.479397,-0.639217 l 4.218767,-4.218766 q 0.40483,-0.404821 1.075991,-0.681825 0.671169,-0.277011 1.246473,-0.277011 h 3.40911 v -4.090906 q 0,-0.553972 0.404829,-0.958799 0.404821,-0.404822 0.9588,-0.404822 h 21.818236 q 0.553979,0 0.958801,0.404822 0.404827,0.404827 0.404827,0.958799 z"
- id="path4"
- style="fill:#816647" /></g></svg> \ No newline at end of file
diff --git a/sugar_network/toolkit/__init__.py b/sugar_network/toolkit/__init__.py
index a32d87f..4088e07 100644
--- a/sugar_network/toolkit/__init__.py
+++ b/sugar_network/toolkit/__init__.py
@@ -1,4 +1,4 @@
-# Copyright (C) 2011-2013 Aleksey Lim
+# Copyright (C) 2011-2014 Aleksey Lim
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
@@ -114,84 +114,12 @@ def exception(*args):
logger.debug('\n'.join(tb))
-def default_lang():
- """Default language to fallback for localized strings.
-
- :returns:
- string in format of HTTP's Accept-Language
-
- """
- return default_langs()[0]
-
-
-def default_langs():
- """Default languages list, i.e., including all secondory languages.
-
- :returns:
- list of strings in format of HTTP's Accept-Language
-
- """
- global _default_langs
-
- if _default_langs is None:
- locales = os.environ.get('LANGUAGE')
- if locales:
- locales = [i for i in locales.split(':') if i.strip()]
- else:
- from locale import getdefaultlocale
- locales = [getdefaultlocale()[0]]
- if not locales:
- _default_langs = ['en']
- else:
- _default_langs = []
- for locale in locales:
- lang = locale.strip().split('.')[0].lower()
- if lang == 'c':
- lang = 'en'
- elif '_' in lang:
- lang, region = lang.split('_')
- if lang != region:
- lang = '-'.join([lang, region])
- _default_langs.append(lang)
- _logger.info('Default languages are %r', _default_langs)
-
- return _default_langs
-
-
-def gettext(value, accept_language=None):
- if not value:
- return ''
- if not isinstance(value, dict):
- return value
-
- if accept_language is None:
- accept_language = [default_lang()]
- elif isinstance(accept_language, basestring):
- accept_language = [accept_language]
- accept_language.append('en')
-
- stripped_value = None
- for lang in accept_language:
- result = value.get(lang)
- if result is not None:
- return result
-
- prime_lang = lang.split('-')[0]
- if prime_lang != lang:
- result = value.get(prime_lang)
- if result is not None:
- return result
-
- if stripped_value is None:
- stripped_value = {}
- for k, v in value.items():
- if '-' in k:
- stripped_value[k.split('-', 1)[0]] = v
- result = stripped_value.get(prime_lang)
- if result is not None:
- return result
-
- return value[min(value.keys())]
+def ascii(value):
+ if not isinstance(value, basestring):
+ return str(value)
+ if isinstance(value, unicode):
+ return value.encode('utf8')
+ return value
def uuid():
@@ -484,12 +412,12 @@ def unique_filename(root, filename):
class mkdtemp(str):
- def __new__(cls, **kwargs):
- if cachedir.value and 'dir' not in kwargs:
- if not exists(cachedir.value):
- os.makedirs(cachedir.value)
+ def __new__(cls, *args, **kwargs):
+ if 'dir' not in kwargs:
kwargs['dir'] = cachedir.value
- result = tempfile.mkdtemp(**kwargs)
+ if not exists(kwargs['dir']):
+ os.makedirs(kwargs['dir'])
+ result = tempfile.mkdtemp(*args, **kwargs)
return str.__new__(cls, result)
def __enter__(self):
@@ -522,21 +450,60 @@ def svg_to_png(data, w, h):
return result
+class File(dict):
+
+ AWAY = None
+
+ def __init__(self, path=None, meta=None, digest=None):
+ self.path = path
+ self.digest = digest
+ dict.__init__(self, meta or {})
+ self._stat = None
+ self._name = self.get('filename')
+
+ @property
+ def size(self):
+ if self._stat is None:
+ self._stat = os.stat(self.path)
+ return self._stat.st_size
+
+ @property
+ def mtime(self):
+ if self._stat is None:
+ self._stat = os.stat(self.path)
+ return int(self._stat.st_mtime)
+
+ @property
+ def name(self):
+ if self._name is None:
+ self._name = self.get('name') or self.digest or 'blob'
+ mime_type = self.get('mime_type')
+ if mime_type:
+ import mimetypes
+ if not mimetypes.inited:
+ mimetypes.init()
+ self._name += mimetypes.guess_extension(mime_type) or ''
+ return self._name
+
+ def __repr__(self):
+ return '<File path=%r digest=%r>' % (self.path, self.digest)
+
+
def TemporaryFile(*args, **kwargs):
- if cachedir.value and 'dir' not in kwargs:
- if not exists(cachedir.value):
- os.makedirs(cachedir.value)
+ if 'dir' not in kwargs:
kwargs['dir'] = cachedir.value
+ if not exists(kwargs['dir']):
+ os.makedirs(kwargs['dir'])
return tempfile.TemporaryFile(*args, **kwargs)
class NamedTemporaryFile(object):
def __init__(self, *args, **kwargs):
- if cachedir.value and 'dir' not in kwargs:
- if not exists(cachedir.value):
- os.makedirs(cachedir.value)
+ if 'dir' not in kwargs:
kwargs['dir'] = cachedir.value
+ if not exists(kwargs['dir']):
+ os.makedirs(kwargs['dir'])
self._file = tempfile.NamedTemporaryFile(*args, **kwargs)
def close(self):
@@ -567,11 +534,9 @@ class Seqno(object):
"""
self._path = path
self._value = 0
-
if exists(path):
with file(path) as f:
self._value = int(f.read().strip())
-
self._orig_value = self._value
@property
@@ -610,7 +575,7 @@ class Sequence(list):
"""List of sorted and non-overlapping ranges.
List items are ranges, [`start`, `stop']. If `start` or `stop`
- is `None`, it means the beginning or ending of the entire scale.
+ is `None`, it means the beginning or ending of the entire sequence.
"""
@@ -880,5 +845,4 @@ def _nb_read(stream):
fcntl.fcntl(fd, fcntl.F_SETFL, orig_flags)
-_default_lang = None
-_default_langs = None
+File.AWAY = File()
diff --git a/sugar_network/toolkit/coroutine.py b/sugar_network/toolkit/coroutine.py
index 170f445..1913bda 100644
--- a/sugar_network/toolkit/coroutine.py
+++ b/sugar_network/toolkit/coroutine.py
@@ -1,4 +1,4 @@
-# Copyright (C) 2012-2013 Aleksey Lim
+# Copyright (C) 2012-2014 Aleksey Lim
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
@@ -23,6 +23,7 @@ import logging
import gevent
import gevent.pool
import gevent.hub
+from gevent.queue import Empty
from sugar_network.toolkit import enforce
@@ -36,27 +37,27 @@ sleep = gevent.sleep
#: Wait for the spawned events to finish.
joinall = gevent.joinall
+#: Access to greenlet-local storage
+this = None
+
gevent.hub.Hub.resolver_class = 'gevent.resolver_ares.Resolver'
-_group = gevent.pool.Group()
+_all_jobs = None
_logger = logging.getLogger('coroutine')
_wsgi_logger = logging.getLogger('wsgi')
def spawn(*args, **kwargs):
- return _group.spawn(*args, **kwargs)
+ return _all_jobs.spawn(*args, **kwargs)
def spawn_later(seconds, *args, **kwargs):
- job = _group.greenlet_class(*args, **kwargs)
- job.start_later(seconds)
- _group.add(job)
- return job
+ return _all_jobs.spawn_later(*args, **kwargs)
def shutdown():
- _group.kill()
- return _group.join()
+ _all_jobs.kill()
+ return _all_jobs.join()
def reset_resolver():
@@ -168,10 +169,6 @@ class ThreadResult(object):
return self._value
-class Empty(Exception):
- pass
-
-
class AsyncQueue(object):
def __init__(self):
@@ -216,30 +213,30 @@ class AsyncQueue(object):
self._queue.put(*args, **kwargs)
def _get(self):
- from Queue import Empty as empty
- try:
- return self._queue.get_nowait()
- except empty:
- raise Empty()
+ return self._queue.get_nowait()
class Pool(gevent.pool.Pool):
def spawn(self, *args, **kwargs):
- job = gevent.pool.Pool.spawn(self, *args, **kwargs)
- _group.add(job)
+ job = self.greenlet_class(*args, **kwargs)
+ job.local = _Local()
+ if self is not _all_jobs:
+ _all_jobs.add(job)
+ self.start(job)
return job
def spawn_later(self, seconds, *args, **kwargs):
job = self.greenlet_class(*args, **kwargs)
+ job.local = _Local()
+ if self is not _all_jobs:
+ _all_jobs.add(job)
job.start_later(seconds)
self.add(job)
- _group.add(job)
return job
# pylint: disable-msg=W0221
def kill(self, *args, **kwargs):
- from gevent.queue import Empty
try:
gevent.pool.Pool.kill(self, *args, **kwargs)
except Empty:
@@ -253,6 +250,71 @@ class Pool(gevent.pool.Pool):
self.kill()
+class Spooler(object):
+ """One-producer many-consumers events delivery.
+
+ The delivery process supports lossless events feeding with guaranty that
+ every consumer proccessed every event producer pushed.
+
+ """
+
+ def __init__(self):
+ self._value = None
+ self._waiters = 0
+ self._ready = Event()
+ self._notifying_done = Event()
+ self._notifying_done.set()
+
+ @property
+ def waiters(self):
+ return self._waiters
+
+ def wait(self):
+ self._notifying_done.wait()
+ self._waiters += 1
+ try:
+ self._ready.wait()
+ value = self._value
+ finally:
+ self._waiters -= 1
+ if self._waiters == 0:
+ self._ready.clear()
+ self._notifying_done.set()
+ return value
+
+ def notify_all(self, value=None):
+ while not self._notifying_done.is_set():
+ self._notifying_done.wait()
+ if not self._waiters:
+ return
+ self._notifying_done.clear()
+ self._value = value
+ self._ready.set()
+
+
+class _Local(object):
+
+ def __init__(self):
+ self.attrs = set()
+
+ if hasattr(gevent.getcurrent(), 'local'):
+ current = gevent.getcurrent().local
+ for attr in current.attrs:
+ self.attrs.add(attr)
+ setattr(self, attr, getattr(current, attr))
+
+
+class _LocalAccess(object):
+
+ def __getattr__(self, name):
+ return getattr(gevent.getcurrent().local, name)
+
+ def __setattr__(self, name, value):
+ local = gevent.getcurrent().local
+ local.attrs.add(name)
+ return setattr(local, name, value)
+
+
class _Child(object):
def __init__(self, pid):
@@ -317,4 +379,7 @@ def _print_exception(context, klass, value, tb):
_logger.error('\n'.join([error, context, tb_repr]))
+_all_jobs = Pool()
gevent.hub.get_hub().print_exception = _print_exception
+gevent.getcurrent().local = gevent.get_hub().local = _Local()
+this = _LocalAccess()
diff --git a/sugar_network/toolkit/http.py b/sugar_network/toolkit/http.py
index d1b2fe7..8d913ae 100644
--- a/sugar_network/toolkit/http.py
+++ b/sugar_network/toolkit/http.py
@@ -22,7 +22,7 @@ import logging
from os.path import join, dirname, exists, expanduser, abspath
from sugar_network import toolkit
-from sugar_network.toolkit import enforce
+from sugar_network.toolkit import i18n, enforce
_REDIRECT_CODES = frozenset([301, 302, 303, 307, 308])
@@ -316,7 +316,7 @@ class Connection(object):
self._session = Connection._Session()
self._session.headers['accept-language'] = \
- ','.join(toolkit.default_langs())
+ ','.join(i18n.default_langs())
for arg, value in self._session_args.items():
setattr(self._session, arg, value)
self._session.stream = True
diff --git a/sugar_network/toolkit/i18n.py b/sugar_network/toolkit/i18n.py
new file mode 100644
index 0000000..86d3cae
--- /dev/null
+++ b/sugar_network/toolkit/i18n.py
@@ -0,0 +1,134 @@
+# Copyright (C) 2014 Aleksey Lim
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <http://www.gnu.org/licenses/>.
+
+import os
+import logging
+from gettext import translation
+
+
+# To let `encode()` working properly, avoid msgids gettext'izing
+# but still populate .po files parsing the source code
+_ = lambda x: x
+
+_logger = logging.getLogger('i18n')
+_i18n = {}
+
+
+def default_lang():
+ """Default language to fallback for localized strings.
+
+ :returns:
+ string in format of HTTP's Accept-Language
+
+ """
+ return default_langs()[0]
+
+
+def default_langs():
+ """Default languages list, i.e., including all secondory languages.
+
+ :returns:
+ list of strings in format of HTTP's Accept-Language
+
+ """
+ global _default_langs
+
+ if _default_langs is None:
+ locales = os.environ.get('LANGUAGE')
+ if locales:
+ locales = [i for i in locales.split(':') if i.strip()]
+ else:
+ from locale import getdefaultlocale
+ locales = [getdefaultlocale()[0]]
+ if not locales:
+ _default_langs = ['en']
+ else:
+ _default_langs = []
+ for locale in locales:
+ lang = locale.strip().split('.')[0].lower()
+ if lang == 'c':
+ lang = 'en'
+ elif '_' in lang:
+ lang, region = lang.split('_')
+ if lang != region:
+ lang = '-'.join([lang, region])
+ _default_langs.append(lang)
+ _logger.info('Default languages are %r', _default_langs)
+
+ return _default_langs
+
+
+def decode(value, accept_language=None):
+ if not value:
+ return ''
+ if not isinstance(value, dict):
+ return value
+
+ if accept_language is None:
+ accept_language = default_langs()
+ elif isinstance(accept_language, basestring):
+ accept_language = [accept_language]
+ accept_language.append('en')
+
+ stripped_value = None
+ for lang in accept_language:
+ result = value.get(lang)
+ if result is not None:
+ return result
+
+ prime_lang = lang.split('-')[0]
+ if prime_lang != lang:
+ result = value.get(prime_lang)
+ if result is not None:
+ return result
+
+ if stripped_value is None:
+ stripped_value = {}
+ for k, v in value.items():
+ if '-' in k:
+ stripped_value[k.split('-', 1)[0]] = v
+ result = stripped_value.get(prime_lang)
+ if result is not None:
+ return result
+
+ return value[min(value.keys())]
+
+
+def encode(msgid, *args, **kwargs):
+ if not _i18n:
+ from sugar_network.toolkit.languages import LANGUAGES
+ for lang in LANGUAGES:
+ _i18n[lang] = translation('sugar-network', languages=[lang])
+ result = {}
+
+ for lang, trans in _i18n.items():
+ msgstr = trans.gettext(msgid)
+ if args:
+ msgargs = []
+ for arg in args:
+ msgargs.append(decode(arg, lang))
+ msgstr = msgstr % tuple(msgargs)
+ elif kwargs:
+ msgargs = {}
+ for key, value in kwargs.items():
+ msgargs[key] = decode(value, lang)
+ msgstr = msgstr % msgargs
+ result[lang] = msgstr
+
+ return result
+
+
+_default_lang = None
+_default_langs = None
diff --git a/sugar_network/static/__init__.py b/sugar_network/toolkit/languages.py.in
index 4295e38..2542821 100644
--- a/sugar_network/static/__init__.py
+++ b/sugar_network/toolkit/languages.py.in
@@ -1,4 +1,4 @@
-# Copyright (C) 2012 Aleksey Lim
+# Copyright (C) 2014 Aleksey Lim
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
@@ -13,10 +13,4 @@
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
-from os.path import dirname, join
-
-PATH = join(dirname(__file__), 'httpdocs')
-
-
-def path(*args):
- return join(PATH, *args)
+LANGUAGES = [%LANGUAGES%]
diff --git a/sugar_network/toolkit/router.py b/sugar_network/toolkit/router.py
index df57ff3..b37eee4 100644
--- a/sugar_network/toolkit/router.py
+++ b/sugar_network/toolkit/router.py
@@ -20,16 +20,16 @@ import time
import types
import logging
import calendar
-import mimetypes
from base64 import b64decode
from bisect import bisect_left
from urllib import urlencode
from urlparse import parse_qsl, urlsplit
from email.utils import parsedate, formatdate
-from os.path import isfile, split, splitext
+from os.path import isfile
from sugar_network import toolkit
-from sugar_network.toolkit import http, coroutine, enforce
+from sugar_network.toolkit.coroutine import this
+from sugar_network.toolkit import i18n, http, coroutine, enforce
_SIGNATURE_LIFETIME = 600
@@ -84,14 +84,15 @@ class ACL(object):
DELETE = 1 << 5
INSERT = 1 << 6
REMOVE = 1 << 7
+ REPLACE = 1 << 8
PUBLIC = CREATE | WRITE | READ | DELETE | INSERT | REMOVE
- AUTH = 1 << 8
- AUTHOR = 1 << 9
- SUPERUSER = 1 << 10
+ AUTH = 1 << 10
+ AUTHOR = 1 << 11
+ SUPERUSER = 1 << 12
- LOCAL = 1 << 11
- CALC = 1 << 12
+ LOCAL = 1 << 13
+ CALC = 1 << 14
NAMES = {
CREATE: 'Create',
@@ -100,6 +101,7 @@ class ACL(object):
DELETE: 'Delete',
INSERT: 'Insert',
REMOVE: 'Remove',
+ REPLACE: 'Replace',
}
@@ -114,18 +116,16 @@ class Unauthorized(http.Unauthorized):
class Request(dict):
- principal = None
- subcall = lambda *args: enforce(False)
-
def __init__(self, environ=None, method=None, path=None, cmd=None,
content=None, content_stream=None, content_type=None, session=None,
- **kwargs):
+ principal=None, **kwargs):
dict.__init__(self)
self.path = []
self.cmd = None
self.environ = {}
self.session = session or {}
+ self.principal = principal
self._content = _NOT_SET
self._dirty_query = False
@@ -252,6 +252,11 @@ class Request(dict):
return self.path[2]
@property
+ def key(self):
+ if len(self.path) > 3:
+ return self.path[3]
+
+ @property
def static_prefix(self):
http_host = self.environ.get('HTTP_HOST')
if http_host:
@@ -326,23 +331,6 @@ class Request(dict):
else:
existing_value = self[key] = [existing_value, value]
- def call(self, response=None, **kwargs):
- environ = {}
- for key in ('HTTP_HOST',
- 'HTTP_ACCEPT_LANGUAGE',
- 'HTTP_ACCEPT_ENCODING',
- 'HTTP_IF_MODIFIED_SINCE',
- 'HTTP_AUTHORIZATION',
- ):
- if key in self.environ:
- environ[key] = self.environ[key]
- request = Request(environ, **kwargs)
- if response is None:
- response = Response()
- request.principal = self.principal
- request.subcall = self.subcall
- return self.subcall(request, response)
-
def ensure_content(self):
if self._content is not _NOT_SET:
return
@@ -400,9 +388,9 @@ class Response(dict):
for key, value in dict.items(self):
if type(value) in (list, tuple):
for i in value:
- result.append((_to_ascii(key), _to_ascii(i)))
+ result.append((toolkit.ascii(key), toolkit.ascii(i)))
else:
- result.append((_to_ascii(key), _to_ascii(value)))
+ result.append((toolkit.ascii(key), toolkit.ascii(value)))
return result
def __repr__(self):
@@ -428,10 +416,6 @@ class Response(dict):
dict.__delitem__(self, key)
-class Blob(dict):
- pass
-
-
class Router(object):
def __init__(self, routes_model, allow_spawn=False):
@@ -441,8 +425,8 @@ class Router(object):
self._invalid_origins = set()
self._host = None
self._routes = _Routes()
- self._preroutes = set()
- self._postroutes = set()
+ self._preroutes = []
+ self._postroutes = []
processed = set()
cls = type(routes_model)
@@ -452,10 +436,14 @@ class Router(object):
if name in processed:
continue
if hasattr(attr, 'is_preroute'):
- self._preroutes.add(getattr(routes_model, name))
+ route_ = getattr(routes_model, name)
+ if route_ not in self._preroutes:
+ self._preroutes.append(route_)
continue
elif hasattr(attr, 'is_postroute'):
- self._postroutes.add(getattr(routes_model, name))
+ route_ = getattr(routes_model, name)
+ if route_ not in self._postroutes:
+ self._postroutes.append(route_)
continue
elif not hasattr(attr, 'route'):
continue
@@ -481,44 +469,75 @@ class Router(object):
processed.add(name)
cls = cls.__base__
- def call(self, request, response):
- request.subcall = self.call
- result = self._call_route(request, response)
-
- if isinstance(result, Blob):
- if 'url' in result:
- raise http.Redirect(result['url'])
-
- path = result['blob']
- enforce(isfile(path), 'No such file')
-
- mtime = result.get('mtime') or int(os.stat(path).st_mtime)
- if request.if_modified_since and mtime and \
- mtime <= request.if_modified_since:
- raise http.NotModified()
- response.last_modified = mtime
-
- response.content_type = result.get('mime_type') or \
- 'application/octet-stream'
-
- filename = result.get('filename')
- if not filename:
- filename = _filename(result.get('name') or
- splitext(split(path)[-1])[0],
- response.content_type)
- response['Content-Disposition'] = \
- 'attachment; filename="%s"' % filename
-
- result = file(path, 'rb')
-
- if hasattr(result, 'read'):
- if hasattr(result, 'fileno'):
- response.content_length = os.fstat(result.fileno()).st_size
- elif hasattr(result, 'seek'):
- result.seek(0, 2)
- response.content_length = result.tell()
- result.seek(0)
- result = _stream_reader(result)
+ this.call = self.call
+
+ def call(self, request=None, response=None, environ=None, principal=None,
+ **kwargs):
+ if request is None:
+ if this.request is not None:
+ if not environ:
+ environ = {}
+ for key in ('HTTP_HOST',
+ 'HTTP_ACCEPT_LANGUAGE',
+ 'HTTP_ACCEPT_ENCODING',
+ 'HTTP_IF_MODIFIED_SINCE',
+ 'HTTP_AUTHORIZATION',
+ ):
+ if key in this.request.environ:
+ environ[key] = this.request.environ[key]
+ if not principal:
+ principal = this.request.principal
+ request = Request(environ=environ, principal=principal, **kwargs)
+ if response is None:
+ response = Response()
+
+ route_ = self._resolve_route(request)
+
+ for arg, cast in route_.arguments.items():
+ value = request.get(arg)
+ if value is None:
+ if not hasattr(cast, '__call__'):
+ request[arg] = cast
+ continue
+ if not hasattr(cast, '__call__'):
+ cast = type(cast)
+ try:
+ request[arg] = _typecast(cast, value)
+ except Exception, error:
+ raise http.BadRequest(
+ 'Cannot typecast %r argument: %s' % (arg, error))
+ kwargs = {}
+ for arg in route_.kwarg_names:
+ if arg == 'request':
+ kwargs[arg] = request
+ elif arg == 'response':
+ kwargs[arg] = response
+ elif arg not in kwargs:
+ kwargs[arg] = request.get(arg)
+
+ for i in self._preroutes:
+ i(route_, request, response)
+ result = None
+ exception = None
+ try:
+ result = route_.callback(**kwargs)
+ if route_.mime_type == 'text/event-stream' and \
+ self._allow_spawn and 'spawn' in request:
+ _logger.debug('Spawn event stream for %r', request)
+ request.ensure_content()
+ coroutine.spawn(self._event_stream, request, result)
+ result = None
+ except Exception, exception:
+ raise
+ else:
+ if not response.content_type:
+ if isinstance(result, toolkit.File):
+ response.content_type = result.get('mime_type')
+ if not response.content_type:
+ response.content_type = route_.mime_type
+ finally:
+ for i in self._postroutes:
+ i(request, response, result, exception)
return result
@@ -533,7 +552,7 @@ class Router(object):
if 'callback' in request:
js_callback = request.pop('callback')
- result = None
+ content = None
try:
if 'HTTP_ORIGIN' in request.environ:
enforce(self._assert_origin(request.environ), http.Forbidden,
@@ -541,7 +560,34 @@ class Router(object):
request.environ['HTTP_ORIGIN'])
response['Access-Control-Allow-Origin'] = \
request.environ['HTTP_ORIGIN']
+
result = self.call(request, response)
+
+ if isinstance(result, toolkit.File):
+ if 'url' in result:
+ raise http.Redirect(result['url'])
+ enforce(isfile(result.path), 'No such file')
+ if request.if_modified_since and result.mtime and \
+ result.mtime <= request.if_modified_since:
+ raise http.NotModified()
+ response.last_modified = result.mtime
+ response.content_type = result.get('mime_type') or \
+ 'application/octet-stream'
+ response['Content-Disposition'] = \
+ 'attachment; filename="%s"' % result.name
+ result = file(result.path, 'rb')
+
+ if not hasattr(result, 'read'):
+ content = result
+ else:
+ if hasattr(result, 'fileno'):
+ response.content_length = os.fstat(result.fileno()).st_size
+ elif hasattr(result, 'seek'):
+ result.seek(0, 2)
+ response.content_length = result.tell()
+ result.seek(0)
+ content = _stream_reader(result)
+
except http.StatusPass, error:
response.status = error.status
if error.headers:
@@ -557,100 +603,46 @@ class Router(object):
if request.method == 'HEAD':
response.meta['error'] = str(error)
else:
- result = {'error': str(error),
- 'request': request.url,
- }
+ content = {'error': str(error), 'request': request.url}
response.content_type = 'application/json'
- result_streamed = isinstance(result, types.GeneratorType)
+ streamed_content = isinstance(content, types.GeneratorType)
if request.method == 'HEAD':
- result_streamed = False
- result = None
+ streamed_content = False
+ content = None
elif js_callback:
- if result_streamed:
- result = ''.join(result)
- result_streamed = False
- result = '%s(%s);' % (js_callback, json.dumps(result))
- response.content_length = len(result)
- elif not result_streamed:
+ if streamed_content:
+ content = ''.join(content)
+ streamed_content = False
+ content = '%s(%s);' % (js_callback, json.dumps(content))
+ response.content_length = len(content)
+ elif not streamed_content:
if response.content_type == 'application/json':
- result = json.dumps(result)
+ content = json.dumps(content)
if 'content-length' not in response:
- response.content_length = len(result) if result else 0
+ response.content_length = len(content) if content else 0
for key, value in response.meta.items():
- response.set('X-SN-%s' % _to_ascii(key), json.dumps(value))
+ response.set('X-SN-%s' % toolkit.ascii(key), json.dumps(value))
- if request.method == 'HEAD' and result is not None:
+ if request.method == 'HEAD' and content is not None:
_logger.warning('Content from HEAD response is ignored')
- result = None
+ content = None
- _logger.trace('%s call: request=%s response=%r result=%r',
- self, request.environ, response, repr(result)[:256])
+ _logger.trace('%s call: request=%s response=%r content=%r',
+ self, request.environ, response, repr(content)[:256])
start_response(response.status, response.items())
- if result_streamed:
+ if streamed_content:
if response.content_type == 'text/event-stream':
- for event in _event_stream(request, result):
+ for event in _event_stream(request, content):
yield 'data: %s\n\n' % json.dumps(event)
else:
- for i in result:
+ for i in content:
yield i
- elif result is not None:
- yield result
-
- def _call_route(self, request, response):
- route_ = self._resolve_route(request)
- request.routes = self._routes_model
-
- for arg, cast in route_.arguments.items():
- value = request.get(arg)
- if value is None:
- if not hasattr(cast, '__call__'):
- request[arg] = cast
- continue
- if not hasattr(cast, '__call__'):
- cast = type(cast)
- try:
- request[arg] = _typecast(cast, value)
- except Exception, error:
- raise http.BadRequest(
- 'Cannot typecast %r argument: %s' % (arg, error))
- kwargs = {}
- for arg in route_.kwarg_names:
- if arg == 'request':
- kwargs[arg] = request
- elif arg == 'response':
- kwargs[arg] = response
- elif arg not in kwargs:
- kwargs[arg] = request.get(arg)
-
- for i in self._preroutes:
- i(route_, request, response)
- result = None
- exception = None
- try:
- result = route_.callback(**kwargs)
- if route_.mime_type == 'text/event-stream' and \
- self._allow_spawn and 'spawn' in request:
- _logger.debug('Spawn event stream for %r', request)
- request.ensure_content()
- coroutine.spawn(self._event_stream, request, result)
- result = None
- except Exception, exception:
- raise
- else:
- if not response.content_type:
- if isinstance(result, Blob):
- response.content_type = result.get('mime_type')
- if not response.content_type:
- response.content_type = route_.mime_type
- finally:
- for i in self._postroutes:
- i(request, response, result, exception)
-
- return result
+ elif content is not None:
+ yield content
def _resolve_route(self, request):
found_path = [False]
@@ -695,9 +687,19 @@ class Router(object):
commons['guid'] = request.guid
if request.prop:
commons['prop'] = request.prop
- for event in _event_stream(request, stream):
+ try:
+ for event in _event_stream(request, stream):
+ event.update(commons)
+ this.localcast(event)
+ except Exception, error:
+ _logger.exception('Event stream %r failed', request)
+ event = {'event': 'failure',
+ 'exception': type(error).__name__,
+ 'error': str(error),
+ }
+ event.update(request.session)
event.update(commons)
- self._routes_model.broadcast(event)
+ this.localcast(event)
def _assert_origin(self, environ):
origin = environ['HTTP_ORIGIN']
@@ -747,22 +749,6 @@ class _ContentStream(object):
return result
-def _filename(names, mime_type):
- if type(names) not in (list, tuple):
- names = [names]
- parts = []
- for name in names:
- if isinstance(name, dict):
- name = toolkit.gettext(name)
- parts.append(''.join([i.capitalize() for i in name.split()]))
- result = '-'.join(parts)
- if mime_type:
- if not mimetypes.inited:
- mimetypes.init()
- result += mimetypes.guess_extension(mime_type) or ''
- return result.replace(os.sep, '')
-
-
def _stream_reader(stream):
try:
while True:
@@ -783,15 +769,8 @@ def _event_stream(request, stream):
event[0].update(i)
event = event[0]
yield event
- except Exception, error:
- _logger.exception('Event stream %r failed', request)
- event = {'event': 'failure',
- 'exception': type(error).__name__,
- 'error': str(error),
- }
- event.update(request.session)
- yield event
- _logger.debug('Event stream %r exited', request)
+ finally:
+ _logger.debug('Event stream %r exited', request)
def _typecast(cast, value):
@@ -817,7 +796,7 @@ def _typecast(cast, value):
def _parse_accept_language(value):
if not value:
- return [toolkit.default_lang()]
+ return [i18n.default_lang()]
langs = []
qualities = []
for chunk in value.split(','):
@@ -836,14 +815,6 @@ def _parse_accept_language(value):
return langs
-def _to_ascii(value):
- if not isinstance(value, basestring):
- return str(value)
- if isinstance(value, unicode):
- return value.encode('utf8')
- return value
-
-
class _Routes(dict):
def __init__(self, parent=None):