Web   ·   Wiki   ·   Activities   ·   Blog   ·   Lists   ·   Chat   ·   Meeting   ·   Bugs   ·   Git   ·   Translate   ·   Archive   ·   People   ·   Donate
summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorAleksey Lim <alsroot@sugarlabs.org>2014-03-16 10:54:04 (GMT)
committer Aleksey Lim <alsroot@sugarlabs.org>2014-03-16 10:54:04 (GMT)
commit83d0187c5742ae539cd63554555a6f285925a149 (patch)
tree16996dbad88f206ae75bbc0537b3586650264a33
parentae31651d6ae31215db903530115bf340ae9f98f5 (diff)
Switch client code to server side resolves
-rw-r--r--sugar_network/client/__init__.py32
-rw-r--r--sugar_network/client/cache.py196
-rw-r--r--sugar_network/client/injector.py463
-rw-r--r--sugar_network/client/packagekit.py92
-rw-r--r--sugar_network/client/releases.py392
-rw-r--r--sugar_network/client/solver.py407
-rw-r--r--sugar_network/model/__init__.py3
-rw-r--r--sugar_network/node/model.py14
-rw-r--r--sugar_network/node/routes.py16
-rw-r--r--sugar_network/toolkit/__init__.py31
-rw-r--r--sugar_network/toolkit/http.py21
-rw-r--r--sugar_network/toolkit/parcel.py2
-rw-r--r--sugar_network/toolkit/router.py11
-rw-r--r--tests/__init__.py33
-rw-r--r--tests/units/client/__main__.py5
-rwxr-xr-xtests/units/client/cache.py319
-rwxr-xr-xtests/units/client/injector.py968
-rwxr-xr-xtests/units/client/packagekit.py25
-rwxr-xr-xtests/units/client/releases.py502
-rwxr-xr-xtests/units/client/solver.py277
-rwxr-xr-xtests/units/model/context.py1
-rwxr-xr-xtests/units/node/model.py90
-rwxr-xr-xtests/units/node/node.py6
-rwxr-xr-xtests/units/node/slave.py8
-rwxr-xr-xtests/units/toolkit/router.py27
25 files changed, 1675 insertions, 2266 deletions
diff --git a/sugar_network/client/__init__.py b/sugar_network/client/__init__.py
index c863e98..446795a 100644
--- a/sugar_network/client/__init__.py
+++ b/sugar_network/client/__init__.py
@@ -40,16 +40,16 @@ def profile_path(*args):
return join(root_dir, *args)
-api_url = Option(
- 'url to connect to Sugar Network server API',
+api = Option(
+ 'url to connect to Sugar Network node API',
default='http://node-devel.sugarlabs.org', short_option='-a',
- name='api-url')
+ name='api')
certfile = Option(
- 'path to SSL certificate file to connect to server via HTTPS')
+ 'path to SSL certificate file to connect to node via HTTPS')
no_check_certificate = Option(
- 'do not check the server certificate against the available '
+ 'do not check the node certificate against the available '
'certificate authorities',
default=False, type_cast=Option.bool_cast, action='store_true')
@@ -57,10 +57,10 @@ local_root = Option(
'path to the directory to keep all local data',
default=profile_path('network'), name='local_root')
-server_mode = Option(
- 'start server to share local documents',
+node_mode = Option(
+ 'start node to share local documents',
default=False, type_cast=Option.bool_cast,
- action='store_true', name='server-mode')
+ action='store_true', name='node-mode')
delayed_start = Option(
'immediate start only database and the rest on getting '
@@ -86,10 +86,10 @@ layers = Option(
default=[], type_cast=Option.list_cast, type_repr=Option.list_repr,
name='layers')
-discover_server = Option(
- 'discover servers in local network instead of using --api-url',
+discover_node = Option(
+ 'discover nodes in local network instead of using --api',
default=False, type_cast=Option.bool_cast,
- action='store_true', name='discover_server')
+ action='store_true', name='discover-node')
cache_limit = Option(
'the minimal disk free space, in bytes, to preserve while recycling '
@@ -113,19 +113,19 @@ cache_timeout = Option(
default=3600, type_cast=int, name='cache-timeout')
login = Option(
- 'Sugar Labs account to connect to Sugar Network API server; '
+ 'Sugar Labs account to connect to Sugar Network API node; '
'should be set only if either password is provided or public key '
'for Sugar Labs account was uploaded to the Sugar Network',
name='login', short_option='-l')
password = Option(
- 'Sugar Labs account password to connect to Sugar Network API server '
+ 'Sugar Labs account password to connect to Sugar Network API node '
'using Basic authentication; if omitted, keys based authentication '
'will be used',
name='password', short_option='-p')
keyfile = Option(
- 'path to RSA private key to connect to Sugar Network API server',
+ 'path to RSA private key to connect to Sugar Network API node',
name='keyfile', short_option='-k', default='~/.ssh/sugar-network')
@@ -173,13 +173,13 @@ def stability(context):
def Connection(url=None, **args):
if url is None:
- url = api_url.value
+ url = api.value
return http.Connection(url, verify=not no_check_certificate.value, **args)
def IPCConnection():
return http.Connection(
- api_url='http://127.0.0.1:%s' % ipc_port.value,
+ api='http://127.0.0.1:%s' % ipc_port.value,
# Online ipc->client->node request might fail if node connection
# is lost in client process, so, re-send ipc request immediately
# to retrive data from client in offline mode without propagating
diff --git a/sugar_network/client/cache.py b/sugar_network/client/cache.py
deleted file mode 100644
index 8bee316..0000000
--- a/sugar_network/client/cache.py
+++ /dev/null
@@ -1,196 +0,0 @@
-# Copyright (C) 2012-2013 Aleksey Lim
-#
-# This program is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with this program. If not, see <http://www.gnu.org/licenses/>.
-
-# sugar-lint: disable
-
-import os
-import sys
-import time
-import logging
-from os.path import exists
-
-from sugar_network import client
-from sugar_network.db import blobs
-from sugar_network.toolkit import pylru, enforce
-
-
-_POOL_SIZE = 256
-
-_logger = logging.getLogger('cache')
-
-
-class Cache(object):
-
- def __init__(self):
- self._pool = None
- self._du = 0
- self._acquired = {}
-
- def __iter__(self):
- self._ensure_open()
- return iter(self._pool)
-
- @property
- def du(self):
- return self._du
-
- def ensure(self, requested_size, temp_size=0):
- self._ensure_open()
- to_free = self._to_free(requested_size, temp_size)
- if to_free <= 0:
- return
- enforce(self._du >= to_free, 'No free disk space')
- for guid, size, mtime in self._reversed_iter():
- self._checkout(guid, (size, mtime))
- to_free -= size
- if to_free <= 0:
- break
-
- def acquire(self, guid, size):
- self.checkout(guid)
- self._acquired.setdefault(guid, [0, size])[0] += 1
- return guid
-
- def release(self, *guids):
- for guid in guids:
- acquired = self._acquired.get(guid)
- if acquired is None:
- continue
- acquired[0] -= 1
- if acquired[0] <= 0:
- self.checkin(guid, acquired[1])
- del self._acquired[guid]
-
- def checkin(self, digest, size):
- self._ensure_open()
- if digest in self._pool:
- self._pool.__getitem__(digest)
- return
-
-
-
- _logger.debug('Checkin %r %d bytes long', guid, size)
-
- mtime = os.stat(files.get(digest).path).st_mtime
- self._pool[digest] = (size, mtime)
- self._du += size
-
- def checkout(self, guid, *args):
- self._ensure_open()
- if guid not in self._pool:
- return False
- _logger.debug('Checkout %r', guid)
- size, __ = self._pool.peek(guid)
- self._du -= size
- del self._pool[guid]
- return True
-
- def recycle(self):
- self._ensure_open()
- ts = time.time()
- to_free = self._to_free(0, 0)
- for guid, size, mtime in self._reversed_iter():
- if to_free > 0:
- self._checkout(guid, (size, mtime))
- to_free -= size
- elif client.cache_lifetime.value and \
- client.cache_lifetime.value < (ts - mtime) / 86400.0:
- self._checkout(guid, (size, None))
- else:
- break
-
- def _ensure_open(self):
- if self._pool is not None:
- return
-
- _logger.debug('Open releases pool')
-
- pool = []
- for release in self._volume['release'].find(not_layer=['local'])[0]:
- meta = files.get(release['data'])
- if not meta:
- continue
-
- """
- TODO
-
- solution_path = client.path('solutions', release['context'])
- if exists(solution_path):
- with file(path) as f:
- cached_api_url, cached_stability, solution = json.load(f)
- if solution[0]['guid'] == release['guid']:
- continue
-
- """
- pool.append((
- os.stat(meta.path).st_mtime,
- release.guid,
- meta.get('unpack_size') or meta['blob_size'],
- ))
-
- self._pool = pylru.lrucache(_POOL_SIZE, self._checkout)
- for mtime, guid, size in sorted(pool):
- self._pool[guid] = (size, mtime)
- self._du += size
-
- def _to_free(self, requested_size, temp_size):
- if not client.cache_limit.value and \
- not client.cache_limit_percent.value:
- return 0
-
- stat = os.statvfs(client.local_root.value)
- if stat.f_blocks == 0:
- # TODO Sounds like a tmpfs or so
- return 0
-
- limit = sys.maxint
- free = stat.f_bfree * stat.f_frsize
- if client.cache_limit_percent.value:
- total = stat.f_blocks * stat.f_frsize
- limit = client.cache_limit_percent.value * total / 100
- if client.cache_limit.value:
- limit = min(limit, client.cache_limit.value)
- to_free = max(limit, temp_size) - (free - requested_size)
-
- if to_free > 0:
- _logger.debug(
- 'Need to recycle %d bytes, '
- 'free_size=%d requested_size=%d temp_size=%d',
- to_free, free, requested_size, temp_size)
- return to_free
-
- def _reversed_iter(self):
- i = self._pool.head.prev
- while True:
- while i.empty:
- if i is self._pool.head:
- return
- i = i.prev
- size, mtime = i.value
- yield i.key, size, mtime
- if i is self._pool.head:
- break
- i = i.next
-
- def _checkout(self, guid, value):
- size, mtime = value
- if mtime is None:
- _logger.debug('Recycle stale %r to save %s bytes', guid, size)
- else:
- _logger.debug('Recycle %r to save %s bytes', guid, size)
- self._volume['release'].delete(guid)
- self._du -= size
- if guid in self._pool:
- del self._pool[guid]
diff --git a/sugar_network/client/injector.py b/sugar_network/client/injector.py
new file mode 100644
index 0000000..12baf51
--- /dev/null
+++ b/sugar_network/client/injector.py
@@ -0,0 +1,463 @@
+# Copyright (C) 2012-2014 Aleksey Lim
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <http://www.gnu.org/licenses/>.
+
+import os
+import re
+import sys
+import json
+import time
+import random
+import hashlib
+import logging
+from os.path import exists, join
+
+from sugar_network import toolkit
+from sugar_network.client import packagekit, journal, profile_path
+from sugar_network.toolkit.spec import format_version
+from sugar_network.toolkit.bundle import Bundle
+from sugar_network.toolkit import lsb_release, coroutine, i18n, pylru, http
+from sugar_network.toolkit import enforce
+
+
+_PREEMPTIVE_POOL_SIZE = 256
+
+_logger = logging.getLogger('client.injector')
+
+
+class Injector(object):
+
+ seqno = 0
+
+ def __init__(self, root, lifetime=None, limit_bytes=None,
+ limit_percent=None):
+ self._root = root
+ self._pool = _PreemptivePool(join(root, 'releases'), lifetime,
+ limit_bytes, limit_percent)
+ self._api = None
+ self._checkins = toolkit.Bin(join(root, 'checkins'), {})
+
+ for dir_name in ('solutions', 'releases'):
+ dir_path = join(root, dir_name)
+ if not exists(dir_path):
+ os.makedirs(dir_path)
+
+ @property
+ def api(self):
+ if self._api is not None:
+ return self._api.url
+
+ @api.setter
+ def api(self, value):
+ if not value:
+ self._api = None
+ else:
+ self._api = http.Connection(value)
+
+ def close(self):
+ self._pool.close()
+
+ def recycle(self):
+ self._pool.recycle()
+
+ def launch(self, context, stability='stable', app=None, activity_id=None,
+ object_id=None, uri=None, args=None):
+ if object_id and not activity_id:
+ activity_id = journal.get(object_id, 'activity_id')
+ if not activity_id:
+ activity_id = _activity_id_new()
+ yield {'activity_id': activity_id}
+
+ yield {'event': 'launch', 'state': 'init'}
+ releases = []
+ acquired = []
+ checkedin = {}
+ environ = {}
+
+ def acquire(ctx):
+ solution = self._solve(ctx, stability)
+ environ.update({'context': ctx, 'solution': solution})
+ self._pool.pop(solution.values())
+ if ctx in self._checkins:
+ checkedin[ctx] = (self.api, stability, self.seqno)
+ else:
+ _logger.debug('Acquire %r', ctx)
+ acquired.extend(solution.values())
+ releases.extend(solution.values())
+ release = solution[ctx]
+ return release, self._pool.path(release['blob'])
+
+ try:
+ yield {'event': 'launch', 'state': 'solve'}
+ release, path = acquire(context)
+ if app is None and \
+ release['content-type'] != 'application/vnd.olpc-sugar':
+ app = _app_by_mimetype(release['content-type'])
+ enforce(app, 'Cannot find proper application')
+ if app is None:
+ _logger.debug('Execute %r', context)
+ else:
+ uri = path
+ environ['document'] = release['blob']
+ release, path = acquire(app)
+ _logger.debug('Open %r in %r', context, app)
+ context = app
+
+ for event in self._download(releases):
+ event['event'] = 'launch'
+ yield event
+ for event in self._install(releases):
+ event['event'] = 'launch'
+ yield event
+
+ if args is None:
+ args = []
+ args.extend(['-b', context])
+ args.extend(['-a', activity_id])
+ if object_id:
+ args.extend(['-o', object_id])
+ if uri:
+ args.extend(['-u', uri])
+ child = _exec(context, release, path, args, environ)
+ yield {'event': 'launch', 'state': 'exec'}
+
+ yield environ
+ status = child.wait()
+ finally:
+ if acquired:
+ _logger.debug('Release acquired contexts')
+ self._pool.push(acquired)
+
+ if checkedin:
+ with self._checkins as checkins:
+ checkins.update(checkedin)
+
+ _logger.debug('Exit %s[%s]: %r', context, child.pid, status)
+ enforce(status == 0, 'Process exited with %r status', status)
+ yield {'event': 'launch', 'state': 'exit'}
+
+ def checkin(self, context, stability='stable'):
+ if context in self._checkins:
+ _logger.debug('Refresh %r checkin', context)
+ else:
+ _logger.debug('Checkin %r', context)
+ yield {'event': 'checkin', 'state': 'solve'}
+ solution = self._solve(context, stability)
+ for event in self._download(solution.values()):
+ event['event'] = 'checkin'
+ yield event
+ self._pool.pop(solution.values())
+ with self._checkins as checkins:
+ checkins[context] = (self.api, stability, self.seqno)
+ yield {'event': 'checkin', 'state': 'ready'}
+
+ def checkout(self, context):
+ if context not in self._checkins:
+ return False
+ _logger.debug('Checkout %r', context)
+ with file(join(self._root, 'solutions', context)) as f:
+ __, __, __, solution = json.load(f)
+ self._pool.push(solution.values())
+ with self._checkins as checkins:
+ del checkins[context]
+ return True
+
+ def _solve(self, context, stability):
+ path = join(self._root, 'solutions', context)
+ solution = None
+
+ if exists(path):
+ with file(path) as f:
+ api, stability_, seqno, solution = json.load(f)
+ if self.api:
+ if api != self.api or \
+ stability_ and set(stability_) != set(stability) or \
+ seqno < self.seqno or \
+ int(os.stat(path).st_mtime) < packagekit.mtime():
+ _logger.debug('Reset stale %r solution', context)
+ solution = None
+ else:
+ _logger.debug('Reuse cached %r solution', context)
+ else:
+ _logger.debug('Reuse cached %r solution in offline', context)
+
+ if not solution:
+ enforce(self.api, 'Cannot solve in offline')
+ _logger.debug('Solve %r', context)
+ solution = self._api.get(['context', context], cmd='solve',
+ stability=stability, lsb_id=lsb_release.distributor_id(),
+ lsb_release=lsb_release.release())
+ with toolkit.new_file(path) as f:
+ json.dump((self.api, stability, self.seqno, solution), f)
+
+ return solution
+
+ def _download(self, solution):
+ to_download = []
+ download_size = 0
+ size = 0
+
+ for release in solution:
+ digest = release.get('blob')
+ if not digest or exists(self._pool.path(digest)):
+ continue
+ enforce(self._api is not None, 'Cannot download in offline')
+ download_size = max(download_size, release['size'])
+ size += release.get('unpack_size') or release['size']
+ to_download.append((digest, release))
+
+ if not to_download:
+ return
+
+ self._pool.ensure(size, download_size)
+ for digest, release in to_download:
+ yield {'state': 'download'}
+ with toolkit.NamedTemporaryFile() as tmp_file:
+ self._api.download(['blobs', digest], tmp_file.name)
+ path = self._pool.path(digest)
+ if 'unpack_size' in release:
+ with Bundle(tmp_file, 'application/zip') as bundle:
+ bundle.extractall(path, prefix=bundle.rootdir)
+ for exec_dir in ('bin', 'activity'):
+ bin_path = join(path, exec_dir)
+ if not exists(bin_path):
+ continue
+ for filename in os.listdir(bin_path):
+ os.chmod(join(bin_path, filename), 0755)
+ else:
+ os.rename(tmp_file.name, path)
+
+ def _install(self, solution):
+ to_install = []
+
+ for release in solution:
+ packages = release.get('packages')
+ if packages:
+ to_install.extend(packages)
+
+ if to_install:
+ yield {'state': 'install'}
+ packagekit.install(to_install)
+
+
+class _PreemptivePool(object):
+
+ def __init__(self, root, lifetime, limit_bytes, limit_percent):
+ self._root = root
+ self._lifetime = lifetime
+ self._limit_bytes = limit_bytes
+ self._limit_percent = limit_percent
+ self._lru = None
+ self._du = None
+
+ def __iter__(self):
+ """Least recently to most recently used iterator."""
+ if self._lru is None:
+ self._init()
+ i = self._lru.head.prev
+ while True:
+ while i.empty:
+ if i is self._lru.head:
+ return
+ i = i.prev
+ yield i.key, i.value
+ if i is self._lru.head:
+ break
+ i = i.prev
+
+ def close(self):
+ if self._lru is not None:
+ with toolkit.new_file(self._root + '.index') as f:
+ json.dump((self._du, [i for i in self]), f)
+ self._lru = None
+
+ def path(self, digest):
+ return join(self._root, digest)
+
+ def push(self, solution):
+ if self._lru is None:
+ self._init()
+ for release in solution:
+ digest = release.get('blob')
+ if not digest:
+ continue
+ path = join(self._root, digest)
+ if not exists(path):
+ continue
+ size = release.get('unpack_size') or release['size']
+ self._lru[digest] = (size, os.stat(path).st_mtime)
+ self._du += size
+ _logger.debug('Push %r release %s bytes', digest, size)
+
+ def pop(self, solution):
+ if self._lru is None:
+ self._init()
+ found = False
+ for release in solution:
+ digest = release.get('blob')
+ if digest and digest in self._lru:
+ self._pop(digest, False)
+ found = True
+ return found
+
+ def ensure(self, requested_size, temp_size=0):
+ if self._lru is None:
+ self._init()
+ to_free = self._to_free(requested_size, temp_size)
+ if to_free <= 0:
+ return
+ enforce(self._du >= to_free, 'No free disk space')
+ for digest, (size, __) in self:
+ self._pop(digest)
+ to_free -= size
+ if to_free <= 0:
+ break
+
+ def recycle(self):
+ if self._lru is None:
+ self._init()
+ ts = time.time()
+ to_free = self._to_free(0, 0)
+ for digest, (size, mtime) in self:
+ if to_free > 0:
+ self._pop(digest)
+ to_free -= size
+ elif self._lifetime and self._lifetime < (ts - mtime) / 86400.0:
+ self._pop(digest)
+ else:
+ break
+
+ def _init(self):
+ self._lru = pylru.lrucache(_PREEMPTIVE_POOL_SIZE, self._pop)
+ if not exists(self._root + '.index'):
+ self._du = 0
+ else:
+ with file(self._root + '.index') as f:
+ self._du, items = json.load(f)
+ for key, value in items:
+ self._lru[key] = value
+
+ def _pop(self, digest, unlink=True):
+ size, __ = self._lru.peek(digest)
+ _logger.debug('Pop %r release and save %s bytes', digest, size)
+ self._du -= size
+ del self._lru[digest]
+ path = join(self._root, digest)
+ if unlink and exists(path):
+ os.unlink(path)
+
+ def _to_free(self, requested_size, temp_size):
+ if not self._limit_bytes and not self._limit_percent:
+ return 0
+
+ stat = os.statvfs(self._root)
+ if stat.f_blocks == 0:
+ # TODO Sounds like a tmpfs or so
+ return 0
+
+ limit = sys.maxint
+ free = stat.f_bfree * stat.f_frsize
+ if self._limit_percent:
+ total = stat.f_blocks * stat.f_frsize
+ limit = self._limit_percent * total / 100
+ if self._limit_bytes:
+ limit = min(limit, self._limit_bytes)
+ to_free = max(limit, temp_size) - (free - requested_size)
+
+ if to_free > 0:
+ _logger.debug(
+ 'Need to recycle %d bytes, '
+ 'free_size=%d requested_size=%d temp_size=%d',
+ to_free, free, requested_size, temp_size)
+ return to_free
+
+
+def _exec(context, release, path, args, environ):
+ # pylint: disable-msg=W0212
+ datadir = profile_path('data', context)
+ logdir = profile_path('logs')
+
+ for i in [
+ join(datadir, 'instance'),
+ join(datadir, 'data'),
+ join(datadir, 'tmp'),
+ logdir,
+ ]:
+ if not exists(i):
+ os.makedirs(i)
+
+ log_path = toolkit.unique_filename(logdir, context + '.log')
+ environ['logs'] = [
+ profile_path('logs', 'shell.log'),
+ profile_path('logs', 'sugar-network-client.log'),
+ log_path,
+ ]
+
+ __, command = release['command']
+ args = command.split() + args
+ environ['args'] = args
+
+ child = coroutine.fork()
+ if child is not None:
+ _logger.debug('Exec %s[%s]: %r', context, child.pid, args)
+ return child
+ try:
+ with file('/dev/null', 'r') as f:
+ os.dup2(f.fileno(), 0)
+ with file(log_path, 'a+') as f:
+ os.dup2(f.fileno(), 1)
+ os.dup2(f.fileno(), 2)
+ toolkit.init_logging()
+
+ os.chdir(path)
+
+ environ = os.environ
+ environ['PATH'] = ':'.join([
+ join(path, 'activity'),
+ join(path, 'bin'),
+ environ['PATH'],
+ ])
+ environ['PYTHONPATH'] = path + ':' + environ.get('PYTHONPATH', '')
+ environ['SUGAR_BUNDLE_PATH'] = path
+ environ['SUGAR_BUNDLE_ID'] = context
+ environ['SUGAR_BUNDLE_NAME'] = i18n.decode(release['title'])
+ environ['SUGAR_BUNDLE_VERSION'] = format_version(release['version'])
+ environ['SUGAR_ACTIVITY_ROOT'] = datadir
+ environ['SUGAR_LOCALEDIR'] = join(path, 'locale')
+
+ os.execvpe(args[0], args, environ)
+ except BaseException:
+ logging.exception('Failed to execute %r args=%r', release, args)
+ finally:
+ os._exit(1)
+
+
+def _activity_id_new():
+ from uuid import getnode
+ data = '%s%s%s' % (
+ time.time(),
+ random.randint(10000, 100000),
+ getnode())
+ return hashlib.sha1(data).hexdigest()
+
+
+def _app_by_mimetype(mime_type):
+ import gconf
+ mime_type = _MIMETYPE_INVALID_CHARS.sub('_', mime_type)
+ key = '/'.join([_MIMETYPE_DEFAULTS_KEY, mime_type])
+ return gconf.client_get_default().get_string(key)
+
+
+_MIMETYPE_DEFAULTS_KEY = '/desktop/sugar/journal/defaults'
+_MIMETYPE_INVALID_CHARS = re.compile('[^a-zA-Z0-9-_/.]')
diff --git a/sugar_network/client/packagekit.py b/sugar_network/client/packagekit.py
index 782f09e..68772e8 100644
--- a/sugar_network/client/packagekit.py
+++ b/sugar_network/client/packagekit.py
@@ -1,4 +1,4 @@
-# Copyright (C) 2010-2013 Aleksey Lim
+# Copyright (C) 2010-2014 Aleksey Lim
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
@@ -14,6 +14,7 @@
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import os
+import re
import logging
from sugar_network.toolkit import lsb_release, gbus, enforce
@@ -37,7 +38,23 @@ def mtime():
return os.stat(_pms_path).st_mtime
-def resolve(names):
+def install(names):
+ ids = [i['pk_id'] for i in _resolve(names)]
+ while ids:
+ chunk = ids[:min(len(ids), _PK_MAX_INSTALL)]
+ del ids[:len(chunk)]
+
+ _logger.debug('Install %r', chunk)
+
+ resp = gbus.call(_pk, 'InstallPackages', True, chunk)
+ enforce(resp.error_code in (
+ 'package-already-installed',
+ 'all-packages-already-installed', None),
+ 'Installation failed: %s (%s)',
+ resp.error_details, resp.error_code)
+
+
+def _resolve(names):
result = {}
while names:
@@ -54,22 +71,6 @@ def resolve(names):
return result
-def install(packages):
- ids = [i['pk_id'] for i in packages]
- while ids:
- chunk = ids[:min(len(ids), _PK_MAX_INSTALL)]
- del ids[:len(chunk)]
-
- _logger.debug('Install %r', chunk)
-
- resp = gbus.call(_pk, 'InstallPackages', True, chunk)
- enforce(resp.error_code in (
- 'package-already-installed',
- 'all-packages-already-installed', None),
- 'Installation failed: %s (%s)',
- resp.error_details, resp.error_code)
-
-
class _Response(object):
def __init__(self):
@@ -104,10 +105,8 @@ def _pk(result, op, *args):
resp.error_details = details
def Package_cb(status, pk_id, summary):
- from sugar_network.client import solver
-
package_name, version, arch, __ = pk_id.split(';')
- clean_version = solver.try_cleanup_distro_version(version)
+ clean_version = _cleanup_distro_version(version)
if not clean_version:
_logger.warn('Cannot parse distribution version "%s" '
'for package "%s"', version, package_name)
@@ -117,7 +116,7 @@ def _pk(result, op, *args):
'pk_id': str(pk_id),
'version': clean_version,
'name': package_name,
- 'arch': solver.canonicalize_machine(arch),
+ 'arch': _canonicalize_machine(arch),
'installed': (status == 'installed'),
}
_logger.debug('Found: %r', package)
@@ -148,6 +147,51 @@ def _pk(result, op, *args):
op(*args)
+def _canonicalize_machine(arch):
+ arch = arch.lower()
+ if arch == 'x86':
+ return 'i386'
+ elif arch == 'amd64':
+ return 'x86_64'
+ elif arch == 'power macintosh':
+ return 'ppc'
+ elif arch == 'i86pc':
+ return 'i686'
+
+
+def _cleanup_distro_version(version):
+ if ':' in version:
+ # Skip 'epoch'
+ version = version.split(':', 1)[1]
+ version = version.replace('_', '-')
+ if '~' in version:
+ version, suffix = version.split('~', 1)
+ if suffix.startswith('pre'):
+ suffix = suffix[3:]
+ suffix = '-pre' + (_cleanup_distro_version(suffix) or '')
+ else:
+ suffix = ''
+ match = _VERSION_RE.match(version)
+ if match:
+ major, version, revision = match.groups()
+ if major is not None:
+ version = major[:-1].rstrip('.') + '.' + version
+ if revision is not None:
+ version = '%s-%s' % (version, revision[2:])
+ return version + suffix
+ return None
+
+
+_DOTTED_RE = r'[0-9]+(?:\.[0-9]+)*'
+# Matche a version number that would be a valid version without modification
+_RELEASE_RE = '(?:%s)(?:-(?:pre|rc|post|)(?:%s))*' % (_DOTTED_RE, _DOTTED_RE)
+# This matches the interesting bits of distribution version numbers
+# (first matching group is for Java-style 6b17 or 7u9 syntax, or "major")
+_VERSION_RE = re.compile(
+ r'(?:[a-z])?({ints}\.?[bu])?({zero})(-r{ints})?'.format(
+ zero=_RELEASE_RE, ints=_DOTTED_RE))
+
+
if __name__ == '__main__':
import sys
from pprint import pprint
@@ -158,6 +202,6 @@ if __name__ == '__main__':
logging.basicConfig(level=logging.DEBUG)
if sys.argv[1] == 'install':
- install(resolve(sys.argv[2:]).values())
+ install(_resolve(sys.argv[2:]).values())
else:
- pprint(resolve(sys.argv[1:]))
+ pprint(_resolve(sys.argv[1:]))
diff --git a/sugar_network/client/releases.py b/sugar_network/client/releases.py
deleted file mode 100644
index c93a91a..0000000
--- a/sugar_network/client/releases.py
+++ /dev/null
@@ -1,392 +0,0 @@
-# Copyright (C) 2013 Aleksey Lim
-#
-# This program is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with this program. If not, see <http://www.gnu.org/licenses/>.
-
-# pylint: disable=E1101
-
-import os
-import re
-import sys
-import time
-import json
-import random
-import shutil
-import hashlib
-import logging
-from copy import deepcopy
-from os.path import join, exists, basename, dirname, relpath
-
-from sugar_network import client, toolkit
-from sugar_network.client.cache import Cache
-from sugar_network.client import journal, packagekit
-from sugar_network.toolkit.router import Request, Response, route
-from sugar_network.toolkit.bundle import Bundle
-from sugar_network.toolkit.coroutine import this
-from sugar_network.toolkit import i18n, http, coroutine, enforce
-
-
-_MIMETYPE_DEFAULTS_KEY = '/desktop/sugar/journal/defaults'
-_MIMETYPE_INVALID_CHARS = re.compile('[^a-zA-Z0-9-_/.]')
-
-_logger = logging.getLogger('releases')
-
-
-class Routes(object):
-
- def __init__(self):
- self._node_mtime = None
- self._call = lambda **kwargs: \
- self._map_exceptions(self.fallback, **kwargs)
- self._cache = Cache()
-
- def invalidate_solutions(self, mtime):
- self._node_mtime = mtime
-
- @route('GET', ['context', None], cmd='path')
- def path(self, request):
- clone = self._solve(request)
- enforce(clone is not None, http.NotFound, 'No clones')
- return clone['path']
-
- @route('GET', ['context', None], cmd='launch', arguments={'args': list},
- mime_type='text/event-stream')
- def launch(self, request):
- activity_id = request.get('activity_id')
- if 'object_id' in request and not activity_id:
- activity_id = journal.get(request['object_id'], 'activity_id')
- if not activity_id:
- activity_id = _activity_id_new()
- request.session['activity_id'] = activity_id
-
- for context in self._checkin_context(request):
- yield {'event': 'launch', 'activity_id': activity_id}, request
-
- acquired = []
- try:
- impl = self._solve(request, context['type'])
- if 'activity' not in context['type']:
- app = request.get('context') or \
- _mimetype_context(impl['data']['mime_type'])
- enforce(app, 'Cannot find proper application')
- acquired += self._checkin(
- context, request, self._cache.acquire)
- request = Request(path=['context', app],
- object_id=impl['path'], session=request.session)
- for context in self._checkin_context(request):
- impl = self._solve(request, context['type'])
- acquired += self._checkin(
- context, request, self._cache.acquire)
-
- child = _exec(context, request, impl)
- yield {'event': 'exec', 'activity_id': activity_id}
- status = child.wait()
- finally:
- self._cache.release(*acquired)
-
- _logger.debug('Exit %s[%s]: %r', context.guid, child.pid, status)
- enforce(status == 0, 'Process exited with %r status', status)
- yield {'event': 'exit', 'activity_id': activity_id}
-
- @route('PUT', ['context', None], cmd='clone', arguments={'requires': list},
- mime_type='text/event-stream')
- def clone(self, request):
- enforce(not request.content or self.inline(), http.ServiceUnavailable,
- 'Not available in offline')
- for context in self._checkin_context(request, 'clone'):
- if request.content:
- impl = self._solve(request, context['type'])
- self._checkin(context, request, self._cache.checkout)
- yield {'event': 'ready'}
- else:
- clone = self._solve(request)
- meta = this.volume['release'].get(clone['guid']).meta('data')
- size = meta.get('unpack_size') or meta['blob_size']
- self._cache.checkin(clone['guid'], size)
-
- @route('GET', ['context', None], cmd='clone',
- arguments={'requires': list})
- def get_clone(self, request, response):
- return self._get_clone(request, response)
-
- @route('HEAD', ['context', None], cmd='clone',
- arguments={'requires': list})
- def head_clone(self, request, response):
- self._get_clone(request, response)
-
- @route('PUT', ['context', None], cmd='favorite')
- def favorite(self, request):
- for __ in self._checkin_context(request, 'favorite'):
- pass
-
- @route('GET', cmd='recycle')
- def recycle(self):
- return self._cache.recycle()
-
- def _map_exceptions(self, fun, *args, **kwargs):
- try:
- return fun(*args, **kwargs)
- except http.NotFound, error:
- if self.inline():
- raise
- raise http.ServiceUnavailable, error, sys.exc_info()[2]
-
- def _checkin_context(self, request, layer=None):
- contexts = this.volume['context']
- guid = request.guid
- if layer and not request.content and not contexts.exists(guid):
- return
-
- if not contexts.exists(guid):
- patch = self._call(method='GET', path=['context', guid], cmd='diff')
- contexts.merge(guid, patch)
- context = contexts.get(guid)
- if layer and bool(request.content) == (layer in context['layer']):
- return
-
- yield context
-
- if layer:
- if request.content:
- layer_value = set(context['layer']) | set([layer])
- else:
- layer_value = set(context['layer']) - set([layer])
- contexts.update(guid, {'layer': list(layer_value)})
- _logger.debug('Checked %r in: %r', guid, layer_value)
-
- def _solve(self, request, force_type=None):
- stability = request.get('stability') or \
- client.stability(request.guid)
-
- request.session['stability'] = stability
- request.session['logs'] = [
- client.profile_path('logs', 'shell.log'),
- client.profile_path('logs', 'sugar-network-client.log'),
- ]
-
- _logger.debug('Solving %r stability=%r', request.guid, stability)
-
- solution, stale = self._cache_solution_get(request.guid, stability)
- if stale is False:
- _logger.debug('Reuse cached %r solution', request.guid)
- elif solution is not None and (not force_type or not self.inline()):
- _logger.debug('Reuse stale %r solution', request.guid)
- elif not force_type:
- return None
- elif 'activity' in force_type:
- from sugar_network.client import solver
- solution = self._map_exceptions(solver.solve,
- self.fallback, request.guid, stability)
- else:
- response = Response()
- blob = self._call(method='GET', path=['context', request.guid],
- cmd='clone', stability=stability, response=response)
- release = response.meta
- release['mime_type'] = response.content_type
- release['size'] = response.content_length
- files.post(blob, digest=release['spec']['*-*']['bundle'])
- solution = [release]
-
- request.session['solution'] = solution
- return solution[0]
-
- def _checkin(self, context, request, cache_call):
- if 'clone' in context['layer']:
- cache_call = self._cache.checkout
-
- if 'activity' in context['type']:
- to_install = []
- for sel in request.session['solution']:
- if 'install' in sel:
- enforce(self.inline(), http.ServiceUnavailable,
- 'Installation is not available in offline')
- to_install.extend(sel.pop('install'))
- if to_install:
- packagekit.install(to_install)
-
- def cache_impl(sel):
- guid = sel['guid']
-
-
-
-
- data = files.get(guid)
-
- if data is not None:
- return cache_call(guid, data['unpack_size'])
-
- response = Response()
- blob = self._call(method='GET', path=['release', guid, 'data'],
- response=response)
-
- if 'activity' not in context['type']:
- self._cache.ensure(response.content_length)
- files.post(blob, response.meta, sel['data'])
- return cache_call(guid, response.content_length)
-
- with toolkit.mkdtemp(dir=files.path(sel['data'])) as blob_dir:
- self._cache.ensure(
- response.meta['unpack_size'],
- response.content_length)
- with toolkit.TemporaryFile() as tmp_file:
- shutil.copyfileobj(blob, tmp_file)
- tmp_file.seek(0)
- with Bundle(tmp_file, 'application/zip') as bundle:
- bundle.extractall(blob_dir, prefix=bundle.rootdir)
- for exec_dir in ('bin', 'activity'):
- bin_path = join(blob_dir, exec_dir)
- if not exists(bin_path):
- continue
- for filename in os.listdir(bin_path):
- os.chmod(join(bin_path, filename), 0755)
-
- files.update(sel['data'], response.meta)
- return cache_call(guid, response.meta['unpack_size'])
-
- result = []
- for sel in request.session['solution']:
- if 'path' not in sel and sel['stability'] != 'packaged':
- result.append(cache_impl(sel))
- self._cache_solution_set(context.guid,
- request.session['stability'], request.session['solution'])
- return result
-
- def _cache_solution_get(self, guid, stability):
- path = client.path('solutions', guid)
- solution = None
- if exists(path):
- try:
- with file(path) as f:
- cached_api_url, cached_stability, solution = json.load(f)
- except Exception, error:
- _logger.debug('Cannot open %r solution: %s', path, error)
- if solution is None:
- return None, None
-
- stale = (cached_api_url != client.api_url.value)
- if not stale and cached_stability is not None:
- stale = set(cached_stability) != set(stability)
- if not stale and self._node_mtime is not None:
- stale = (self._node_mtime > os.stat(path).st_mtime)
- if not stale:
- stale = (packagekit.mtime() > os.stat(path).st_mtime)
- return _CachedSolution(solution), stale
-
- def _cache_solution_set(self, guid, stability, solution):
- if isinstance(solution, _CachedSolution):
- return
- path = client.path('solutions', guid)
- if not exists(dirname(path)):
- os.makedirs(dirname(path))
- with file(path, 'w') as f:
- json.dump([client.api_url.value, stability, solution], f)
-
- def _get_clone(self, request, response):
- for context in self._checkin_context(request):
- if 'clone' not in context['layer']:
- return self._map_exceptions(self.fallback, request, response)
- release = this.volume['release'].get(self._solve(request)['guid'])
- response.meta = release.properties([
- 'guid', 'ctime', 'layer', 'author', 'tags',
- 'context', 'version', 'stability', 'license', 'notes', 'data',
- ])
- return release.meta('data')
-
-
-def _activity_id_new():
- from uuid import getnode
- data = '%s%s%s' % (
- time.time(),
- random.randint(10000, 100000),
- getnode())
- return hashlib.sha1(data).hexdigest()
-
-
-def _mimetype_context(mime_type):
- import gconf
- mime_type = _MIMETYPE_INVALID_CHARS.sub('_', mime_type)
- key = '/'.join([_MIMETYPE_DEFAULTS_KEY, mime_type])
- return gconf.client_get_default().get_string(key)
-
-
-def _exec(context, request, sel):
- # pylint: disable-msg=W0212
- datadir = client.profile_path('data', context.guid)
- logdir = client.profile_path('logs')
-
- for path in [
- join(datadir, 'instance'),
- join(datadir, 'data'),
- join(datadir, 'tmp'),
- logdir,
- ]:
- if not exists(path):
- os.makedirs(path)
-
- cmd = sel['data']['spec']['*-*']['commands']['activity']['exec']
- args = cmd.split() + [
- '-b', request.guid,
- '-a', request.session['activity_id'],
- ]
- if 'object_id' in request:
- args.extend(['-o', request['object_id']])
- if 'uri' in request:
- args.extend(['-u', request['uri']])
- if 'args' in request:
- args.extend(request['args'])
- request.session['args'] = args
-
- log_path = toolkit.unique_filename(logdir, context.guid + '.log')
- request.session['logs'].append(log_path)
-
- child = coroutine.fork()
- if child is not None:
- _logger.debug('Exec %s[%s]: %r', request.guid, child.pid, args)
- return child
-
- try:
- with file('/dev/null', 'r') as f:
- os.dup2(f.fileno(), 0)
- with file(log_path, 'a+') as f:
- os.dup2(f.fileno(), 1)
- os.dup2(f.fileno(), 2)
- toolkit.init_logging()
-
- impl_path = sel['path']
- os.chdir(impl_path)
-
- environ = os.environ
- environ['PATH'] = ':'.join([
- join(impl_path, 'activity'),
- join(impl_path, 'bin'),
- environ['PATH'],
- ])
- environ['PYTHONPATH'] = impl_path + ':' + \
- environ.get('PYTHONPATH', '')
- environ['SUGAR_BUNDLE_PATH'] = impl_path
- environ['SUGAR_BUNDLE_ID'] = context.guid
- environ['SUGAR_BUNDLE_NAME'] = \
- i18n.decode(context['title']).encode('utf8')
- environ['SUGAR_BUNDLE_VERSION'] = sel['version']
- environ['SUGAR_ACTIVITY_ROOT'] = datadir
- environ['SUGAR_LOCALEDIR'] = join(impl_path, 'locale')
-
- os.execvpe(args[0], args, environ)
- except BaseException:
- logging.exception('Failed to execute %r args=%r', sel, args)
- finally:
- os._exit(1)
-
-
-class _CachedSolution(list):
- pass
diff --git a/sugar_network/client/solver.py b/sugar_network/client/solver.py
deleted file mode 100644
index 84eb9cf..0000000
--- a/sugar_network/client/solver.py
+++ /dev/null
@@ -1,407 +0,0 @@
-# Copyright (C) 2010-2013 Aleksey Lim
-#
-# This program is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with this program. If not, see <http://www.gnu.org/licenses/>.
-
-# pylint: disable-msg=W0611,F0401,W0201,E1101,W0232
-
-import sys
-import logging
-from os.path import isabs, join, dirname
-
-from sugar_network.client import packagekit
-from sugar_network.toolkit.router import ACL
-from sugar_network.toolkit.spec import parse_version
-from sugar_network.toolkit import http, lsb_release
-
-sys.path.insert(0, join(dirname(__file__), '..', 'lib', 'zeroinstall'))
-
-from zeroinstall.injector import reader, model, arch as _arch
-from zeroinstall.injector.config import Config
-from zeroinstall.injector.driver import Driver
-from zeroinstall.injector.requirements import Requirements
-from zeroinstall.injector.arch import machine_ranks
-from zeroinstall.injector.distro import try_cleanup_distro_version
-
-
-_SUGAR_API_COMPATIBILITY = {
- '0.94': frozenset(['0.86', '0.88', '0.90', '0.92', '0.94']),
- }
-
-model.Interface.__init__ = lambda *args: _interface_init(*args)
-reader.check_readable = lambda *args, **kwargs: True
-reader.update_from_cache = lambda *args, **kwargs: None
-reader.load_feed_from_cache = lambda url, **kwargs: _load_feed(url)
-
-_logger = logging.getLogger('solver')
-_stability = None
-_call = None
-
-
-def canonicalize_machine(arch):
- if arch in ('noarch', 'all'):
- return None
- return _arch.canonicalize_machine(arch)
-
-
-def select_architecture(arches):
- """Select most appropriate, for the host system, machine architecture
-
- :param arches:
- list of architecture names to select
- :returns:
- one of passed architecture names, or, `None` if not any
-
- """
- result_rank = 9999
- result_arch = None
- for arch in arches:
- rank = machine_ranks.get(canonicalize_machine(arch))
- if rank is not None and rank < result_rank:
- result_rank = rank
- result_arch = arch
- return result_arch
-
-
-def solve(call, context, stability):
- global _call, _stability
-
- _call = call
- _stability = stability
-
- req = Requirements(context)
- # TODO
- req.command = 'activity'
- config = Config()
- driver = Driver(config, req)
- solver = driver.solver
- solver.record_details = True
- status = None
- ready = False
-
- while True:
- solver.solve(context, driver.target_arch, command_name=req.command)
- if ready and solver.ready:
- break
- ready = solver.ready
-
- resolved = None
- for url in solver.feeds_used:
- feed = config.iface_cache.get_feed(url)
- if feed is None:
- continue
- while feed.to_resolve:
- try:
- resolved = packagekit.resolve(feed.to_resolve.pop(0))
- except Exception, error:
- if feed.to_resolve:
- continue
- if status is None:
- status = call(method='GET', cmd='whoami')
- if status['route'] == 'offline':
- raise http.ServiceUnavailable(str(error))
- else:
- raise
- feed.resolve(resolved.values())
- feed.to_resolve = None
- if not resolved:
- break
-
- selections = solver.selections.selections
- missed = []
-
- top_summary = []
- dep_summary = []
- for iface, impls in solver.details.items():
- summary = (top_summary if iface.uri == context else dep_summary)
- summary.append(iface.uri)
- if impls:
- sel = selections.get(iface.uri)
- for impl, reason in impls:
- if not reason and sel is None:
- reason = 'wrong version'
- missed.append(iface.uri)
- if reason:
- reason = '(%s)' % reason
- summary.append('%s v%s %s' % (
- '*' if sel is not None and sel.impl is impl else ' ',
- impl.get_version(),
- reason or '',
- ))
- else:
- summary.append(' (no versions)')
- missed.append(iface.uri)
- _logger.debug('[%s] Solving results:\n%s',
- context, '\n'.join(top_summary + dep_summary))
-
- if not ready:
- # pylint: disable-msg=W0212
- reason_exception = solver.get_failure_reason()
- if reason_exception is not None:
- reason = reason_exception.message
- else:
- reason = 'Cannot find releases for %s' % ', '.join(missed)
- raise http.NotFound(reason)
-
- solution = []
- solution.append(_impl_new(config, context, selections[context]))
- for iface, sel in selections.items():
- if sel is not None and iface != context:
- solution.append(_impl_new(config, iface, sel))
-
- return solution
-
-
-def _interface_init(self, url):
- self.uri = url
- self.reset()
-
-
-def _impl_new(config, iface, sel):
- impl = sel.impl.sn_impl
- impl['context'] = iface
- if sel.local_path:
- impl['path'] = sel.local_path
- if sel.impl.to_install:
- impl['install'] = sel.impl.to_install
- return impl
-
-
-def _load_feed(context):
- feed = _Feed(context)
-
- if context == 'sugar':
- try:
- from jarabe import config
- host_version = '.'.join(config.version.split('.', 2)[:2])
- except ImportError:
- # XXX sweets-sugar binding might be not sourced
- host_version = '0.94'
- for version in _SUGAR_API_COMPATIBILITY.get(host_version) or []:
- feed.implement_sugar(version)
- feed.name = context
- return feed
-
- releases = None
- try:
- releases = _call(method='GET', path=['context', context, 'releases'])
- _logger.trace('[%s] Found feed: %r', context, releases)
- except http.ServiceUnavailable:
- _logger.trace('[%s] Failed to fetch the feed', context)
- raise
- except Exception:
- _logger.exception('[%s] Failed to fetch the feed', context)
- return None
-
- """
- for digest, release in releases:
- if [i for i in release['author'].values()
- if i['role'] & ACL.ORIGINAL] and \
- release['stability'] == _stability and \
- f
-
-
-
-
-
- stability=_stability,
- distro=lsb_release.distributor_id())
- """
-
- for impl in feed_content['releases']:
- feed.implement(impl)
-
-
-
- # XXX 0install fails on non-ascii `name` values
- feed.name = context
- feed.to_resolve = feed_content.get('packages')
- if not feed.to_resolve:
- _logger.trace('[%s] No compatible packages', context)
-
-
- if not feed.to_resolve and not feed.implementations:
- _logger.trace('[%s] No releases', context)
-
- return feed
-
-
-class _Feed(model.ZeroInstallFeed):
- # pylint: disable-msg=E0202
-
- def __init__(self, context):
- self.context = context
- self.local_path = None
- self.implementations = {}
- self.last_modified = None
- self.feeds = []
- self.metadata = []
- self.last_checked = None
- self.to_resolve = None
- self._package_implementations = []
-
- @property
- def url(self):
- return self.context
-
- @property
- def feed_for(self):
- return set([self.context])
-
- def resolve(self, packages):
- top_package = packages[0]
-
- impl = _Release(self, self.context, None)
- impl.version = parse_version(top_package['version'])
- impl.released = 0
- impl.arch = '*-%s' % (top_package['arch'] or '*')
- impl.upstream_stability = model.stability_levels['packaged']
- impl.to_install = [i for i in packages if not i['installed']]
- impl.add_download_source(self.context, 0, None)
- impl.sn_impl = {
- 'guid': self.context,
- 'license': None,
- 'version': top_package['version'],
- 'stability': 'packaged',
- }
-
- self.implementations[self.context] = impl
-
- def implement(self, release):
- impl_id = release['guid']
- spec = release['data']['spec']['*-*']
-
- impl = _Release(self, impl_id, None)
- impl.version = parse_version(release['version'])
- impl.released = 0
- impl.arch = '*-*'
- impl.upstream_stability = model.stability_levels['stable']
- impl.license = release.get('license') or []
- impl.requires = _read_requires(spec.get('requires'))
- impl.requires.extend(_read_requires(release.get('requires')))
- impl.sn_impl = release
-
- if isabs(impl_id):
- impl.local_path = impl_id
- else:
- impl.add_download_source(impl_id, 0, None)
-
- for name, command in spec['commands'].items():
- impl.commands[name] = _Command(name, command)
-
- for name, insert, mode in spec.get('bindings') or []:
- binding = model.EnvironmentBinding(name, insert, mode=mode)
- impl.bindings.append(binding)
-
- self.implementations[impl_id] = impl
-
- def implement_sugar(self, sugar_version):
- impl_id = 'sugar-%s' % sugar_version
- impl = _Release(self, impl_id, None)
- impl.version = parse_version(sugar_version)
- impl.released = 0
- impl.arch = '*-*'
- impl.upstream_stability = model.stability_levels['packaged']
- self.implementations[impl_id] = impl
- impl.sn_impl = {
- 'guid': impl_id,
- 'license': None,
- 'version': sugar_version,
- 'stability': 'packaged',
- }
-
-
-class _Release(model.ZeroInstallImplementation):
-
- to_install = None
- sn_impl = None
- license = None
-
- def is_available(self, stores):
- # Simplify solving
- return True
-
-
-class _Dependency(model.InterfaceDependency):
-
- def __init__(self, guid, data):
- self._importance = data.get('importance', model.Dependency.Essential)
- self._metadata = {}
- self.qdom = None
- self.interface = guid
- self.restrictions = []
- self.bindings = []
-
- for not_before, before in data.get('restrictions') or []:
- restriction = model.VersionRangeRestriction(
- not_before=parse_version(not_before),
- before=parse_version(before))
- self.restrictions.append(restriction)
-
- @property
- def context(self):
- return self.interface
-
- @property
- def metadata(self):
- return self._metadata
-
- @property
- def importance(self):
- return self._importance
-
- def get_required_commands(self):
- return []
-
- @property
- def command(self):
- pass
-
-
-class _Command(model.Command):
-
- def __init__(self, name, command):
- self.qdom = None
- self.name = name
- self._requires = _read_requires(command.get('requires'))
-
- @property
- def path(self):
- return 'doesnt_matter'
-
- @property
- def requires(self):
- return self._requires
-
- def get_runner(self):
- pass
-
- def __str__(self):
- return ''
-
- @property
- def bindings(self):
- return []
-
-
-def _read_requires(data):
- result = []
- for guid, dep_data in (data or {}).items():
- result.append(_Dependency(guid, dep_data))
- return result
-
-
-if __name__ == '__main__':
- from pprint import pprint
- logging.basicConfig(level=logging.DEBUG)
- pprint(solve(*sys.argv[1:]))
diff --git a/sugar_network/model/__init__.py b/sugar_network/model/__init__.py
index 77a322c..9e1aaf5 100644
--- a/sugar_network/model/__init__.py
+++ b/sugar_network/model/__init__.py
@@ -140,9 +140,10 @@ def load_bundle(blob, context=None, initial=False, extra_deps=None):
release['license'] = this.request['license']
if isinstance(release['license'], basestring):
release['license'] = [release['license']]
+ release['stability'] = 'stable'
release['bundles'] = {
'*-*': {
- 'bundle': blob.digest,
+ 'blob': blob.digest,
},
}
else:
diff --git a/sugar_network/node/model.py b/sugar_network/node/model.py
index 559f6b4..8de6038 100644
--- a/sugar_network/node/model.py
+++ b/sugar_network/node/model.py
@@ -21,7 +21,7 @@ from sugar_network.model import Release, context as base_context
from sugar_network.node import obs
from sugar_network.toolkit.router import ACL
from sugar_network.toolkit.coroutine import this
-from sugar_network.toolkit import spec, sat, http, coroutine, enforce
+from sugar_network.toolkit import spec, sat, http, coroutine, i18n, enforce
_logger = logging.getLogger('node.model')
@@ -133,7 +133,7 @@ def solve(volume, top_context, command=None, lsb_id=None, lsb_release=None,
top_context.guid, lsb_id, lsb_release, stability, top_requires)
def rate_release(digest, release):
- return [command in release['commands'],
+ return [command in release.get('commands', []),
_STABILITY_RATES.get(release['stability']) or 0,
release['version'],
digest,
@@ -186,17 +186,23 @@ def solve(volume, top_context, command=None, lsb_id=None, lsb_release=None,
for release in reversed(candidates):
digest = release[-1]
release = releases[digest]['value']
- release_info = {'version': release['version'], 'blob': digest}
+ release_info = {
+ 'title': i18n.decode(context['title'],
+ this.request.accept_language),
+ 'version': release['version'],
+ 'blob': digest,
+ }
blob = volume.blobs.get(digest)
if blob is not None:
release_info['size'] = blob.size
+ release_info['content-type'] = blob['content-type']
unpack_size = release['bundles']['*-*'].get('unpack_size')
if unpack_size is not None:
release_info['unpack_size'] = unpack_size
requires = release.get('requires') or {}
if top_requires and context.guid == top_context.guid:
requires.update(top_requires)
- if context.guid == top_context.guid:
+ if context.guid == top_context.guid and 'commands' in release:
cmd = release['commands'].get(command)
if cmd is None:
cmd_name, cmd = release['commands'].items()[0]
diff --git a/sugar_network/node/routes.py b/sugar_network/node/routes.py
index 86e4ce1..5fdb27e 100644
--- a/sugar_network/node/routes.py
+++ b/sugar_network/node/routes.py
@@ -113,22 +113,6 @@ class NodeRoutes(db.Routes, FrontRoutes):
content_type='application/json', content=release)
return blob.digest
- @route('PUT', [None, None], cmd='attach', acl=ACL.AUTH | ACL.SUPERUSER)
- def attach(self, request):
- # TODO Reading layer here is a race
- directory = self.volume[request.resource]
- doc = directory.get(request.guid)
- layer = list(set(doc['layer']) | set(request.content))
- directory.update(request.guid, {'layer': layer})
-
- @route('PUT', [None, None], cmd='detach', acl=ACL.AUTH | ACL.SUPERUSER)
- def detach(self, request):
- # TODO Reading layer here is a race
- directory = self.volume[request.resource]
- doc = directory.get(request.guid)
- layer = list(set(doc['layer']) - set(request.content))
- directory.update(request.guid, {'layer': layer})
-
@route('GET', ['context', None], cmd='solve',
arguments={'requires': list, 'stability': list},
mime_type='application/json')
diff --git a/sugar_network/toolkit/__init__.py b/sugar_network/toolkit/__init__.py
index 67ee7da..70868c0 100644
--- a/sugar_network/toolkit/__init__.py
+++ b/sugar_network/toolkit/__init__.py
@@ -21,7 +21,6 @@ import shutil
import logging
import tempfile
import collections
-from copy import deepcopy
from cStringIO import StringIO
from os.path import exists, join, islink, isdir, dirname, basename, abspath
from os.path import lexists, isfile
@@ -497,14 +496,12 @@ class Bin(object):
def __init__(self, path, default_value=None):
self._path = abspath(path)
self.value = default_value
- self._orig_value = None
if exists(self._path):
with file(self._path) as f:
self.value = json.load(f)
else:
self.commit()
- self._orig_value = deepcopy(self.value)
@property
def mtime(self):
@@ -514,27 +511,33 @@ class Bin(object):
return 0
def commit(self):
- """Store current value in a file.
-
- :returns:
- `True` if commit was happened
-
- """
- if self.value == self._orig_value:
- return False
+ """Store current value in a file."""
with new_file(self._path) as f:
json.dump(self.value, f)
f.flush()
os.fsync(f.fileno())
- self._orig_value = deepcopy(self.value)
- return True
def __enter__(self):
- return self
+ return self.value
def __exit__(self, exc_type, exc_value, traceback):
self.commit()
+ def __contains__(self, key):
+ return key in self.value
+
+ def __getitem__(self, key):
+ return self.value.get(key)
+
+ def __setitem__(self, key, value):
+ self.value[key] = value
+
+ def __delitem__(self, key):
+ del self.value[key]
+
+ def __getattr__(self, name):
+ return getattr(self.value, name)
+
class Seqno(Bin):
"""Sequence number counter with persistent storing in a file."""
diff --git a/sugar_network/toolkit/http.py b/sugar_network/toolkit/http.py
index 254e6f3..9b9754e 100644
--- a/sugar_network/toolkit/http.py
+++ b/sugar_network/toolkit/http.py
@@ -112,8 +112,8 @@ class Connection(object):
_Session = None
- def __init__(self, api='', auth=None, max_retries=0, **session_args):
- self.api = api
+ def __init__(self, url='', auth=None, max_retries=0, **session_args):
+ self.url = url
self.auth = auth
self._max_retries = max_retries
self._session_args = session_args
@@ -121,7 +121,7 @@ class Connection(object):
self._nonce = None
def __repr__(self):
- return '<Connection api=%s>' % self.api
+ return '<Connection url=%s>' % self.url
def __enter__(self):
return self
@@ -183,13 +183,10 @@ class Connection(object):
finally:
if isinstance(dst, basestring):
f.close()
+ return reply
def upload(self, path, data, **kwargs):
- if isinstance(data, basestring):
- with file(data, 'rb') as f:
- reply = self.request('POST', path, f, params=kwargs)
- else:
- reply = self.request('POST', path, data, params=kwargs)
+ reply = self.request('POST', path, data, params=kwargs)
if reply.headers.get('Content-Type') == 'application/json':
return json.loads(reply.content)
else:
@@ -203,7 +200,7 @@ class Connection(object):
if not path:
path = ['']
if not isinstance(path, basestring):
- path = '/'.join([i.strip('/') for i in [self.api] + path])
+ path = '/'.join([i.strip('/') for i in [self.url] + path])
try_ = 0
while True:
@@ -283,7 +280,7 @@ class Connection(object):
break
path = reply.headers['location']
if path.startswith('/'):
- path = self.api + path
+ path = self.url + path
if request.method != 'HEAD':
if reply.headers.get('Content-Type') == 'application/json':
@@ -432,7 +429,7 @@ class _Subscription(object):
if try_ == 0:
raise
toolkit.exception('Failed to read from %r subscription, '
- 'will resubscribe', self._client.api)
+ 'will resubscribe', self._client.url)
self._content = None
return _parse_event(line)
@@ -441,7 +438,7 @@ class _Subscription(object):
return self._content
params.update(self._condition)
params['cmd'] = 'subscribe'
- _logger.debug('Subscribe to %r, %r', self._client.api, params)
+ _logger.debug('Subscribe to %r, %r', self._client.url, params)
response = self._client.request('GET', params=params)
self._content = response.raw
return self._content
diff --git a/sugar_network/toolkit/parcel.py b/sugar_network/toolkit/parcel.py
index 43e6960..f09bdb5 100644
--- a/sugar_network/toolkit/parcel.py
+++ b/sugar_network/toolkit/parcel.py
@@ -1,3 +1,5 @@
+# -*- coding: utf-8 -*-
+#
# Copyright (C) 2012-2014 Aleksey Lim
#
# This program is free software: you can redistribute it and/or modify
diff --git a/sugar_network/toolkit/router.py b/sugar_network/toolkit/router.py
index 48a04fe..8e23863 100644
--- a/sugar_network/toolkit/router.py
+++ b/sugar_network/toolkit/router.py
@@ -118,14 +118,13 @@ class Unauthorized(http.Unauthorized):
class Request(dict):
def __init__(self, environ=None, method=None, path=None, cmd=None,
- content=None, content_stream=None, content_type=None, session=None,
+ content=None, content_stream=None, content_type=None,
principal=None, **kwargs):
dict.__init__(self)
self.path = []
self.cmd = None
self.environ = {}
- self.session = session or {}
self.principal = principal
self._content = _NOT_SET
@@ -756,15 +755,17 @@ class Router(object):
commons['prop'] = request.prop
try:
for event in _event_stream(request, stream):
- event.update(commons)
- this.localcast(event)
+ if 'event' not in event:
+ commons.update(event)
+ else:
+ event.update(commons)
+ this.localcast(event)
except Exception, error:
_logger.exception('Event stream %r failed', request)
event = {'event': 'failure',
'exception': type(error).__name__,
'error': str(error),
}
- event.update(request.session)
event.update(commons)
this.localcast(event)
diff --git a/tests/__init__.py b/tests/__init__.py
index dc10cdb..1f5118c 100644
--- a/tests/__init__.py
+++ b/tests/__init__.py
@@ -15,13 +15,15 @@ import subprocess
from os.path import dirname, join, exists, abspath, isfile
from M2Crypto import DSA
-from gevent import monkey
-from sugar_network.toolkit import coroutine, http, mountpoints, Option, gbus, i18n, languages, parcel
+from sugar_network.toolkit import coroutine
+coroutine.inject()
+
+from sugar_network.toolkit import http, mountpoints, Option, gbus, i18n, languages, parcel
from sugar_network.toolkit.router import Router, Request
from sugar_network.toolkit.coroutine import this
-from sugar_network.client import IPCConnection, journal, routes as client_routes
-from sugar_network.client.routes import ClientRoutes, _Auth
+#from sugar_network.client import IPCConnection, journal, routes as client_routes
+#from sugar_network.client.routes import ClientRoutes, _Auth
from sugar_network import db, client, node, toolkit, model
from sugar_network.model.user import User
from sugar_network.model.context import Context
@@ -37,11 +39,6 @@ root = abspath(dirname(__file__))
tmproot = '/tmp/sugar_network.tests'
tmpdir = None
-monkey.patch_socket()
-monkey.patch_select()
-monkey.patch_ssl()
-monkey.patch_time()
-
gettext._default_localedir = join(root, 'data', 'locale')
languages.LANGUAGES = ['en', 'es', 'fr']
@@ -99,7 +96,8 @@ class Test(unittest.TestCase):
client.cache_limit_percent.value = 0
client.cache_lifetime.value = 0
client.keyfile.value = join(root, 'data', UID)
- client_routes._RECONNECT_TIMEOUT = 0
+ #client_routes._RECONNECT_TIMEOUT = 0
+ #journal._ds_root = tmpdir + '/datastore'
mountpoints._connects.clear()
mountpoints._found.clear()
mountpoints._COMPLETE_MOUNT_TIMEOUT = .1
@@ -107,7 +105,6 @@ class Test(unittest.TestCase):
obs._repos = {'base': [], 'presolve': []}
http._RECONNECTION_NUMBER = 0
toolkit.cachedir.value = tmpdir + '/tmp'
- journal._ds_root = tmpdir + '/datastore'
gbus.join()
db.Volume.model = [
@@ -117,11 +114,11 @@ class Test(unittest.TestCase):
'sugar_network.model.report',
]
- if tmp_root is None:
- self.override(_Auth, 'profile', lambda self: {
- 'name': 'test',
- 'pubkey': PUBKEY,
- })
+ #if tmp_root is None:
+ # self.override(_Auth, 'profile', lambda self: {
+ # 'name': 'test',
+ # 'pubkey': PUBKEY,
+ # })
os.makedirs('tmp')
@@ -286,9 +283,11 @@ class Test(unittest.TestCase):
coroutine.sleep(.1)
return pid
- def start_client(self, classes=None, routes=ClientRoutes):
+ def start_client(self, classes=None, routes=None):
if classes is None:
classes = [User, Context]
+ if routes is None:
+ routes = ClientRoutes
volume = db.Volume('client', classes)
self.client_routes = routes(volume, client.api.value)
self.client = coroutine.WSGIServer(
diff --git a/tests/units/client/__main__.py b/tests/units/client/__main__.py
index 849cf0b..5d48161 100644
--- a/tests/units/client/__main__.py
+++ b/tests/units/client/__main__.py
@@ -3,13 +3,12 @@
from __init__ import tests
from journal import *
-from solver import *
from routes import *
from offline_routes import *
from online_routes import *
from server_routes import *
-from cache import *
-from releases import *
+from injector import *
+from packagekit import *
if __name__ == '__main__':
tests.main()
diff --git a/tests/units/client/cache.py b/tests/units/client/cache.py
deleted file mode 100755
index 51245ee..0000000
--- a/tests/units/client/cache.py
+++ /dev/null
@@ -1,319 +0,0 @@
-#!/usr/bin/env python
-# sugar-lint: disable
-
-import os
-import time
-import json
-import shutil
-from cStringIO import StringIO
-from os.path import exists
-
-from __init__ import tests
-
-from sugar_network import db
-from sugar_network.model.context import Context
-from sugar_network.client import cache_limit, cache_limit_percent, cache_lifetime, IPCConnection
-from sugar_network.client.cache import Cache
-from sugar_network.toolkit import http
-
-
-class CacheTest(tests.Test):
-
- def setUp(self):
- tests.Test.setUp(self)
-
- class statvfs(object):
- f_blocks = 100
- f_bfree = 100
- f_frsize = 1
-
- self.statvfs = statvfs
- self.override(os, 'statvfs', lambda *args: statvfs())
-
- def test_open(self):
- volume = db.Volume('db', [Context])
-
- volume['release'].create({
- 'guid': '1',
- 'context': 'context',
- 'license': ['GPL'],
- 'version': '1',
- 'stability': 'stable',
- 'data': {'blob_size': 1},
- })
- os.utime('db/release/1/1', (1, 1))
- volume['release'].create({
- 'guid': '5',
- 'context': 'context',
- 'license': ['GPL'],
- 'version': '5',
- 'stability': 'stable',
- 'data': {'blob_size': 5},
- })
- os.utime('db/release/5/5', (5, 5))
- volume['release'].create({
- 'guid': '2',
- 'context': 'context',
- 'license': ['GPL'],
- 'version': '2',
- 'stability': 'stable',
- 'data': {},
- })
- os.utime('db/release/2/2', (2, 2))
- volume['release'].create({
- 'guid': '3',
- 'context': 'context',
- 'license': ['GPL'],
- 'version': '3',
- 'stability': 'stable',
- })
- os.utime('db/release/3/3', (3, 3))
- volume['release'].create({
- 'guid': '4',
- 'context': 'context',
- 'license': ['GPL'],
- 'version': '4',
- 'stability': 'stable',
- 'data': {'blob_size': 4, 'unpack_size': 44},
- })
- os.utime('db/release/4/4', (4, 4))
-
- cache = Cache(volume)
- self.assertEqual(['5', '4', '1'], [i for i in cache])
-
- def test_open_IgnoreClones(self):
- volume = db.Volume('db', [Context])
-
- volume['context'].create({
- 'guid': 'context',
- 'type': 'activity',
- 'title': 'title',
- 'summary': 'summary',
- 'description': 'description',
- })
- volume['release'].create({
- 'guid': 'impl',
- 'context': 'context',
- 'license': ['GPL'],
- 'version': '1',
- 'stability': 'stable',
- 'data': {'blob_size': 1},
- })
-
- cache = Cache(volume)
- self.assertEqual(['impl'], [i for i in cache])
-
- os.symlink('../../../release/im/impl', 'db/context/co/context/.clone')
- cache = Cache(volume)
- self.assertEqual([], [i for i in cache])
-
- def test_ensure_AfterOpen(self):
- volume = db.Volume('db', [Context])
-
- volume['release'].create({'data': {'blob_size': 1}, 'guid': '1', 'context': 'context', 'version': '1', 'license': ['GPL'], 'stability': 'stable'})
- os.utime('db/release/1/1', (1, 1))
- volume['release'].create({'data': {'blob_size': 2}, 'guid': '2', 'context': 'context', 'version': '1', 'license': ['GPL'], 'stability': 'stable'})
- os.utime('db/release/2/2', (2, 2))
- volume['release'].create({'data': {'blob_size': 3}, 'guid': '3', 'context': 'context', 'version': '1', 'license': ['GPL'], 'stability': 'stable'})
- os.utime('db/release/3/3', (3, 3))
- cache = Cache(volume)
- cache_limit.value = 10
- self.statvfs.f_bfree = 11
-
- self.assertRaises(RuntimeError, cache.ensure, 100, 0)
- assert volume['release'].exists('1')
- assert volume['release'].exists('2')
- assert volume['release'].exists('3')
-
- cache.ensure(1, 0)
- assert volume['release'].exists('1')
- assert volume['release'].exists('2')
- assert volume['release'].exists('3')
-
- cache.ensure(2, 0)
- assert not volume['release'].exists('1')
- assert volume['release'].exists('2')
- assert volume['release'].exists('3')
-
- cache.ensure(4, 0)
- assert not volume['release'].exists('2')
- assert not volume['release'].exists('3')
-
- self.assertRaises(RuntimeError, cache.ensure, 2, 0)
-
- def test_ensure_Live(self):
- volume = db.Volume('db', [Context])
-
- cache = Cache(volume)
- # To initiate the cache
- cache.ensure(0, 0)
-
- volume['release'].create({'data': {'blob_size': 1}, 'guid': '1', 'context': 'context', 'version': '1', 'license': ['GPL'], 'stability': 'stable'})
- cache.checkin('1', 1)
-
- cache_limit.value = 10
- self.statvfs.f_bfree = 10
- cache.ensure(1, 0)
- assert not volume['release'].exists('1')
- self.assertRaises(RuntimeError, cache.ensure, 1, 0)
-
- def test_ensure_ConsiderTmpSize(self):
- volume = db.Volume('db', [Context])
- volume['release'].create({'data': {'blob_size': 1}, 'guid': '1', 'context': 'context', 'version': '1', 'license': ['GPL'], 'stability': 'stable'})
-
- cache = Cache(volume)
- cache_limit.value = 10
- self.statvfs.f_bfree = 10
-
- self.assertRaises(RuntimeError, cache.ensure, 1, 11)
- assert volume['release'].exists('1')
-
- cache.ensure(1, 10)
- assert not volume['release'].exists('1')
-
- def test_recycle(self):
- ts = time.time()
-
- volume = db.Volume('db', [Context])
- volume['release'].create({'data': {'blob_size': 1}, 'guid': '1', 'context': 'context', 'version': '1', 'license': ['GPL'], 'stability': 'stable'})
- os.utime('db/release/1/1', (ts - 1.5 * 86400, ts - 1.5 * 86400))
- volume['release'].create({'data': {'blob_size': 1}, 'guid': '2', 'context': 'context', 'version': '1', 'license': ['GPL'], 'stability': 'stable'})
- os.utime('db/release/2/2', (ts - 2.5 * 86400, ts - 2.5 * 86400))
- volume['release'].create({'data': {'blob_size': 1}, 'guid': '3', 'context': 'context', 'version': '1', 'license': ['GPL'], 'stability': 'stable'})
- os.utime('db/release/3/3', (ts - 3.5 * 86400, ts - 3.5 * 86400))
- cache = Cache(volume)
-
- cache_lifetime.value = 4
- cache.recycle()
- assert volume['release'].exists('1')
- assert volume['release'].exists('2')
- assert volume['release'].exists('3')
-
- cache_lifetime.value = 3
- cache.recycle()
- assert volume['release'].exists('1')
- assert volume['release'].exists('2')
- assert not volume['release'].exists('3')
-
- cache_lifetime.value = 1
- cache.recycle()
- assert not volume['release'].exists('1')
- assert not volume['release'].exists('2')
- assert not volume['release'].exists('3')
-
- cache.recycle()
-
- def test_checkin(self):
- volume = db.Volume('db', [Context])
- cache = Cache(volume)
-
- volume['release'].create({'guid': '1', 'context': 'context', 'version': '1', 'license': ['GPL'], 'stability': 'stable'})
- volume['release'].create({'guid': '2', 'context': 'context', 'version': '1', 'license': ['GPL'], 'stability': 'stable'})
- volume['release'].create({'guid': '3', 'context': 'context', 'version': '1', 'license': ['GPL'], 'stability': 'stable'})
-
- cache.checkin('1', 1)
- self.assertEqual(['1'], [i for i in cache])
- self.assertEqual(1, cache.du)
-
- cache.checkin('2', 2)
- self.assertEqual(['2', '1'], [i for i in cache])
- self.assertEqual(3, cache.du)
-
- cache.checkin('3', 3)
- self.assertEqual(['3', '2', '1'], [i for i in cache])
- self.assertEqual(6, cache.du)
-
- def test_checkout(self):
- local_volume = self.start_online_client()
- conn = IPCConnection()
- self.statvfs.f_blocks = 0
-
- bundle = self.zips(['TestActivity/activity/activity.info', [
- '[Activity]',
- 'name = TestActivity',
- 'bundle_id = context',
- 'exec = true',
- 'icon = icon',
- 'activity_version = 1',
- 'license = Public Domain',
- 'stability = stable',
- ]])
- impl1 = conn.upload(['release'], StringIO(bundle), cmd='submit', initial=True)
- print self.blobs[str(hash(bundle))]
- conn.put(['context', 'context'], True, cmd='clone')
- print self.blobs[str(hash(bundle))]
- return
- self.assertEqual([], [i for i in self.client_routes._cache])
- assert local_volume['release'].exists(impl1)
-
- conn.put(['context', 'context'], False, cmd='clone')
- self.assertEqual([impl1], [i for i in self.client_routes._cache])
- assert local_volume['release'].exists(impl1)
-
- impl2 = conn.upload(['release'], StringIO(self.zips(['TestActivity/activity/activity.info', [
- '[Activity]',
- 'name = TestActivity',
- 'bundle_id = context',
- 'exec = true',
- 'icon = icon',
- 'activity_version = 2',
- 'license = Public Domain',
- 'stability = stable',
- ]])), cmd='submit', initial=True)
-
- shutil.rmtree('solutions')
- conn.put(['context', 'context'], True, cmd='clone')
- self.assertEqual([impl1], [i for i in self.client_routes._cache])
- assert local_volume['release'].exists(impl1)
- assert local_volume['release'].exists(impl2)
-
- conn.put(['context', 'context'], False, cmd='clone')
- self.assertEqual([impl2, impl1], [i for i in self.client_routes._cache])
- assert local_volume['release'].exists(impl1)
- assert local_volume['release'].exists(impl2)
-
- def test_Acquiring(self):
- volume = db.Volume('db', [Context])
- cache = Cache(volume)
-
- volume['release'].create({'guid': '1', 'context': 'context', 'version': '1', 'license': ['GPL'], 'stability': 'stable'})
- volume['release'].create({'guid': '2', 'context': 'context', 'version': '1', 'license': ['GPL'], 'stability': 'stable'})
- volume['release'].create({'guid': '3', 'context': 'context', 'version': '1', 'license': ['GPL'], 'stability': 'stable'})
-
- cache.checkin('1', 1)
- self.assertEqual(['1'], [i for i in cache])
- self.assertEqual(1, cache.du)
-
- cache.acquire('1', 2)
- self.assertEqual([], [i for i in cache])
- self.assertEqual(0, cache.du)
- cache.acquire('1', 3)
- self.assertEqual([], [i for i in cache])
- self.assertEqual(0, cache.du)
- cache.acquire('2', 1)
- self.assertEqual([], [i for i in cache])
- self.assertEqual(0, cache.du)
- cache.acquire('2', 2)
- self.assertEqual([], [i for i in cache])
- self.assertEqual(0, cache.du)
- cache.acquire('2', 3)
- self.assertEqual([], [i for i in cache])
- self.assertEqual(0, cache.du)
-
- cache.release('1', '2')
- self.assertEqual([], [i for i in cache])
- self.assertEqual(0, cache.du)
- cache.release('1', '2')
- self.assertEqual(['1'], [i for i in cache])
- self.assertEqual(2, cache.du)
- cache.release('2')
- self.assertEqual(['2', '1'], [i for i in cache])
- self.assertEqual(3, cache.du)
-
- cache.release('1', '2')
- self.assertEqual(['2', '1'], [i for i in cache])
- self.assertEqual(3, cache.du)
-
-
-if __name__ == '__main__':
- tests.main()
diff --git a/tests/units/client/injector.py b/tests/units/client/injector.py
new file mode 100755
index 0000000..4b12fe2
--- /dev/null
+++ b/tests/units/client/injector.py
@@ -0,0 +1,968 @@
+#!/usr/bin/env python
+# sugar-lint: disable
+
+import os
+import time
+import json
+import shutil
+import hashlib
+from cStringIO import StringIO
+from os.path import exists, join, basename
+
+from __init__ import tests
+
+from sugar_network import db, client
+from sugar_network.client import Connection, keyfile, api, packagekit, injector as injector_
+from sugar_network.client.injector import _PreemptivePool, Injector
+from sugar_network.toolkit import http, lsb_release
+
+
+class InjectorTest(tests.Test):
+
+ def setUp(self):
+ tests.Test.setUp(self)
+
+ class statvfs(object):
+ f_blocks = 100
+ f_bfree = 999999999
+ f_frsize = 1
+
+ self.statvfs = statvfs
+ self.override(os, 'statvfs', lambda *args: statvfs())
+
+ def test_PreemptivePool_push(self):
+ cache = _PreemptivePool('releases', None, None, None)
+
+ self.touch(('releases/1', '1'))
+ self.utime('releases/1', 1)
+ self.touch(('releases/2', '2'))
+ self.utime('releases/2', 2)
+ self.touch(('releases/3', '3'))
+ self.utime('releases/3', 3)
+
+ cache.push([{'blob': '1', 'size': 11}])
+ cache.push([{'blob': '2', 'size': 1000, 'unpack_size': 22}])
+ cache.push([{'size': 2000}])
+ self.assertEqual([
+ ('1', (11, 1)),
+ ('2', (22, 2)),
+ ],
+ [i for i in cache])
+ self.assertEqual(33, cache._du)
+
+ cache.push([{'size': 3000}])
+ cache.push([{'blob': '3', 'size': 1000, 'unpack_size': 33}])
+ self.assertEqual([
+ ('1', (11, 1)),
+ ('2', (22, 2)),
+ ('3', (33, 3)),
+ ],
+ [i for i in cache])
+ self.assertEqual(66, cache._du)
+
+ def test_PreemptivePool_pop(self):
+ cache = _PreemptivePool('releases', None, None, None)
+
+ self.touch(('releases/1', '1'))
+ self.utime('releases/1', 1)
+ self.touch(('releases/2', '2'))
+ self.utime('releases/2', 2)
+ self.touch(('releases/3', '3'))
+ self.utime('releases/3', 3)
+
+ cache.push([{'blob': '1', 'size': 1}])
+ cache.push([{'blob': '2', 'size': 2}])
+ cache.push([{'blob': '3', 'size': 3}])
+ self.assertEqual(
+ [('1', (1, 1)), ('2', (2, 2)), ('3', (3, 3))],
+ [i for i in cache])
+ self.assertEqual(6, cache._du)
+
+ assert not cache.pop([{'blob': 'fake'}])
+ self.assertEqual(
+ [('1', (1, 1)), ('2', (2, 2)), ('3', (3, 3))],
+ [i for i in cache])
+ self.assertEqual(6, cache._du)
+ assert exists('releases/1')
+ assert exists('releases/2')
+ assert exists('releases/3')
+
+ assert cache.pop([{'blob': '2'}])
+ self.assertEqual(
+ [('1', (1, 1)), ('3', (3, 3))],
+ [i for i in cache])
+ self.assertEqual(4, cache._du)
+ assert exists('releases/1')
+ assert exists('releases/2')
+ assert exists('releases/3')
+
+ assert cache.pop([{'blob': '1'}])
+ self.assertEqual(
+ [('3', (3, 3))],
+ [i for i in cache])
+ self.assertEqual(3, cache._du)
+ assert exists('releases/1')
+ assert exists('releases/2')
+ assert exists('releases/3')
+
+ assert cache.pop([{'blob': '3'}])
+ self.assertEqual(
+ [],
+ [i for i in cache])
+ self.assertEqual(0, cache._du)
+ assert exists('releases/1')
+ assert exists('releases/2')
+ assert exists('releases/3')
+
+ def test_PreemptivePool_RestoreAfterClosing(self):
+ cache = _PreemptivePool('./releases', None, None, None)
+
+ self.touch(('releases/1', '1'))
+ self.utime('releases/1', 1)
+ cache.push([{'blob': '1', 'size': 1}])
+
+ self.assertEqual(
+ [('1', (1, 1))],
+ [i for i in cache])
+ self.assertEqual(1, cache._du)
+
+ assert not exists('releases.index')
+ cache.close()
+ assert exists('releases.index')
+
+ cache2 = _PreemptivePool('./releases', None, None, None)
+ self.assertEqual(
+ [('1', [1, 1])],
+ [i for i in cache2])
+ self.assertEqual(1, cache2._du)
+
+ def test_PreemptivePool_EnsureLimitInBytes(self):
+ cache = _PreemptivePool('./releases', None, 10, None)
+ self.statvfs.f_bfree = 11
+
+ self.touch(('releases/1', '1'))
+ self.utime('releases/1', 1)
+ cache.push([{'blob': '1', 'size': 5}])
+
+ self.touch(('releases/2', '2'))
+ self.utime('releases/2', 2)
+ cache.push([{'blob': '2', 'size': 5}])
+
+ self.assertRaises(RuntimeError, cache.ensure, 12, 0)
+ self.assertEqual(
+ [('1', (5, 1)), ('2', (5, 2))],
+ [i for i in cache])
+ self.assertEqual(10, cache._du)
+ assert exists('releases/1')
+ assert exists('releases/2')
+
+ cache.ensure(1, 0)
+ self.assertEqual(
+ [('1', (5, 1)), ('2', (5, 2))],
+ [i for i in cache])
+ self.assertEqual(10, cache._du)
+ assert exists('releases/1')
+ assert exists('releases/2')
+
+ cache.ensure(2, 0)
+ self.assertEqual(
+ [('2', (5, 2))],
+ [i for i in cache])
+ self.assertEqual(5, cache._du)
+ assert not exists('releases/1')
+ assert exists('releases/2')
+
+ cache.ensure(1, 0)
+ self.assertEqual(
+ [('2', (5, 2))],
+ [i for i in cache])
+ self.assertEqual(5, cache._du)
+ assert not exists('releases/1')
+ assert exists('releases/2')
+
+ self.assertRaises(RuntimeError, cache.ensure, 7, 0)
+ self.assertEqual(
+ [('2', (5, 2))],
+ [i for i in cache])
+ self.assertEqual(5, cache._du)
+ assert not exists('releases/1')
+ assert exists('releases/2')
+
+ cache.ensure(6, 0)
+ self.assertEqual(
+ [],
+ [i for i in cache])
+ self.assertEqual(0, cache._du)
+ assert not exists('releases/1')
+ assert not exists('releases/2')
+
+ def test_PreemptivePool_EnsureLimitInPercents(self):
+ cache = _PreemptivePool('./releases', None, None, 10)
+ self.statvfs.f_bfree = 11
+
+ self.touch(('releases/1', '1'))
+ self.utime('releases/1', 1)
+ cache.push([{'blob': '1', 'size': 5}])
+
+ self.touch(('releases/2', '2'))
+ self.utime('releases/2', 2)
+ cache.push([{'blob': '2', 'size': 5}])
+
+ self.assertRaises(RuntimeError, cache.ensure, 12, 0)
+ self.assertEqual(
+ [('1', (5, 1)), ('2', (5, 2))],
+ [i for i in cache])
+ self.assertEqual(10, cache._du)
+ assert exists('releases/1')
+ assert exists('releases/2')
+
+ cache.ensure(1, 0)
+ self.assertEqual(
+ [('1', (5, 1)), ('2', (5, 2))],
+ [i for i in cache])
+ self.assertEqual(10, cache._du)
+ assert exists('releases/1')
+ assert exists('releases/2')
+
+ cache.ensure(2, 0)
+ self.assertEqual(
+ [('2', (5, 2))],
+ [i for i in cache])
+ self.assertEqual(5, cache._du)
+ assert not exists('releases/1')
+ assert exists('releases/2')
+
+ cache.ensure(1, 0)
+ self.assertEqual(
+ [('2', (5, 2))],
+ [i for i in cache])
+ self.assertEqual(5, cache._du)
+ assert not exists('releases/1')
+ assert exists('releases/2')
+
+ self.assertRaises(RuntimeError, cache.ensure, 7, 0)
+ self.assertEqual(
+ [('2', (5, 2))],
+ [i for i in cache])
+ self.assertEqual(5, cache._du)
+ assert not exists('releases/1')
+ assert exists('releases/2')
+
+ cache.ensure(6, 0)
+ self.assertEqual(
+ [],
+ [i for i in cache])
+ self.assertEqual(0, cache._du)
+ assert not exists('releases/1')
+ assert not exists('releases/2')
+
+ def test_PreemptivePool_EnsureWithTmpSize(self):
+ cache = _PreemptivePool('./releases', None, 10, None)
+ self.statvfs.f_bfree = 11
+
+ self.touch(('releases/1', '1'))
+ self.utime('releases/1', 1)
+ cache.push([{'blob': '1', 'size': 5}])
+
+ self.assertRaises(RuntimeError, cache.ensure, 7, 0)
+ self.assertEqual(
+ [('1', (5, 1))],
+ [i for i in cache])
+ self.assertEqual(5, cache._du)
+ assert exists('releases/1')
+
+ cache.ensure(6, 0)
+ self.assertEqual(
+ [],
+ [i for i in cache])
+ self.assertEqual(0, cache._du)
+ assert not exists('releases/1')
+
+ self.touch(('releases/1', '1'))
+ self.utime('releases/1', 1)
+ cache.push([{'blob': '1', 'size': 5}])
+
+ cache.ensure(6, 10)
+ self.assertEqual(
+ [],
+ [i for i in cache])
+ self.assertEqual(0, cache._du)
+ assert not exists('releases/1')
+
+ self.touch(('releases/1', '1'))
+ self.utime('releases/1', 1)
+ cache.push([{'blob': '1', 'size': 5}])
+
+ self.assertRaises(RuntimeError, cache.ensure, 6, 11)
+ self.assertEqual(
+ [('1', (5, 1))],
+ [i for i in cache])
+ self.assertEqual(5, cache._du)
+ assert exists('releases/1')
+
+ def test_PreemptivePool_RecycleByLifetime(self):
+ cache = _PreemptivePool('./releases', 1, None, None)
+
+ self.touch(('releases/1', '1'))
+ self.utime('releases/1', 0)
+ cache.push([{'blob': '1', 'size': 1}])
+ self.touch(('releases/2', '2'))
+ self.utime('releases/2', 86400)
+ cache.push([{'blob': '2', 'size': 1}])
+
+ self.override(time, 'time', lambda: 86400)
+ cache.recycle()
+ self.assertEqual(
+ [('1', (1, 0)), ('2', (1, 86400))],
+ [i for i in cache])
+ self.assertEqual(2, cache._du)
+ assert exists('releases/1')
+ assert exists('releases/2')
+
+ self.override(time, 'time', lambda: 86400 * 1.5)
+ cache.recycle()
+ self.assertEqual(
+ [('2', (1, 86400))],
+ [i for i in cache])
+ self.assertEqual(1, cache._du)
+ assert not exists('releases/1')
+ assert exists('releases/2')
+
+ self.override(time, 'time', lambda: 86400 * 1.5)
+ cache.recycle()
+ self.assertEqual(
+ [('2', (1, 86400))],
+ [i for i in cache])
+ self.assertEqual(1, cache._du)
+ assert not exists('releases/1')
+ assert exists('releases/2')
+
+ self.override(time, 'time', lambda: 86400 * 2.5)
+ cache.recycle()
+ self.assertEqual(
+ [],
+ [i for i in cache])
+ self.assertEqual(0, cache._du)
+ assert not exists('releases/1')
+ assert not exists('releases/2')
+
+ def test_solve(self):
+ self.start_master()
+ conn = Connection(auth=http.SugarAuth(keyfile.value))
+ injector = Injector('client')
+ injector.api = client.api.value
+ injector.seqno = 0
+
+ activity_info = '\n'.join([
+ '[Activity]',
+ 'name = Activity',
+ 'bundle_id = context',
+ 'exec = true',
+ 'icon = icon',
+ 'activity_version = 1',
+ 'license = Public Domain',
+ ])
+ activity_bundle = self.zips(('topdir/activity/activity.info', activity_info))
+ release = conn.upload(['context'], activity_bundle, cmd='submit', initial=True)
+
+ solution = {
+ 'context': {
+ 'blob': release,
+ 'command': ['activity', 'true'],
+ 'content-type': 'application/vnd.olpc-sugar',
+ 'size': len(activity_bundle),
+ 'title': 'Activity',
+ 'unpack_size': len(activity_info),
+ 'version': [[1], 0],
+ },
+ }
+ self.assertEqual(solution, injector._solve('context', 'stable'))
+ self.assertEqual([client.api.value, 'stable', 0, solution], json.load(file('client/solutions/context')))
+
+ def test_solve_FailInOffline(self):
+ self.start_master()
+ conn = Connection(auth=http.SugarAuth(keyfile.value))
+ injector = Injector('client')
+ injector.api = None
+ injector.seqno = 0
+
+ activity_info = '\n'.join([
+ '[Activity]',
+ 'name = Activity',
+ 'bundle_id = context',
+ 'exec = true',
+ 'icon = icon',
+ 'activity_version = 1',
+ 'license = Public Domain',
+ ])
+ activity_bundle = self.zips(('topdir/activity/activity.info', activity_info))
+ release = conn.upload(['context'], activity_bundle, cmd='submit', initial=True)
+
+ self.assertRaises(RuntimeError, injector._solve, 'context', 'stable')
+
+ def test_solve_ReuseCachedSolution(self):
+ volume = self.start_master()
+ conn = Connection(auth=http.SugarAuth(keyfile.value))
+ injector = Injector('client')
+ injector.api = client.api.value
+ injector.seqno = 0
+
+ conn.upload(['context'], self.zips(('topdir/activity/activity.info', '\n'.join([
+ '[Activity]',
+ 'name = Activity',
+ 'bundle_id = context',
+ 'exec = true',
+ 'icon = icon',
+ 'activity_version = 1',
+ 'license = Public Domain',
+ ]))), cmd='submit', initial=True)
+
+ assert 'context' in injector._solve('context', 'stable')
+ volume['context'].delete('context')
+ assert 'context' in injector._solve('context', 'stable')
+ os.unlink('client/solutions/context')
+ self.assertRaises(http.NotFound, injector._solve, 'context', 'stable')
+
+ def test_solve_InvalidateCachedSolution(self):
+ volume = self.start_master()
+ conn = Connection(auth=http.SugarAuth(keyfile.value))
+ injector = Injector('client')
+ injector.api = 'http://127.0.0.1:7777'
+ injector.seqno = 1
+
+ conn.upload(['context'], self.zips(('topdir/activity/activity.info', '\n'.join([
+ '[Activity]',
+ 'name = Activity',
+ 'bundle_id = context',
+ 'exec = true',
+ 'icon = icon',
+ 'activity_version = 1',
+ 'license = Public Domain',
+ ]))), cmd='submit', initial=True)
+ self.assertEqual([[1], 0], injector._solve('context', 'stable')['context']['version'])
+ self.assertEqual(['http://127.0.0.1:7777', 'stable', 1], json.load(file('client/solutions/context'))[:-1])
+
+ conn.upload(['context'], self.zips(('topdir/activity/activity.info', '\n'.join([
+ '[Activity]',
+ 'name = Activity',
+ 'bundle_id = context',
+ 'exec = true',
+ 'icon = icon',
+ 'activity_version = 2',
+ 'license = Public Domain',
+ ]))), cmd='submit')
+ self.assertEqual([[1], 0], injector._solve('context', 'stable')['context']['version'])
+ self.assertEqual(['http://127.0.0.1:7777', 'stable', 1], json.load(file('client/solutions/context'))[:-1])
+ injector.seqno = 2
+ self.assertEqual([[2], 0], injector._solve('context', 'stable')['context']['version'])
+ self.assertEqual(['http://127.0.0.1:7777', 'stable', 2], json.load(file('client/solutions/context'))[:-1])
+
+ conn.upload(['context'], self.zips(('topdir/activity/activity.info', '\n'.join([
+ '[Activity]',
+ 'name = Activity',
+ 'bundle_id = context',
+ 'exec = true',
+ 'icon = icon',
+ 'activity_version = 0',
+ 'license = Public Domain',
+ 'stability = testing',
+ ]))), cmd='submit')
+ self.assertEqual([[2], 0], injector._solve('context', 'stable')['context']['version'])
+ self.assertEqual(['http://127.0.0.1:7777', 'stable', 2], json.load(file('client/solutions/context'))[:-1])
+ self.assertEqual([[0], 0], injector._solve('context', 'testing')['context']['version'])
+ self.assertEqual(['http://127.0.0.1:7777', 'testing', 2], json.load(file('client/solutions/context'))[:-1])
+
+ self.assertEqual([[2], 0], injector._solve('context', 'stable')['context']['version'])
+ self.assertEqual(['http://127.0.0.1:7777', 'stable', 2], json.load(file('client/solutions/context'))[:-1])
+ conn.upload(['context'], self.zips(('topdir/activity/activity.info', '\n'.join([
+ '[Activity]',
+ 'name = Activity',
+ 'bundle_id = context',
+ 'exec = true',
+ 'icon = icon',
+ 'activity_version = 3',
+ 'license = Public Domain',
+ ]))), cmd='submit')
+ self.assertEqual([[2], 0], injector._solve('context', 'stable')['context']['version'])
+ self.assertEqual(['http://127.0.0.1:7777', 'stable', 2], json.load(file('client/solutions/context'))[:-1])
+ injector.api = 'http://localhost:7777'
+ self.assertEqual([[3], 0], injector._solve('context', 'stable')['context']['version'])
+ self.assertEqual(['http://localhost:7777', 'stable', 2], json.load(file('client/solutions/context'))[:-1])
+
+ def test_solve_ForceUsingStaleCachedSolutionInOffline(self):
+ volume = self.start_master()
+ conn = Connection(auth=http.SugarAuth(keyfile.value))
+ injector = Injector('client')
+ injector.api = client.api.value
+ injector.seqno = 0
+
+ conn.upload(['context'], self.zips(('topdir/activity/activity.info', '\n'.join([
+ '[Activity]',
+ 'name = Activity',
+ 'bundle_id = context',
+ 'exec = true',
+ 'icon = icon',
+ 'activity_version = 1',
+ 'license = Public Domain',
+ ]))), cmd='submit', initial=True)
+ self.assertEqual([[1], 0], injector._solve('context', 'stable')['context']['version'])
+ self.assertEqual([client.api.value, 'stable', 0], json.load(file('client/solutions/context'))[:-1])
+
+ injector.api = None
+ injector.seqno = 1
+ self.assertEqual([[1], 0], injector._solve('context', 'stable')['context']['version'])
+ self.assertEqual([client.api.value, 'stable', 0], json.load(file('client/solutions/context'))[:-1])
+
+ os.unlink('client/solutions/context')
+ self.assertRaises(RuntimeError, injector._solve, 'context', 'stable')
+
+ def test_download_SetExecPermissions(self):
+ volume = self.start_master()
+ conn = Connection(auth=http.SugarAuth(keyfile.value))
+ injector = Injector('client')
+ injector.api = client.api.value
+ injector.seqno = 0
+
+ release = conn.upload(['context'], self.zips(
+ ('topdir/activity/activity.info', '\n'.join([
+ '[Activity]',
+ 'name = Activity',
+ 'bundle_id = context',
+ 'exec = true',
+ 'icon = icon',
+ 'activity_version = 1',
+ 'license = Public Domain',
+ ])),
+ 'topdir/activity/foo',
+ 'topdir/bin/bar',
+ 'topdir/bin/probe',
+ 'topdir/file1',
+ 'topdir/test/file2',
+ ), cmd='submit', initial=True)
+ for __ in injector.checkin('context'):
+ pass
+
+ path = 'client/releases/%s/' % release
+ assert os.access(path + 'activity/foo', os.X_OK)
+ assert os.access(path + 'bin/bar', os.X_OK)
+ assert os.access(path + 'bin/probe', os.X_OK)
+ assert not os.access(path + 'file1', os.X_OK)
+ assert not os.access(path + 'test/file2', os.X_OK)
+
+ def test_checkin(self):
+ self.start_master()
+ conn = Connection(auth=http.SugarAuth(keyfile.value))
+ injector = Injector('client')
+ injector.api = client.api.value
+ injector.seqno = 0
+
+ activity_info = '\n'.join([
+ '[Activity]',
+ 'name = Activity',
+ 'bundle_id = context',
+ 'exec = true',
+ 'icon = icon',
+ 'activity_version = 1',
+ 'license = Public Domain',
+ ])
+ activity_bundle = self.zips(('topdir/activity/activity.info', activity_info))
+ release = conn.upload(['context'], activity_bundle, cmd='submit', initial=True)
+
+ self.assertEqual([
+ {'event': 'checkin', 'state': 'solve'},
+ {'event': 'checkin', 'state': 'download'},
+ {'event': 'checkin', 'state': 'ready'},
+ ],
+ [i for i in injector.checkin('context')])
+
+ self.assertEqual(activity_info, file(join('client', 'releases', release, 'activity', 'activity.info')).read())
+ self.assertEqual([client.api.value, 'stable', 0, {
+ 'context': {
+ 'title': 'Activity',
+ 'unpack_size': len(activity_info),
+ 'version': [[1], 0],
+ 'command': ['activity', 'true'],
+ 'blob': release,
+ 'size': len(activity_bundle),
+ 'content-type': 'application/vnd.olpc-sugar',
+ }}],
+ json.load(file('client/solutions/context')))
+ self.assertEqual({
+ 'context': [client.api.value, 'stable', 0],
+ },
+ json.load(file('client/checkins')))
+ self.assertEqual(0, injector._pool._du)
+
+ self.assertEqual([
+ {'event': 'checkin', 'state': 'solve'},
+ {'event': 'checkin', 'state': 'ready'},
+ ],
+ [i for i in injector.checkin('context')])
+
+ def test_checkin_PreemptivePool(self):
+ self.start_master()
+ conn = Connection(auth=http.SugarAuth(keyfile.value))
+ injector = Injector('client')
+ injector.api = client.api.value
+ injector.seqno = 0
+
+ activity_info = '\n'.join([
+ '[Activity]',
+ 'name = Activity',
+ 'bundle_id = context',
+ 'exec = true',
+ 'icon = icon',
+ 'activity_version = 1',
+ 'license = Public Domain',
+ ])
+ activity_bundle = self.zips(('topdir/activity/activity.info', activity_info))
+ release = conn.upload(['context'], activity_bundle, cmd='submit', initial=True)
+
+ for __ in injector.checkin('context'):
+ pass
+ assert exists(join('client', 'releases', release))
+ self.assertEqual({
+ 'context': [client.api.value, 'stable', 0],
+ },
+ json.load(file('client/checkins')))
+ self.assertEqual(0, injector._pool._du)
+
+ assert injector.checkout('context')
+ assert exists(join('client', 'releases', release))
+ self.assertEqual({
+ },
+ json.load(file('client/checkins')))
+ self.assertEqual(len(activity_info), injector._pool._du)
+
+ for __ in injector.checkin('context'):
+ pass
+ assert exists(join('client', 'releases', release))
+ self.assertEqual({
+ 'context': [client.api.value, 'stable', 0],
+ },
+ json.load(file('client/checkins')))
+ self.assertEqual(0, injector._pool._du)
+
+ assert injector.checkout('context')
+ assert not injector.checkout('context')
+
+ assert exists(join('client', 'releases', release))
+ self.assertEqual({
+ },
+ json.load(file('client/checkins')))
+ self.assertEqual(len(activity_info), injector._pool._du)
+
+ def test_checkin_Refresh(self):
+ volume = self.start_master()
+ conn = Connection(auth=http.SugarAuth(keyfile.value))
+ injector = Injector('client')
+ injector.api = client.api.value
+ injector.seqno = 0
+
+ release1 = conn.upload(['context'], self.zips(('topdir/activity/activity.info', '\n'.join([
+ '[Activity]',
+ 'name = Activity',
+ 'bundle_id = context',
+ 'exec = true',
+ 'icon = icon',
+ 'activity_version = 1',
+ 'license = Public Domain',
+ ]))), cmd='submit', initial=True)
+ for __ in injector.checkin('context'):
+ pass
+ assert exists('client/releases/%s' % release1)
+
+ release2 = conn.upload(['context'], self.zips(('topdir/activity/activity.info', '\n'.join([
+ '[Activity]',
+ 'name = Activity',
+ 'bundle_id = context',
+ 'exec = true',
+ 'icon = icon',
+ 'activity_version = 2',
+ 'license = Public Domain',
+ ]))), cmd='submit')
+ injector.seqno = 1
+ for __ in injector.checkin('context'):
+ pass
+ assert exists('client/releases/%s' % release2)
+
+ def test_launch(self):
+ self.start_master()
+ conn = Connection(auth=http.SugarAuth(keyfile.value))
+ injector = Injector('client')
+ injector.api = client.api.value
+ injector.seqno = 0
+
+ activity_info = '\n'.join([
+ '[Activity]',
+ 'name = Activity',
+ 'bundle_id = context',
+ 'exec = true',
+ 'icon = icon',
+ 'activity_version = 1',
+ 'license = Public Domain',
+ ])
+ activity_bundle = self.zips(('topdir/activity/activity.info', activity_info))
+ release = conn.upload(['context'], activity_bundle, cmd='submit', initial=True)
+
+ self.assertEqual([
+ {'activity_id': 'activity_id'},
+ {'event': 'launch', 'state': 'init'},
+ {'event': 'launch', 'state': 'solve'},
+ {'event': 'launch', 'state': 'download'},
+ {'event': 'launch', 'state': 'exec'},
+ {'context': 'context',
+ 'solution': {
+ 'context': {
+ 'title': 'Activity',
+ 'command': ['activity', 'true'],
+ 'content-type': 'application/vnd.olpc-sugar',
+ 'blob': hashlib.sha1(activity_bundle).hexdigest(),
+ 'size': len(activity_bundle),
+ 'unpack_size': len(activity_info),
+ 'version': [[1], 0],
+ },
+ },
+ 'logs': [
+ tests.tmpdir + '/.sugar/default/logs/shell.log',
+ tests.tmpdir + '/.sugar/default/logs/sugar-network-client.log',
+ tests.tmpdir + '/.sugar/default/logs/context.log',
+ ],
+ 'args': ['true', '-b', 'context', '-a', 'activity_id'],
+ },
+ {'event': 'launch', 'state': 'exit'},
+ ],
+ [i for i in injector.launch('context', activity_id='activity_id')])
+
+ def test_launch_PreemptivePool(self):
+ self.start_master()
+ conn = Connection(auth=http.SugarAuth(keyfile.value))
+ injector = Injector('client')
+ injector.api = client.api.value
+ injector.seqno = 0
+
+ activity_info = '\n'.join([
+ '[Activity]',
+ 'name = Activity',
+ 'bundle_id = context',
+ 'exec = true',
+ 'icon = icon',
+ 'activity_version = 1',
+ 'license = Public Domain',
+ ])
+ activity_bundle = self.zips(('topdir/activity/activity.info', activity_info))
+ release = conn.upload(['context'], activity_bundle, cmd='submit', initial=True)
+
+ launch = injector.launch('context')
+ for event in launch:
+ if event.get('state') == 'exec':
+ break
+ assert exists(join('client', 'releases', release))
+ self.assertEqual(0, injector._pool._du)
+ for event in launch:
+ pass
+ assert exists(join('client', 'releases', release))
+ self.assertEqual(len(activity_info), injector._pool._du)
+
+ launch = injector.launch('context')
+ for event in launch:
+ if event.get('state') == 'exec':
+ break
+ assert exists(join('client', 'releases', release))
+ self.assertEqual(0, injector._pool._du)
+ for event in launch:
+ pass
+ assert exists(join('client', 'releases', release))
+ self.assertEqual(len(activity_info), injector._pool._du)
+
+ def test_launch_DonntAcquireCheckins(self):
+ volume = self.start_master()
+ conn = Connection(auth=http.SugarAuth(keyfile.value))
+ injector = Injector('client')
+ injector.api = client.api.value
+ injector.seqno = 0
+
+ conn.upload(['context'], self.zips(('topdir/activity/activity.info', '\n'.join([
+ '[Activity]',
+ 'name = Activity',
+ 'bundle_id = context',
+ 'exec = true',
+ 'icon = icon',
+ 'activity_version = 1',
+ 'license = Public Domain',
+ ]))), cmd='submit', initial=True)
+ for __ in injector.launch('context'):
+ pass
+ assert injector._pool._du > 0
+
+ for __ in injector.checkin('context'):
+ pass
+ assert injector._pool._du == 0
+ for __ in injector.launch('context'):
+ pass
+ assert injector._pool._du == 0
+
+ def test_launch_RefreshCheckins(self):
+ self.start_master()
+ conn = Connection(auth=http.SugarAuth(keyfile.value))
+ injector = Injector(tests.tmpdir + '/client')
+ injector.api = client.api.value
+ injector.seqno = 1
+
+ release1 = conn.upload(['context'], self.zips(
+ ('topdir/activity/activity.info', '\n'.join([
+ '[Activity]',
+ 'name = Activity',
+ 'bundle_id = context',
+ 'exec = runner',
+ 'icon = icon',
+ 'activity_version = 1',
+ 'license = Public Domain',
+ ])),
+ ('topdir/activity/runner', '\n'.join([
+ '#!/bin/sh',
+ 'echo -n 1 > output',
+ ])),
+ ), cmd='submit', initial=True)
+ for __ in injector.checkin('context'):
+ pass
+ self.assertEqual(
+ {'event': 'launch', 'state': 'exit'},
+ [i for i in injector.launch('context')][-1])
+ self.assertEqual([client.api.value, 'stable', 1], json.load(file('client/solutions/context'))[:-1])
+ self.assertEqual('1', file('client/releases/%s/output' % release1).read())
+
+ release2 = conn.upload(['context'], self.zips(
+ ('topdir/activity/activity.info', '\n'.join([
+ '[Activity]',
+ 'name = Activity',
+ 'bundle_id = context',
+ 'exec = runner',
+ 'icon = icon',
+ 'activity_version = 2',
+ 'license = Public Domain',
+ ])),
+ ('topdir/activity/runner', '\n'.join([
+ '#!/bin/sh',
+ 'echo -n 2 > output',
+ ])),
+ ), cmd='submit')
+ injector.seqno = 2
+ self.assertEqual(
+ {'event': 'launch', 'state': 'exit'},
+ [i for i in injector.launch('context')][-1])
+ self.assertEqual([client.api.value, 'stable', 2], json.load(file('client/solutions/context'))[:-1])
+ self.assertEqual('2', file('client/releases/%s/output' % release2).read())
+
+ def test_launch_InstallDeps(self):
+ volume = self.start_master()
+ conn = Connection(auth=http.SugarAuth(keyfile.value))
+ injector = Injector(tests.tmpdir + '/client')
+ injector.api = client.api.value
+ injector.seqno = 1
+
+ release1 = conn.upload(['context'], self.zips(('topdir/activity/activity.info', '\n'.join([
+ '[Activity]',
+ 'name = Activity',
+ 'bundle_id = context',
+ 'exec = true',
+ 'icon = icon',
+ 'activity_version = 1',
+ 'license = Public Domain',
+ 'requires = package1; package2',
+ ]))), cmd='submit', initial=True)
+ distro = '%s-%s' % (lsb_release.distributor_id(), lsb_release.release())
+ volume['context'].create({
+ 'guid': 'package1', 'type': ['package'], 'title': {}, 'summary': {}, 'description': {}, 'releases': {
+ 'resolves': {
+ distro: {'version': [[1], 0], 'packages': ['pkg1', 'pkg2']},
+ },
+ },
+ })
+ volume['context'].create({
+ 'guid': 'package2', 'type': ['package'], 'title': {}, 'summary': {}, 'description': {}, 'releases': {
+ 'resolves': {
+ distro: {'version': [[1], 0], 'packages': ['pkg3', 'pkg4']},
+ },
+ },
+ })
+
+ packages = []
+ self.override(packagekit, 'install', lambda names: packages.extend(names))
+ events = [i for i in injector.launch('context')]
+ self.assertEqual({'event': 'launch', 'state': 'exit'}, events[-1])
+ assert {'event': 'launch', 'state': 'install'} in events
+ self.assertEqual(['pkg1', 'pkg2', 'pkg3', 'pkg4'], sorted(packages))
+
+ def test_launch_Document(self):
+ volume = self.start_master()
+ conn = Connection(auth=http.SugarAuth(keyfile.value))
+ injector = Injector(tests.tmpdir + '/client')
+ injector.api = client.api.value
+ injector.seqno = 1
+
+ volume['context'].create({'guid': 'book', 'type': ['book'], 'title': {}, 'summary': {}, 'description': {}})
+ book = conn.upload(['context'], 'book', cmd='submit', context='book', version='1', license='Public Domain')
+
+ app = conn.upload(['context'], self.zips(
+ ('topdir/activity/activity.info', '\n'.join([
+ '[Activity]',
+ 'name = Activity',
+ 'bundle_id = app',
+ 'exec = runner',
+ 'icon = icon',
+ 'activity_version = 1',
+ 'license = Public Domain',
+ ])),
+ ('topdir/activity/runner', '\n'.join([
+ '#!/bin/sh',
+ 'echo -n $@ > output',
+ ])),
+ ), cmd='submit', initial=True)
+
+ self.assertEqual(
+ {'event': 'launch', 'state': 'exit'},
+ [i for i in injector.launch('book', activity_id='activity_id', app='app')][-1])
+
+ self.assertEqual(
+ '-b app -a activity_id -u %s/client/releases/%s' % (tests.tmpdir, book),
+ file('client/releases/%s/output' % app).read())
+
+ def test_launch_DocumentWithDetectingAppByMIMEType(self):
+ volume = self.start_master()
+ conn = Connection(auth=http.SugarAuth(keyfile.value))
+ injector = Injector(tests.tmpdir + '/client')
+ injector.api = client.api.value
+ injector.seqno = 1
+
+ volume['context'].create({'guid': 'book', 'type': ['book'], 'title': {}, 'summary': {}, 'description': {}})
+ book = conn.upload(['context'], 'book', cmd='submit', context='book', version='1', license='Public Domain')
+
+ app = conn.upload(['context'], self.zips(
+ ('topdir/activity/activity.info', '\n'.join([
+ '[Activity]',
+ 'name = Activity',
+ 'bundle_id = app',
+ 'exec = runner',
+ 'icon = icon',
+ 'activity_version = 1',
+ 'license = Public Domain',
+ ])),
+ ('topdir/activity/runner', '\n'.join([
+ '#!/bin/sh',
+ 'echo -n $@ > output',
+ ])),
+ ), cmd='submit', initial=True)
+
+ self.override(injector_, '_app_by_mimetype', lambda mime_type: 'app')
+ self.assertEqual(
+ {'event': 'launch', 'state': 'exit'},
+ [i for i in injector.launch('book', activity_id='activity_id')][-1])
+
+ self.assertEqual(
+ '-b app -a activity_id -u %s/client/releases/%s' % (tests.tmpdir, book),
+ file('client/releases/%s/output' % app).read())
+
+
+if __name__ == '__main__':
+ tests.main()
diff --git a/tests/units/client/packagekit.py b/tests/units/client/packagekit.py
new file mode 100755
index 0000000..05cd8eb
--- /dev/null
+++ b/tests/units/client/packagekit.py
@@ -0,0 +1,25 @@
+#!/usr/bin/env python
+# sugar-lint: disable
+
+from __init__ import tests
+
+from sugar_network.client.packagekit import _cleanup_distro_version
+
+
+class Packagekit(tests.Test):
+
+ def test_cleanup_distro_version(self):
+ self.assertEqual('0.3.1-1', _cleanup_distro_version('1:0.3.1-1'))
+ self.assertEqual('0.3.1-1', _cleanup_distro_version('0.3.1-1ubuntu0'))
+ self.assertEqual('0.3-post1-rc2', _cleanup_distro_version('0.3-post1-rc2'))
+ self.assertEqual('0.3.1-2', _cleanup_distro_version('0.3.1-r2-r3'))
+ self.assertEqual('6.17', _cleanup_distro_version('6b17'))
+ self.assertEqual('20-1', _cleanup_distro_version('b20_1'))
+ self.assertEqual('17', _cleanup_distro_version('p17'))
+ self.assertEqual('7-pre3-2.1.1-3', _cleanup_distro_version('7~u3-2.1.1-3')) # Debian snapshot
+ self.assertEqual('7-pre3-2.1.1-pre1-1', _cleanup_distro_version('7~u3-2.1.1~pre1-1ubuntu2'))
+ self.assertEqual(None, _cleanup_distro_version('cvs'))
+
+
+if __name__ == '__main__':
+ tests.main()
diff --git a/tests/units/client/releases.py b/tests/units/client/releases.py
deleted file mode 100755
index 30f938e..0000000
--- a/tests/units/client/releases.py
+++ /dev/null
@@ -1,502 +0,0 @@
-#!/usr/bin/env python
-# sugar-lint: disable
-
-import os
-import imp
-import json
-import time
-import pickle
-import shutil
-import zipfile
-import logging
-import hashlib
-from cStringIO import StringIO
-from os.path import exists, dirname
-
-from __init__ import tests
-
-from sugar_network.client import journal, releases, cache_limit
-from sugar_network.toolkit import coroutine, lsb_release
-from sugar_network.node import obs
-from sugar_network.model.user import User
-from sugar_network.model.context import Context
-from sugar_network.model.release import Release
-from sugar_network.client import IPCConnection, packagekit, solver
-from sugar_network.toolkit import http, Option
-from sugar_network import client
-
-
-class Releases(tests.Test):
-
- def setUp(self, fork_num=0):
- tests.Test.setUp(self, fork_num)
- self.override(obs, 'get_repos', lambda: [])
- self.override(obs, 'presolve', lambda *args: None)
-
- def test_InstallDeps(self):
- self.start_online_client()
- conn = IPCConnection()
-
- blob = self.zips(['TestActivity/activity/activity.info', [
- '[Activity]',
- 'name = TestActivity',
- 'bundle_id = bundle_id',
- 'exec = true',
- 'icon = icon',
- 'activity_version = 1',
- 'license = Public Domain',
- 'requires = dep1; dep2',
- ]])
- impl = conn.upload(['release'], StringIO(blob), cmd='submit', initial=True)
-
- conn.post(['context'], {
- 'guid': 'dep1',
- 'type': 'package',
- 'title': 'title',
- 'summary': 'summary',
- 'description': 'description',
- 'aliases': {
- lsb_release.distributor_id(): {
- 'status': 'success',
- 'binary': [['dep1.bin']],
- },
- },
- })
- conn.post(['context'], {
- 'guid': 'dep2',
- 'type': 'package',
- 'title': 'title',
- 'summary': 'summary',
- 'description': 'description',
- 'aliases': {
- lsb_release.distributor_id(): {
- 'status': 'success',
- 'binary': [['dep2.bin']],
- },
- },
- })
-
- def resolve(names):
- with file('resolve', 'a') as f:
- pickle.dump(names, f)
- return dict([(i, {'name': i, 'pk_id': i, 'version': '0', 'arch': '*', 'installed': i == 'dep1.bin'}) for i in names])
-
- def install(packages):
- with file('install', 'a') as f:
- pickle.dump([i['name'] for i in packages], f)
-
- self.override(packagekit, 'resolve', resolve)
- self.override(packagekit, 'install', install)
- self.assertEqual('exit', [i for i in conn.get(['context', 'bundle_id'], cmd='launch')][-1]['event'])
-
- with file('resolve') as f:
- deps = [pickle.load(f),
- pickle.load(f),
- ]
- self.assertRaises(EOFError, pickle.load, f)
- self.assertEqual(
- sorted([['dep1.bin'], ['dep2.bin']]),
- sorted(deps))
- with file('install') as f:
- self.assertEqual(['dep2.bin'], pickle.load(f))
- self.assertRaises(EOFError, pickle.load, f)
-
- def test_SetExecPermissions(self):
- self.start_online_client()
- conn = IPCConnection()
-
- blob = self.zips(
- ['TestActivity/activity/activity.info', [
- '[Activity]',
- 'name = TestActivity',
- 'bundle_id = bundle_id',
- 'exec = true',
- 'icon = icon',
- 'activity_version = 1',
- 'license = Public Domain',
- ]],
- 'TestActivity/activity/foo',
- 'TestActivity/bin/bar',
- 'TestActivity/bin/probe',
- 'TestActivity/file1',
- 'TestActivity/test/file2',
- )
- impl = conn.upload(['release'], StringIO(blob), cmd='submit', initial=True)
-
- conn.put(['context', 'bundle_id'], True, cmd='clone')
-
- path = 'client/release/%s/%s/data.blob/' % (impl[:2], impl)
- assert os.access(path + 'activity/foo', os.X_OK)
- assert os.access(path + 'bin/bar', os.X_OK)
- assert os.access(path + 'bin/probe', os.X_OK)
- assert not os.access(path + 'file1', os.X_OK)
- assert not os.access(path + 'test/file2', os.X_OK)
-
- def test_ReuseCachedSolution(self):
- self.start_online_client()
- conn = IPCConnection()
-
- activity_info = '\n'.join([
- '[Activity]',
- 'name = TestActivity',
- 'bundle_id = bundle_id',
- 'exec = true',
- 'icon = icon',
- 'activity_version = 1',
- 'license = Public Domain',
- 'stability = stable',
- ])
- blob = self.zips(['TestActivity/activity/activity.info', activity_info])
- impl = conn.upload(['release'], StringIO(blob), cmd='submit', initial=True)
- solution = ['http://127.0.0.1:8888', ['stable'], [{
- 'license': ['Public Domain'],
- 'stability': 'stable',
- 'version': '1',
- 'context': 'bundle_id',
- 'path': tests.tmpdir + '/client/release/%s/%s/data.blob' % (impl[:2], impl),
- 'guid': impl,
- 'layer': ['origin'],
- 'author': {tests.UID: {'name': 'test', 'order': 0, 'role': 3}},
- 'ctime': self.node_volume['release'].get(impl).ctime,
- 'notes': {'en-us': ''},
- 'tags': [],
- 'data': {
- 'unpack_size': len(activity_info),
- 'blob_size': len(blob),
- 'digest': hashlib.sha1(blob).hexdigest(),
- 'mime_type': 'application/vnd.olpc-sugar',
- 'spec': {'*-*': {'commands': {'activity': {'exec': 'true'}}, 'requires': {}}},
- },
- }]]
- cached_path = 'solutions/bu/bundle_id'
-
- self.assertEqual('exit', [i for i in conn.get(['context', 'bundle_id'], cmd='launch')][-1]['event'])
- self.assertEqual(solution, json.load(file(cached_path)))
-
- os.utime(cached_path, (0, 0))
- self.assertEqual(solution, json.load(file(cached_path)))
- assert os.stat(cached_path).st_mtime == 0
-
- def test_InvalidaeCachedSolutions(self):
- self.start_online_client()
- conn = IPCConnection()
-
- conn.post(['context'], {
- 'guid': 'bundle_id',
- 'type': 'activity',
- 'title': 'title',
- 'summary': 'summary',
- 'description': 'description',
- })
-
- solution = json.dumps(['http://127.0.0.1:8888', ['stable'], [{
- 'license': ['Public Domain'],
- 'stability': 'stable',
- 'version': '1',
- 'context': 'bundle_id',
- 'path': tests.tmpdir,
- 'guid': 'impl',
- 'data': {
- 'spec': {'*-*': {'commands': {'activity': {'exec': 'true'}}, 'requires': {}}},
- },
- }]])
- cached_path = 'solutions/bu/bundle_id'
- self.touch([cached_path, solution])
- cached_mtime = int(os.stat(cached_path).st_mtime)
-
- self.assertEqual('exit', [i for i in conn.get(['context', 'bundle_id'], cmd='launch')][-1]['event'])
-
- client.api.value = 'fake'
- self.assertEqual('NotFound', [i for i in conn.get(['context', 'bundle_id'], cmd='launch')][-1]['exception'])
- self.assertEqual(solution, file(cached_path).read())
-
- client.api.value = 'http://127.0.0.1:8888'
- self.assertEqual('exit', [i for i in conn.get(['context', 'bundle_id'], cmd='launch')][-1]['event'])
-
- self.client_routes._node_mtime = cached_mtime + 2
- self.assertEqual('NotFound', [i for i in conn.get(['context', 'bundle_id'], cmd='launch')][-1]['exception'])
- self.assertEqual(solution, file(cached_path).read())
-
- self.client_routes._node_mtime = cached_mtime
- self.assertEqual('exit', [i for i in conn.get(['context', 'bundle_id'], cmd='launch')][-1]['event'])
-
- self.override(packagekit, 'mtime', lambda: cached_mtime + 2)
- self.assertEqual('NotFound', [i for i in conn.get(['context', 'bundle_id'], cmd='launch')][-1]['exception'])
- self.assertEqual(solution, file(cached_path).read())
-
- self.override(packagekit, 'mtime', lambda: cached_mtime)
- self.assertEqual('exit', [i for i in conn.get(['context', 'bundle_id'], cmd='launch')][-1]['event'])
-
- self.touch(('config', [
- '[stabilities]',
- 'bundle_id = buggy',
- ]))
- Option.load(['config'])
- self.assertEqual('NotFound', [i for i in conn.get(['context', 'bundle_id'], cmd='launch')][-1]['exception'])
- self.assertEqual(solution, file(cached_path).read())
-
- self.touch(('config', [
- '[stabilities]',
- 'bundle_id = stable',
- ]))
- Option.load(['config'])
- self.assertEqual('exit', [i for i in conn.get(['context', 'bundle_id'], cmd='launch')][-1]['event'])
-
- def test_DeliberateReuseCachedSolutionInOffline(self):
- self.start_online_client()
- conn = IPCConnection()
-
- conn.post(['context'], {
- 'guid': 'bundle_id',
- 'type': 'activity',
- 'title': 'title',
- 'summary': 'summary',
- 'description': 'description',
- })
-
- solution = json.dumps(['http://127.0.0.1:8888', ['stable'], [{
- 'license': ['Public Domain'],
- 'stability': 'stable',
- 'version': '1',
- 'context': 'bundle_id',
- 'path': tests.tmpdir,
- 'guid': 'impl',
- 'data': {
- 'spec': {'*-*': {'commands': {'activity': {'exec': 'true'}}, 'requires': {}}},
- },
- }]])
- self.touch(['solutions/bu/bundle_id', solution])
-
- client.api.value = 'fake'
- self.assertEqual('NotFound', [i for i in conn.get(['context', 'bundle_id'], cmd='launch')][-1]['exception'])
-
- self.node.stop()
- coroutine.sleep(.1)
- self.assertEqual('exit', [i for i in conn.get(['context', 'bundle_id'], cmd='launch')][-1]['event'])
-
- def test_StabilityPreferences(self):
- self.start_online_client()
- conn = IPCConnection()
-
- conn.upload(['release'], StringIO(self.zips(['TestActivity/activity/activity.info', [
- '[Activity]',
- 'name = TestActivity',
- 'bundle_id = bundle_id',
- 'exec = true',
- 'icon = icon',
- 'activity_version = 1',
- 'license = Public Domain',
- 'stability = stable',
- ]])), cmd='submit', initial=True)
- conn.upload(['release'], StringIO(self.zips(['TestActivity/activity/activity.info', [
- '[Activity]',
- 'name = TestActivity',
- 'bundle_id = bundle_id',
- 'exec = true',
- 'icon = icon',
- 'activity_version = 2',
- 'license = Public Domain',
- 'stability = testing',
- ]])), cmd='submit')
- conn.upload(['release'], StringIO(self.zips(['TestActivity/activity/activity.info', [
- '[Activity]',
- 'name = TestActivity',
- 'bundle_id = bundle_id',
- 'exec = true',
- 'icon = icon',
- 'activity_version = 3',
- 'license = Public Domain',
- 'stability = buggy',
- ]])), cmd='submit')
- cached_path = 'solutions/bu/bundle_id'
-
- self.assertEqual('exit', [i for i in conn.get(['context', 'bundle_id'], cmd='launch')][-1]['event'])
- self.assertEqual('1', json.load(file(cached_path))[2][0]['version'])
-
- self.touch(('config', [
- '[stabilities]',
- 'bundle_id = testing',
- ]))
- Option.load(['config'])
- self.assertEqual('exit', [i for i in conn.get(['context', 'bundle_id'], cmd='launch')][-1]['event'])
- self.assertEqual('2', json.load(file(cached_path))[2][0]['version'])
-
- self.touch(('config', [
- '[stabilities]',
- 'bundle_id = testing buggy',
- ]))
- Option.load(['config'])
- self.assertEqual('exit', [i for i in conn.get(['context', 'bundle_id'], cmd='launch')][-1]['event'])
- self.assertEqual('3', json.load(file(cached_path))[2][0]['version'])
-
- self.touch(('config', [
- '[stabilities]',
- 'default = testing',
- ]))
- Option.load(['config'])
- self.assertEqual('exit', [i for i in conn.get(['context', 'bundle_id'], cmd='launch')][-1]['event'])
- self.assertEqual('2', json.load(file(cached_path))[2][0]['version'])
-
- def test_LaunchContext(self):
- self.start_online_client()
- conn = IPCConnection()
-
- app = conn.upload(['release'], StringIO(self.zips(
- ['TestActivity/activity/activity.info', [
- '[Activity]',
- 'name = TestActivity',
- 'bundle_id = bundle_id',
- 'exec = activity',
- 'icon = icon',
- 'activity_version = 1',
- 'license = Public Domain',
- ]],
- ['TestActivity/bin/activity', [
- '#!/bin/sh',
- 'cat $6',
- ]],
- )), cmd='submit', initial=True)
-
- conn.post(['context'], {
- 'guid': 'document',
- 'type': 'book',
- 'title': 'title',
- 'summary': 'summary',
- 'description': 'description',
- })
- doc = conn.post(['release'], {
- 'context': 'document',
- 'license': 'GPLv3+',
- 'version': '1',
- 'stability': 'stable',
- })
- self.node_volume['release'].update(doc, {'data': {
- 'mime_type': 'application/octet-stream',
- 'blob': StringIO('content'),
- }})
-
- self.assertEqual('exit', [i for i in conn.get(['context', 'document'], cmd='launch', context='bundle_id')][-1]['event'])
- coroutine.sleep(.1)
- self.assertEqual('content', file('.sugar/default/logs/bundle_id.log').read())
-
- def test_CreateAllImplPropsOnCheckin(self):
- home_volume = self.start_online_client()
- conn = IPCConnection()
-
- blob = self.zips(
- ['TestActivity/activity/activity.info', [
- '[Activity]',
- 'name = TestActivity',
- 'bundle_id = bundle_id',
- 'exec = true',
- 'icon = icon',
- 'activity_version = 1',
- 'license = Public Domain',
- ]],
- )
- impl = conn.upload(['release'], StringIO(blob), cmd='submit', initial=True)
- conn.put(['context', 'bundle_id'], True, cmd='clone')
-
- doc = home_volume['release'].get(impl)
- assert doc.meta('ctime') is not None
- assert doc.meta('mtime') is not None
- assert doc.meta('seqno') is not None
- self.assertEqual({tests.UID: {'name': 'test', 'order': 0, 'role': 3}}, doc.meta('author')['value'])
- self.assertEqual(['origin'], doc.meta('layer')['value'])
- self.assertEqual('bundle_id', doc.meta('context')['value'])
- self.assertEqual(['Public Domain'], doc.meta('license')['value'])
- self.assertEqual('1', doc.meta('version')['value'])
- self.assertEqual('stable', doc.meta('stability')['value'])
- self.assertEqual({'en-us': ''}, doc.meta('notes')['value'])
- self.assertEqual([], doc.meta('tags')['value'])
-
- def test_LaunchAcquiring(self):
- volume = self.start_online_client()
- conn = IPCConnection()
-
- app = conn.upload(['release'], StringIO(self.zips(
- ['TestActivity/activity/activity.info', [
- '[Activity]',
- 'name = TestActivity',
- 'bundle_id = bundle_id',
- 'exec = activity',
- 'icon = icon',
- 'activity_version = 1',
- 'license = Public Domain',
- ]],
- ['TestActivity/bin/activity', [
- '#!/bin/sh',
- 'sleep 1',
- ]],
- )), cmd='submit', initial=True)
-
- conn.post(['context'], {
- 'guid': 'document',
- 'type': 'book',
- 'title': 'title',
- 'summary': 'summary',
- 'description': 'description',
- })
- doc = conn.post(['release'], {
- 'context': 'document',
- 'license': 'GPLv3+',
- 'version': '1',
- 'stability': 'stable',
- })
- self.node_volume['release'].update(doc, {'data': {
- 'mime_type': 'application/octet-stream',
- 'blob': StringIO('content'),
- }})
-
- launch = conn.get(['context', 'document'], cmd='launch', context='bundle_id')
- self.assertEqual('launch', next(launch)['event'])
- self.assertEqual('exec', next(launch)['event'])
-
- class statvfs(object):
- f_blocks = 100
- f_bfree = 10
- f_frsize = 1
- self.override(os, 'statvfs', lambda *args: statvfs())
- cache_limit.value = 10
-
- self.assertRaises(RuntimeError, self.client_routes._cache.ensure, 1, 0)
- assert volume['release'].exists(app)
- assert volume['release'].exists(doc)
- self.assertEqual([], [i for i in self.client_routes._cache])
-
- self.assertEqual('exit', next(launch)['event'])
- self.assertEqual([app, doc], [i for i in self.client_routes._cache])
-
- def test_NoAcquiringForClones(self):
- volume = self.start_online_client()
- conn = IPCConnection()
-
- app = conn.upload(['release'], StringIO(self.zips(
- ['TestActivity/activity/activity.info', [
- '[Activity]',
- 'name = TestActivity',
- 'bundle_id = bundle_id',
- 'exec = activity',
- 'icon = icon',
- 'activity_version = 1',
- 'license = Public Domain',
- ]],
- ['TestActivity/bin/activity', [
- '#!/bin/sh',
- 'sleep 1',
- ]],
- )), cmd='submit', initial=True)
-
- conn.put(['context', 'bundle_id'], True, cmd='clone')
- self.assertEqual([], [i for i in self.client_routes._cache])
-
- launch = conn.get(['context', 'bundle_id'], cmd='launch')
- self.assertEqual('launch', next(launch)['event'])
- self.assertEqual('exec', next(launch)['event'])
- self.assertEqual([], [i for i in self.client_routes._cache])
- self.assertEqual('exit', next(launch)['event'])
- self.assertEqual([], [i for i in self.client_routes._cache])
-
-
-if __name__ == '__main__':
- tests.main()
diff --git a/tests/units/client/solver.py b/tests/units/client/solver.py
deleted file mode 100755
index 29e1472..0000000
--- a/tests/units/client/solver.py
+++ /dev/null
@@ -1,277 +0,0 @@
-#!/usr/bin/env python
-# sugar-lint: disable
-
-import os
-import imp
-
-from __init__ import tests
-
-from sugar_network.client import IPCConnection, packagekit, solver
-from sugar_network.toolkit import lsb_release
-
-
-class SolverTest(tests.Test):
-
- def test_select_architecture(self):
- host_arch = os.uname()[-1]
- all_arches = [i for i in solver.machine_ranks.keys() if i]
-
- self.assertEqual(host_arch, solver.select_architecture(
- sorted(all_arches, cmp=lambda x, y: cmp(solver.machine_ranks[x], solver.machine_ranks[y]))))
- self.assertEqual(host_arch, solver.select_architecture(
- sorted(all_arches, cmp=lambda x, y: cmp(solver.machine_ranks[y], solver.machine_ranks[x]))))
- self.assertEqual(host_arch, solver.select_architecture([host_arch]))
- self.assertEqual(host_arch, solver.select_architecture(['foo', host_arch, 'bar']))
-
- def test_ProcessCommonDependencies(self):
- self.start_online_client()
- conn = IPCConnection()
-
- context = conn.post(['context'], {
- 'type': 'activity',
- 'title': 'title',
- 'summary': 'summary',
- 'description': 'description',
- 'dependencies': ['dep1', 'dep2'],
- })
- impl = conn.post(['release'], {
- 'context': context,
- 'license': 'GPLv3+',
- 'version': '1',
- 'stability': 'stable',
- 'notes': '',
- })
- self.node_volume['release'].update(impl, {'data': {
- 'spec': {
- '*-*': {
- 'commands': {
- 'activity': {
- 'exec': 'echo',
- },
- },
- 'requires': {
- 'dep2': {'restrictions': [['1', '2']]},
- 'dep3': {},
- },
- },
- },
- }})
- conn.post(['context'], {
- 'guid': 'dep1',
- 'type': 'package',
- 'title': 'title1',
- 'summary': 'summary',
- 'description': 'description',
- 'aliases': {
- lsb_release.distributor_id(): {
- 'status': 'success',
- 'binary': [['dep1.bin']],
- },
- },
- })
- conn.post(['context'], {
- 'guid': 'dep2',
- 'type': 'package',
- 'title': 'title2',
- 'summary': 'summary',
- 'description': 'description',
- 'aliases': {
- lsb_release.distributor_id(): {
- 'status': 'success',
- 'binary': [['dep2.bin']],
- },
- },
- })
- conn.post(['context'], {
- 'guid': 'dep3',
- 'type': 'package',
- 'title': 'title3',
- 'summary': 'summary',
- 'description': 'description',
- 'aliases': {
- lsb_release.distributor_id(): {
- 'status': 'success',
- 'binary': [['dep3.bin']],
- },
- },
- })
-
- def resolve(names):
- return dict([(i, {'name': i, 'pk_id': i, 'version': '1', 'arch': '*', 'installed': True}) for i in names])
-
- self.override(packagekit, 'resolve', resolve)
-
- self.assertEqual(
- sorted([
- {'version': '1', 'guid': 'dep1', 'context': 'dep1', 'stability': 'packaged', 'license': None},
- {'version': '1', 'guid': 'dep2', 'context': 'dep2', 'stability': 'packaged', 'license': None},
- {'version': '1', 'guid': 'dep3', 'context': 'dep3', 'stability': 'packaged', 'license': None},
- {'version': '1', 'context': context, 'guid': impl, 'stability': 'stable', 'license': ['GPLv3+'],
- 'layer': ['origin'],
- 'author': {tests.UID: {'name': 'test', 'order': 0, 'role': 3}},
- 'ctime': self.node_volume['release'].get(impl).ctime,
- 'notes': {'en-us': ''},
- 'tags': [],
- 'data': {'spec': {'*-*': {'commands': {'activity': {'exec': 'echo'}}, 'requires':
- {'dep2': {'restrictions': [['1', '2']]}, 'dep3': {}}}}},
- 'requires': {'dep1': {}, 'dep2': {}}},
- ]),
- sorted(solver.solve(self.client_routes.fallback, context, ['stable'])))
-
- def test_SolveSugar(self):
- self.touch(('__init__.py', ''))
- self.touch(('jarabe.py', 'class config: version = "0.94"'))
- file_, pathname_, description_ = imp.find_module('jarabe', ['.'])
- imp.load_module('jarabe', file_, pathname_, description_)
-
- self.start_online_client()
- conn = IPCConnection()
-
- context = conn.post(['context'], {
- 'type': 'activity',
- 'title': 'title',
- 'summary': 'summary',
- 'description': 'description',
- })
- conn.post(['context'], {
- 'guid': 'sugar',
- 'type': 'package',
- 'title': 'title',
- 'summary': 'summary',
- 'description': 'description',
- })
-
- impl = conn.post(['release'], {
- 'context': context,
- 'license': 'GPLv3+',
- 'version': '1',
- 'stability': 'stable',
- 'notes': '',
- })
- self.node_volume['release'].update(impl, {'data': {
- 'spec': {
- '*-*': {
- 'commands': {
- 'activity': {
- 'exec': 'echo',
- },
- },
- 'requires': {
- 'sugar': {},
- },
- },
- },
- }})
- self.assertEqual([
- {
- 'version': '1',
- 'context': context,
- 'guid': impl,
- 'stability': 'stable',
- 'license': ['GPLv3+'],
- 'layer': ['origin'],
- 'author': {tests.UID: {'name': 'test', 'order': 0, 'role': 3}},
- 'ctime': self.node_volume['release'].get(impl).ctime,
- 'notes': {'en-us': ''},
- 'tags': [],
- 'data': {'spec': {'*-*': {'commands': {'activity': {'exec': 'echo'}}, 'requires': {'sugar': {}}}}}},
- {'version': '0.94', 'context': 'sugar', 'guid': 'sugar-0.94', 'stability': 'packaged', 'license': None},
- ],
- solver.solve(self.client_routes.fallback, context, ['stable']))
-
- self.node_volume['release'].update(impl, {'data': {
- 'spec': {
- '*-*': {
- 'commands': {
- 'activity': {
- 'exec': 'echo',
- },
- },
- 'requires': {
- 'sugar': {'restrictions': [['0.80', '0.87']]},
- },
- },
- },
- }})
- self.assertEqual([
- {
- 'version': '1',
- 'context': context,
- 'guid': impl,
- 'stability': 'stable',
- 'license': ['GPLv3+'],
- 'layer': ['origin'],
- 'author': {tests.UID: {'name': 'test', 'order': 0, 'role': 3}},
- 'ctime': self.node_volume['release'].get(impl).ctime,
- 'notes': {'en-us': ''},
- 'tags': [],
- 'data': {'spec': {'*-*': {'commands': {'activity': {'exec': 'echo'}}, 'requires':
- {'sugar': {'restrictions': [['0.80', '0.87']]}}}}}},
- {'version': '0.86', 'context': 'sugar', 'guid': 'sugar-0.86', 'stability': 'packaged', 'license': None},
- ],
- solver.solve(self.client_routes.fallback, context, ['stable']))
-
- def test_StripSugarVersion(self):
- self.touch(('__init__.py', ''))
- self.touch(('jarabe.py', 'class config: version = "0.94.1"'))
- file_, pathname_, description_ = imp.find_module('jarabe', ['.'])
- imp.load_module('jarabe', file_, pathname_, description_)
-
- self.start_online_client()
- conn = IPCConnection()
-
- context = conn.post(['context'], {
- 'type': 'activity',
- 'title': 'title',
- 'summary': 'summary',
- 'description': 'description',
- })
- conn.post(['context'], {
- 'guid': 'sugar',
- 'type': 'package',
- 'title': 'title',
- 'summary': 'summary',
- 'description': 'description',
- })
-
- impl = conn.post(['release'], {
- 'context': context,
- 'license': 'GPLv3+',
- 'version': '1',
- 'stability': 'stable',
- 'notes': '',
- })
- self.node_volume['release'].update(impl, {'data': {
- 'spec': {
- '*-*': {
- 'commands': {
- 'activity': {
- 'exec': 'echo',
- },
- },
- 'requires': {
- 'sugar': {},
- },
- },
- },
- }})
- self.assertEqual([
- {
- 'version': '1',
- 'context': context,
- 'guid': impl,
- 'stability': 'stable',
- 'license': ['GPLv3+'],
- 'layer': ['origin'],
- 'author': {tests.UID: {'name': 'test', 'order': 0, 'role': 3}},
- 'ctime': self.node_volume['release'].get(impl).ctime,
- 'notes': {'en-us': ''},
- 'tags': [],
- 'data': {'spec': {'*-*': {'commands': {'activity': {'exec': 'echo'}}, 'requires': {'sugar': {}}}}}},
- {'version': '0.94', 'context': 'sugar', 'guid': 'sugar-0.94', 'stability': 'packaged', 'license': None},
- ],
- solver.solve(self.client_routes.fallback, context, ['stable']))
-
-
-if __name__ == '__main__':
- tests.main()
diff --git a/tests/units/model/context.py b/tests/units/model/context.py
index d33cb73..bd39c04 100755
--- a/tests/units/model/context.py
+++ b/tests/units/model/context.py
@@ -81,7 +81,6 @@ class ContextTest(tests.Test):
'summary': 'summary',
'description': 'description',
})
- return
activity_info1 = '\n'.join([
'[Activity]',
diff --git a/tests/units/node/model.py b/tests/units/node/model.py
index 024c148..6788105 100755
--- a/tests/units/node/model.py
+++ b/tests/units/node/model.py
@@ -269,7 +269,7 @@ class ModelTest(tests.Test):
},
})
self.assertEqual(
- {context: {'command': ('activity', 3), 'blob': '3', 'version': [[3], 0]}},
+ {context: {'command': ('activity', 3), 'title': '', 'blob': '3', 'version': [[3], 0]}},
model.solve(volume, context))
context = volume['context'].create({
@@ -280,7 +280,7 @@ class ModelTest(tests.Test):
},
})
self.assertEqual(
- {context: {'command': ('activity', 3), 'blob': '3', 'version': [[3], 0]}},
+ {context: {'command': ('activity', 3), 'title': '', 'blob': '3', 'version': [[3], 0]}},
model.solve(volume, context))
def test_solve_SortByStability(self):
@@ -295,7 +295,7 @@ class ModelTest(tests.Test):
},
})
self.assertEqual(
- {context: {'command': ('activity', 2), 'blob': '2', 'version': [[2], 0]}},
+ {context: {'command': ('activity', 2), 'title': '', 'blob': '2', 'version': [[2], 0]}},
model.solve(volume, context))
def test_solve_CollectDeps(self):
@@ -334,10 +334,10 @@ class ModelTest(tests.Test):
})
self.assertEqual({
- 'context1': {'blob': '1', 'version': [[1], 0], 'command': ('activity', 'command')},
- 'context2': {'blob': '2', 'version': [[2], 0]},
- 'context3': {'blob': '3', 'version': [[3], 0]},
- 'context4': {'blob': '4', 'version': [[4], 0]},
+ 'context1': {'title': '', 'blob': '1', 'version': [[1], 0], 'command': ('activity', 'command')},
+ 'context2': {'title': '', 'blob': '2', 'version': [[2], 0]},
+ 'context3': {'title': '', 'blob': '3', 'version': [[3], 0]},
+ 'context4': {'title': '', 'blob': '4', 'version': [[4], 0]},
},
model.solve(volume, 'context1'))
@@ -370,12 +370,12 @@ class ModelTest(tests.Test):
})
self.assertEqual({
- 'context1': {'blob': '1', 'version': [[1], 0], 'command': ('activity', 1)},
- 'context2': {'blob': '2', 'version': [[2], 0]},
+ 'context1': {'title': '', 'blob': '1', 'version': [[1], 0], 'command': ('activity', 1)},
+ 'context2': {'title': '', 'blob': '2', 'version': [[2], 0]},
},
model.solve(volume, 'context1', command='activity'))
self.assertEqual({
- 'context1': {'blob': '1', 'version': [[1], 0], 'command': ('application', 2)},
+ 'context1': {'title': '', 'blob': '1', 'version': [[1], 0], 'command': ('application', 2)},
},
model.solve(volume, 'context1', command='application'))
@@ -401,8 +401,8 @@ class ModelTest(tests.Test):
},
})
self.assertEqual({
- 'context1': {'blob': '10', 'version': [[1], 0], 'command': ('activity', 'command')},
- 'dep': {'blob': '2', 'version': [[2], 0]},
+ 'context1': {'title': '', 'blob': '10', 'version': [[1], 0], 'command': ('activity', 'command')},
+ 'dep': {'title': '', 'blob': '2', 'version': [[2], 0]},
},
model.solve(volume, 'context1'))
@@ -414,8 +414,8 @@ class ModelTest(tests.Test):
},
})
self.assertEqual({
- 'context1': {'blob': '10', 'version': [[1], 0], 'command': ('activity', 'command')},
- 'dep': {'blob': '3', 'version': [[3], 0]},
+ 'context1': {'title': '', 'blob': '10', 'version': [[1], 0], 'command': ('activity', 'command')},
+ 'dep': {'title': '', 'blob': '3', 'version': [[3], 0]},
},
model.solve(volume, 'context1'))
@@ -427,8 +427,8 @@ class ModelTest(tests.Test):
},
})
self.assertEqual({
- 'context1': {'blob': '10', 'version': [[1], 0], 'command': ('activity', 'command')},
- 'dep': {'blob': '5', 'version': [[5], 0]},
+ 'context1': {'title': '', 'blob': '10', 'version': [[1], 0], 'command': ('activity', 'command')},
+ 'dep': {'title': '', 'blob': '5', 'version': [[5], 0]},
},
model.solve(volume, 'context1'))
@@ -440,8 +440,8 @@ class ModelTest(tests.Test):
},
})
self.assertEqual({
- 'context1': {'blob': '10', 'version': [[1], 0], 'command': ('activity', 'command')},
- 'dep': {'blob': '5', 'version': [[5], 0]},
+ 'context1': {'title': '', 'blob': '10', 'version': [[1], 0], 'command': ('activity', 'command')},
+ 'dep': {'title': '', 'blob': '5', 'version': [[5], 0]},
},
model.solve(volume, 'context1'))
@@ -453,8 +453,8 @@ class ModelTest(tests.Test):
},
})
self.assertEqual({
- 'context1': {'blob': '10', 'version': [[1], 0], 'command': ('activity', 'command')},
- 'dep': {'blob': '4', 'version': [[4], 0]},
+ 'context1': {'title': '', 'blob': '10', 'version': [[1], 0], 'command': ('activity', 'command')},
+ 'dep': {'title': '', 'blob': '4', 'version': [[4], 0]},
},
model.solve(volume, 'context1'))
@@ -466,8 +466,8 @@ class ModelTest(tests.Test):
},
})
self.assertEqual({
- 'context1': {'blob': '10', 'version': [[1], 0], 'command': ('activity', 'command')},
- 'dep': {'blob': '3', 'version': [[3], 0]},
+ 'context1': {'title': '', 'blob': '10', 'version': [[1], 0], 'command': ('activity', 'command')},
+ 'dep': {'title': '', 'blob': '3', 'version': [[3], 0]},
},
model.solve(volume, 'context1'))
@@ -479,8 +479,8 @@ class ModelTest(tests.Test):
},
})
self.assertEqual({
- 'context1': {'blob': '10', 'version': [[1], 0], 'command': ('activity', 'command')},
- 'dep': {'blob': '1', 'version': [[1], 0]},
+ 'context1': {'title': '', 'blob': '10', 'version': [[1], 0], 'command': ('activity', 'command')},
+ 'dep': {'title': '', 'blob': '1', 'version': [[1], 0]},
},
model.solve(volume, 'context1'))
@@ -516,8 +516,8 @@ class ModelTest(tests.Test):
})
self.assertEqual({
- 'context1': {'blob': '6', 'version': [[1], 0], 'command': ('activity', 6)},
- 'context4': {'blob': '5', 'version': [[1], 0]},
+ 'context1': {'title': '', 'blob': '6', 'version': [[1], 0], 'command': ('activity', 6)},
+ 'context4': {'title': '', 'blob': '5', 'version': [[1], 0]},
},
model.solve(volume, 'context1'))
@@ -545,8 +545,8 @@ class ModelTest(tests.Test):
},
})
self.assertEqual({
- 'context': {'blob': '10', 'version': [[1], 0], 'command': ('activity', 'command')},
- 'dep': {'blob': '2', 'version': [[2], 0]},
+ 'context': {'title': '', 'blob': '10', 'version': [[1], 0], 'command': ('activity', 'command')},
+ 'dep': {'title': '', 'blob': '2', 'version': [[2], 0]},
},
model.solve(volume, 'context'))
@@ -560,8 +560,8 @@ class ModelTest(tests.Test):
},
})
self.assertEqual({
- 'context': {'blob': '10', 'version': [[1], 0], 'command': ('activity', 'command')},
- 'dep': {'blob': '4', 'version': [[4], 0]},
+ 'context': {'title': '', 'blob': '10', 'version': [[1], 0], 'command': ('activity', 'command')},
+ 'dep': {'title': '', 'blob': '4', 'version': [[4], 0]},
},
model.solve(volume, 'context'))
@@ -575,8 +575,8 @@ class ModelTest(tests.Test):
},
})
self.assertEqual({
- 'context': {'blob': '10', 'version': [[1], 0], 'command': ('activity', 'command')},
- 'dep': {'blob': '3', 'version': [[3], 0]},
+ 'context': {'title': '', 'blob': '10', 'version': [[1], 0], 'command': ('activity', 'command')},
+ 'dep': {'title': '', 'blob': '3', 'version': [[3], 0]},
},
model.solve(volume, 'context'))
@@ -602,7 +602,7 @@ class ModelTest(tests.Test):
},
})
self.assertEqual({
- 'context': {'blob': '10', 'version': [[1], 0], 'command': ('activity', 'command')},
+ 'context': {'title': '', 'blob': '10', 'version': [[1], 0], 'command': ('activity', 'command')},
},
model.solve(volume, 'context'))
@@ -614,8 +614,8 @@ class ModelTest(tests.Test):
},
})
self.assertEqual({
- 'context': {'blob': '10', 'version': [[1], 0], 'command': ('activity', 'command')},
- 'dep': {'blob': '5', 'version': [[5], 0]},
+ 'context': {'title': '', 'blob': '10', 'version': [[1], 0], 'command': ('activity', 'command')},
+ 'dep': {'title': '', 'blob': '5', 'version': [[5], 0]},
},
model.solve(volume, 'context'))
@@ -627,8 +627,8 @@ class ModelTest(tests.Test):
},
})
self.assertEqual({
- 'context': {'blob': '10', 'version': [[1], 0], 'command': ('activity', 'command')},
- 'dep': {'blob': '4', 'version': [[4], 0]},
+ 'context': {'title': '', 'blob': '10', 'version': [[1], 0], 'command': ('activity', 'command')},
+ 'dep': {'title': '', 'blob': '4', 'version': [[4], 0]},
},
model.solve(volume, 'context'))
@@ -682,7 +682,7 @@ class ModelTest(tests.Test):
},
})
self.assertEqual({
- 'context': {'blob': '1', 'command': ('activity', 'command'), 'version': [[1], 0]},
+ 'context': {'title': '', 'blob': '1', 'command': ('activity', 'command'), 'version': [[1], 0]},
'package': {'packages': ['pkg1', 'pkg2'], 'version': [[1], 0]},
},
model.solve(volume, context, lsb_id='Ubuntu', lsb_release='10.04'))
@@ -700,8 +700,8 @@ class ModelTest(tests.Test):
},
})
self.assertEqual({
- 'context': {'blob': '1', 'command': ('activity', 'command'), 'version': [[1], 0]},
- 'dep': {'blob': '2', 'version': [[1], 0]},
+ 'context': {'title': '', 'blob': '1', 'command': ('activity', 'command'), 'version': [[1], 0]},
+ 'dep': {'title': '', 'blob': '2', 'version': [[1], 0]},
'package': {'packages': ['pkg1', 'pkg2'], 'version': [[1], 0]},
},
model.solve(volume, context, lsb_id='Ubuntu', lsb_release='10.04'))
@@ -724,7 +724,7 @@ class ModelTest(tests.Test):
},
})
self.assertEqual({
- 'context': {'blob': '1', 'command': ('activity', 'command'), 'version': [[1], 0]},
+ 'context': {'title': '', 'blob': '1', 'command': ('activity', 'command'), 'version': [[1], 0]},
'package1': {'packages': ['bin1', 'bin2', 'devel1', 'devel2'], 'version': []},
},
model.solve(volume, context, lsb_id='Ubuntu'))
@@ -745,7 +745,7 @@ class ModelTest(tests.Test):
},
})
self.assertEqual({
- 'context': {'blob': '1', 'command': ('activity', 'command'), 'version': [[1], 0]},
+ 'context': {'title': '', 'blob': '1', 'command': ('activity', 'command'), 'version': [[1], 0]},
'package2': {'packages': ['bin'], 'version': []},
},
model.solve(volume, context, lsb_id='Ubuntu', lsb_release='fake'))
@@ -772,17 +772,17 @@ class ModelTest(tests.Test):
},
})
self.assertEqual({
- 'context': {'blob': '1', 'command': ('activity', 'command'), 'version': [[1], 0]},
+ 'context': {'title': '', 'blob': '1', 'command': ('activity', 'command'), 'version': [[1], 0]},
'package1': {'packages': ['pkg1'], 'version': []},
},
model.solve(volume, context))
self.assertEqual({
- 'context': {'blob': '1', 'command': ('activity', 'command'), 'version': [[1], 0]},
+ 'context': {'title': '', 'blob': '1', 'command': ('activity', 'command'), 'version': [[1], 0]},
'package1': {'packages': ['pkg1'], 'version': []},
},
model.solve(volume, context, lsb_id='Fake'))
self.assertEqual({
- 'context': {'blob': '1', 'command': ('activity', 'command'), 'version': [[1], 0]},
+ 'context': {'title': '', 'blob': '1', 'command': ('activity', 'command'), 'version': [[1], 0]},
'package1': {'packages': ['pkg1'], 'version': []},
},
model.solve(volume, context, lsb_id='Fake', lsb_release='fake'))
diff --git a/tests/units/node/node.py b/tests/units/node/node.py
index a8c68e8..0f934d4 100755
--- a/tests/units/node/node.py
+++ b/tests/units/node/node.py
@@ -361,7 +361,7 @@ class NodeTest(tests.Test):
'title': 'title1',
'summary': 'summary',
'description': 'description',
- 'artifact_icon': '',
+ 'artefact_icon': '',
'icon': '',
'logo': '',
})
@@ -562,6 +562,7 @@ class NodeTest(tests.Test):
self.assertEqual({
'activity': {
+ 'title': 'activity',
'blob': activity_blob,
'command': ['activity', 'true'],
'version': [[1], 0],
@@ -569,6 +570,7 @@ class NodeTest(tests.Test):
'unpack_size': len(activity_unpack),
},
'dep': {
+ 'title': 'dep',
'blob': dep_blob,
'version': [[2], 0],
'size': len(dep_pack),
@@ -638,6 +640,7 @@ class NodeTest(tests.Test):
self.assertEqual({
'activity': {
+ 'title': 'activity',
'blob': activity_blob,
'command': ['activity', 'true'],
'version': [[1], 0],
@@ -645,6 +648,7 @@ class NodeTest(tests.Test):
'unpack_size': len(activity_unpack),
},
'dep': {
+ 'title': 'dep',
'blob': dep_blob,
'version': [[2], 0],
'size': len(dep_pack),
diff --git a/tests/units/node/slave.py b/tests/units/node/slave.py
index 184da43..55da003 100755
--- a/tests/units/node/slave.py
+++ b/tests/units/node/slave.py
@@ -25,6 +25,14 @@ class SlaveTest(tests.Test):
def setUp(self):
tests.Test.setUp(self)
+ class statvfs(object):
+ f_blocks = 100
+ f_bfree = 999999999
+ f_frsize = 1
+
+ self.statvfs = statvfs
+ self.override(os, 'statvfs', lambda *args: statvfs())
+
class Document(db.Resource):
@db.indexed_property(db.Localized, slot=1, prefix='N', full_text=True)
diff --git a/tests/units/toolkit/router.py b/tests/units/toolkit/router.py
index 52a721e..0c18cee 100755
--- a/tests/units/toolkit/router.py
+++ b/tests/units/toolkit/router.py
@@ -1306,8 +1306,8 @@ class RouterTest(tests.Test):
@route('GET', [None, None, None], cmd='cmd', mime_type='text/event-stream')
def ok(self):
- yield {}
- yield {'foo': 'bar'}
+ yield {'event': 'probe'}
+ yield {'event': 'probe', 'foo': 'bar'}
events = []
def localcast(event):
@@ -1324,8 +1324,8 @@ class RouterTest(tests.Test):
coroutine.sleep(.1)
self.assertEqual([
- {'method': 'GET', 'resource': 'resource', 'guid': 'guid', 'prop': 'prop', 'cmd': 'cmd'},
- {'method': 'GET', 'resource': 'resource', 'guid': 'guid', 'prop': 'prop', 'cmd': 'cmd', 'foo': 'bar'},
+ {'method': 'GET', 'resource': 'resource', 'guid': 'guid', 'prop': 'prop', 'cmd': 'cmd', 'event': 'probe'},
+ {'method': 'GET', 'resource': 'resource', 'guid': 'guid', 'prop': 'prop', 'cmd': 'cmd', 'foo': 'bar', 'event': 'probe'},
],
events)
del events[:]
@@ -1336,9 +1336,9 @@ class RouterTest(tests.Test):
@route('GET', mime_type='text/event-stream')
def error(self, request):
- request.session['bar'] = 'foo'
- yield {}
- yield {'foo': 'bar'}, {'add': 'on'}
+ yield {'foo': 'bar'}
+ yield {'bar': 'foo'}
+ yield {'event': 'probe'}
raise RuntimeError('error')
events = []
@@ -1356,9 +1356,8 @@ class RouterTest(tests.Test):
coroutine.sleep(.1)
self.assertEqual([
- {'method': 'GET'},
- {'method': 'GET', 'foo': 'bar', 'add': 'on'},
- {'method': 'GET', 'bar': 'foo', 'event': 'failure', 'exception': 'RuntimeError', 'error': 'error'},
+ {'method': 'GET', 'foo': 'bar', 'bar': 'foo', 'event': 'probe'},
+ {'method': 'GET', 'foo': 'bar', 'bar': 'foo', 'event': 'failure', 'exception': 'RuntimeError', 'error': 'error'},
],
events)
del events[:]
@@ -1369,8 +1368,8 @@ class RouterTest(tests.Test):
@route('GET', mime_type='text/event-stream')
def get(self, request):
- yield {}
- yield {'request': request.content}
+ yield {'event': 'probe'}
+ yield {'event': 'probe', 'request': request.content}
events = []
def localcast(event):
@@ -1389,8 +1388,8 @@ class RouterTest(tests.Test):
coroutine.sleep(.1)
self.assertEqual([
- {'method': 'GET'},
- {'method': 'GET', 'request': 'probe'},
+ {'method': 'GET', 'event': 'probe'},
+ {'method': 'GET', 'request': 'probe', 'event': 'probe'},
],
events)
del events[:]