Web   ·   Wiki   ·   Activities   ·   Blog   ·   Lists   ·   Chat   ·   Meeting   ·   Bugs   ·   Git   ·   Translate   ·   Archive   ·   People   ·   Donate
summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorAleksey Lim <alsroot@sugarlabs.org>2013-08-30 03:01:43 (GMT)
committer Aleksey Lim <alsroot@sugarlabs.org>2013-08-31 13:20:17 (GMT)
commitb4e415ab9f70373ce1d5572467d58f95b5a9c755 (patch)
treeccfb23f1cec2bb291c3a2d75776106fb7938aa57
parent6c16b4c366b33f8020cd414b88abcd8aa2e43ead (diff)
More internal polishing
* keep all launching related procedures in client process only * do not monitor activity directories * revert lazy db open on client side
-rw-r--r--TODO5
-rw-r--r--doc/objects.dia853
-rwxr-xr-xsugar-network16
-rwxr-xr-xsugar-network-client15
-rwxr-xr-xsugar-network-node3
-rw-r--r--sugar_network/client/__init__.py8
-rw-r--r--sugar_network/client/cache.py243
-rw-r--r--sugar_network/client/clones.py405
-rw-r--r--sugar_network/client/implementations.py385
-rw-r--r--sugar_network/client/injector.py267
-rw-r--r--sugar_network/client/journal.py18
-rw-r--r--sugar_network/client/packagekit.py255
-rw-r--r--sugar_network/client/routes.py381
-rw-r--r--sugar_network/client/solver.py70
-rw-r--r--sugar_network/db/directory.py33
-rw-r--r--sugar_network/db/index.py51
-rw-r--r--sugar_network/db/resource.py23
-rw-r--r--sugar_network/db/routes.py76
-rw-r--r--sugar_network/db/storage.py9
-rw-r--r--sugar_network/db/volume.py21
-rw-r--r--sugar_network/model/__init__.py2
-rw-r--r--sugar_network/model/artifact.py10
-rw-r--r--sugar_network/model/context.py15
-rw-r--r--sugar_network/model/implementation.py5
-rw-r--r--sugar_network/model/routes.py42
-rw-r--r--sugar_network/node/master.py5
-rw-r--r--sugar_network/node/routes.py88
-rw-r--r--sugar_network/toolkit/__init__.py4
-rw-r--r--sugar_network/toolkit/bundle.py24
-rw-r--r--sugar_network/toolkit/coroutine.py56
-rw-r--r--sugar_network/toolkit/gbus.py116
-rw-r--r--sugar_network/toolkit/http.py124
-rw-r--r--sugar_network/toolkit/lsb_release.py5
-rw-r--r--sugar_network/toolkit/pipe.py189
-rw-r--r--sugar_network/toolkit/router.py180
-rw-r--r--sugar_network/toolkit/zeroconf.py22
-rw-r--r--tests/__init__.py35
-rw-r--r--tests/data/node/context/ac/activity/author2
-rw-r--r--tests/data/node/context/ac/activity2/aliases1
-rw-r--r--tests/data/node/context/ac/activity2/author1
-rw-r--r--tests/data/node/context/ac/activity2/clone1
-rw-r--r--tests/data/node/context/ac/activity2/ctime1
-rw-r--r--tests/data/node/context/ac/activity2/dependencies1
-rw-r--r--tests/data/node/context/ac/activity2/description1
-rw-r--r--tests/data/node/context/ac/activity2/favorite1
-rw-r--r--tests/data/node/context/ac/activity2/guid1
-rw-r--r--tests/data/node/context/ac/activity2/homepage1
-rw-r--r--tests/data/node/context/ac/activity2/implement1
-rw-r--r--tests/data/node/context/ac/activity2/keep8
-rw-r--r--tests/data/node/context/ac/activity2/keep_impl8
-rw-r--r--tests/data/node/context/ac/activity2/layer1
-rw-r--r--tests/data/node/context/ac/activity2/mime_types1
-rw-r--r--tests/data/node/context/ac/activity2/mtime1
-rw-r--r--tests/data/node/context/ac/activity2/packages1
-rw-r--r--tests/data/node/context/ac/activity2/position10
-rw-r--r--tests/data/node/context/ac/activity2/presolve8
-rw-r--r--tests/data/node/context/ac/activity2/rating1
-rw-r--r--tests/data/node/context/ac/activity2/reviews1
-rw-r--r--tests/data/node/context/ac/activity2/seqno1
-rw-r--r--tests/data/node/context/ac/activity2/summary1
-rw-r--r--tests/data/node/context/ac/activity2/tags1
-rw-r--r--tests/data/node/context/ac/activity2/title1
-rw-r--r--tests/data/node/context/ac/activity2/type1
-rw-r--r--tests/data/node/context/ac/activity2/user10
-rw-r--r--tests/data/node/context/ac/activity2/versions8
-rw-r--r--tests/data/node/context/co/context/author2
-rw-r--r--tests/data/node/context/co/context/layer2
-rw-r--r--tests/data/node/context/de/dep1/author2
-rw-r--r--tests/data/node/context/de/dep2/author2
-rw-r--r--tests/data/node/context/de/dep3/author2
-rw-r--r--tests/data/node/context/pa/package/author2
-rw-r--r--tests/data/node/implementation/im/implementation/author2
-rw-r--r--tests/data/node/implementation/im/implementation/data2
-rw-r--r--tests/data/node/implementation/im/implementation2/author2
-rwxr-xr-xtests/integration/master_personal.py2
-rwxr-xr-xtests/integration/node_client.py151
-rwxr-xr-xtests/integration/node_packages.py12
-rw-r--r--tests/units/client/__main__.py7
-rwxr-xr-xtests/units/client/cache.py307
-rwxr-xr-xtests/units/client/clones.py439
-rwxr-xr-xtests/units/client/implementations.py368
-rwxr-xr-xtests/units/client/injector.py1059
-rwxr-xr-xtests/units/client/offline_routes.py557
-rwxr-xr-xtests/units/client/online_routes.py1778
-rwxr-xr-xtests/units/client/routes.py175
-rwxr-xr-xtests/units/client/server_routes.py182
-rwxr-xr-xtests/units/client/solver.py238
-rwxr-xr-xtests/units/db/routes.py59
-rwxr-xr-xtests/units/model/routes.py6
-rwxr-xr-xtests/units/node/node.py49
-rwxr-xr-xtests/units/node/volume.py6
-rw-r--r--tests/units/toolkit/__main__.py1
-rwxr-xr-xtests/units/toolkit/gbus.py31
-rwxr-xr-xtests/units/toolkit/http.py107
94 files changed, 4283 insertions, 5403 deletions
diff --git a/TODO b/TODO
index 58b9661..d4a542c 100644
--- a/TODO
+++ b/TODO
@@ -1,3 +1,7 @@
+- on launch failures, upload system logs
+- do failure upload in background
+- show only packages in /packages API call
+
- (!) Editors' workflows:
- (?) log all (including editros) posters of particular document to minimize conflicts about why somthing was changed
or better, detailed log for every editor's change
@@ -11,4 +15,3 @@
- increase granularity for sync.chunked_encode()
- slave._Pooler might leak events if pullers are not in time to call wait()
- revert per-document "downloads" property as "launches", a part of unpersonizalied user_stats
-- reuse "layer" for home volume instead of "clone" and "favorite"
diff --git a/doc/objects.dia b/doc/objects.dia
index 727b7e3..5ff9450 100644
--- a/doc/objects.dia
+++ b/doc/objects.dia
@@ -2,10 +2,10 @@
<dia:diagram xmlns:dia="http://www.lysator.liu.se/~alla/dia/">
<dia:diagramdata>
<dia:attribute name="background">
- <dia:color val="#ffffffff"/>
+ <dia:color val="#ffffff"/>
</dia:attribute>
<dia:attribute name="pagebreak">
- <dia:color val="#000099ff"/>
+ <dia:color val="#000099"/>
</dia:attribute>
<dia:attribute name="paper">
<dia:composite type="paper">
@@ -37,9 +37,6 @@
</dia:attribute>
<dia:attribute name="grid">
<dia:composite type="grid">
- <dia:attribute name="dynamic">
- <dia:boolean val="true"/>
- </dia:attribute>
<dia:attribute name="width_x">
<dia:real val="0.40000000000000002"/>
</dia:attribute>
@@ -56,7 +53,7 @@
</dia:composite>
</dia:attribute>
<dia:attribute name="color">
- <dia:color val="#d8e5e5ff"/>
+ <dia:color val="#d8e5e5"/>
</dia:attribute>
<dia:attribute name="guides">
<dia:composite type="guides">
@@ -64,25 +61,6 @@
<dia:attribute name="vguides"/>
</dia:composite>
</dia:attribute>
- <dia:attribute name="display">
- <dia:composite type="display">
- <dia:attribute name="antialiased">
- <dia:boolean val="true"/>
- </dia:attribute>
- <dia:attribute name="snap-to-grid">
- <dia:boolean val="true"/>
- </dia:attribute>
- <dia:attribute name="snap-to-object">
- <dia:boolean val="true"/>
- </dia:attribute>
- <dia:attribute name="show-grid">
- <dia:boolean val="false"/>
- </dia:attribute>
- <dia:attribute name="show-connection-points">
- <dia:boolean val="true"/>
- </dia:attribute>
- </dia:composite>
- </dia:attribute>
</dia:diagramdata>
<dia:layer name="Background" visible="true" active="true">
<dia:object type="UML - Class" version="0" id="O0">
@@ -90,7 +68,7 @@
<dia:point val="30,8"/>
</dia:attribute>
<dia:attribute name="obj_bb">
- <dia:rectangle val="29.985,7.985;48.6675,42.415"/>
+ <dia:rectangle val="29.985,7.985;48.6675,36.815"/>
</dia:attribute>
<dia:attribute name="elem_corner">
<dia:point val="30,8"/>
@@ -99,7 +77,7 @@
<dia:real val="18.6525"/>
</dia:attribute>
<dia:attribute name="elem_height">
- <dia:real val="34.400000000000006"/>
+ <dia:real val="28.800000000000001"/>
</dia:attribute>
<dia:attribute name="name">
<dia:string>#Context#</dia:string>
@@ -140,20 +118,17 @@
<dia:attribute name="comment_tagging">
<dia:boolean val="false"/>
</dia:attribute>
- <dia:attribute name="allow_resizing">
- <dia:boolean val="false"/>
- </dia:attribute>
<dia:attribute name="line_width">
<dia:real val="0.029999999999999999"/>
</dia:attribute>
<dia:attribute name="line_color">
- <dia:color val="#000000ff"/>
+ <dia:color val="#000000"/>
</dia:attribute>
<dia:attribute name="fill_color">
- <dia:color val="#c1ccccff"/>
+ <dia:color val="#c1cccc"/>
</dia:attribute>
<dia:attribute name="text_color">
- <dia:color val="#000000ff"/>
+ <dia:color val="#000000"/>
</dia:attribute>
<dia:attribute name="normal_font">
<dia:font family="monospace" style="0" name="Courier"/>
@@ -401,52 +376,6 @@
</dia:composite>
<dia:composite type="umlattribute">
<dia:attribute name="name">
- <dia:string>#favorite#</dia:string>
- </dia:attribute>
- <dia:attribute name="type">
- <dia:string>#bool [R L]#</dia:string>
- </dia:attribute>
- <dia:attribute name="value">
- <dia:string>#False#</dia:string>
- </dia:attribute>
- <dia:attribute name="comment">
- <dia:string>#Context is favorited by local user#</dia:string>
- </dia:attribute>
- <dia:attribute name="visibility">
- <dia:enum val="0"/>
- </dia:attribute>
- <dia:attribute name="abstract">
- <dia:boolean val="false"/>
- </dia:attribute>
- <dia:attribute name="class_scope">
- <dia:boolean val="false"/>
- </dia:attribute>
- </dia:composite>
- <dia:composite type="umlattribute">
- <dia:attribute name="name">
- <dia:string>#clone#</dia:string>
- </dia:attribute>
- <dia:attribute name="type">
- <dia:string>#int [R L]#</dia:string>
- </dia:attribute>
- <dia:attribute name="value">
- <dia:string>#0#</dia:string>
- </dia:attribute>
- <dia:attribute name="comment">
- <dia:string>#If there are implementations stored locally. 0 - no implementations; 1 - cloning in progress; 2 - there are local implementations.#</dia:string>
- </dia:attribute>
- <dia:attribute name="visibility">
- <dia:enum val="0"/>
- </dia:attribute>
- <dia:attribute name="abstract">
- <dia:boolean val="false"/>
- </dia:attribute>
- <dia:attribute name="class_scope">
- <dia:boolean val="false"/>
- </dia:attribute>
- </dia:composite>
- <dia:composite type="umlattribute">
- <dia:attribute name="name">
<dia:string>#downloads#</dia:string>
</dia:attribute>
<dia:attribute name="type">
@@ -576,20 +505,17 @@
<dia:attribute name="comment_tagging">
<dia:boolean val="false"/>
</dia:attribute>
- <dia:attribute name="allow_resizing">
- <dia:boolean val="false"/>
- </dia:attribute>
<dia:attribute name="line_width">
<dia:real val="0.029999999999999999"/>
</dia:attribute>
<dia:attribute name="line_color">
- <dia:color val="#000000ff"/>
+ <dia:color val="#000000"/>
</dia:attribute>
<dia:attribute name="fill_color">
- <dia:color val="#90ee90ff"/>
+ <dia:color val="#90ee90"/>
</dia:attribute>
<dia:attribute name="text_color">
- <dia:color val="#000000ff"/>
+ <dia:color val="#000000"/>
</dia:attribute>
<dia:attribute name="normal_font">
<dia:font family="monospace" style="0" name="Courier"/>
@@ -851,20 +777,17 @@
<dia:attribute name="comment_tagging">
<dia:boolean val="false"/>
</dia:attribute>
- <dia:attribute name="allow_resizing">
- <dia:boolean val="false"/>
- </dia:attribute>
<dia:attribute name="line_width">
<dia:real val="0.029999999999999999"/>
</dia:attribute>
<dia:attribute name="line_color">
- <dia:color val="#000000ff"/>
+ <dia:color val="#000000"/>
</dia:attribute>
<dia:attribute name="fill_color">
- <dia:color val="#ffffffff"/>
+ <dia:color val="#ffffff"/>
</dia:attribute>
<dia:attribute name="text_color">
- <dia:color val="#000000ff"/>
+ <dia:color val="#000000"/>
</dia:attribute>
<dia:attribute name="normal_font">
<dia:font family="monospace" style="0" name="Courier"/>
@@ -1049,11 +972,47 @@
<dia:attribute name="templates"/>
</dia:object>
<dia:object type="UML - Association" version="2" id="O3">
+ <dia:attribute name="name">
+ <dia:string>##</dia:string>
+ </dia:attribute>
+ <dia:attribute name="direction">
+ <dia:enum val="0"/>
+ </dia:attribute>
+ <dia:attribute name="show_direction">
+ <dia:boolean val="true"/>
+ </dia:attribute>
+ <dia:attribute name="assoc_type">
+ <dia:enum val="0"/>
+ </dia:attribute>
+ <dia:attribute name="role_a">
+ <dia:string>##</dia:string>
+ </dia:attribute>
+ <dia:attribute name="multipicity_a">
+ <dia:string>##</dia:string>
+ </dia:attribute>
+ <dia:attribute name="visibility_a">
+ <dia:enum val="0"/>
+ </dia:attribute>
+ <dia:attribute name="show_arrow_a">
+ <dia:boolean val="false"/>
+ </dia:attribute>
+ <dia:attribute name="role_b">
+ <dia:string>##</dia:string>
+ </dia:attribute>
+ <dia:attribute name="multipicity_b">
+ <dia:string>#*#</dia:string>
+ </dia:attribute>
+ <dia:attribute name="visibility_b">
+ <dia:enum val="0"/>
+ </dia:attribute>
+ <dia:attribute name="show_arrow_b">
+ <dia:boolean val="false"/>
+ </dia:attribute>
<dia:attribute name="obj_pos">
<dia:point val="48.6525,33.1"/>
</dia:attribute>
<dia:attribute name="obj_bb">
- <dia:rectangle val="48.6025,32.34;54.05,68.74"/>
+ <dia:rectangle val="48.6025,32.3;54.05,68.7"/>
</dia:attribute>
<dia:attribute name="meta">
<dia:composite type="dict"/>
@@ -1072,6 +1031,17 @@
<dia:attribute name="orth_autoroute">
<dia:boolean val="false"/>
</dia:attribute>
+ <dia:attribute name="text_colour">
+ <dia:color val="#000000"/>
+ </dia:attribute>
+ <dia:attribute name="line_colour">
+ <dia:color val="#000000"/>
+ </dia:attribute>
+ <dia:connections>
+ <dia:connection handle="1" to="O14" connection="12"/>
+ </dia:connections>
+ </dia:object>
+ <dia:object type="UML - Association" version="2" id="O4">
<dia:attribute name="name">
<dia:string>##</dia:string>
</dia:attribute>
@@ -1108,32 +1078,11 @@
<dia:attribute name="show_arrow_b">
<dia:boolean val="false"/>
</dia:attribute>
- <dia:attribute name="text_font">
- <dia:font family="monospace" style="0" name="Courier"/>
- </dia:attribute>
- <dia:attribute name="text_height">
- <dia:real val="0.80000000000000004"/>
- </dia:attribute>
- <dia:attribute name="text_colour">
- <dia:color val="#000000ff"/>
- </dia:attribute>
- <dia:attribute name="line_width">
- <dia:real val="0.10000000000000001"/>
- </dia:attribute>
- <dia:attribute name="line_colour">
- <dia:color val="#000000ff"/>
- </dia:attribute>
- <dia:connections>
- <dia:connection handle="0" to="O0" connection="29"/>
- <dia:connection handle="1" to="O14" connection="12"/>
- </dia:connections>
- </dia:object>
- <dia:object type="UML - Association" version="2" id="O4">
<dia:attribute name="obj_pos">
<dia:point val="54,69.9"/>
</dia:attribute>
<dia:attribute name="obj_bb">
- <dia:rectangle val="46.95,67.9293;54.05,70.74"/>
+ <dia:rectangle val="46.95,67.9293;54.05,70.7"/>
</dia:attribute>
<dia:attribute name="meta">
<dia:composite type="dict"/>
@@ -1152,6 +1101,18 @@
<dia:attribute name="orth_autoroute">
<dia:boolean val="false"/>
</dia:attribute>
+ <dia:attribute name="text_colour">
+ <dia:color val="#000000"/>
+ </dia:attribute>
+ <dia:attribute name="line_colour">
+ <dia:color val="#000000"/>
+ </dia:attribute>
+ <dia:connections>
+ <dia:connection handle="0" to="O14" connection="14"/>
+ <dia:connection handle="1" to="O11" connection="4"/>
+ </dia:connections>
+ </dia:object>
+ <dia:object type="UML - Association" version="2" id="O5">
<dia:attribute name="name">
<dia:string>##</dia:string>
</dia:attribute>
@@ -1188,39 +1149,18 @@
<dia:attribute name="show_arrow_b">
<dia:boolean val="false"/>
</dia:attribute>
- <dia:attribute name="text_font">
- <dia:font family="monospace" style="0" name="Courier"/>
- </dia:attribute>
- <dia:attribute name="text_height">
- <dia:real val="0.80000000000000004"/>
- </dia:attribute>
- <dia:attribute name="text_colour">
- <dia:color val="#000000ff"/>
- </dia:attribute>
- <dia:attribute name="line_width">
- <dia:real val="0.10000000000000001"/>
- </dia:attribute>
- <dia:attribute name="line_colour">
- <dia:color val="#000000ff"/>
- </dia:attribute>
- <dia:connections>
- <dia:connection handle="0" to="O14" connection="14"/>
- <dia:connection handle="1" to="O11" connection="4"/>
- </dia:connections>
- </dia:object>
- <dia:object type="UML - Association" version="2" id="O5">
<dia:attribute name="obj_pos">
- <dia:point val="39.3263,42.4"/>
+ <dia:point val="39.3263,36.8"/>
</dia:attribute>
<dia:attribute name="obj_bb">
- <dia:rectangle val="39.2763,41.64;40.1,49.84"/>
+ <dia:rectangle val="39.2763,36;40.1,49.8"/>
</dia:attribute>
<dia:attribute name="meta">
<dia:composite type="dict"/>
</dia:attribute>
<dia:attribute name="orth_points">
- <dia:point val="39.3263,42.4"/>
- <dia:point val="40,42.4"/>
+ <dia:point val="39.3263,36.8"/>
+ <dia:point val="40,36.8"/>
<dia:point val="40,49"/>
<dia:point val="39.5,49"/>
</dia:attribute>
@@ -1232,6 +1172,18 @@
<dia:attribute name="orth_autoroute">
<dia:boolean val="false"/>
</dia:attribute>
+ <dia:attribute name="text_colour">
+ <dia:color val="#000000"/>
+ </dia:attribute>
+ <dia:attribute name="line_colour">
+ <dia:color val="#000000"/>
+ </dia:attribute>
+ <dia:connections>
+ <dia:connection handle="0" to="O0" connection="6"/>
+ <dia:connection handle="1" to="O11" connection="1"/>
+ </dia:connections>
+ </dia:object>
+ <dia:object type="UML - Association" version="2" id="O6">
<dia:attribute name="name">
<dia:string>##</dia:string>
</dia:attribute>
@@ -1268,32 +1220,11 @@
<dia:attribute name="show_arrow_b">
<dia:boolean val="false"/>
</dia:attribute>
- <dia:attribute name="text_font">
- <dia:font family="monospace" style="0" name="Courier"/>
- </dia:attribute>
- <dia:attribute name="text_height">
- <dia:real val="0.80000000000000004"/>
- </dia:attribute>
- <dia:attribute name="text_colour">
- <dia:color val="#000000ff"/>
- </dia:attribute>
- <dia:attribute name="line_width">
- <dia:real val="0.10000000000000001"/>
- </dia:attribute>
- <dia:attribute name="line_colour">
- <dia:color val="#000000ff"/>
- </dia:attribute>
- <dia:connections>
- <dia:connection handle="0" to="O0" connection="6"/>
- <dia:connection handle="1" to="O11" connection="1"/>
- </dia:connections>
- </dia:object>
- <dia:object type="UML - Association" version="2" id="O6">
<dia:attribute name="obj_pos">
<dia:point val="30,33.1"/>
</dia:attribute>
<dia:attribute name="obj_bb">
- <dia:rectangle val="26.385,32.34;30.05,73.14"/>
+ <dia:rectangle val="26.385,32.3;30.05,73.1"/>
</dia:attribute>
<dia:attribute name="meta">
<dia:composite type="dict"/>
@@ -1312,6 +1243,17 @@
<dia:attribute name="orth_autoroute">
<dia:boolean val="true"/>
</dia:attribute>
+ <dia:attribute name="text_colour">
+ <dia:color val="#000000"/>
+ </dia:attribute>
+ <dia:attribute name="line_colour">
+ <dia:color val="#000000"/>
+ </dia:attribute>
+ <dia:connections>
+ <dia:connection handle="1" to="O8" connection="11"/>
+ </dia:connections>
+ </dia:object>
+ <dia:object type="UML - Association" version="2" id="O7">
<dia:attribute name="name">
<dia:string>##</dia:string>
</dia:attribute>
@@ -1348,32 +1290,11 @@
<dia:attribute name="show_arrow_b">
<dia:boolean val="false"/>
</dia:attribute>
- <dia:attribute name="text_font">
- <dia:font family="monospace" style="0" name="Courier"/>
- </dia:attribute>
- <dia:attribute name="text_height">
- <dia:real val="0.80000000000000004"/>
- </dia:attribute>
- <dia:attribute name="text_colour">
- <dia:color val="#000000ff"/>
- </dia:attribute>
- <dia:attribute name="line_width">
- <dia:real val="0.10000000000000001"/>
- </dia:attribute>
- <dia:attribute name="line_colour">
- <dia:color val="#000000ff"/>
- </dia:attribute>
- <dia:connections>
- <dia:connection handle="0" to="O0" connection="28"/>
- <dia:connection handle="1" to="O8" connection="11"/>
- </dia:connections>
- </dia:object>
- <dia:object type="UML - Association" version="2" id="O7">
<dia:attribute name="obj_pos">
<dia:point val="26.435,75.1"/>
</dia:attribute>
<dia:attribute name="obj_bb">
- <dia:rectangle val="26.385,67.24;32.0516,75.94"/>
+ <dia:rectangle val="26.385,67.2;32,75.9"/>
</dia:attribute>
<dia:attribute name="meta">
<dia:composite type="dict"/>
@@ -1382,7 +1303,7 @@
<dia:point val="26.435,75.1"/>
<dia:point val="30,75.1"/>
<dia:point val="30,68"/>
- <dia:point val="32.0016,68"/>
+ <dia:point val="31.95,68"/>
</dia:attribute>
<dia:attribute name="orth_orient">
<dia:enum val="0"/>
@@ -1392,56 +1313,11 @@
<dia:attribute name="orth_autoroute">
<dia:boolean val="false"/>
</dia:attribute>
- <dia:attribute name="name">
- <dia:string>##</dia:string>
- </dia:attribute>
- <dia:attribute name="direction">
- <dia:enum val="0"/>
- </dia:attribute>
- <dia:attribute name="show_direction">
- <dia:boolean val="true"/>
- </dia:attribute>
- <dia:attribute name="assoc_type">
- <dia:enum val="0"/>
- </dia:attribute>
- <dia:attribute name="role_a">
- <dia:string>##</dia:string>
- </dia:attribute>
- <dia:attribute name="multipicity_a">
- <dia:string>##</dia:string>
- </dia:attribute>
- <dia:attribute name="visibility_a">
- <dia:enum val="0"/>
- </dia:attribute>
- <dia:attribute name="show_arrow_a">
- <dia:boolean val="false"/>
- </dia:attribute>
- <dia:attribute name="role_b">
- <dia:string>##</dia:string>
- </dia:attribute>
- <dia:attribute name="multipicity_b">
- <dia:string>#*#</dia:string>
- </dia:attribute>
- <dia:attribute name="visibility_b">
- <dia:enum val="0"/>
- </dia:attribute>
- <dia:attribute name="show_arrow_b">
- <dia:boolean val="false"/>
- </dia:attribute>
- <dia:attribute name="text_font">
- <dia:font family="monospace" style="0" name="Courier"/>
- </dia:attribute>
- <dia:attribute name="text_height">
- <dia:real val="0.80000000000000004"/>
- </dia:attribute>
<dia:attribute name="text_colour">
- <dia:color val="#000000ff"/>
- </dia:attribute>
- <dia:attribute name="line_width">
- <dia:real val="0.10000000000000001"/>
+ <dia:color val="#000000"/>
</dia:attribute>
<dia:attribute name="line_colour">
- <dia:color val="#000000ff"/>
+ <dia:color val="#000000"/>
</dia:attribute>
<dia:connections>
<dia:connection handle="0" to="O8" connection="13"/>
@@ -1459,10 +1335,10 @@
<dia:point val="14,66"/>
</dia:attribute>
<dia:attribute name="elem_width">
- <dia:real val="12.434999999999999"/>
+ <dia:real val="12.435"/>
</dia:attribute>
<dia:attribute name="elem_height">
- <dia:real val="14.800000000000001"/>
+ <dia:real val="14.799999999999999"/>
</dia:attribute>
<dia:attribute name="name">
<dia:string>#Feedback#</dia:string>
@@ -1503,20 +1379,17 @@
<dia:attribute name="comment_tagging">
<dia:boolean val="false"/>
</dia:attribute>
- <dia:attribute name="allow_resizing">
- <dia:boolean val="false"/>
- </dia:attribute>
<dia:attribute name="line_width">
<dia:real val="0.029999999999999999"/>
</dia:attribute>
<dia:attribute name="line_color">
- <dia:color val="#000000ff"/>
+ <dia:color val="#000000"/>
</dia:attribute>
<dia:attribute name="fill_color">
- <dia:color val="#d9e6e6ff"/>
+ <dia:color val="#d9e6e6"/>
</dia:attribute>
<dia:attribute name="text_color">
- <dia:color val="#000000ff"/>
+ <dia:color val="#000000"/>
</dia:attribute>
<dia:attribute name="normal_font">
<dia:font family="monospace" style="0" name="Courier"/>
@@ -1732,20 +1605,17 @@
<dia:attribute name="comment_tagging">
<dia:boolean val="false"/>
</dia:attribute>
- <dia:attribute name="allow_resizing">
- <dia:boolean val="false"/>
- </dia:attribute>
<dia:attribute name="line_width">
<dia:real val="0.029999999999999999"/>
</dia:attribute>
<dia:attribute name="line_color">
- <dia:color val="#000000ff"/>
+ <dia:color val="#000000"/>
</dia:attribute>
<dia:attribute name="fill_color">
- <dia:color val="#d8e5e5ff"/>
+ <dia:color val="#d8e5e5"/>
</dia:attribute>
<dia:attribute name="text_color">
- <dia:color val="#000000ff"/>
+ <dia:color val="#000000"/>
</dia:attribute>
<dia:attribute name="normal_font">
<dia:font family="monospace" style="0" name="Courier"/>
@@ -1861,29 +1731,6 @@
<dia:attribute name="templates"/>
</dia:object>
<dia:object type="UML - Association" version="2" id="O10">
- <dia:attribute name="obj_pos">
- <dia:point val="14,72.3"/>
- </dia:attribute>
- <dia:attribute name="obj_bb">
- <dia:rectangle val="9.6625,72.25;14.2,76.1"/>
- </dia:attribute>
- <dia:attribute name="meta">
- <dia:composite type="dict"/>
- </dia:attribute>
- <dia:attribute name="orth_points">
- <dia:point val="14,72.3"/>
- <dia:point val="14,75"/>
- <dia:point val="9.7125,75"/>
- <dia:point val="9.7125,74.5"/>
- </dia:attribute>
- <dia:attribute name="orth_orient">
- <dia:enum val="1"/>
- <dia:enum val="0"/>
- <dia:enum val="1"/>
- </dia:attribute>
- <dia:attribute name="orth_autoroute">
- <dia:boolean val="false"/>
- </dia:attribute>
<dia:attribute name="name">
<dia:string>##</dia:string>
</dia:attribute>
@@ -1920,20 +1767,34 @@
<dia:attribute name="show_arrow_b">
<dia:boolean val="false"/>
</dia:attribute>
- <dia:attribute name="text_font">
- <dia:font family="monospace" style="0" name="Courier"/>
+ <dia:attribute name="obj_pos">
+ <dia:point val="14,72.3"/>
</dia:attribute>
- <dia:attribute name="text_height">
- <dia:real val="0.80000000000000004"/>
+ <dia:attribute name="obj_bb">
+ <dia:rectangle val="9.6625,72.25;14.2,76.1"/>
</dia:attribute>
- <dia:attribute name="text_colour">
- <dia:color val="#000000ff"/>
+ <dia:attribute name="meta">
+ <dia:composite type="dict"/>
</dia:attribute>
- <dia:attribute name="line_width">
- <dia:real val="0.10000000000000001"/>
+ <dia:attribute name="orth_points">
+ <dia:point val="14,72.3"/>
+ <dia:point val="14,75"/>
+ <dia:point val="9.7125,75"/>
+ <dia:point val="9.7125,74.5"/>
+ </dia:attribute>
+ <dia:attribute name="orth_orient">
+ <dia:enum val="1"/>
+ <dia:enum val="0"/>
+ <dia:enum val="1"/>
+ </dia:attribute>
+ <dia:attribute name="orth_autoroute">
+ <dia:boolean val="false"/>
+ </dia:attribute>
+ <dia:attribute name="text_colour">
+ <dia:color val="#000000"/>
</dia:attribute>
<dia:attribute name="line_colour">
- <dia:color val="#000000ff"/>
+ <dia:color val="#000000"/>
</dia:attribute>
<dia:connections>
<dia:connection handle="0" to="O8" connection="10"/>
@@ -1959,29 +1820,23 @@
<dia:attribute name="elem_height">
<dia:real val="38"/>
</dia:attribute>
- <dia:attribute name="name">
- <dia:string>#Common objects #</dia:string>
- </dia:attribute>
- <dia:attribute name="stereotype">
- <dia:string>##</dia:string>
- </dia:attribute>
- <dia:attribute name="text_font">
- <dia:font family="monospace" style="0" name="Courier"/>
- </dia:attribute>
- <dia:attribute name="text_height">
- <dia:real val="0.80000000000000004"/>
- </dia:attribute>
- <dia:attribute name="text_colour">
- <dia:color val="#000000ff"/>
- </dia:attribute>
<dia:attribute name="line_width">
<dia:real val="0.035277776420116425"/>
</dia:attribute>
<dia:attribute name="line_colour">
- <dia:color val="#000000ff"/>
+ <dia:color val="#000000"/>
</dia:attribute>
<dia:attribute name="fill_colour">
- <dia:color val="#f3f3f3ff"/>
+ <dia:color val="#f3f3f3"/>
+ </dia:attribute>
+ <dia:attribute name="text_colour">
+ <dia:color val="#000000"/>
+ </dia:attribute>
+ <dia:attribute name="stereotype">
+ <dia:string>##</dia:string>
+ </dia:attribute>
+ <dia:attribute name="name">
+ <dia:string>#Common objects #</dia:string>
</dia:attribute>
</dia:object>
<dia:object type="UML - Class" version="0" id="O12">
@@ -1989,16 +1844,16 @@
<dia:point val="33,69"/>
</dia:attribute>
<dia:attribute name="obj_bb">
- <dia:rectangle val="32.985,68.985;45.645,85.415"/>
+ <dia:rectangle val="32.985,68.985;45.63,85.415"/>
</dia:attribute>
<dia:attribute name="elem_corner">
<dia:point val="33,69"/>
</dia:attribute>
<dia:attribute name="elem_width">
- <dia:real val="12.629999999999999"/>
+ <dia:real val="12.615"/>
</dia:attribute>
<dia:attribute name="elem_height">
- <dia:real val="16.400000000000002"/>
+ <dia:real val="16.399999999999999"/>
</dia:attribute>
<dia:attribute name="name">
<dia:string>#Comment#</dia:string>
@@ -2039,20 +1894,17 @@
<dia:attribute name="comment_tagging">
<dia:boolean val="false"/>
</dia:attribute>
- <dia:attribute name="allow_resizing">
- <dia:boolean val="false"/>
- </dia:attribute>
<dia:attribute name="line_width">
<dia:real val="0.029999999999999999"/>
</dia:attribute>
<dia:attribute name="line_color">
- <dia:color val="#000000ff"/>
+ <dia:color val="#000000"/>
</dia:attribute>
<dia:attribute name="fill_color">
- <dia:color val="#f1ffffff"/>
+ <dia:color val="#f1ffff"/>
</dia:attribute>
<dia:attribute name="text_color">
- <dia:color val="#000000ff"/>
+ <dia:color val="#000000"/>
</dia:attribute>
<dia:attribute name="normal_font">
<dia:font family="monospace" style="0" name="Courier"/>
@@ -2268,20 +2120,17 @@
<dia:attribute name="comment_tagging">
<dia:boolean val="false"/>
</dia:attribute>
- <dia:attribute name="allow_resizing">
- <dia:boolean val="false"/>
- </dia:attribute>
<dia:attribute name="line_width">
<dia:real val="0.029999999999999999"/>
</dia:attribute>
<dia:attribute name="line_color">
- <dia:color val="#000000ff"/>
+ <dia:color val="#000000"/>
</dia:attribute>
<dia:attribute name="fill_color">
- <dia:color val="#f1ffffff"/>
+ <dia:color val="#f1ffff"/>
</dia:attribute>
<dia:attribute name="text_color">
- <dia:color val="#000000ff"/>
+ <dia:color val="#000000"/>
</dia:attribute>
<dia:attribute name="normal_font">
<dia:font family="monospace" style="0" name="Courier"/>
@@ -2447,7 +2296,7 @@
<dia:point val="54,58"/>
</dia:attribute>
<dia:attribute name="obj_bb">
- <dia:rectangle val="53.985,57.985;69.145,89.615"/>
+ <dia:rectangle val="53.985,57.985;69.145,83.215"/>
</dia:attribute>
<dia:attribute name="elem_corner">
<dia:point val="54,58"/>
@@ -2456,7 +2305,7 @@
<dia:real val="15.130000000000001"/>
</dia:attribute>
<dia:attribute name="elem_height">
- <dia:real val="31.600000000000005"/>
+ <dia:real val="25.200000000000003"/>
</dia:attribute>
<dia:attribute name="name">
<dia:string>#Artifact#</dia:string>
@@ -2497,20 +2346,17 @@
<dia:attribute name="comment_tagging">
<dia:boolean val="false"/>
</dia:attribute>
- <dia:attribute name="allow_resizing">
- <dia:boolean val="false"/>
- </dia:attribute>
<dia:attribute name="line_width">
<dia:real val="0.029999999999999999"/>
</dia:attribute>
<dia:attribute name="line_color">
- <dia:color val="#000000ff"/>
+ <dia:color val="#000000"/>
</dia:attribute>
<dia:attribute name="fill_color">
- <dia:color val="#d8e5e5ff"/>
+ <dia:color val="#d8e5e5"/>
</dia:attribute>
<dia:attribute name="text_color">
- <dia:color val="#000000ff"/>
+ <dia:color val="#000000"/>
</dia:attribute>
<dia:attribute name="normal_font">
<dia:font family="monospace" style="0" name="Courier"/>
@@ -2689,52 +2535,6 @@
</dia:composite>
<dia:composite type="umlattribute">
<dia:attribute name="name">
- <dia:string>#favorite#</dia:string>
- </dia:attribute>
- <dia:attribute name="type">
- <dia:string>#bool [R L]#</dia:string>
- </dia:attribute>
- <dia:attribute name="value">
- <dia:string>#False#</dia:string>
- </dia:attribute>
- <dia:attribute name="comment">
- <dia:string>#Artifact is favorited by local user#</dia:string>
- </dia:attribute>
- <dia:attribute name="visibility">
- <dia:enum val="0"/>
- </dia:attribute>
- <dia:attribute name="abstract">
- <dia:boolean val="false"/>
- </dia:attribute>
- <dia:attribute name="class_scope">
- <dia:boolean val="false"/>
- </dia:attribute>
- </dia:composite>
- <dia:composite type="umlattribute">
- <dia:attribute name="name">
- <dia:string>#clone#</dia:string>
- </dia:attribute>
- <dia:attribute name="type">
- <dia:string>#int [R L]#</dia:string>
- </dia:attribute>
- <dia:attribute name="value">
- <dia:string>#0#</dia:string>
- </dia:attribute>
- <dia:attribute name="comment">
- <dia:string>#If Artifact stored in local Journal. 0 - not stored; 1 - cloning in progress; 2 - stored#</dia:string>
- </dia:attribute>
- <dia:attribute name="visibility">
- <dia:enum val="0"/>
- </dia:attribute>
- <dia:attribute name="abstract">
- <dia:boolean val="false"/>
- </dia:attribute>
- <dia:attribute name="class_scope">
- <dia:boolean val="false"/>
- </dia:attribute>
- </dia:composite>
- <dia:composite type="umlattribute">
- <dia:attribute name="name">
<dia:string>#downloads#</dia:string>
</dia:attribute>
<dia:attribute name="type">
@@ -2864,20 +2664,17 @@
<dia:attribute name="comment_tagging">
<dia:boolean val="false"/>
</dia:attribute>
- <dia:attribute name="allow_resizing">
- <dia:boolean val="false"/>
- </dia:attribute>
<dia:attribute name="line_width">
<dia:real val="0.029999999999999999"/>
</dia:attribute>
<dia:attribute name="line_color">
- <dia:color val="#000000ff"/>
+ <dia:color val="#000000"/>
</dia:attribute>
<dia:attribute name="fill_color">
- <dia:color val="#d8e5e5ff"/>
+ <dia:color val="#d8e5e5"/>
</dia:attribute>
<dia:attribute name="text_color">
- <dia:color val="#000000ff"/>
+ <dia:color val="#000000"/>
</dia:attribute>
<dia:attribute name="normal_font">
<dia:font family="monospace" style="0" name="Courier"/>
@@ -3075,7 +2872,7 @@
<dia:real val="17.055"/>
</dia:attribute>
<dia:attribute name="elem_height">
- <dia:real val="16.000000000000004"/>
+ <dia:real val="16"/>
</dia:attribute>
<dia:attribute name="name">
<dia:string>#Report#</dia:string>
@@ -3116,20 +2913,17 @@
<dia:attribute name="comment_tagging">
<dia:boolean val="false"/>
</dia:attribute>
- <dia:attribute name="allow_resizing">
- <dia:boolean val="false"/>
- </dia:attribute>
<dia:attribute name="line_width">
<dia:real val="0.029999999999999999"/>
</dia:attribute>
<dia:attribute name="line_color">
- <dia:color val="#000000ff"/>
+ <dia:color val="#000000"/>
</dia:attribute>
<dia:attribute name="fill_color">
- <dia:color val="#d8e5e5ff"/>
+ <dia:color val="#d8e5e5"/>
</dia:attribute>
<dia:attribute name="text_color">
- <dia:color val="#000000ff"/>
+ <dia:color val="#000000"/>
</dia:attribute>
<dia:attribute name="normal_font">
<dia:font family="monospace" style="0" name="Courier"/>
@@ -3268,29 +3062,6 @@
<dia:attribute name="templates"/>
</dia:object>
<dia:object type="UML - Association" version="2" id="O17">
- <dia:attribute name="obj_pos">
- <dia:point val="14,51.5"/>
- </dia:attribute>
- <dia:attribute name="obj_bb">
- <dia:rectangle val="9.005,51.4293;14.2,55.94"/>
- </dia:attribute>
- <dia:attribute name="meta">
- <dia:composite type="dict"/>
- </dia:attribute>
- <dia:attribute name="orth_points">
- <dia:point val="14,51.5"/>
- <dia:point val="14,51.5"/>
- <dia:point val="14,55.1"/>
- <dia:point val="9.055,55.1"/>
- </dia:attribute>
- <dia:attribute name="orth_orient">
- <dia:enum val="0"/>
- <dia:enum val="1"/>
- <dia:enum val="0"/>
- </dia:attribute>
- <dia:attribute name="orth_autoroute">
- <dia:boolean val="false"/>
- </dia:attribute>
<dia:attribute name="name">
<dia:string>##</dia:string>
</dia:attribute>
@@ -3327,50 +3098,41 @@
<dia:attribute name="show_arrow_b">
<dia:boolean val="false"/>
</dia:attribute>
- <dia:attribute name="text_font">
- <dia:font family="monospace" style="0" name="Courier"/>
- </dia:attribute>
- <dia:attribute name="text_height">
- <dia:real val="0.80000000000000004"/>
- </dia:attribute>
- <dia:attribute name="text_colour">
- <dia:color val="#000000ff"/>
- </dia:attribute>
- <dia:attribute name="line_width">
- <dia:real val="0.10000000000000001"/>
- </dia:attribute>
- <dia:attribute name="line_colour">
- <dia:color val="#000000ff"/>
- </dia:attribute>
- <dia:connections>
- <dia:connection handle="0" to="O15" connection="12"/>
- <dia:connection handle="1" to="O16" connection="11"/>
- </dia:connections>
- </dia:object>
- <dia:object type="UML - Association" version="2" id="O18">
<dia:attribute name="obj_pos">
- <dia:point val="30,31.1"/>
+ <dia:point val="14,51.5"/>
</dia:attribute>
<dia:attribute name="obj_bb">
- <dia:rectangle val="19.7562,31.05;30.2,40.8"/>
+ <dia:rectangle val="9.005,51.5;14.2,55.9"/>
</dia:attribute>
<dia:attribute name="meta">
<dia:composite type="dict"/>
</dia:attribute>
<dia:attribute name="orth_points">
- <dia:point val="30,31.1"/>
- <dia:point val="30,33"/>
- <dia:point val="19.8062,33"/>
- <dia:point val="19.8062,40"/>
+ <dia:point val="14,51.5"/>
+ <dia:point val="14,51.5"/>
+ <dia:point val="14,55.1"/>
+ <dia:point val="9.055,55.1"/>
</dia:attribute>
<dia:attribute name="orth_orient">
- <dia:enum val="1"/>
<dia:enum val="0"/>
<dia:enum val="1"/>
+ <dia:enum val="0"/>
</dia:attribute>
<dia:attribute name="orth_autoroute">
<dia:boolean val="false"/>
</dia:attribute>
+ <dia:attribute name="text_colour">
+ <dia:color val="#000000"/>
+ </dia:attribute>
+ <dia:attribute name="line_colour">
+ <dia:color val="#000000"/>
+ </dia:attribute>
+ <dia:connections>
+ <dia:connection handle="0" to="O15" connection="12"/>
+ <dia:connection handle="1" to="O16" connection="11"/>
+ </dia:connections>
+ </dia:object>
+ <dia:object type="UML - Association" version="2" id="O18">
<dia:attribute name="name">
<dia:string>##</dia:string>
</dia:attribute>
@@ -3407,23 +3169,36 @@
<dia:attribute name="show_arrow_b">
<dia:boolean val="false"/>
</dia:attribute>
- <dia:attribute name="text_font">
- <dia:font family="monospace" style="0" name="Courier"/>
+ <dia:attribute name="obj_pos">
+ <dia:point val="30,31.1"/>
</dia:attribute>
- <dia:attribute name="text_height">
- <dia:real val="0.80000000000000004"/>
+ <dia:attribute name="obj_bb">
+ <dia:rectangle val="19.7562,31.05;30.2,40.8"/>
</dia:attribute>
- <dia:attribute name="text_colour">
- <dia:color val="#000000ff"/>
+ <dia:attribute name="meta">
+ <dia:composite type="dict"/>
</dia:attribute>
- <dia:attribute name="line_width">
- <dia:real val="0.10000000000000001"/>
+ <dia:attribute name="orth_points">
+ <dia:point val="30,31.1"/>
+ <dia:point val="30,33"/>
+ <dia:point val="19.8062,33"/>
+ <dia:point val="19.8062,40"/>
+ </dia:attribute>
+ <dia:attribute name="orth_orient">
+ <dia:enum val="1"/>
+ <dia:enum val="0"/>
+ <dia:enum val="1"/>
+ </dia:attribute>
+ <dia:attribute name="orth_autoroute">
+ <dia:boolean val="false"/>
+ </dia:attribute>
+ <dia:attribute name="text_colour">
+ <dia:color val="#000000"/>
</dia:attribute>
<dia:attribute name="line_colour">
- <dia:color val="#000000ff"/>
+ <dia:color val="#000000"/>
</dia:attribute>
<dia:connections>
- <dia:connection handle="0" to="O0" connection="26"/>
<dia:connection handle="1" to="O15" connection="1"/>
</dia:connections>
</dia:object>
@@ -3438,10 +3213,10 @@
<dia:point val="54,40"/>
</dia:attribute>
<dia:attribute name="elem_width">
- <dia:real val="12.434999999999999"/>
+ <dia:real val="12.435"/>
</dia:attribute>
<dia:attribute name="elem_height">
- <dia:real val="15.600000000000001"/>
+ <dia:real val="15.6"/>
</dia:attribute>
<dia:attribute name="name">
<dia:string>#Review#</dia:string>
@@ -3482,20 +3257,17 @@
<dia:attribute name="comment_tagging">
<dia:boolean val="false"/>
</dia:attribute>
- <dia:attribute name="allow_resizing">
- <dia:boolean val="false"/>
- </dia:attribute>
<dia:attribute name="line_width">
<dia:real val="0.029999999999999999"/>
</dia:attribute>
<dia:attribute name="line_color">
- <dia:color val="#000000ff"/>
+ <dia:color val="#000000"/>
</dia:attribute>
<dia:attribute name="fill_color">
- <dia:color val="#d9e6e6ff"/>
+ <dia:color val="#d9e6e6"/>
</dia:attribute>
<dia:attribute name="text_color">
- <dia:color val="#000000ff"/>
+ <dia:color val="#000000"/>
</dia:attribute>
<dia:attribute name="normal_font">
<dia:font family="monospace" style="0" name="Courier"/>
@@ -3657,29 +3429,6 @@
<dia:attribute name="templates"/>
</dia:object>
<dia:object type="UML - Association" version="2" id="O20">
- <dia:attribute name="obj_pos">
- <dia:point val="48.6525,33.1"/>
- </dia:attribute>
- <dia:attribute name="obj_bb">
- <dia:rectangle val="48.6025,32.34;60.2675,40.84"/>
- </dia:attribute>
- <dia:attribute name="meta">
- <dia:composite type="dict"/>
- </dia:attribute>
- <dia:attribute name="orth_points">
- <dia:point val="48.6525,33.1"/>
- <dia:point val="60,33.1"/>
- <dia:point val="60,40"/>
- <dia:point val="60.2175,40"/>
- </dia:attribute>
- <dia:attribute name="orth_orient">
- <dia:enum val="0"/>
- <dia:enum val="1"/>
- <dia:enum val="0"/>
- </dia:attribute>
- <dia:attribute name="orth_autoroute">
- <dia:boolean val="false"/>
- </dia:attribute>
<dia:attribute name="name">
<dia:string>##</dia:string>
</dia:attribute>
@@ -3716,41 +3465,20 @@
<dia:attribute name="show_arrow_b">
<dia:boolean val="false"/>
</dia:attribute>
- <dia:attribute name="text_font">
- <dia:font family="monospace" style="0" name="Courier"/>
- </dia:attribute>
- <dia:attribute name="text_height">
- <dia:real val="0.80000000000000004"/>
- </dia:attribute>
- <dia:attribute name="text_colour">
- <dia:color val="#000000ff"/>
- </dia:attribute>
- <dia:attribute name="line_width">
- <dia:real val="0.10000000000000001"/>
- </dia:attribute>
- <dia:attribute name="line_colour">
- <dia:color val="#000000ff"/>
- </dia:attribute>
- <dia:connections>
- <dia:connection handle="0" to="O0" connection="29"/>
- <dia:connection handle="1" to="O19" connection="1"/>
- </dia:connections>
- </dia:object>
- <dia:object type="UML - Association" version="2" id="O21">
<dia:attribute name="obj_pos">
- <dia:point val="61.565,58"/>
+ <dia:point val="48.6525,33.1"/>
</dia:attribute>
<dia:attribute name="obj_bb">
- <dia:rectangle val="59.6325,54.84;61.615,58.84"/>
+ <dia:rectangle val="48.6025,32.3;60.2675,40.8"/>
</dia:attribute>
<dia:attribute name="meta">
<dia:composite type="dict"/>
</dia:attribute>
<dia:attribute name="orth_points">
- <dia:point val="61.565,58"/>
- <dia:point val="60,58"/>
- <dia:point val="60,55.6"/>
- <dia:point val="60.2175,55.6"/>
+ <dia:point val="48.6525,33.1"/>
+ <dia:point val="60,33.1"/>
+ <dia:point val="60,40"/>
+ <dia:point val="60.2175,40"/>
</dia:attribute>
<dia:attribute name="orth_orient">
<dia:enum val="0"/>
@@ -3760,6 +3488,17 @@
<dia:attribute name="orth_autoroute">
<dia:boolean val="false"/>
</dia:attribute>
+ <dia:attribute name="text_colour">
+ <dia:color val="#000000"/>
+ </dia:attribute>
+ <dia:attribute name="line_colour">
+ <dia:color val="#000000"/>
+ </dia:attribute>
+ <dia:connections>
+ <dia:connection handle="1" to="O19" connection="1"/>
+ </dia:connections>
+ </dia:object>
+ <dia:object type="UML - Association" version="2" id="O21">
<dia:attribute name="name">
<dia:string>##</dia:string>
</dia:attribute>
@@ -3796,20 +3535,34 @@
<dia:attribute name="show_arrow_b">
<dia:boolean val="false"/>
</dia:attribute>
- <dia:attribute name="text_font">
- <dia:font family="monospace" style="0" name="Courier"/>
+ <dia:attribute name="obj_pos">
+ <dia:point val="61.565,58"/>
</dia:attribute>
- <dia:attribute name="text_height">
- <dia:real val="0.80000000000000004"/>
+ <dia:attribute name="obj_bb">
+ <dia:rectangle val="59.6325,54.8;61.615,58.8"/>
</dia:attribute>
- <dia:attribute name="text_colour">
- <dia:color val="#000000ff"/>
+ <dia:attribute name="meta">
+ <dia:composite type="dict"/>
</dia:attribute>
- <dia:attribute name="line_width">
- <dia:real val="0.10000000000000001"/>
+ <dia:attribute name="orth_points">
+ <dia:point val="61.565,58"/>
+ <dia:point val="60,58"/>
+ <dia:point val="60,55.6"/>
+ <dia:point val="60.2175,55.6"/>
+ </dia:attribute>
+ <dia:attribute name="orth_orient">
+ <dia:enum val="0"/>
+ <dia:enum val="1"/>
+ <dia:enum val="0"/>
+ </dia:attribute>
+ <dia:attribute name="orth_autoroute">
+ <dia:boolean val="false"/>
+ </dia:attribute>
+ <dia:attribute name="text_colour">
+ <dia:color val="#000000"/>
</dia:attribute>
<dia:attribute name="line_colour">
- <dia:color val="#000000ff"/>
+ <dia:color val="#000000"/>
</dia:attribute>
<dia:connections>
<dia:connection handle="0" to="O14" connection="1"/>
@@ -3817,13 +3570,12 @@
</dia:connections>
</dia:object>
<dia:group>
- <dia:attribute name="matrix"/>
<dia:object type="Standard - Box" version="0" id="O22">
<dia:attribute name="obj_pos">
<dia:point val="62,8"/>
</dia:attribute>
<dia:attribute name="obj_bb">
- <dia:rectangle val="61.9995,7.99947;84.0005,21.0005"/>
+ <dia:rectangle val="61.9995,7.99947;84.0005,19.2005"/>
</dia:attribute>
<dia:attribute name="elem_corner">
<dia:point val="62,8"/>
@@ -3832,13 +3584,13 @@
<dia:real val="22"/>
</dia:attribute>
<dia:attribute name="elem_height">
- <dia:real val="13"/>
+ <dia:real val="11.199999999999999"/>
</dia:attribute>
<dia:attribute name="border_width">
<dia:real val="0.0010583332689479003"/>
</dia:attribute>
<dia:attribute name="inner_color">
- <dia:color val="#ffffccff"/>
+ <dia:color val="#ffffcc"/>
</dia:attribute>
<dia:attribute name="show_background">
<dia:boolean val="true"/>
@@ -3852,7 +3604,7 @@
<dia:point val="63,9.3"/>
</dia:attribute>
<dia:attribute name="obj_bb">
- <dia:rectangle val="63,8.705;71.67,9.45"/>
+ <dia:rectangle val="63,8.705;71.67,9.4525"/>
</dia:attribute>
<dia:attribute name="text">
<dia:composite type="text">
@@ -3869,7 +3621,7 @@
<dia:point val="63,9.3"/>
</dia:attribute>
<dia:attribute name="color">
- <dia:color val="#000000ff"/>
+ <dia:color val="#000000"/>
</dia:attribute>
<dia:attribute name="alignment">
<dia:enum val="0"/>
@@ -3885,7 +3637,7 @@
<dia:point val="63,12.3"/>
</dia:attribute>
<dia:attribute name="obj_bb">
- <dia:rectangle val="63,11.705;79.725,12.45"/>
+ <dia:rectangle val="63,11.705;79.725,12.4525"/>
</dia:attribute>
<dia:attribute name="text">
<dia:composite type="text">
@@ -3902,7 +3654,7 @@
<dia:point val="63,12.3"/>
</dia:attribute>
<dia:attribute name="color">
- <dia:color val="#000000ff"/>
+ <dia:color val="#000000"/>
</dia:attribute>
<dia:attribute name="alignment">
<dia:enum val="0"/>
@@ -3918,7 +3670,7 @@
<dia:point val="63,13.3"/>
</dia:attribute>
<dia:attribute name="obj_bb">
- <dia:rectangle val="63,12.705;81.39,13.45"/>
+ <dia:rectangle val="63,12.705;81.39,13.4525"/>
</dia:attribute>
<dia:attribute name="text">
<dia:composite type="text">
@@ -3935,7 +3687,7 @@
<dia:point val="63,13.3"/>
</dia:attribute>
<dia:attribute name="color">
- <dia:color val="#000000ff"/>
+ <dia:color val="#000000"/>
</dia:attribute>
<dia:attribute name="alignment">
<dia:enum val="0"/>
@@ -3951,7 +3703,7 @@
<dia:point val="63,11.3"/>
</dia:attribute>
<dia:attribute name="obj_bb">
- <dia:rectangle val="63,10.705;76.6375,11.45"/>
+ <dia:rectangle val="63,10.705;76.6375,11.4525"/>
</dia:attribute>
<dia:attribute name="text">
<dia:composite type="text">
@@ -3968,7 +3720,7 @@
<dia:point val="63,11.3"/>
</dia:attribute>
<dia:attribute name="color">
- <dia:color val="#000000ff"/>
+ <dia:color val="#000000"/>
</dia:attribute>
<dia:attribute name="alignment">
<dia:enum val="0"/>
@@ -3984,7 +3736,7 @@
<dia:point val="63,14.3"/>
</dia:attribute>
<dia:attribute name="obj_bb">
- <dia:rectangle val="63,13.705;80.82,14.45"/>
+ <dia:rectangle val="63,13.705;80.82,14.4525"/>
</dia:attribute>
<dia:attribute name="text">
<dia:composite type="text">
@@ -4001,7 +3753,7 @@
<dia:point val="63,14.3"/>
</dia:attribute>
<dia:attribute name="color">
- <dia:color val="#000000ff"/>
+ <dia:color val="#000000"/>
</dia:attribute>
<dia:attribute name="alignment">
<dia:enum val="0"/>
@@ -4017,7 +3769,7 @@
<dia:point val="63,10.3"/>
</dia:attribute>
<dia:attribute name="obj_bb">
- <dia:rectangle val="63,9.705;74.7525,10.45"/>
+ <dia:rectangle val="63,9.705;74.7525,10.4525"/>
</dia:attribute>
<dia:attribute name="text">
<dia:composite type="text">
@@ -4034,7 +3786,7 @@
<dia:point val="63,10.3"/>
</dia:attribute>
<dia:attribute name="color">
- <dia:color val="#000000ff"/>
+ <dia:color val="#000000"/>
</dia:attribute>
<dia:attribute name="alignment">
<dia:enum val="0"/>
@@ -4047,10 +3799,10 @@
</dia:object>
<dia:object type="Standard - Text" version="1" id="O29">
<dia:attribute name="obj_pos">
- <dia:point val="63,20"/>
+ <dia:point val="63,18.5"/>
</dia:attribute>
<dia:attribute name="obj_bb">
- <dia:rectangle val="63,19.405;80.41,20.15"/>
+ <dia:rectangle val="63,17.905;80.41,18.6525"/>
</dia:attribute>
<dia:attribute name="text">
<dia:composite type="text">
@@ -4064,10 +3816,10 @@
<dia:real val="0.80000000000000004"/>
</dia:attribute>
<dia:attribute name="pos">
- <dia:point val="63,20"/>
+ <dia:point val="63,18.5"/>
</dia:attribute>
<dia:attribute name="color">
- <dia:color val="#000000ff"/>
+ <dia:color val="#000000"/>
</dia:attribute>
<dia:attribute name="alignment">
<dia:enum val="0"/>
@@ -4083,7 +3835,7 @@
<dia:point val="63,15.3"/>
</dia:attribute>
<dia:attribute name="obj_bb">
- <dia:rectangle val="63,14.705;76.82,15.45"/>
+ <dia:rectangle val="63,14.705;76.82,15.4525"/>
</dia:attribute>
<dia:attribute name="text">
<dia:composite type="text">
@@ -4100,7 +3852,7 @@
<dia:point val="63,15.3"/>
</dia:attribute>
<dia:attribute name="color">
- <dia:color val="#000000ff"/>
+ <dia:color val="#000000"/>
</dia:attribute>
<dia:attribute name="alignment">
<dia:enum val="0"/>
@@ -4116,7 +3868,7 @@
<dia:point val="63,16.3"/>
</dia:attribute>
<dia:attribute name="obj_bb">
- <dia:rectangle val="63,15.705;82.6725,16.45"/>
+ <dia:rectangle val="63,15.705;82.6725,16.4525"/>
</dia:attribute>
<dia:attribute name="text">
<dia:composite type="text">
@@ -4133,7 +3885,7 @@
<dia:point val="63,16.3"/>
</dia:attribute>
<dia:attribute name="color">
- <dia:color val="#000000ff"/>
+ <dia:color val="#000000"/>
</dia:attribute>
<dia:attribute name="alignment">
<dia:enum val="0"/>
@@ -4149,7 +3901,7 @@
<dia:point val="62,7"/>
</dia:attribute>
<dia:attribute name="obj_bb">
- <dia:rectangle val="62,6.405;84.375,7.15"/>
+ <dia:rectangle val="62,6.405;84.375,7.1525"/>
</dia:attribute>
<dia:attribute name="text">
<dia:composite type="text">
@@ -4166,7 +3918,7 @@
<dia:point val="62,7"/>
</dia:attribute>
<dia:attribute name="color">
- <dia:color val="#000000ff"/>
+ <dia:color val="#000000"/>
</dia:attribute>
<dia:attribute name="alignment">
<dia:enum val="0"/>
@@ -4179,10 +3931,10 @@
</dia:object>
<dia:object type="Standard - Text" version="1" id="O33">
<dia:attribute name="obj_pos">
- <dia:point val="63,18.1414"/>
+ <dia:point val="63,17.1414"/>
</dia:attribute>
<dia:attribute name="obj_bb">
- <dia:rectangle val="63,17.5464;73.7075,18.2914"/>
+ <dia:rectangle val="63,16.5464;73.7075,17.2939"/>
</dia:attribute>
<dia:attribute name="text">
<dia:composite type="text">
@@ -4196,43 +3948,10 @@
<dia:real val="0.80000000000000004"/>
</dia:attribute>
<dia:attribute name="pos">
- <dia:point val="63,18.1414"/>
- </dia:attribute>
- <dia:attribute name="color">
- <dia:color val="#000000ff"/>
- </dia:attribute>
- <dia:attribute name="alignment">
- <dia:enum val="0"/>
- </dia:attribute>
- </dia:composite>
- </dia:attribute>
- <dia:attribute name="valign">
- <dia:enum val="3"/>
- </dia:attribute>
- </dia:object>
- <dia:object type="Standard - Text" version="1" id="O34">
- <dia:attribute name="obj_pos">
- <dia:point val="63,17.2475"/>
- </dia:attribute>
- <dia:attribute name="obj_bb">
- <dia:rectangle val="63,16.6525;75.62,17.3975"/>
- </dia:attribute>
- <dia:attribute name="text">
- <dia:composite type="text">
- <dia:attribute name="string">
- <dia:string>#[L] Property is accessible only locally#</dia:string>
- </dia:attribute>
- <dia:attribute name="font">
- <dia:font family="sans" style="0" name="Helvetica"/>
- </dia:attribute>
- <dia:attribute name="height">
- <dia:real val="0.80000000000000004"/>
- </dia:attribute>
- <dia:attribute name="pos">
- <dia:point val="63,17.2475"/>
+ <dia:point val="63,17.1414"/>
</dia:attribute>
<dia:attribute name="color">
- <dia:color val="#000000ff"/>
+ <dia:color val="#000000"/>
</dia:attribute>
<dia:attribute name="alignment">
<dia:enum val="0"/>
diff --git a/sugar-network b/sugar-network
index 99a5e1c..642af7f 100755
--- a/sugar-network
+++ b/sugar-network
@@ -63,14 +63,14 @@ _ESCAPE_VALUE_RE = re.compile(r'([^\[\]\{\}0-9][^\]\[\{\}]+)')
_LIST_RE = re.compile(r'\s*[;,:]+\s*')
-class ClientRouter(ClientRoutes, Router):
+class ClientRouter(Router, ClientRoutes):
def __init__(self):
home = db.Volume(client.path('db'), RESOURCES)
+ Router.__init__(self, self)
ClientRoutes.__init__(self, home,
client.api_url.value if not offline.value else None,
no_subscription=True)
- Router.__init__(self, self)
if not offline.value:
for __ in self.subscribe(event='inline', state='online'):
@@ -199,10 +199,14 @@ class Application(application.Application):
'specifies all ARGUMENTs the particular API call requires',
args='PATH [ARGUMENT=VALUE]')
def head(self):
- request = Request()
+ request = Request(method='HEAD')
self._parse_path(request)
self._parse_args(request)
- result = self._connect().meta(request.path, request.query)
+ response = Response()
+ self._connect().call(request, response)
+ result = {}
+ result.update(response)
+ result.update(response.meta)
self._dump(result)
@application.command(hidden=True)
@@ -319,7 +323,7 @@ class Application(application.Application):
def porcelain_dump(value):
if type(value) is dict:
- for i in value.items():
+ for i in sorted(value.items()):
print '%-18s%s' % i
else:
if type(value) not in (list, tuple):
@@ -330,7 +334,7 @@ class Application(application.Application):
if type(i) is dict and len(i) == 1:
i = i.values()[0]
print i,
- print ''
+ print ''
if type(result) in (list, tuple):
for i in result:
diff --git a/sugar-network-client b/sugar-network-client
index 5cc1ca2..a528f27 100755
--- a/sugar-network-client
+++ b/sugar-network-client
@@ -26,7 +26,6 @@ from gevent import monkey
import sugar_network_webui as webui
from sugar_network import db, toolkit, client, node
-from sugar_network.client import clones, cache
from sugar_network.client.routes import CachedClientRoutes
from sugar_network.node import stats_node, stats_user
from sugar_network.model import RESOURCES
@@ -71,7 +70,6 @@ class Application(application.Daemon):
volume = db.Volume(client.path('db'), RESOURCES)
try:
volume.populate()
- clones.populate(volume['context'], client.activity_dirs.value)
finally:
volume.close()
@@ -103,7 +101,7 @@ class Application(application.Daemon):
def run(self):
toolkit.ensure_key(client.key_path())
- volume = db.Volume(client.path('db'), RESOURCES)
+ volume = db.Volume(client.path('db'), RESOURCES, lazy_open=True)
routes = CachedClientRoutes(volume,
client.api_url.value if not client.server_mode.value else None)
router = Router(routes)
@@ -119,14 +117,13 @@ class Application(application.Daemon):
def final_start():
volume.populate()
- self.jobs.spawn(clones.monitor, volume['context'],
- client.activity_dirs.value)
if webui.webui.value:
host = (webui.webui_host.value, webui.webui_port.value)
logging.info('Start Web server on %s:%s', *host)
webui_app = webui.get_app(
- lambda x: router.call(Request(x), Response()),
+ lambda **kwargs: router.call(Request(**kwargs),
+ Response()),
'http://localhost:%s' % client.ipc_port.value)
server = coroutine.WSGIServer(host, webui_app)
self.jobs.spawn(server.serve_forever)
@@ -138,7 +135,7 @@ class Application(application.Daemon):
self.jobs.spawn(mountpoints.monitor, mounts_root)
if client.cache_timeout.value:
- self.jobs.spawn(self._recycle_cache)
+ self.jobs.spawn(self._recycle_cache, routes)
def delayed_start(event=None):
for __ in routes.subscribe(event='delayed-start'):
@@ -162,12 +159,12 @@ class Application(application.Daemon):
def shutdown(self):
self.jobs.kill()
- def _recycle_cache(self):
+ def _recycle_cache(self, routes):
while True:
logging.debug('Start cache recycling in %d seconds',
client.cache_timeout.value)
coroutine.sleep(client.cache_timeout.value)
- cache.recycle()
+ routes.recycle()
def __SIGCHLD_cb(self):
while True:
diff --git a/sugar-network-node b/sugar-network-node
index 0975b57..ee52b32 100755
--- a/sugar-network-node
+++ b/sugar-network-node
@@ -89,7 +89,8 @@ class Application(application.Daemon):
logging.info('Start Web server on %s:%s port', *host)
server = coroutine.WSGIServer(host,
webui.get_app(
- lambda x: client_app.call(Request(x), Response()),
+ lambda **kwargs: client_app.call(Request(**kwargs),
+ Response()),
client.api_url.value, True))
self.jobs.spawn(server.serve_forever)
diff --git a/sugar_network/client/__init__.py b/sugar_network/client/__init__.py
index ff845f3..077027a 100644
--- a/sugar_network/client/__init__.py
+++ b/sugar_network/client/__init__.py
@@ -70,14 +70,6 @@ local_root = Option(
'path to the directory to keep all local data',
default=profile_path('network'), name='local_root')
-activity_dirs = Option(
- 'colon separated list of paths to directories with Sugar '
- 'activities; first path will be used to keep check-in activities',
- type_cast=Option.paths_cast, type_repr=Option.paths_repr, default=[
- expanduser('~/Activities'),
- '/usr/share/sugar/activities',
- ])
-
server_mode = Option(
'start server to share local documents',
default=False, type_cast=Option.bool_cast,
diff --git a/sugar_network/client/cache.py b/sugar_network/client/cache.py
index 76c3dd0..09eb40a 100644
--- a/sugar_network/client/cache.py
+++ b/sugar_network/client/cache.py
@@ -16,135 +16,142 @@
import os
import json
import time
-import shutil
import logging
-from os.path import exists, join, isdir
+from os.path import exists
-from sugar_network import toolkit, client
-from sugar_network.toolkit.bundle import Bundle
-from sugar_network.toolkit import pipe, enforce
+from sugar_network import client
+from sugar_network.toolkit import pylru, enforce
-_logger = logging.getLogger('cache')
-
+_POOL_SIZE = 256
-def recycle():
- stat = os.statvfs(client.local_root.value)
- total = stat.f_blocks * stat.f_frsize
- free = stat.f_bfree * stat.f_frsize
- to_free = client.cache_limit.value * total / 100 - free
- ts = time.time()
-
- __, items = _list()
- for mtime, neg_size, path in items:
- if to_free > 0:
- shutil.rmtree(path, ignore_errors=True)
- _logger.debug('Recycled %r to save %s bytes', path, -neg_size)
- to_free += neg_size
- elif mtime == 0:
- shutil.rmtree(path, ignore_errors=True)
- _logger.debug('Recycled malformed cache item %r', path)
- elif client.cache_lifetime.value and \
- client.cache_lifetime.value < (ts - mtime) / 86400.0:
- shutil.rmtree(path, ignore_errors=True)
- _logger.debug('Recycled stale %r to get %s bytes', path, -neg_size)
- else:
- break
+_logger = logging.getLogger('cache')
-def ensure(requested_size=0, temp_size=0):
- stat = os.statvfs(client.local_root.value)
- if stat.f_blocks == 0:
- # TODO Sonds like a tmpfs or so
- return
- total = stat.f_blocks * stat.f_frsize
- free = stat.f_bfree * stat.f_frsize
+class Cache(object):
- to_free = max(client.cache_limit.value * total / 100, temp_size) - \
- (free - requested_size)
- if to_free <= 0:
- return
+ def __init__(self, volume):
+ self._volume = volume
+ self._pool = None
+ self._du = 0
- _logger.debug('Recycle %s bytes free=%d requested_size=%d temp_size=%d',
- to_free, free, requested_size, temp_size)
+ def __iter__(self):
+ self._ensure_open()
+ return iter(self._pool)
- cached_total, items = _list()
- enforce(cached_total >= to_free, 'No free disk space')
+ def ensure(self, requested_size, temp_size=0):
+ self._ensure_open()
+ stat = os.statvfs(client.local_root.value)
+ if stat.f_blocks == 0:
+ # TODO Sonds like a tmpfs or so
+ return
+ total = stat.f_blocks * stat.f_frsize
+ free = stat.f_bfree * stat.f_frsize
- for __, neg_size, path in items:
- shutil.rmtree(path, ignore_errors=True)
- _logger.debug('Recycled %r to save %s bytes', path, -neg_size)
- to_free += neg_size
+ to_free = max(client.cache_limit.value * total / 100, temp_size) - \
+ (free - requested_size)
if to_free <= 0:
- break
-
-
-def get(guid, hints=None):
- path = join(client.local_root.value, 'cache', 'implementation', guid)
- if exists(path):
- pipe.trace('Reuse cached %s implementation from %r', guid, path)
+ return
+
+ _logger.debug('Recycle %s byte free=%d requested_size=%d temp_size=%d',
+ to_free, free, requested_size, temp_size)
+ enforce(self._du >= to_free, 'No free disk space')
+
+ for guid, size, mtime in self._reversed_iter():
+ self._checkout(guid, (size, mtime))
+ to_free -= size
+ if to_free <= 0:
+ break
+
+ def checkin(self, guid, meta=None):
+ self._ensure_open()
+ if guid in self._pool:
+ self._pool.__getitem__(guid)
+ return
+ _logger.debug('Checkin %r', guid)
+ impls = self._volume['implementation']
+ if meta is None:
+ meta = impls.get(guid).meta('data')
+ size = meta.get('unpack_size') or meta['blob_size']
+ mtime = os.stat(impls.path(guid)).st_mtime
+ self._pool[guid] = (size, mtime)
+
+ def checkout(self, guid):
+ self._ensure_open()
+ if guid not in self._pool:
+ return
+ _logger.debug('Checkout %r', guid)
+ size, __ = self._pool.peek(guid)
+ self._du -= size
+ del self._pool[guid]
+
+ def recycle(self):
+ self._ensure_open()
+ stat = os.statvfs(client.local_root.value)
+ total = stat.f_blocks * stat.f_frsize
+ free = stat.f_bfree * stat.f_frsize
+ to_free = client.cache_limit.value * total / 100 - free
ts = time.time()
- os.utime(path, (ts, ts))
- return path
-
- pipe.trace('Download %s implementation', guid)
- # TODO Per download progress
- pipe.feedback('download')
-
- ensure(hints.get('unpack_size') or 0, hints.get('bundle_size') or 0)
- blob = client.IPCConnection().download(['implementation', guid, 'data'])
- _unpack_stream(blob, path)
- with toolkit.new_file(join(path, '.unpack_size')) as f:
- json.dump(hints.get('unpack_size') or 0, f)
-
- topdir = os.listdir(path)[-1:]
- if topdir:
- for exec_dir in ('bin', 'activity'):
- bin_path = join(path, topdir[0], exec_dir)
- if not exists(bin_path):
+
+ for guid, size, mtime in self._reversed_iter():
+ if to_free > 0:
+ self._checkout(guid, (size, mtime))
+ to_free -= size
+ elif client.cache_lifetime.value and \
+ client.cache_lifetime.value < (ts - mtime) / 86400.0:
+ self._checkout(guid, (size, None))
+ else:
+ break
+
+ def _ensure_open(self):
+ if self._pool is not None:
+ return
+
+ _logger.debug('Open implementations pool')
+
+ pool = []
+ contexts = self._volume['context']
+ impls = self._volume['implementation']
+ for res in impls.find(not_layer=['local'])[0]:
+ meta = res.meta('data')
+ if not meta or 'blob_size' not in meta:
continue
- for filename in os.listdir(bin_path):
- os.chmod(join(bin_path, filename), 0755)
-
- return path
-
-
-def _list():
- total = 0
- result = []
- root = join(client.local_root.value, 'cache', 'implementation')
-
- if not exists(root):
- os.makedirs(root)
- return 0, []
-
- for filename in os.listdir(root):
- path = join(root, filename)
- if not isdir(path):
- continue
- try:
- with file(join(path, '.unpack_size')) as f:
- unpack_size = json.load(f)
- total += unpack_size
- # Negative `unpack_size` to process large impls at first
- result.append((os.stat(path).st_mtime, -unpack_size, path))
- except Exception:
- toolkit.exception('Cannot list %r cached implementation', path)
- result.append((0, 0, path))
-
- return total, sorted(result)
-
-
-def _unpack_stream(stream, dst):
- with toolkit.NamedTemporaryFile() as tmp_file:
- for chunk in stream:
- tmp_file.write(chunk)
- tmp_file.flush()
- if not exists(dst):
- os.makedirs(dst)
- try:
- with Bundle(tmp_file.name, 'application/zip') as bundle:
- bundle.extractall(dst)
- except Exception:
- shutil.rmtree(dst, ignore_errors=True)
- raise
+ clone = contexts.path(res['context'], 'clone')
+ if exists(clone):
+ with file(clone) as f:
+ if json.load(f) == res.guid:
+ continue
+ pool.append((
+ os.stat(impls.path(res.guid)).st_mtime,
+ res.guid,
+ meta.get('unpack_size') or meta['blob_size'],
+ ))
+
+ self._pool = pylru.lrucache(_POOL_SIZE, self._checkout)
+ for mtime, guid, size in sorted(pool):
+ self._pool[guid] = (size, mtime)
+ self._du += size
+
+ def _reversed_iter(self):
+ i = self._pool.head.prev
+ while True:
+ while i.empty:
+ if i is self._pool.head:
+ return
+ i = i.prev
+ size, mtime = i.value
+ yield i.key, size, mtime
+ if i is self._pool.head:
+ break
+ i = i.next
+
+ def _checkout(self, guid, value):
+ size, mtime = value
+ if mtime is None:
+ _logger.debug('Recycle stale %r to save %s bytes', guid, size)
+ else:
+ _logger.debug('Recycle %r to save %s bytes', guid, size)
+ self._volume['implementation'].delete(guid)
+ self._du -= size
+ if guid in self._pool:
+ del self._pool[guid]
diff --git a/sugar_network/client/clones.py b/sugar_network/client/clones.py
deleted file mode 100644
index bd8850f..0000000
--- a/sugar_network/client/clones.py
+++ /dev/null
@@ -1,405 +0,0 @@
-# Copyright (C) 2012-2013 Aleksey Lim
-#
-# This program is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with this program. If not, see <http://www.gnu.org/licenses/>.
-
-import os
-import errno
-import shutil
-import hashlib
-import logging
-from os.path import join, exists, lexists, relpath, dirname, basename, isdir
-from os.path import abspath, islink
-
-from sugar_network import client, toolkit
-from sugar_network.model.context import Context
-from sugar_network.toolkit.spec import Spec
-from sugar_network.toolkit.inotify import Inotify, \
- IN_DELETE_SELF, IN_CREATE, IN_DELETE, IN_CLOSE_WRITE, \
- IN_MOVED_TO, IN_MOVED_FROM
-from sugar_network.toolkit import coroutine
-
-
-_logger = logging.getLogger('client.clones')
-
-
-def walk(context):
- root = _context_path(context, '')
- if not exists(root):
- return
-
- for filename in os.listdir(root):
- path = join(root, filename)
- if exists(path):
- yield os.readlink(path)
-
-
-def wipeout(context):
- for path in walk(context):
- _logger.info('Wipe out %r implementation from %r', context, path)
- if isdir(path):
- shutil.rmtree(path)
- else:
- os.unlink(path)
-
-
-def monitor(contexts, paths):
- inotify = _Inotify(contexts)
- inotify.setup(paths)
- inotify.serve_forever()
-
-
-def populate(contexts, paths):
- inotify = _Inotify(contexts)
- inotify.add_watch = lambda *args: None
- inotify.setup(paths)
-
-
-class _Inotify(Inotify):
-
- def __init__(self, contexts):
- Inotify.__init__(self)
-
- self._contexts = contexts
- self._roots = []
- self._jobs = coroutine.Pool()
-
- xdg_data_home = os.environ.get('XDG_DATA_HOME') or \
- join(os.environ['HOME'], '.local', 'share')
- self._icons_dir = join(xdg_data_home,
- 'icons', 'sugar', 'scalable', 'mimetypes')
- self._mime_dir = join(xdg_data_home, 'mime')
-
- def setup(self, paths):
- mtime = 0
- for path in paths:
- path = abspath(path)
- if not exists(path):
- if not os.access(dirname(path), os.W_OK):
- _logger.warning('No permissions to create %s '
- 'directory, do not monitor it', path)
- continue
- os.makedirs(path)
- mtime = max(mtime, os.stat(path).st_mtime)
- self._roots.append(_Root(self, path))
-
- if mtime <= self._contexts.mtime:
- return
-
- docs, __ = self._contexts.find(clone=[1, 2])
- for context in docs:
- root = _context_path(context.guid, '')
- found = False
- if exists(root):
- for filename in os.listdir(root):
- path = join(root, filename)
- if lexists(path):
- if not exists(path):
- os.unlink(path)
- else:
- found = True
- break
- if found:
- if context['clone'] != 2:
- self._contexts.update(context.guid, {'clone': 2})
- else:
- self._contexts.update(context.guid, {'clone': 0})
-
- def serve_forever(self):
- while True:
- coroutine.select([self.fileno()], [], [])
- if self.closed:
- break
- for filename, event, cb in self.read():
- try:
- cb(filename, event)
- except Exception:
- toolkit.exception('Cannot dispatch 0x%X event for %r',
- event, filename)
- coroutine.dispatch()
-
- def found(self, clone_path):
- hashed_path, checkin_path = _checkin_path(clone_path)
- if exists(checkin_path):
- return
-
- _logger.debug('Checking in activity from %r', clone_path)
-
- try:
- spec = Spec(root=clone_path)
- except Exception:
- toolkit.exception(_logger, 'Cannot read %r spec', clone_path)
- return
-
- context = spec['context']
-
- context_path = _context_path(context, hashed_path)
- _ensure_path(context_path)
- os.symlink(clone_path, context_path)
-
- _ensure_path(checkin_path)
- os.symlink(relpath(context_path, dirname(checkin_path)), checkin_path)
-
- if self._contexts.exists(context):
- self._contexts.update(context, {'clone': 2})
- else:
- _logger.debug('Register unknown local activity, %r', context)
-
- mtime = os.stat(spec.root).st_mtime
- self._contexts.create({
- 'guid': context,
- 'type': 'activity',
- 'title': spec['name'],
- 'summary': spec['summary'],
- 'description': spec['description'],
- 'clone': 2,
- 'ctime': mtime,
- 'mtime': mtime,
- })
-
- icon_path = join(spec.root, spec['icon'])
- if exists(icon_path):
- with file(icon_path, 'rb') as svg:
- self._contexts.update(context, Context.image_props(svg))
-
- self._checkin_activity(spec)
-
- def found_mimetypes(self, impl_path):
- hashed_path, __ = _checkin_path(impl_path)
- src_path = join(impl_path, 'activity', 'mimetypes.xml')
- dst_path = join(self._mime_dir, 'packages', hashed_path + '.xml')
-
- if exists(dst_path):
- return
-
- _logger.debug('Update MIME database to process found %r', src_path)
-
- toolkit.symlink(src_path, dst_path)
- toolkit.spawn('update-mime-database', self._mime_dir)
-
- def lost(self, clone_path):
- __, checkin_path = _checkin_path(clone_path)
- if not lexists(checkin_path):
- return
-
- _logger.debug('Checking out activity from %r', clone_path)
-
- context_path = _read_checkin_path(checkin_path)
- context_dir = dirname(context_path)
- impls = set(os.listdir(context_dir)) - set([basename(context_path)])
-
- if not impls:
- context = basename(context_dir)
- if self._contexts.exists(context):
- self._contexts.update(context, {'clone': 0})
-
- if lexists(context_path):
- os.unlink(context_path)
- os.unlink(checkin_path)
-
- self._checkout_activity(clone_path)
-
- def lost_mimetypes(self, impl_path):
- hashed_path, __ = _checkin_path(impl_path)
- dst_path = join(self._mime_dir, 'packages', hashed_path + '.xml')
-
- if not lexists(dst_path):
- return
-
- _logger.debug('Update MIME database to process lost %r', impl_path)
-
- os.unlink(dst_path)
- toolkit.spawn('update-mime-database', self._mime_dir)
-
- def _checkin_activity(self, spec):
- icon_path = join(spec.root, spec['icon'])
- if spec['mime_types'] and exists(icon_path):
- _logger.debug('Register %r icons for %r',
- spec['mime_types'], spec)
- if not exists(self._icons_dir):
- os.makedirs(self._icons_dir)
- for mime_type in spec['mime_types']:
- toolkit.symlink(icon_path,
- join(self._icons_dir,
- mime_type.replace('/', '-') + '.svg'))
-
- def _checkout_activity(self, clone_path):
- if exists(self._icons_dir):
- for filename in os.listdir(self._icons_dir):
- path = join(self._icons_dir, filename)
- if islink(path) and \
- os.readlink(path).startswith(clone_path + os.sep):
- os.unlink(path)
-
-
-class _Root(object):
-
- def __init__(self, monitor_, path):
- self.path = path
- self._monitor = monitor_
- self._nodes = {}
-
- _logger.info('Start monitoring %r implementations root', self.path)
-
- self._monitor.add_watch(self.path,
- IN_DELETE_SELF | IN_CREATE | IN_DELETE |
- IN_MOVED_TO | IN_MOVED_FROM,
- self.__watch_cb)
-
- for filename in os.listdir(self.path):
- path = join(self.path, filename)
- if isdir(path):
- self._nodes[filename] = _Node(self._monitor, path)
-
- def __watch_cb(self, filename, event):
- if event & IN_DELETE_SELF:
- _logger.warning('Lost ourselves, cannot monitor anymore')
- self._nodes.clear()
- return
-
- if event & (IN_CREATE | IN_MOVED_TO):
- path = join(self.path, filename)
- if isdir(path):
- self._nodes[filename] = _Node(self._monitor, path)
- elif event & (IN_DELETE | IN_MOVED_FROM):
- node = self._nodes.get(filename)
- if node is not None:
- node.unlink()
- del self._nodes[filename]
-
-
-class _Node(object):
-
- def __init__(self, monitor_, path):
- self._path = path
- self._monitor = monitor_
- self._activity_path = join(path, 'activity')
- self._activity_dir = None
-
- _logger.debug('Start monitoring %r root activity directory', path)
-
- self._wd = self._monitor.add_watch(path,
- IN_CREATE | IN_DELETE | IN_MOVED_TO | IN_MOVED_FROM,
- self.__watch_cb)
-
- if exists(self._activity_path):
- self._activity_dir = \
- _ActivityDir(self._monitor, self._activity_path)
-
- def unlink(self):
- if self._activity_dir is not None:
- self._activity_dir.unlink()
- self._activity_dir = None
- _logger.debug('Stop monitoring %r root activity directory', self._path)
- self._monitor.rm_watch(self._wd)
-
- def __watch_cb(self, filename, event):
- if filename != 'activity':
- return
- if event & (IN_CREATE | IN_MOVED_TO):
- self._activity_dir = \
- _ActivityDir(self._monitor, self._activity_path)
- elif event & (IN_DELETE | IN_MOVED_FROM):
- self._activity_dir.unlink()
- self._activity_dir = None
-
-
-class _ActivityDir(object):
-
- def __init__(self, monitor_, path):
- self._path = path
- self._monitor = monitor_
- self._found = False
- self._node_path = dirname(path)
-
- _logger.debug('Start monitoring %r activity directory', path)
-
- self._wd = self._monitor.add_watch(path,
- IN_CREATE | IN_CLOSE_WRITE | IN_DELETE | IN_MOVED_TO |
- IN_MOVED_FROM,
- self.__watch_cb)
-
- for filename in ('activity.info', 'mimetypes.xml'):
- if exists(join(path, filename)):
- self.found(filename)
-
- def unlink(self):
- self.lost('activity.info')
- _logger.debug('Stop monitoring %r activity directory', self._path)
- self._monitor.rm_watch(self._wd)
-
- def found(self, filename):
- if filename == 'mimetypes.xml':
- self._monitor.found_mimetypes(self._node_path)
- return
- if self._found:
- return
- _logger.debug('Found %r', self._node_path)
- self._found = True
- self._monitor.found(self._node_path)
- if exists(join(self._path, 'mimetypes.xml')):
- self._monitor.found_mimetypes(self._node_path)
-
- def lost(self, filename):
- if filename == 'mimetypes.xml':
- self._monitor.lost_mimetypes(self._node_path)
- return
- if not self._found:
- return
- _logger.debug('Lost %r', self._node_path)
- self._found = False
- self._monitor.lost(self._node_path)
-
- def __watch_cb(self, filename, event):
- if filename not in ('activity.info', 'mimetypes.xml'):
- return
- if event & IN_CREATE:
- # There is only one case when newly created file can be read,
- # if number of hardlinks is bigger than one, i.e., its content
- # already populated
- if os.stat(join(self._path, filename)).st_nlink > 1:
- self.found(filename)
- elif event & (IN_CLOSE_WRITE | IN_MOVED_TO):
- self.found(filename)
- elif event & (IN_DELETE | IN_MOVED_FROM):
- self.lost(filename)
-
-
-def _checkin_path(clone_path):
- hashed_path = hashlib.sha1(clone_path).hexdigest()
- return hashed_path, client.path('clones', 'checkin', hashed_path)
-
-
-def _read_checkin_path(checkin_path):
- return join(dirname(checkin_path), os.readlink(checkin_path))
-
-
-def _context_path(context, hashed_path):
- return client.path('clones', 'context', context, hashed_path)
-
-
-def _ensure_path(path):
- if lexists(path):
- os.unlink(path)
- return
-
- dir_path = dirname(path)
- if exists(dir_path):
- return
-
- try:
- os.makedirs(dir_path)
- except OSError, error:
- # In case if another process already create directory
- if error.errno != errno.EEXIST:
- raise
diff --git a/sugar_network/client/implementations.py b/sugar_network/client/implementations.py
new file mode 100644
index 0000000..34a9145
--- /dev/null
+++ b/sugar_network/client/implementations.py
@@ -0,0 +1,385 @@
+# Copyright (C) 2013 Aleksey Lim
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <http://www.gnu.org/licenses/>.
+
+# pylint: disable-msg=E1101,W0611
+
+import os
+import re
+import sys
+import uuid
+import time
+import json
+import random
+import shutil
+import hashlib
+import logging
+from os.path import join, exists, basename, dirname, relpath
+
+from sugar_network import client, toolkit
+from sugar_network.client.cache import Cache
+from sugar_network.client import journal, packagekit
+from sugar_network.toolkit.router import Request, Response, route, postroute
+from sugar_network.toolkit.bundle import Bundle
+from sugar_network.toolkit import http, coroutine, exception, enforce
+
+
+_MIMETYPE_DEFAULTS_KEY = '/desktop/sugar/journal/defaults'
+_MIMETYPE_INVALID_CHARS = re.compile('[^a-zA-Z0-9-_/.]')
+
+_logger = logging.getLogger('implementations')
+
+
+class Routes(object):
+
+ def __init__(self, local_volume):
+ self._volume = local_volume
+ self._node_mtime = None
+ self._call = lambda **kwargs: \
+ self._map_exceptions(self.fallback, **kwargs)
+ self._cache = Cache(local_volume)
+
+ def invalidate_solutions(self, mtime):
+ self._node_mtime = mtime
+
+ @route('GET', ['context', None], cmd='launch', arguments={'args': list})
+ def launch(self, request, no_spawn):
+ for context in self._checkin_context(request):
+ impl = self._checkin_impl(context, request)
+ if 'activity' in context['type']:
+ self._exec(request, context, impl)
+ else:
+ app = request.get('context') or \
+ _mimetype_context(impl['mime_type'])
+ enforce(app, 'Cannot find proper application')
+ doc = self._volume['implementation'].path(impl['guid'], 'data')
+ app_request = Request(path=['context', app], object_id=doc)
+ for app_context in self._checkin_context(app_request):
+ app_impl = self._checkin_impl(app_context, app_request)
+ self._exec(app_request, app_context, app_impl)
+
+ @route('PUT', ['context', None], cmd='clone', arguments={'requires': list})
+ def clone(self, request):
+ enforce(not request.content or self.inline(), http.ServiceUnavailable,
+ 'Not available in offline')
+ for context in self._checkin_context(request, 'clone'):
+ cloned_path = context.path('.clone')
+ if request.content:
+ impl = self._checkin_impl(context, request)
+ impl_path = relpath(dirname(impl['path']), context.path())
+ os.symlink(impl_path, cloned_path)
+ self._cache.checkout(impl['guid'])
+ else:
+ cloned_impl = basename(os.readlink(cloned_path))
+ self._cache.checkin(cloned_impl)
+ os.unlink(cloned_path)
+
+ @route('GET', ['context', None], cmd='clone',
+ arguments={'requires': list})
+ def get_clone(self, request, response):
+ return self._get_clone(request, response)
+
+ @route('HEAD', ['context', None], cmd='clone',
+ arguments={'requires': list})
+ def head_clone(self, request, response):
+ self._get_clone(request, response)
+
+ @route('PUT', ['context', None], cmd='favorite')
+ def favorite(self, request):
+ for __ in self._checkin_context(request, 'favorite'):
+ pass
+
+ @route('GET', cmd='recycle')
+ def recycle(self):
+ return self._cache.recycle()
+
+ def _map_exceptions(self, fun, *args, **kwargs):
+ try:
+ return fun(*args, **kwargs)
+ except http.NotFound, error:
+ if self.inline():
+ raise
+ raise http.ServiceUnavailable, error, sys.exc_info()[2]
+
+ def _checkin_context(self, request, layer=None):
+ guid = request.guid
+ if layer and not request.content and \
+ not self._volume['context'].exists(guid):
+ return
+ contexts = self._volume['context']
+
+ if not contexts.exists(guid):
+ context = self._call(method='GET', path=['context', guid])
+ contexts.create(context, setters=True)
+ for prop in ('icon', 'artifact_icon', 'preview'):
+ blob = self._call(method='GET', path=['context', guid, prop])
+ if blob is not None:
+ contexts.update(guid, {prop: {'blob': blob}})
+ context = contexts.get(guid)
+ if layer and bool(request.content) == (layer in context['layer']):
+ return
+
+ yield context
+
+ if layer:
+ if request.content:
+ layer_value = set(context['layer']) | set([layer])
+ else:
+ layer_value = set(context['layer']) - set([layer])
+ contexts.update(guid, {'layer': list(layer_value)})
+ self.broadcast({
+ 'event': 'update',
+ 'resource': 'context',
+ 'guid': guid,
+ })
+ _logger.debug('Checked %r in: %r', guid, layer_value)
+
+ def _checkin_impl(self, context, request, clone=None):
+ stability = request.get('stability') or \
+ client.stability(request.guid)
+
+ if 'activity' not in context['type']:
+ _logger.debug('Cloniing %r', request.guid)
+ response = Response()
+ blob = self._call(method='GET', path=['context', request.guid],
+ cmd='clone', stability=stability, response=response)
+ impl = response.meta
+ self._cache_impl(context, impl, blob, impl.pop('data'))
+ return impl
+
+ _logger.debug('Making %r', request.guid)
+
+ solution, stale = self._cache_solution_get(request.guid, stability)
+ if stale is False:
+ _logger.debug('Reuse cached %r solution', request.guid)
+ elif solution is not None and not self.inline():
+ _logger.debug('Reuse stale %r solution in offline', request.guid)
+ else:
+ _logger.debug('Solve %r', request.guid)
+ from sugar_network.client import solver
+ solution = self._map_exceptions(solver.solve,
+ self.fallback, request.guid, stability)
+ request.session['solution'] = solution
+
+ to_install = []
+ for sel in solution:
+ if 'install' in sel:
+ enforce(self.inline(), http.ServiceUnavailable,
+ 'Installation is not available in offline')
+ to_install.extend(sel.pop('install'))
+ if to_install:
+ packagekit.install(to_install)
+
+ for sel in solution:
+ if 'path' not in sel and sel['stability'] != 'packaged':
+ self._cache_impl(context, sel)
+
+ self._cache_solution(request.guid, stability, solution)
+ return solution[0]
+
+ def _exec(self, request, context, sel):
+ # pylint: disable-msg=W0212
+ datadir = client.profile_path('data', context.guid)
+ logdir = client.profile_path('logs')
+
+ args = sel['command'] + (request.get('args') or [])
+ object_id = request.get('object_id')
+ if object_id:
+ if 'activity_id' not in request:
+ activity_id = journal.get(object_id, 'activity_id')
+ if activity_id:
+ request['activity_id'] = activity_id
+ args.extend(['-o', object_id])
+ activity_id = request.get('activity_id')
+ if not activity_id:
+ activity_id = request['activity_id'] = _activity_id_new()
+ uri = request.get('uri')
+ if uri:
+ args.extend(['-u', uri])
+ args.extend([
+ '-b', request.guid,
+ '-a', activity_id,
+ ])
+
+ for path in [
+ join(datadir, 'instance'),
+ join(datadir, 'data'),
+ join(datadir, 'tmp'),
+ logdir,
+ ]:
+ if not exists(path):
+ os.makedirs(path)
+
+ event = {'event': 'exec',
+ 'cmd': 'launch',
+ 'guid': request.guid,
+ 'args': args,
+ 'log_path':
+ toolkit.unique_filename(logdir, context.guid + '.log'),
+ }
+ event.update(request)
+ event.update(request.session)
+ self.broadcast(event)
+
+ child = coroutine.fork()
+ if child is not None:
+ _logger.debug('Exec %s[%s]: %r', request.guid, child.pid, args)
+ child.watch(self.__sigchld_cb, child.pid, event)
+ return
+
+ try:
+ with file('/dev/null', 'r') as f:
+ os.dup2(f.fileno(), 0)
+ with file(event['log_path'], 'a+') as f:
+ os.dup2(f.fileno(), 1)
+ os.dup2(f.fileno(), 2)
+ toolkit.init_logging()
+
+ impl_path = sel['path']
+ os.chdir(impl_path)
+
+ environ = os.environ
+ environ['PATH'] = ':'.join([
+ join(impl_path, 'activity'),
+ join(impl_path, 'bin'),
+ environ['PATH'],
+ ])
+ environ['PYTHONPATH'] = impl_path + ':' + \
+ environ.get('PYTHONPATH', '')
+ environ['SUGAR_BUNDLE_PATH'] = impl_path
+ environ['SUGAR_BUNDLE_ID'] = context.guid
+ environ['SUGAR_BUNDLE_NAME'] = \
+ toolkit.gettext(context['title']).encode('utf8')
+ environ['SUGAR_BUNDLE_VERSION'] = sel['version']
+ environ['SUGAR_ACTIVITY_ROOT'] = datadir
+ environ['SUGAR_LOCALEDIR'] = join(impl_path, 'locale')
+
+ os.execvpe(args[0], args, environ)
+ except BaseException:
+ logging.exception('Failed to execute %r args=%r', sel, args)
+ finally:
+ os._exit(1)
+
+ def _cache_solution_path(self, guid):
+ return client.path('cache', 'solutions', guid[:2], guid)
+
+ def _cache_solution_get(self, guid, stability):
+ path = self._cache_solution_path(guid)
+ solution = None
+ if exists(path):
+ try:
+ with file(path) as f:
+ cached_api_url, cached_stability, solution = json.load(f)
+ except Exception, error:
+ _logger.debug('Cannot open %r solution: %s', path, error)
+ if solution is None:
+ return None, None
+
+ stale = (cached_api_url != client.api_url.value)
+ if not stale and cached_stability is not None:
+ stale = set(cached_stability) != set(stability)
+ if not stale and self._node_mtime is not None:
+ stale = (self._node_mtime > os.stat(path).st_mtime)
+ if not stale:
+ stale = (packagekit.mtime() > os.stat(path).st_mtime)
+
+ return solution, stale
+
+ def _cache_solution(self, guid, stability, solution):
+ path = self._cache_solution_path(guid)
+ if not exists(dirname(path)):
+ os.makedirs(dirname(path))
+ with file(path, 'w') as f:
+ json.dump([client.api_url.value, stability, solution], f)
+
+ def _cache_impl(self, context, sel, blob=None, data=None):
+ guid = sel['guid']
+ impls = self._volume['implementation']
+ data_path = sel['path'] = impls.path(guid, 'data')
+
+ if impls.exists(guid):
+ self._cache.checkin(guid, data)
+ return
+
+ if blob is None:
+ response = Response()
+ blob = self._call(method='GET',
+ path=['implementation', guid, 'data'], response=response)
+ data = response.meta
+ for key in ('seqno', 'url'):
+ if key in data:
+ del data[key]
+
+ try:
+ if not exists(dirname(data_path)):
+ os.makedirs(dirname(data_path))
+ if 'activity' in context['type']:
+ self._cache.ensure(data['unpack_size'], data['blob_size'])
+ with toolkit.TemporaryFile() as tmp_file:
+ shutil.copyfileobj(blob, tmp_file)
+ tmp_file.seek(0)
+ with Bundle(tmp_file, 'application/zip') as bundle:
+ bundle.extractall(data_path,
+ extract=data.get('extract'))
+ for exec_dir in ('bin', 'activity'):
+ bin_path = join(data_path, exec_dir)
+ if not exists(bin_path):
+ continue
+ for filename in os.listdir(bin_path):
+ os.chmod(join(bin_path, filename), 0755)
+ else:
+ self._cache.ensure(data['blob_size'])
+ with file(data_path, 'wb') as f:
+ shutil.copyfileobj(blob, f)
+ impl = sel.copy()
+ impl['data'] = data
+ impls.create(impl)
+ self._cache.checkin(guid)
+ except Exception:
+ shutil.rmtree(data_path, ignore_errors=True)
+ raise
+
+ def _get_clone(self, request, response):
+ for context in self._checkin_context(request):
+ if 'clone' not in context['layer']:
+ return self._map_exceptions(self.fallback, request, response)
+ guid = basename(os.readlink(context.path('.clone')))
+ impl = self._volume['implementation'].get(guid)
+ response.meta = impl.properties([
+ 'guid', 'context', 'license', 'version', 'stability', 'data'])
+ return impl.meta('data')
+
+ def __sigchld_cb(self, returncode, pid, event):
+ _logger.debug('Exit %s[%s]: %r', event['guid'], pid, returncode)
+ if returncode:
+ event['event'] = 'failure'
+ event['error'] = 'Process exited with %r status' % returncode
+ else:
+ event['event'] = 'exit'
+ self.broadcast(event)
+
+
+def _activity_id_new():
+ data = '%s%s%s' % (
+ time.time(),
+ random.randint(10000, 100000),
+ uuid.getnode())
+ return hashlib.sha1(data).hexdigest()
+
+
+def _mimetype_context(mime_type):
+ import gconf
+ mime_type = _MIMETYPE_INVALID_CHARS.sub('_', mime_type)
+ key = '/'.join([_MIMETYPE_DEFAULTS_KEY, mime_type])
+ return gconf.client_get_default().get_string(key)
diff --git a/sugar_network/client/injector.py b/sugar_network/client/injector.py
deleted file mode 100644
index 04a6765..0000000
--- a/sugar_network/client/injector.py
+++ /dev/null
@@ -1,267 +0,0 @@
-# Copyright (C) 2010-2013 Aleksey Lim
-#
-# This program is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with this program. If not, see <http://www.gnu.org/licenses/>.
-
-import os
-import json
-import shutil
-import logging
-from os.path import join, exists, basename, dirname
-
-from sugar_network import client, toolkit
-from sugar_network.client import journal, cache
-from sugar_network.toolkit import pipe, lsb_release
-
-
-_PMS_PATHS = {
- 'Debian': '/var/lib/dpkg/status',
- 'Fedora': '/var/lib/rpm/Packages',
- 'Ubuntu': '/var/lib/dpkg/status',
- }
-
-_logger = logging.getLogger('client.injector')
-_pms_path = _PMS_PATHS.get(lsb_release.distributor_id())
-_mtime = None
-
-
-def make(guid):
- return pipe.fork(_make, log_path=client.profile_path('logs', guid),
- context=guid, session={'context': guid})
-
-
-def launch(guid, args=None, activity_id=None, object_id=None, uri=None,
- color=None):
- if object_id:
- if not activity_id:
- activity_id = journal.get(object_id, 'activity_id')
- if not color:
- color = journal.get(object_id, 'icon-color')
-
- if not activity_id:
- activity_id = journal.create_activity_id()
-
- if args is None:
- args = []
- args.extend([
- '-b', guid,
- '-a', activity_id,
- ])
- if object_id:
- args.extend(['-o', object_id])
- if uri:
- args.extend(['-u', uri])
-
- return pipe.fork(_launch, log_path=client.profile_path('logs', guid),
- context=guid, args=args, session={
- 'context': guid,
- 'activity_id': activity_id,
- 'color': color,
- })
-
-
-def clone(guid):
- return pipe.fork(_clone, log_path=client.profile_path('logs', guid),
- context=guid, session={'context': guid})
-
-
-def clone_impl(context, **params):
- return pipe.fork(_clone_impl,
- log_path=client.profile_path('logs', context),
- context_guid=context, params=params, session={'context': context})
-
-
-def invalidate_solutions(mtime):
- global _mtime
- _mtime = mtime
-
-
-def _make(context):
- pipe.feedback('analyze')
- solution, stability = _solve(context)
- pipe.feedback('solved', environ={'solution': solution})
-
- to_install = []
- for impl in solution:
- if 'install' in impl:
- to_install.extend(impl.pop('install'))
- if to_install:
- pipe.trace('Install %s package(s)',
- ', '.join([i['name'] for i in to_install]))
- from sugar_network.client import packagekit
- packagekit.install(to_install)
-
- for impl in solution:
- if 'path' in impl or impl['stability'] == 'packaged':
- continue
- impl_path = cache.get(impl['id'], impl)
- if 'prefix' in impl:
- impl_path = join(impl_path, impl['prefix'])
- impl['path'] = impl_path
-
- if stability is not None:
- _set_cached_solution(context, stability, solution)
-
- pipe.feedback('ready')
- return solution
-
-
-def _launch(context, args):
- solution = _make(context)
-
- args = solution[0]['command'] + (args or [])
- _logger.info('Executing %r feed: %s', context, args)
- pipe.feedback('exec')
-
- _activity_env(solution[0], os.environ)
- os.execvpe(args[0], args, os.environ)
-
-
-def _clone(context):
- solution = _make(context)
-
- cloned = []
- try:
- for impl in solution:
- path = impl.get('path')
- if not path or \
- path == '/': # Fake path set by "sugar" dependency
- continue
- dst_path = toolkit.unique_filename(
- client.activity_dirs.value[0], basename(path))
- cloned.append(dst_path)
- _logger.info('Clone implementation to %r', dst_path)
- toolkit.cptree(path, dst_path)
- impl['path'] = dst_path
- except Exception:
- while cloned:
- shutil.rmtree(cloned.pop(), ignore_errors=True)
- raise
-
- _set_cached_solution(context, None, solution)
-
-
-def _clone_impl(context_guid, params):
- conn = client.IPCConnection()
-
- context = conn.get(['context', context_guid], reply=['title'])
- impl = conn.meta(['context', context_guid], cmd='clone', **params)
-
- src_path = cache.get(impl['guid'], impl)
- if 'extract' in impl:
- src_path = join(src_path, impl['extract'])
- dst_path = toolkit.unique_filename(
- client.activity_dirs.value[0], basename(src_path))
-
- _logger.info('Clone implementation to %r', dst_path)
- toolkit.cptree(src_path, dst_path)
-
- _set_cached_solution(context_guid, None, [{
- 'id': dst_path,
- 'context': context_guid,
- 'version': impl['version'],
- 'name': context['title'],
- 'stability': impl['stability'],
- 'spec': join(dst_path, 'activity', 'activity.info'),
- 'path': dst_path,
- 'command': impl['commands']['activity']['exec'].split(),
- }])
-
-
-def _solve(context):
- pipe.trace('Start solving %s feed', context)
- stability = client.stability(context)
-
- solution, stale = _get_cached_solution(context, stability)
- if stale is False:
- pipe.trace('Reuse cached solution')
- return solution, None
-
- conn = client.IPCConnection()
- if solution is not None and conn.get(cmd='status')['route'] == 'offline':
- pipe.trace('Reuse stale cached solution in offline mode')
- return solution, None
-
- from sugar_network.client import solver
-
- return solver.solve(conn, context, stability), stability
-
-
-def _activity_env(impl, environ):
- root = client.profile_path('data', impl['context'])
- impl_path = impl['path']
-
- for path in ['instance', 'data', 'tmp']:
- path = join(root, path)
- if not exists(path):
- os.makedirs(path)
-
- environ['PATH'] = ':'.join([
- join(impl_path, 'activity'),
- join(impl_path, 'bin'),
- environ['PATH'],
- ])
- environ['SUGAR_BUNDLE_PATH'] = impl_path
- environ['SUGAR_BUNDLE_ID'] = impl['context']
- environ['SUGAR_BUNDLE_NAME'] = impl['name'].encode('utf8')
- environ['SUGAR_BUNDLE_VERSION'] = impl['version']
- environ['SUGAR_ACTIVITY_ROOT'] = root
- environ['PYTHONPATH'] = impl_path + ':' + environ.get('PYTHONPATH', '')
- environ['SUGAR_LOCALEDIR'] = join(impl_path, 'locale')
-
- os.chdir(impl_path)
-
-
-def _cached_solution_path(guid):
- return client.path('cache', 'solutions', guid[:2], guid)
-
-
-def _get_cached_solution(guid, stability):
- path = _cached_solution_path(guid)
- solution = None
- if exists(path):
- try:
- with file(path) as f:
- cached_api_url, cached_stability, solution = json.load(f)
- except Exception, error:
- _logger.debug('Cannot open %r solution: %s', path, error)
- if solution is None:
- return None, None
-
- stale = (cached_api_url != client.api_url.value)
- if not stale and cached_stability is not None:
- stale = set(cached_stability) != set(stability)
- if not stale and _mtime is not None:
- stale = (_mtime > os.stat(path).st_mtime)
- if not stale and _pms_path is not None:
- stale = (os.stat(_pms_path).st_mtime > os.stat(path).st_mtime)
-
- for impl in solution:
- impl_path = impl.get('path')
- if impl_path and not exists(impl_path):
- os.unlink(path)
- return None, None
- if not stale:
- spec = impl.get('spec')
- if spec and exists(spec):
- stale = (os.stat(spec).st_mtime > os.stat(path).st_mtime)
-
- return solution, stale
-
-
-def _set_cached_solution(guid, stability, solution):
- path = _cached_solution_path(guid)
- if not exists(dirname(path)):
- os.makedirs(dirname(path))
- with file(path, 'w') as f:
- json.dump([client.api_url.value, stability, solution], f)
diff --git a/sugar_network/client/journal.py b/sugar_network/client/journal.py
index 8f6c023..646bced 100644
--- a/sugar_network/client/journal.py
+++ b/sugar_network/client/journal.py
@@ -15,10 +15,6 @@
import os
import sys
-import time
-import uuid
-import random
-import hashlib
import logging
from shutil import copyfileobj
from tempfile import NamedTemporaryFile
@@ -32,14 +28,6 @@ _logger = logging.getLogger('client.journal')
_ds_root = client.profile_path('datastore')
-def create_activity_id():
- data = '%s%s%s' % (
- time.time(),
- random.randint(10000, 100000),
- uuid.getnode())
- return hashlib.sha1(data).hexdigest()
-
-
def exists(guid):
return os.path.exists(_ds_path(guid))
@@ -147,19 +135,19 @@ class Routes(object):
subrequest.content = request.content
subrequest.content_type = 'application/json'
# pylint: disable-msg=E1101
- subguid = self.call(subrequest, response)
+ subguid = self.fallback(subrequest, response)
subrequest = Request(method='PUT', document='artifact',
guid=subguid, prop='preview')
subrequest.content_type = 'image/png'
with file(preview_path, 'rb') as subrequest.content_stream:
- self.call(subrequest, response)
+ self.fallback(subrequest, response)
subrequest = Request(method='PUT', document='artifact',
guid=subguid, prop='data')
subrequest.content_type = get(guid, 'mime_type') or 'application/octet'
with file(data_path, 'rb') as subrequest.content_stream:
- self.call(subrequest, response)
+ self.fallback(subrequest, response)
def journal_update(self, guid, data=None, **kwargs):
enforce(self._ds is not None, 'Journal is inaccessible')
diff --git a/sugar_network/client/packagekit.py b/sugar_network/client/packagekit.py
index 8c624c6..782f09e 100644
--- a/sugar_network/client/packagekit.py
+++ b/sugar_network/client/packagekit.py
@@ -1,5 +1,4 @@
-# Copyright (C) 2010-2012 Aleksey Lim
-# Copyright (C) 2010 Thomas Leonard
+# Copyright (C) 2010-2013 Aleksey Lim
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
@@ -15,155 +14,96 @@
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import os
-import locale
import logging
-from ConfigParser import ConfigParser
-from os.path import exists
-from gettext import gettext as _
-import dbus
-import gobject
-from dbus.mainloop.glib import threads_init, DBusGMainLoop
+from sugar_network.toolkit import lsb_release, gbus, enforce
-from sugar_network.toolkit import pipe, enforce
+_PK_MAX_RESOLVE = 100
+_PK_MAX_INSTALL = 2500
-_PK_CONFILE = '/etc/PackageKit/PackageKit.conf'
+_PMS_PATHS = {
+ 'Debian': '/var/lib/dpkg/status',
+ 'Fedora': '/var/lib/rpm/Packages',
+ 'Ubuntu': '/var/lib/dpkg/status',
+ }
-_logger = logging.getLogger('client.packagekit')
+_logger = logging.getLogger('packagekit')
+_pms_path = _PMS_PATHS.get(lsb_release.distributor_id())
-_pk = None
-_pk_max_resolve = 100
-_pk_max_install = 2500
+def mtime():
+ if _pms_path:
+ return os.stat(_pms_path).st_mtime
-def resolve(names):
- enforce(_get_pk() is not None, 'Cannot connect to PackageKit')
- pipe.feedback('resolve',
- message=_('Resolving %s package name(s)') % len(names))
- _logger.debug('Resolve names %r', names)
+def resolve(names):
result = {}
- mainloop = gobject.MainLoop()
while names:
- chunk = names[:min(len(names), _pk_max_resolve)]
+ chunk = names[:min(len(names), _PK_MAX_RESOLVE)]
del names[:len(chunk)]
- transaction = _Transaction(mainloop.quit)
- transaction.resolve(chunk)
- mainloop.run()
+ _logger.debug('Resolve %r', chunk)
- missed = set(chunk) - set([i['name'] for i in transaction.packages])
- enforce(not missed,
- 'Failed to resolve %s package(s)', ', '.join(missed))
- for pkg in transaction.packages:
- result[pkg['name']] = pkg
+ resp = gbus.call(_pk, 'Resolve', 'none', chunk)
+ missed = set(chunk) - set(resp.packages.keys())
+ enforce(not missed, 'Failed to resolve %s', ', '.join(missed))
+ result.update(resp.packages)
return result
def install(packages):
- enforce(_get_pk() is not None, 'Cannot connect to PackageKit')
-
ids = [i['pk_id'] for i in packages]
- pipe.feedback('install',
- message=_('Installing %s package(s)') % len(packages))
- _logger.debug('Ask PackageKit to install %r packages', ids)
-
- mainloop = gobject.MainLoop()
while ids:
- chunk = ids[:min(len(ids), _pk_max_install)]
+ chunk = ids[:min(len(ids), _PK_MAX_INSTALL)]
del ids[:len(chunk)]
- transaction = _Transaction(mainloop.quit)
- transaction.install(chunk)
- mainloop.run()
+ _logger.debug('Install %r', chunk)
- enforce(transaction.error_code is None or
- transaction.error_code in ('package-already-installed',
- 'all-packages-already-installed'),
- 'PackageKit install failed: %s (%s)',
- transaction.error_details, transaction.error_code)
+ resp = gbus.call(_pk, 'InstallPackages', True, chunk)
+ enforce(resp.error_code in (
+ 'package-already-installed',
+ 'all-packages-already-installed', None),
+ 'Installation failed: %s (%s)',
+ resp.error_details, resp.error_code)
-class _Transaction(object):
+class _Response(object):
- def __init__(self, finished_cb):
- self._finished_cb = finished_cb
+ def __init__(self):
self.error_code = None
self.error_details = None
- self.packages = []
-
- self._object = dbus.SystemBus().get_object(
- 'org.freedesktop.PackageKit', _get_pk().GetTid(), False)
- self._proxy = dbus.Interface(self._object,
- 'org.freedesktop.PackageKit.Transaction')
- self._props = dbus.Interface(self._object, dbus.PROPERTIES_IFACE)
-
- self._signals = []
- for signal, cb in [
- ('Finished', self.__finished_cb),
- ('ErrorCode', self.__error_code_cb),
- ('Package', self.__package_cb),
- ]:
- self._signals.append(self._proxy.connect_to_signal(signal, cb))
-
- defaultlocale = locale.getdefaultlocale()[0]
- if defaultlocale is not None:
- self._compat_call([
- ('SetLocale', defaultlocale),
- ('SetHints', ['locale=%s' % defaultlocale]),
- ])
-
- def resolve(self, names):
- self._proxy.Resolve('none', names)
-
- def install(self, names):
- _auth_wrapper('org.freedesktop.packagekit.package-install',
- self._compat_call, [
- ('InstallPackages', names),
- ('InstallPackages', True, names),
- ])
-
- def get_percentage(self):
- if self._object is None:
- return None
- try:
- return self._props.Get('org.freedesktop.PackageKit.Transaction',
- 'Percentage')
- except Exception:
- result, __, __, __ = self._proxy.GetProgress()
- return result
-
- def _compat_call(self, calls):
- for call in calls:
- method = call[0]
- args = call[1:]
- try:
- dbus_method = self._proxy.get_dbus_method(method)
- return dbus_method(*args)
- except dbus.exceptions.DBusException, e:
- if e.get_dbus_name() not in [
- 'org.freedesktop.DBus.Error.UnknownMethod',
- 'org.freedesktop.DBus.Error.InvalidArgs']:
- raise
- raise Exception('Cannot call %r DBus method' % calls)
-
- def __finished_cb(self, status, runtime):
+ self.packages = {}
+
+
+def _pk(result, op, *args):
+ import dbus
+
+ bus = dbus.SystemBus()
+ pk = dbus.Interface(
+ bus.get_object(
+ 'org.freedesktop.PackageKit', '/org/freedesktop/PackageKit',
+ False),
+ 'org.freedesktop.PackageKit')
+ txn = dbus.Interface(
+ bus.get_object('org.freedesktop.PackageKit', pk.GetTid(), False),
+ 'org.freedesktop.PackageKit.Transaction')
+ resp = _Response()
+ signals = []
+
+ def Finished_cb(status, runtime):
_logger.debug('Transaction finished: %s', status)
- for i in self._signals:
+ for i in signals:
i.remove()
- self._finished_cb()
- self._props = None
- self._proxy = None
- self._object = None
+ result.set(resp)
- def __error_code_cb(self, code, details):
- self.error_code = code
- self.error_details = details
+ def ErrorCode_cb(code, details):
+ resp.error_code = code
+ resp.error_details = details
- def __package_cb(self, status, pk_id, summary):
+ def Package_cb(status, pk_id, summary):
from sugar_network.client import solver
package_name, version, arch, __ = pk_id.split(';')
@@ -171,6 +111,8 @@ class _Transaction(object):
if not clean_version:
_logger.warn('Cannot parse distribution version "%s" '
'for package "%s"', version, package_name)
+ if package_name in resp.packages:
+ return
package = {
'pk_id': str(pk_id),
'version': clean_version,
@@ -178,73 +120,32 @@ class _Transaction(object):
'arch': solver.canonicalize_machine(arch),
'installed': (status == 'installed'),
}
- _logger.debug('Resolved PackageKit name: %r', package)
- self.packages.append(package)
-
+ _logger.debug('Found: %r', package)
+ resp.packages[package_name] = package
-def _get_pk():
- global _pk, _pk_max_resolve, _pk_max_install
+ for signal, cb in [
+ ('Finished', Finished_cb),
+ ('ErrorCode', ErrorCode_cb),
+ ('Package', Package_cb),
+ ]:
+ signals.append(txn.connect_to_signal(signal, cb))
- if _pk is not None:
- if _pk is False:
- return None
- else:
- return _pk
-
- gobject.threads_init()
- threads_init()
- DBusGMainLoop(set_as_default=True)
+ op = txn.get_dbus_method(op)
try:
- bus = dbus.SystemBus()
- pk_object = bus.get_object('org.freedesktop.PackageKit',
- '/org/freedesktop/PackageKit', False)
- _pk = dbus.Interface(pk_object, 'org.freedesktop.PackageKit')
- _logger.info('PackageKit dbus service found')
- except Exception, error:
- _pk = False
- _logger.info('PackageKit dbus service not found: %s', error)
- return None
-
- if exists(_PK_CONFILE):
- conf = ConfigParser()
- conf.read(_PK_CONFILE)
- if conf.has_option('Daemon', 'MaximumItemsToResolve'):
- _pk_max_resolve = \
- int(conf.get('Daemon', 'MaximumItemsToResolve'))
- if conf.has_option('Daemon', 'MaximumPackagesToProcess'):
- _pk_max_install = \
- int(conf.get('Daemon', 'MaximumPackagesToProcess'))
-
- return _pk
-
-
-def _auth_wrapper(iface, method, *args):
- _logger.info('Obtain authentication for %s', iface)
-
- def obtain():
- pk_auth = dbus.SessionBus().get_object(
+ op(*args)
+ except dbus.exceptions.DBusException, error:
+ if error.get_dbus_name() != \
+ 'org.freedesktop.PackageKit.Transaction.RefusedByPolicy':
+ raise
+ iface, auth = error.get_dbus_message().split()
+ if not auth.startswith('auth_'):
+ raise
+ auth = dbus.SessionBus().get_object(
'org.freedesktop.PolicyKit.AuthenticationAgent', '/',
'org.freedesktop.PolicyKit.AuthenticationAgent')
- pk_auth.ObtainAuthorization(iface, dbus.UInt32(0),
+ auth.ObtainAuthorization(iface, dbus.UInt32(0),
dbus.UInt32(os.getpid()), timeout=300)
-
- try:
- # PK on f11 needs to obtain authentication at first
- obtain()
- return method(*args)
- except Exception:
- # It seems doesn't work for recent PK
- try:
- return method(*args)
- except dbus.exceptions.DBusException, e:
- if e.get_dbus_name() != \
- 'org.freedesktop.PackageKit.Transaction.RefusedByPolicy':
- raise
- iface, auth = e.get_dbus_message().split()
- if not auth.startswith('auth_'):
- raise
- obtain()
- return method(*args)
+ op(*args)
if __name__ == '__main__':
diff --git a/sugar_network/client/routes.py b/sugar_network/client/routes.py
index 942b052..dfbda6f 100644
--- a/sugar_network/client/routes.py
+++ b/sugar_network/client/routes.py
@@ -13,38 +13,38 @@
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
+# pylint: disable=W0611
+
import os
import logging
import httplib
from os.path import join
from sugar_network import db, client, node, toolkit, model
-from sugar_network.client import journal, clones, injector
+from sugar_network.client import journal, implementations
from sugar_network.node.slave import SlaveRoutes
from sugar_network.toolkit import netlink, mountpoints
from sugar_network.toolkit.router import ACL, Request, Response, Router
-from sugar_network.toolkit.router import route, fallbackroute
-from sugar_network.toolkit.spec import Spec
+from sugar_network.toolkit.router import route, fallbackroute, postroute
from sugar_network.toolkit import zeroconf, coroutine, http, enforce
# Top-level directory name to keep SN data on mounted devices
_SN_DIRNAME = 'sugar-network'
-_LOCAL_PROPS = frozenset(['favorite', 'clone'])
-
# Flag file to recognize a directory as a synchronization directory
_SYNC_DIRNAME = 'sugar-network-sync'
-
_RECONNECT_TIMEOUT = 3
_RECONNECT_TIMEOUT_MAX = 60 * 15
+_LOCAL_LAYERS = frozenset(['local', 'clone', 'favorite'])
_logger = logging.getLogger('client.routes')
-class ClientRoutes(model.Routes, journal.Routes):
+class ClientRoutes(model.FrontRoutes, implementations.Routes, journal.Routes):
def __init__(self, home_volume, api_url=None, no_subscription=False):
- model.Routes.__init__(self)
+ model.FrontRoutes.__init__(self)
+ implementations.Routes.__init__(self, home_volume)
if not client.no_dbus.value:
journal.Routes.__init__(self)
@@ -57,7 +57,7 @@ class ClientRoutes(model.Routes, journal.Routes):
self._no_subscription = no_subscription
self._server_mode = not api_url
- home_volume.broadcast = self.broadcast
+ self._got_offline()
if self._server_mode:
mountpoints.connect(_SN_DIRNAME,
@@ -74,6 +74,23 @@ class ClientRoutes(model.Routes, journal.Routes):
self._got_offline()
self._local.volume.close()
+ @postroute
+ def postroute(self, request, response, result, error):
+ if error is None or isinstance(error, http.StatusPass):
+ return
+ event = {'event': 'failure',
+ 'exception': type(error).__name__,
+ 'error': str(error),
+ 'method': request.method,
+ 'cmd': request.cmd,
+ 'resource': request.resource,
+ 'guid': request.guid,
+ 'prop': request.prop,
+ }
+ event.update(request)
+ event.update(request.session)
+ self.broadcast(event)
+
@fallbackroute('GET', ['hub'])
def hub(self, request, response):
"""Serve Hub via HTTP instead of file:// for IPC users.
@@ -104,7 +121,7 @@ class ClientRoutes(model.Routes, journal.Routes):
@fallbackroute('GET', ['packages'])
def route_packages(self, request, response):
if self._inline.is_set():
- return self._node_call(request, response)
+ return self.fallback(request, response)
else:
# Let caller know that we are in offline and
# no way to process specified request on the node
@@ -127,158 +144,60 @@ class ClientRoutes(model.Routes, journal.Routes):
def whoami(self, request, response):
if self._inline.is_set():
- return self._node_call(request, response)
+ return self.fallback(request, response)
else:
return {'roles': [], 'guid': client.sugar_uid()}
@route('GET', [None],
- arguments={'reply': ('guid',), 'clone': int, 'favorite': bool},
+ arguments={
+ 'offset': int,
+ 'limit': int,
+ 'reply': ('guid',),
+ 'layer': list,
+ },
mime_type='application/json')
- def find(self, request, response, clone, favorite):
- if not self._inline.is_set() or clone or favorite:
+ def find(self, request, response, layer):
+ if set(request.get('layer', [])) & set(['favorite', 'clone']):
return self._local.call(request, response)
- else:
- return self._proxy_get(request, response)
- @route('GET', [None, None],
- arguments={'reply': list}, mime_type='application/json')
+ reply = request.setdefault('reply', ['guid'])
+ if 'layer' not in reply:
+ return self.fallback(request, response)
+
+ if 'guid' not in reply:
+ # Otherwise there is no way to mixin local `layer`
+ reply.append('guid')
+ result = self.fallback(request, response)
+
+ directory = self._local.volume[request.resource]
+ for item in result['result']:
+ if directory.exists(item['guid']):
+ existing_layer = directory.get(item['guid'])['layer']
+ item['layer'][:] = set(item['layer']) | set(existing_layer)
+
+ return result
+
+ @route('GET', [None, None], mime_type='application/json')
def get(self, request, response):
- return self._proxy_get(request, response)
+ if self._local.volume[request.resource].exists(request.guid):
+ return self._local.call(request, response)
+ else:
+ return self.fallback(request, response)
@route('GET', [None, None, None], mime_type='application/json')
def get_prop(self, request, response):
- return self._proxy_get(request, response)
-
- @route('GET', ['context', None], cmd='make')
- def make(self, request):
- for event in injector.make(request.guid):
- event['event'] = 'make'
- self.broadcast(event)
-
- @route('GET', ['context', None], cmd='launch',
- arguments={'args': list})
- def launch(self, request, args, activity_id=None,
- object_id=None, uri=None, color=None, no_spawn=None):
-
- def do_launch():
- for event in injector.launch(request.guid, args,
- activity_id=activity_id, object_id=object_id, uri=uri,
- color=color):
- event['event'] = 'launch'
- self.broadcast(event)
-
- if no_spawn:
- do_launch()
- else:
- self._jobs.spawn(do_launch)
-
- @route('PUT', ['context', None], cmd='clone',
- arguments={'force': False, 'nodeps': False, 'requires': list})
- def clone_context(self, request):
- enforce(self._inline.is_set(), 'Not available in offline')
-
- context_type = self._node_call(method='GET',
- path=['context', request.guid, 'type'])
- if 'stability' not in request:
- request['stability'] = client.stability(request.guid)
-
- if 'activity' in context_type:
- self._clone_activity(request)
- elif 'content' in context_type:
-
- def get_props():
- impls = self._node_call(method='GET',
- path=['implementation'], context=request.guid,
- stability=request['stability'], order_by='-version',
- limit=1, reply=['guid'])['result']
- enforce(impls, http.NotFound, 'No implementations')
- impl_id = impls[0]['guid']
- props = self._node_call(method='GET',
- path=['context', request.guid],
- reply=['title', 'description'])
- props['preview'] = self._node_call(method='GET',
- path=['context', request.guid, 'preview'])
- data_response = Response()
- props['data'] = self._node_call(response=data_response,
- method='GET',
- path=['implementation', impl_id, 'data'])
- props['mime_type'] = data_response.content_type or \
- 'application/octet'
- props['activity_id'] = impl_id
- return props
-
- self._clone_jobject(request, get_props)
+ if self._local.volume[request.resource].exists(request.guid):
+ return self._local.call(request, response)
else:
- raise RuntimeError('No way to clone')
-
- @route('PUT', ['artifact', None], cmd='clone', arguments={'force': False})
- def clone_artifact(self, request):
- enforce(self._inline.is_set(), 'Not available in offline')
-
- def get_props():
- props = self._node_call(method='GET',
- path=['artifact', request.guid],
- reply=['title', 'description', 'context'])
- props['preview'] = self._node_call(method='GET',
- path=['artifact', request.guid, 'preview'])
- props['data'] = self._node_call(method='GET',
- path=['artifact', request.guid, 'data'])
- props['activity'] = props.pop('context')
- return props
-
- self._clone_jobject(request, get_props)
-
- @route('PUT', ['context', None], cmd='favorite')
- def favorite(self, request):
- if request.content or \
- self._local.volume['context'].exists(request.guid):
- self._checkin_context(request.guid, {'favorite': request.content})
-
- @route('GET', ['context', None], cmd='feed',
- mime_type='application/json')
- def feed(self, request, response):
- try:
- context = self._local.volume['context'].get(request.guid)
- except http.NotFound:
- context = None
- if context is None or context['clone'] != 2:
- if self._inline.is_set():
- return self._node_call(request, response)
- else:
- # Let caller know that we are in offline and
- # no way to process specified request on the node
- raise http.ServiceUnavailable()
-
- versions = []
- for path in clones.walk(context.guid):
- try:
- spec = Spec(root=path)
- except Exception:
- toolkit.exception(_logger, 'Failed to read %r spec file', path)
- continue
- versions.append({
- 'guid': spec.root,
- 'version': spec['version'],
- 'arch': '*-*',
- 'stability': 'stable',
- 'commands': {
- 'activity': {
- 'exec': spec['Activity', 'exec'],
- },
- },
- 'requires': spec.requires,
- })
-
- return {'name': context.get('title',
- accept_language=request.accept_language),
- 'implementations': versions,
- }
+ return self.fallback(request, response)
@fallbackroute()
- def _node_call(self, request=None, response=None, method=None, path=None,
- **kwargs):
+ def fallback(self, request=None, response=None, method=None, path=None,
+ cmd=None, **kwargs):
if request is None:
- request = Request(method=method, path=path)
+ request = Request(method=method, path=path, cmd=cmd)
+ if response is None:
+ response = Response()
request.update(kwargs)
if self._inline.is_set():
if client.layers.value and \
@@ -301,6 +220,7 @@ class ClientRoutes(model.Routes, journal.Routes):
_logger.debug('Got online on %r', self._node)
self._inline.set()
self.broadcast({'event': 'inline', 'state': 'online'})
+ self._local.volume.broadcast = None
def _got_offline(self):
if self._inline.is_set():
@@ -308,10 +228,12 @@ class ClientRoutes(model.Routes, journal.Routes):
self._node.close()
self._inline.clear()
self.broadcast({'event': 'inline', 'state': 'offline'})
+ self._local.volume.broadcast = self.broadcast
def _fall_offline(self):
- _logger.debug('Fall to offline on %r', self._node)
- self._inline_job.kill()
+ if self._inline_job:
+ _logger.debug('Fall to offline on %r', self._node)
+ self._inline_job.kill()
def _restart_online(self):
self._fall_offline()
@@ -338,7 +260,7 @@ class ClientRoutes(model.Routes, journal.Routes):
if event.get('resource') == 'implementation':
mtime = event.get('mtime')
if mtime:
- injector.invalidate_solutions(mtime)
+ self.invalidate_solutions(mtime)
self.broadcast(event)
def handshake(url):
@@ -347,7 +269,7 @@ class ClientRoutes(model.Routes, journal.Routes):
info = self._node.get(cmd='info')
impl_info = info['documents'].get('implementation')
if impl_info:
- injector.invalidate_solutions(impl_info['mtime'])
+ self.invalidate_solutions(impl_info['mtime'])
if self._inline.is_set():
_logger.info('Reconnected to %r node', url)
else:
@@ -415,127 +337,14 @@ class ClientRoutes(model.Routes, journal.Routes):
self._inline_job.kill()
self._got_offline()
- def _checkin_context(self, guid, props):
- contexts = self._local.volume['context']
-
- if contexts.exists(guid):
- contexts.update(guid, props)
- else:
- copy = self._node_call(method='GET', path=['context', guid],
- reply=[
- 'type', 'title', 'summary', 'description',
- 'homepage', 'mime_types', 'dependencies',
- ])
- copy.update(props)
- copy['guid'] = guid
- contexts.create(copy)
- for prop in ('icon', 'artifact_icon', 'preview'):
- blob = self._node_call(method='GET',
- path=['context', guid, prop])
- if blob is not None:
- contexts.update(guid, {prop: {'blob': blob}})
-
- def _proxy_get(self, request, response):
- resource = request.resource
- if resource not in ('context', 'artifact'):
- return self._node_call(request, response)
-
- if not self._inline.is_set():
- return self._local.call(request, response)
-
- request_guid = request.guid if len(request.path) > 1 else None
- if request_guid and self._local.volume[resource].exists(request_guid):
- return self._local.call(request, response)
-
- if request.prop is not None:
- mixin = None
- else:
- reply = request.setdefault('reply', ['guid'])
- mixin = set(reply) & _LOCAL_PROPS
- if mixin:
- # Otherwise there is no way to mixin _LOCAL_PROPS
- if not request_guid and 'guid' not in reply:
- reply.append('guid')
- if resource == 'context' and 'type' not in reply:
- reply.append('type')
-
- result = self._node_call(request, response)
- if not mixin:
- return result
-
- if request_guid:
- items = [result]
- else:
- items = result['result']
-
- def mixin_jobject(props, guid):
- if 'clone' in mixin:
- props['clone'] = 2 if journal.exists(guid) else 0
- if 'favorite' in mixin:
- props['favorite'] = bool(int(journal.get(guid, 'keep') or 0))
-
- if resource == 'context':
- contexts = self._local.volume['context']
- for props in items:
- guid = request_guid or props['guid']
- if 'activity' in props['type']:
- if contexts.exists(guid):
- patch = contexts.get(guid).properties(mixin)
- else:
- patch = dict([(i, contexts.metadata[i].default)
- for i in mixin])
- props.update(patch)
- elif 'content' in props['type']:
- mixin_jobject(props, guid)
- elif resource == 'artifact':
- for props in items:
- mixin_jobject(props, request_guid or props['guid'])
-
- return result
-
- def _clone_activity(self, request):
- if not request.content:
- clones.wipeout(request.guid)
- return
- for __ in clones.walk(request.guid):
- if not request.get('force'):
- return
- break
- self._checkin_context(request.guid, {'clone': 1})
- if request.get('nodeps'):
- pipe = injector.clone_impl(request.guid,
- stability=request['stability'],
- requires=request.get('requires'))
- else:
- pipe = injector.clone(request.guid)
- event = {}
- for event in pipe:
- event['event'] = 'clone'
- self.broadcast(event)
- if event.get('state') == 'failure':
- self._checkin_context(request.guid, {'clone': 0})
- raise RuntimeError(event['error'])
-
- def _clone_jobject(self, request, get_props):
- if request.content:
- if request['force'] or not journal.exists(request.guid):
- self.journal_update(request.guid, **get_props())
- self.broadcast({
- 'event': 'show_journal',
- 'uid': request.guid,
- })
- else:
- if journal.exists(request.guid):
- self.journal_delete(request.guid)
-
class CachedClientRoutes(ClientRoutes):
def __init__(self, home_volume, api_url=None, no_subscription=False):
- ClientRoutes.__init__(self, home_volume, api_url, no_subscription)
self._push_seq = toolkit.PersistentSequence(
join(home_volume.root, 'push.sequence'), [1, None])
self._push_job = coroutine.Pool()
+ ClientRoutes.__init__(self, home_volume, api_url, no_subscription)
def _got_online(self):
ClientRoutes._got_online(self)
@@ -548,6 +357,7 @@ class CachedClientRoutes(ClientRoutes):
def _push(self):
pushed_seq = toolkit.Sequence()
skiped_seq = toolkit.Sequence()
+ volume = self._local.volume
def push(request, seq):
try:
@@ -559,44 +369,45 @@ class CachedClientRoutes(ClientRoutes):
else:
pushed_seq.include(seq)
- for document, directory in self._local.volume.items():
- if directory.mtime <= self._push_seq.mtime:
+ for res in volume.resources:
+ if volume.mtime(res) <= self._push_seq.mtime:
continue
- _logger.debug('Check %r local cache to push', document)
+ _logger.debug('Check %r local cache to push', res)
- for guid, patch in directory.diff(self._push_seq, layer='local'):
+ for guid, patch in volume[res].diff(self._push_seq, layer='local'):
diff = {}
diff_seq = toolkit.Sequence()
post_requests = []
for prop, meta, seqno in patch:
if 'blob' in meta:
- request = Request(method='PUT',
- path=[document, guid, prop])
+ request = Request(method='PUT', path=[res, guid, prop])
request.content_type = meta['mime_type']
request.content_length = os.stat(meta['blob']).st_size
request.content_stream = \
toolkit.iter_file(meta['blob'])
post_requests.append((request, seqno))
elif 'url' in meta:
- request = Request(method='PUT',
- path=[document, guid, prop])
+ request = Request(method='PUT', path=[res, guid, prop])
request.content_type = 'application/json'
request.content = meta
post_requests.append((request, seqno))
else:
- diff[prop] = meta['value']
+ value = meta['value']
+ if prop == 'layer':
+ value = list(set(value) - _LOCAL_LAYERS)
+ diff[prop] = value
diff_seq.include(seqno, seqno)
if not diff:
continue
if 'guid' in diff:
- request = Request(method='POST', path=[document])
+ request = Request(method='POST', path=[res])
access = ACL.CREATE | ACL.WRITE
else:
- request = Request(method='PUT', path=[document, guid])
+ request = Request(method='PUT', path=[res, guid])
access = ACL.WRITE
for name in diff.keys():
- if not (directory.metadata[name].acl & access):
+ if not (volume[res].metadata[name].acl & access):
del diff[name]
request.content_type = 'application/json'
request.content = diff
@@ -613,23 +424,25 @@ class CachedClientRoutes(ClientRoutes):
self._push_seq.exclude(pushed_seq)
if not skiped_seq:
self._push_seq.stretch()
- # No any decent reasons to keep fail reports after uploding.
- # TODO The entire offlile synchronization should be improved,
- # for now, it is possible to have a race here
- self._local.volume['report'].wipe()
+ if 'report' in volume:
+ # No any decent reasons to keep fail reports after uploding.
+ # TODO The entire offlile synchronization should be improved,
+ # for now, it is possible to have a race here
+ volume['report'].wipe()
+
self._push_seq.commit()
self.broadcast({'event': 'push'})
-class _LocalRoutes(db.Routes, Router):
+class _LocalRoutes(model.VolumeRoutes, Router):
def __init__(self, volume):
- db.Routes.__init__(self, volume)
+ model.VolumeRoutes.__init__(self, volume)
Router.__init__(self, self)
def on_create(self, request, props, event):
props['layer'] = tuple(props['layer']) + ('local',)
- db.Routes.on_create(self, request, props, event)
+ model.VolumeRoutes.on_create(self, request, props, event)
class _NodeRoutes(SlaveRoutes, Router):
diff --git a/sugar_network/client/solver.py b/sugar_network/client/solver.py
index 107c7c5..13bc7a4 100644
--- a/sugar_network/client/solver.py
+++ b/sugar_network/client/solver.py
@@ -21,7 +21,7 @@ from os.path import isabs, join, dirname
from sugar_network.client import packagekit, SUGAR_API_COMPATIBILITY
from sugar_network.toolkit.spec import parse_version
-from sugar_network.toolkit import http, lsb_release, pipe, exception
+from sugar_network.toolkit import http, lsb_release
sys.path.insert(0, join(dirname(__file__), '..', 'lib', 'zeroinstall'))
@@ -38,9 +38,9 @@ reader.check_readable = lambda *args, **kwargs: True
reader.update_from_cache = lambda *args, **kwargs: None
reader.load_feed_from_cache = lambda url, **kwargs: _load_feed(url)
-_logger = logging.getLogger('zeroinstall')
+_logger = logging.getLogger('solver')
_stability = None
-_conn = None
+_call = None
def canonicalize_machine(arch):
@@ -68,10 +68,10 @@ def select_architecture(arches):
return result_arch
-def solve(conn, context, stability):
- global _conn, _stability
+def solve(call, context, stability):
+ global _call, _stability
- _conn = conn
+ _call = call
_stability = stability
req = Requirements(context)
@@ -102,7 +102,7 @@ def solve(conn, context, stability):
if feed.to_resolve:
continue
if status is None:
- status = conn.get(cmd='status')
+ status = call(method='GET', cmd='status')
if status['route'] == 'offline':
raise http.ServiceUnavailable(str(error))
else:
@@ -136,7 +136,8 @@ def solve(conn, context, stability):
else:
summary.append(' (no versions)')
missed.append(iface.uri)
- pipe.trace('\n '.join(['Solving results:'] + top_summary + dep_summary))
+ _logger.debug('[%s] Solving results:\n%s',
+ context, '\n'.join(top_summary + dep_summary))
if not ready:
# pylint: disable-msg=W0212
@@ -145,7 +146,7 @@ def solve(conn, context, stability):
reason = reason_exception.message
else:
reason = 'Cannot find implementations for %s' % ', '.join(missed)
- raise RuntimeError(reason)
+ raise http.NotFound(reason)
solution = []
solution.append(_impl_new(config, context, selections[context]))
@@ -162,29 +163,21 @@ def _interface_init(self, url):
def _impl_new(config, iface, sel):
- feed = config.iface_cache.get_feed(iface)
- impl = {'id': sel.id,
+ impl = {'guid': sel.id,
'context': iface,
+ 'license': sel.impl.license,
'version': sel.version,
- 'name': feed.title,
'stability': sel.impl.upstream_stability.name,
}
- if sel.impl.hints:
- for key in ('mime_type', 'blob_size', 'unpack_size'):
- value = sel.impl.hints.get(key)
- if value is not None:
- impl[key] = value
-
- if isabs(sel.id):
- impl['spec'] = join(sel.id, 'activity', 'activity.info')
+
if sel.local_path:
impl['path'] = sel.local_path
if sel.impl.to_install:
impl['install'] = sel.impl.to_install
if sel.impl.download_sources:
- prefix = sel.impl.download_sources[0].extract
- if prefix:
- impl['prefix'] = prefix
+ extract = sel.impl.download_sources[0].extract
+ if extract:
+ impl['extract'] = extract
commands = sel.get_commands()
if commands:
impl['command'] = commands.values()[0].path.split()
@@ -203,32 +196,31 @@ def _load_feed(context):
host_version = '0.94'
for version in SUGAR_API_COMPATIBILITY.get(host_version) or []:
feed.implement_sugar(version)
- feed.name = feed.title = context
+ feed.name = context
return feed
feed_content = None
try:
- feed_content = _conn.get(['context', context], cmd='feed',
- stability=_stability, distro=lsb_release.distributor_id())
- pipe.trace('Found %s feed: %r', context, feed_content)
+ feed_content = _call(method='GET', path=['context', context],
+ cmd='feed', stability=_stability,
+ distro=lsb_release.distributor_id())
+ _logger.trace('[%s] Found feed: %r', context, feed_content)
except http.ServiceUnavailable:
- pipe.trace('Failed to fetch %s feed', context)
+ _logger.trace('[%s] Failed to fetch the feed', context)
raise
except Exception:
- exception(_logger, 'Failed to fetch %r feed', context)
- pipe.trace('No feeds for %s', context)
+ _logger.exception('[%s] Failed to fetch the feed', context)
return None
# XXX 0install fails on non-ascii `name` values
feed.name = context
- feed.title = feed_content['name']
feed.to_resolve = feed_content.get('packages')
if not feed.to_resolve:
- pipe.trace('No compatible packages for %s', context)
+ _logger.trace('[%s] No compatible packages', context)
for impl in feed_content['implementations']:
feed.implement(impl)
if not feed.to_resolve and not feed.implementations:
- pipe.trace('No implementations for %s', context)
+ _logger.trace('[%s] No implementations', context)
return feed
@@ -246,7 +238,6 @@ class _Feed(model.ZeroInstallFeed):
self.last_checked = None
self.to_resolve = None
self._package_implementations = []
- self.title = None
@property
def url(self):
@@ -279,12 +270,12 @@ class _Feed(model.ZeroInstallFeed):
impl.upstream_stability = model.stability_levels['stable']
impl.requires.extend(_read_requires(release.get('requires')))
impl.hints = release
+ impl.license = release.get('license') or []
if isabs(impl_id):
impl.local_path = impl_id
else:
- impl.add_download_source(impl_id,
- release.get('size') or 0, release.get('extract'))
+ impl.add_download_source(impl_id, 0, release.get('extract'))
for name, command in release['commands'].items():
impl.commands[name] = _Command(name, command)
@@ -302,7 +293,6 @@ class _Feed(model.ZeroInstallFeed):
impl.released = 0
impl.arch = '*-*'
impl.upstream_stability = model.stability_levels['packaged']
- impl.local_path = '/'
self.implementations[impl_id] = impl
@@ -310,6 +300,11 @@ class _Implementation(model.ZeroInstallImplementation):
to_install = None
hints = None
+ license = None
+
+ def is_available(self, stores):
+ # Simplify solving
+ return True
class _Dependency(model.InterfaceDependency):
@@ -385,5 +380,4 @@ def _read_requires(data):
if __name__ == '__main__':
from pprint import pprint
logging.basicConfig(level=logging.DEBUG)
- pipe.trace = logging.info
pprint(solve(*sys.argv[1:]))
diff --git a/sugar_network/db/directory.py b/sugar_network/db/directory.py
index 915e508..a59992c 100644
--- a/sugar_network/db/directory.py
+++ b/sugar_network/db/directory.py
@@ -67,10 +67,19 @@ class Directory(object):
def mtime(self):
return self._index.mtime
- @mtime.setter
- def mtime(self, value):
- self._index.mtime = value
- self.broadcast({'event': 'populate', 'mtime': value})
+ def checkpoint(self):
+ ts = self._index.checkpoint()
+ self.broadcast({'event': 'populate', 'mtime': ts})
+
+ def path(self, guid, prop=None):
+ record = self._storage.get(guid)
+ if not prop:
+ return record.path()
+ if prop in self.metadata and \
+ isinstance(self.metadata[prop], BlobProperty):
+ return record.blob_path(prop)
+ else:
+ return record.path(prop)
def wipe(self):
self.close()
@@ -90,7 +99,7 @@ class Directory(object):
"""Flush pending chnages to disk."""
self._index.commit()
- def create(self, props, event=None):
+ def create(self, props, event=None, setters=False):
"""Create new document.
If `guid` property is not specified, it will be auto set.
@@ -104,6 +113,15 @@ class Directory(object):
guid = props.get('guid')
if not guid:
guid = props['guid'] = toolkit.uuid()
+ if setters:
+ # XXX Setters are being proccessed on routes level, but,
+ # while creating resources gotten from routes, it is important
+ # to call setters as well, e.g., `author` property
+ doc = self.document_class(guid, None, props)
+ for key, value in props.items():
+ prop = self.metadata.get(key)
+ if prop is not None and prop.on_set is not None:
+ props[key] = prop.on_set(doc, value)
_logger.debug('Create %s[%s]: %r', self.metadata.name, guid, props)
post_event = {'event': 'create', 'guid': guid}
if event:
@@ -201,10 +219,9 @@ class Directory(object):
record.invalidate()
if found:
- self._index.checkpoint()
self._save_layout()
self.commit()
- self.broadcast({'event': 'populate', 'mtime': self.mtime})
+ self.checkpoint()
def diff(self, seq, exclude_seq=None, **params):
if exclude_seq is None:
@@ -288,7 +305,7 @@ class Directory(object):
self._storage = Storage(self._root, self.metadata)
self._index = self._index_class(index_path, self.metadata,
self._post_commit)
- _logger.debug('Initiated %r document', self.document_class)
+ _logger.debug('Open %r resource', self.document_class)
def _pre_store(self, guid, changes, event=None):
seqno = changes.get('seqno')
diff --git a/sugar_network/db/index.py b/sugar_network/db/index.py
index 708c609..e9a4093 100644
--- a/sugar_network/db/index.py
+++ b/sugar_network/db/index.py
@@ -71,19 +71,19 @@ class IndexReader(object):
@property
def mtime(self):
"""UNIX seconds of the last `commit()` call."""
- if exists(self._mtime_path):
- return int(os.stat(self._mtime_path).st_mtime)
- else:
- return 0
+ return int(os.stat(self._mtime_path).st_mtime)
- @mtime.setter
- def mtime(self, value):
- with file(self._mtime_path, 'w'):
- pass
- os.utime(self._mtime_path, (value, value))
+ def checkpoint(self):
+ ts = time.time()
+ os.utime(self._mtime_path, (ts, ts))
+ return int(ts)
def ensure_open(self):
- pass
+ if not exists(self._mtime_path):
+ with file(self._mtime_path, 'w'):
+ pass
+ # Outter code should understand the initial state
+ os.utime(self._mtime_path, (0, 0))
def get_cached(self, guid):
"""Return cached document.
@@ -407,22 +407,18 @@ class IndexWriter(IndexReader):
# Trigger condition to reset waiting for `index_flush_timeout` timeout
self._commit_cond.set()
- def checkpoint(self):
- with file(self._mtime_path, 'w'):
- pass
-
def ensure_open(self):
- if self._db is not None:
- return
- try:
- self._db = xapian.WritableDatabase(self._path,
- xapian.DB_CREATE_OR_OPEN)
- except xapian.DatabaseError:
- exception('Cannot open Xapian index in %r, will rebuild it',
- self.metadata.name)
- shutil.rmtree(self._path, ignore_errors=True)
- self._db = xapian.WritableDatabase(self._path,
- xapian.DB_CREATE_OR_OPEN)
+ if self._db is None:
+ try:
+ self._db = xapian.WritableDatabase(self._path,
+ xapian.DB_CREATE_OR_OPEN)
+ except xapian.DatabaseError:
+ exception('Cannot open Xapian index in %r, will rebuild it',
+ self.metadata.name)
+ shutil.rmtree(self._path, ignore_errors=True)
+ self._db = xapian.WritableDatabase(self._path,
+ xapian.DB_CREATE_OR_OPEN)
+ IndexReader.ensure_open(self)
def _commit(self):
if self._pending_updates <= 0:
@@ -436,11 +432,10 @@ class IndexWriter(IndexReader):
self._db.commit()
else:
self._db.flush()
- self.checkpoint()
+ ts = self.checkpoint() - ts
self._pending_updates = 0
- _logger.debug('Commit %r changes took %s seconds',
- self.metadata.name, time.time() - ts)
+ _logger.debug('Commit to %r took %s seconds', self.metadata.name, ts)
if self._commit_cb is not None:
self._commit_cb()
diff --git a/sugar_network/db/resource.py b/sugar_network/db/resource.py
index 7209c49..0e7217d 100644
--- a/sugar_network/db/resource.py
+++ b/sugar_network/db/resource.py
@@ -14,7 +14,8 @@
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from sugar_network import toolkit
-from sugar_network.db.metadata import StoredProperty, indexed_property
+from sugar_network.db.metadata import indexed_property
+from sugar_network.db.metadata import StoredProperty, BlobProperty
from sugar_network.toolkit.router import Blob, ACL
@@ -73,6 +74,17 @@ class Resource(object):
})
return result
+ @author.setter
+ def author(self, value):
+ if type(value) not in (list, tuple):
+ return value
+ result = {}
+ for order, author in enumerate(value):
+ user = author.pop('guid')
+ author['order'] = order
+ result[user] = author
+ return result
+
@indexed_property(prefix='RL', typecast=[], default=[])
def layer(self, value):
return value
@@ -81,6 +93,15 @@ class Resource(object):
def tags(self, value):
return value
+ def path(self, prop=None):
+ if not prop:
+ return self._record.path()
+ if prop in self.metadata and \
+ isinstance(self.metadata[prop], BlobProperty):
+ return self._record.blob_path(prop)
+ else:
+ return self._record.path(prop)
+
def get(self, prop, accept_language=None):
"""Get document's property value.
diff --git a/sugar_network/db/routes.py b/sugar_network/db/routes.py
index a796834..197c215 100644
--- a/sugar_network/db/routes.py
+++ b/sugar_network/db/routes.py
@@ -108,50 +108,12 @@ class Routes(object):
return self._get_props(doc, request, reply)
@route('GET', [None, None, None], mime_type='application/json')
- def get_prop(self, request):
- directory = self.volume[request.resource]
- prop = directory.metadata[request.prop]
- doc = directory.get(request.guid)
- doc.request = request
-
- prop.assert_access(ACL.READ)
-
- if isinstance(prop, StoredProperty):
- value = doc.get(prop.name, request.accept_language)
- value = prop.on_get(doc, value)
- if value is None:
- value = prop.default
- return value
- else:
- meta = prop.on_get(doc, doc.meta(prop.name))
- enforce(meta is not None and ('blob' in meta or 'url' in meta),
- http.NotFound, 'BLOB does not exist')
- return meta
+ def get_prop(self, request, response):
+ return self._prop_meta(request, response)
@route('HEAD', [None, None, None])
def get_prop_meta(self, request, response):
- directory = self.volume[request.resource]
- prop = directory.metadata[request.prop]
- doc = directory.get(request.guid)
- doc.request = request
-
- prop.assert_access(ACL.READ)
-
- if isinstance(prop, StoredProperty):
- meta = doc.meta(prop.name)
- value = meta.pop('value')
- response.content_length = len(json.dumps(value))
- else:
- meta = prop.on_get(doc, doc.meta(prop.name))
- enforce(meta is not None and ('blob' in meta or 'url' in meta),
- http.NotFound, 'BLOB does not exist')
- if 'blob' in meta:
- meta.pop('blob')
- meta['url'] = '/'.join([request.static_prefix] + request.path)
- response.content_length = meta['blob_size']
-
- response.meta.update(meta)
- response.last_modified = meta['mtime']
+ self._prop_meta(request, response)
@route('PUT', [None, None], cmd='useradd',
arguments={'role': 0}, acl=ACL.AUTH | ACL.AUTHOR)
@@ -256,6 +218,38 @@ class Routes(object):
self.after_post(doc)
+ def _prop_meta(self, request, response):
+ directory = self.volume[request.resource]
+ prop = directory.metadata[request.prop]
+ doc = directory.get(request.guid)
+ doc.request = request
+
+ prop.assert_access(ACL.READ)
+
+ if isinstance(prop, StoredProperty):
+ meta = doc.meta(prop.name) or {}
+ if 'value' in meta:
+ del meta['value']
+ value = doc.get(prop.name, request.accept_language)
+ value = prop.on_get(doc, value)
+ response.content_length = len(json.dumps(value))
+ else:
+ value = prop.on_get(doc, doc.meta(prop.name))
+ enforce(value is not None and ('blob' in value or 'url' in value),
+ http.NotFound, 'BLOB does not exist')
+ if 'blob' in value:
+ meta = value.copy()
+ meta.pop('blob')
+ else:
+ meta = value
+ response.content_length = meta.get('blob_size') or 0
+
+ response.meta.update(meta)
+ if 'mtime' in meta:
+ response.last_modified = meta['mtime']
+
+ return value
+
def _preget(self, request):
reply = request.get('reply')
if not reply:
diff --git a/sugar_network/db/storage.py b/sugar_network/db/storage.py
index 69d8896..f8587d9 100644
--- a/sugar_network/db/storage.py
+++ b/sugar_network/db/storage.py
@@ -123,6 +123,12 @@ class Record(object):
def consistent(self):
return exists(join(self._root, 'guid'))
+ def path(self, *args):
+ return join(self._root, *args)
+
+ def blob_path(self, prop):
+ return join(self._root, prop + _BLOB_SUFFIX)
+
def invalidate(self):
guid_path = join(self._root, 'guid')
if exists(guid_path):
@@ -137,7 +143,8 @@ class Record(object):
blob_path = path + _BLOB_SUFFIX
if exists(blob_path):
meta['blob'] = blob_path
- meta['blob_size'] = os.stat(blob_path).st_size
+ if 'blob_size' not in meta:
+ meta['blob_size'] = os.stat(blob_path).st_size
meta['mtime'] = int(os.stat(path).st_mtime)
return meta
diff --git a/sugar_network/db/volume.py b/sugar_network/db/volume.py
index 03af0fc..3080eb8 100644
--- a/sugar_network/db/volume.py
+++ b/sugar_network/db/volume.py
@@ -30,8 +30,10 @@ class Volume(dict):
_flush_pool = []
- def __init__(self, root, documents, broadcast=None, index_class=None):
+ def __init__(self, root, documents, broadcast=None, index_class=None,
+ lazy_open=False):
Volume._flush_pool.append(self)
+ self.resources = {}
self.broadcast = broadcast or (lambda event: None)
self._populators = coroutine.Pool()
@@ -51,12 +53,21 @@ class Volume(dict):
name = document.split('.')[-1]
else:
name = document.__name__.lower()
- self[name] = self._open(name, document)
+ self.resources[name] = document
+ if not lazy_open:
+ self[name] = self._open(name, document)
@property
def root(self):
return self._root
+ def mtime(self, name):
+ path = join(self._root, name, 'index', 'mtime')
+ if exists(path):
+ return int(os.stat(path).st_mtime)
+ else:
+ return 0
+
def close(self):
"""Close operations with the server."""
_logger.info('Closing documents in %r', self._root)
@@ -78,8 +89,10 @@ class Volume(dict):
def __getitem__(self, name):
directory = self.get(name)
- enforce(directory is not None, http.BadRequest,
- 'Unknown %r resource', name)
+ if directory is None:
+ enforce(name in self.resources, http.BadRequest,
+ 'Unknown %r resource', name)
+ directory = self[name] = self._open(name, self.resources[name])
return directory
def _open(self, name, resource):
diff --git a/sugar_network/model/__init__.py b/sugar_network/model/__init__.py
index b0ba07a..d30b697 100644
--- a/sugar_network/model/__init__.py
+++ b/sugar_network/model/__init__.py
@@ -13,7 +13,7 @@
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
-from sugar_network.model.routes import Routes
+from sugar_network.model.routes import VolumeRoutes, FrontRoutes
CONTEXT_TYPES = ['activity', 'project', 'package', 'content']
diff --git a/sugar_network/model/artifact.py b/sugar_network/model/artifact.py
index 32ae506..5dba159 100644
--- a/sugar_network/model/artifact.py
+++ b/sugar_network/model/artifact.py
@@ -68,13 +68,3 @@ class Artifact(db.Resource):
if value:
value['name'] = self['title']
return value
-
- @db.indexed_property(prefix='K', typecast=bool, default=False,
- acl=ACL.READ | ACL.LOCAL)
- def favorite(self, value):
- return value
-
- @db.indexed_property(prefix='L', typecast=[0, 1, 2], default=0,
- acl=ACL.READ | ACL.LOCAL)
- def clone(self, value):
- return value
diff --git a/sugar_network/model/context.py b/sugar_network/model/context.py
index 41bf46b..a9962a9 100644
--- a/sugar_network/model/context.py
+++ b/sugar_network/model/context.py
@@ -102,15 +102,12 @@ class Context(db.Resource):
else:
return value[0]
- @db.indexed_property(prefix='K', typecast=bool, default=False,
- acl=ACL.READ | ACL.LOCAL)
- def favorite(self, value):
- return value
-
- @db.indexed_property(prefix='L', typecast=[0, 1, 2], default=0,
- acl=ACL.READ | ACL.LOCAL)
- def clone(self, value):
- return value
+ @reviews.setter
+ def reviews(self, value):
+ if isinstance(value, int):
+ return [value, 0]
+ else:
+ return value
@db.stored_property(typecast=[], default=[], acl=ACL.PUBLIC | ACL.LOCAL)
def dependencies(self, value):
diff --git a/sugar_network/model/implementation.py b/sugar_network/model/implementation.py
index 55636e3..f1c1c23 100644
--- a/sugar_network/model/implementation.py
+++ b/sugar_network/model/implementation.py
@@ -31,8 +31,9 @@ class Implementation(db.Resource):
@context.setter
def context(self, value):
- context = self.volume['context'].get(value)
- enforce(self.request.principal in context['author'], http.Forbidden,
+ authors = self.volume['context'].get(value)['author']
+ enforce(not self.request.principal and not authors or
+ self.request.principal in authors, http.Forbidden,
'Only Context authors can submit new Implementations')
return value
diff --git a/sugar_network/model/routes.py b/sugar_network/model/routes.py
index dc92554..5bb82a1 100644
--- a/sugar_network/model/routes.py
+++ b/sugar_network/model/routes.py
@@ -18,7 +18,7 @@ import logging
import mimetypes
from os.path import join, split
-from sugar_network import static
+from sugar_network import static, db
from sugar_network.toolkit.router import route, fallbackroute, Blob, ACL
from sugar_network.toolkit import coroutine
@@ -26,7 +26,44 @@ from sugar_network.toolkit import coroutine
_logger = logging.getLogger('model.routes')
-class Routes(object):
+class VolumeRoutes(db.Routes):
+
+ @route('GET', ['context', None], cmd='feed',
+ mime_type='application/json')
+ def feed(self, request, distro):
+ context = self.volume['context'].get(request.guid)
+ implementations = self.volume['implementation']
+ versions = []
+
+ impls, __ = implementations.find(context=context.guid,
+ not_layer='deleted', **request)
+ for impl in impls:
+ for arch, spec in impl.meta('data')['spec'].items():
+ spec['guid'] = impl.guid
+ spec['version'] = impl['version']
+ spec['arch'] = arch
+ spec['stability'] = impl['stability']
+ spec['license'] = impl['license']
+ if context['dependencies']:
+ requires = spec.setdefault('requires', {})
+ for i in context['dependencies']:
+ requires.setdefault(i, {})
+ blob = implementations.get(impl.guid).meta('data')
+ if blob:
+ for key in ('blob_size', 'unpack_size', 'extract'):
+ if key in blob:
+ spec[key] = blob[key]
+ versions.append(spec)
+
+ result = {'implementations': versions}
+ if distro:
+ aliases = context['aliases'].get(distro)
+ if aliases and 'binary' in aliases:
+ result['packages'] = aliases['binary']
+ return result
+
+
+class FrontRoutes(object):
def __init__(self):
self._pooler = _Pooler()
@@ -74,7 +111,6 @@ class Routes(object):
event = request.content
_logger.debug('Broadcast event: %r', event)
self._pooler.notify_all(event)
- coroutine.dispatch()
@fallbackroute('GET', ['static'])
def get_static(self, request):
diff --git a/sugar_network/node/master.py b/sugar_network/node/master.py
index b2a7630..3d63d2f 100644
--- a/sugar_network/node/master.py
+++ b/sugar_network/node/master.py
@@ -13,7 +13,6 @@
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
-import time
import json
import base64
import logging
@@ -148,8 +147,8 @@ class MasterRoutes(NodeRoutes):
coroutine.spawn(self._resolve_aliases, doc)
shift_implementations = True
if shift_implementations and not doc.is_new:
- # Shift mtime to invalidate solutions
- self.volume['implementation'].mtime = int(time.time())
+ # Shift checkpoint to invalidate solutions
+ self.volume['implementation'].checkpoint()
NodeRoutes.after_post(self, doc)
def _push(self, stream):
diff --git a/sugar_network/node/routes.py b/sugar_network/node/routes.py
index 8b607cb..0458258 100644
--- a/sugar_network/node/routes.py
+++ b/sugar_network/node/routes.py
@@ -22,12 +22,12 @@ from contextlib import contextmanager
from ConfigParser import ConfigParser
from os.path import join, isdir, exists
-from sugar_network import db, node, toolkit, model
+from sugar_network import node, toolkit, model
from sugar_network.node import stats_node, stats_user
from sugar_network.model.context import Context
# pylint: disable-msg=W0611
from sugar_network.toolkit.router import route, preroute, postroute
-from sugar_network.toolkit.router import ACL, fallbackroute
+from sugar_network.toolkit.router import Request, ACL, fallbackroute
from sugar_network.toolkit.spec import EMPTY_LICENSE
from sugar_network.toolkit.spec import parse_requires, ensure_requires
from sugar_network.toolkit.bundle import Bundle
@@ -39,11 +39,11 @@ _MAX_STATS_LENGTH = 100
_logger = logging.getLogger('node.routes')
-class NodeRoutes(db.Routes, model.Routes):
+class NodeRoutes(model.VolumeRoutes, model.FrontRoutes):
def __init__(self, guid, volume):
- db.Routes.__init__(self, volume)
- model.Routes.__init__(self)
+ model.VolumeRoutes.__init__(self, volume)
+ model.FrontRoutes.__init__(self)
volume.broadcast = self.broadcast
self._guid = guid
@@ -153,9 +153,8 @@ class NodeRoutes(db.Routes, model.Routes):
def delete(self, request):
# Servers data should not be deleted immediately
# to let master-slave synchronization possible
- request.method = 'PUT'
- request.content = {'layer': ['deleted']}
- self.update(request)
+ request.call(method='PUT', path=request.path,
+ content={'layer': ['deleted']})
@route('PUT', [None, None], cmd='attach', acl=ACL.AUTH | ACL.SUPERUSER)
def attach(self, request):
@@ -175,18 +174,13 @@ class NodeRoutes(db.Routes, model.Routes):
@route('GET', ['context', None], cmd='clone',
arguments={'requires': list})
- def clone(self, request, response):
- impl = self._solve(request)
- request.path = ['implementation', impl.guid, 'data']
- return self.get_prop(request)
+ def get_clone(self, request, response):
+ return self._get_clone(request, response)
@route('HEAD', ['context', None], cmd='clone',
arguments={'requires': list})
- def meta_clone(self, request, response):
- impl = self._solve(request)
- props = impl.properties(['guid', 'license', 'version', 'stability'])
- response.meta.update(props)
- response.meta.update(impl.meta('data')['spec']['*-*'])
+ def head_clone(self, request, response):
+ self._get_clone(request, response)
@route('GET', ['context', None], cmd='deplist',
mime_type='application/json', arguments={'requires': list})
@@ -219,43 +213,6 @@ class NodeRoutes(db.Routes, model.Routes):
return result
- @route('GET', ['context', None], cmd='feed',
- mime_type='application/json')
- def feed(self, request, distro):
- context = self.volume['context'].get(request.guid)
- implementations = self.volume['implementation']
- versions = []
-
- impls, __ = implementations.find(context=context.guid,
- not_layer='deleted', **request)
- for impl in impls:
- for arch, spec in impl.meta('data')['spec'].items():
- spec['guid'] = impl.guid
- spec['version'] = impl['version']
- spec['arch'] = arch
- spec['stability'] = impl['stability']
- if context['dependencies']:
- requires = spec.setdefault('requires', {})
- for i in context['dependencies']:
- requires.setdefault(i, {})
- blob = implementations.get(impl.guid).meta('data')
- if blob:
- spec['blob_size'] = blob.get('blob_size')
- spec['unpack_size'] = blob.get('unpack_size')
- versions.append(spec)
-
- result = {
- 'name': context.get('title',
- accept_language=request.accept_language),
- 'implementations': versions,
- }
- if distro:
- aliases = context['aliases'].get(distro)
- if aliases and 'binary' in aliases:
- result['packages'] = aliases['binary']
-
- return result
-
@route('GET', ['user', None], cmd='stats-info',
mime_type='application/json', acl=ACL.AUTH)
def user_stats_info(self, request):
@@ -323,10 +280,10 @@ class NodeRoutes(db.Routes, model.Routes):
def on_create(self, request, props, event):
if request.resource == 'user':
props['guid'], props['pubkey'] = _load_pubkey(props['pubkey'])
- db.Routes.on_create(self, request, props, event)
+ model.VolumeRoutes.on_create(self, request, props, event)
def on_update(self, request, props, event):
- db.Routes.on_update(self, request, props, event)
+ model.VolumeRoutes.on_update(self, request, props, event)
if 'deleted' in props.get('layer', []):
event['event'] = 'delete'
@@ -343,13 +300,13 @@ class NodeRoutes(db.Routes, model.Routes):
_logger.warning('Requesting "deleted" layer')
layer.remove('deleted')
request.add('not_layer', 'deleted')
- return db.Routes.find(self, request, reply)
+ return model.VolumeRoutes.find(self, request, reply)
def get(self, request, reply):
doc = self.volume[request.resource].get(request.guid)
enforce('deleted' not in doc['layer'], http.NotFound,
'Resource deleted')
- return db.Routes.get(self, request, reply)
+ return model.VolumeRoutes.get(self, request, reply)
def authorize(self, user, role):
if role == 'user' and user:
@@ -404,6 +361,17 @@ class NodeRoutes(db.Routes, model.Routes):
raise http.NotFound('No implementations found')
return impl
+ def _get_clone(self, request, response):
+ impl = self._solve(request)
+ result = request.call(method=request.method,
+ path=['implementation', impl['guid'], 'data'],
+ response=response)
+ props = impl.properties(
+ ['guid', 'context', 'license', 'version', 'stability'])
+ props['data'] = response.meta
+ response.meta = props
+ return result
+
@contextmanager
def load_bundle(volume, request, bundle_path):
@@ -442,10 +410,10 @@ def load_bundle(volume, request, bundle_path):
data['spec'] = {'*-*': {
'commands': spec.commands,
'requires': spec.requires,
- 'extract': extract,
}}
data['unpack_size'] = unpack_size
data['mime_type'] = 'application/vnd.olpc-sugar'
+ data['extract'] = extract
if initial and not contexts.exists(impl['context']):
context['guid'] = impl['context']
@@ -455,7 +423,7 @@ def load_bundle(volume, request, bundle_path):
enforce('context' in impl, 'Context is not specified')
enforce('version' in impl, 'Version is not specified')
- enforce(context_type in contexts.get(spec['context'])['type'],
+ enforce(context_type in contexts.get(impl['context'])['type'],
http.BadRequest, 'Inappropriate bundle type')
if impl.get('license') in (None, EMPTY_LICENSE):
existing, total = volume['implementation'].find(
diff --git a/sugar_network/toolkit/__init__.py b/sugar_network/toolkit/__init__.py
index e586d31..f7e59ea 100644
--- a/sugar_network/toolkit/__init__.py
+++ b/sugar_network/toolkit/__init__.py
@@ -199,7 +199,7 @@ def init_logging(debug_level=None, **kwargs):
else:
logging_level = 0
if debug_level < 3:
- if logging_level <= 0:
+ if debug_level <= 0:
logging_level = logging.WARNING
elif debug_level == 1:
logging_level = logging.INFO
@@ -753,6 +753,8 @@ class PersistentSequence(Sequence):
def mtime(self):
if exists(self._path):
return os.stat(self._path).st_mtime
+ else:
+ return 0
def commit(self):
dir_path = dirname(self._path)
diff --git a/sugar_network/toolkit/bundle.py b/sugar_network/toolkit/bundle.py
index eb3a18b..0e7a548 100644
--- a/sugar_network/toolkit/bundle.py
+++ b/sugar_network/toolkit/bundle.py
@@ -14,7 +14,8 @@
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import os
-from os.path import join
+import shutil
+from os.path import join, exists, dirname
from sugar_network.toolkit.spec import Spec
@@ -67,8 +68,25 @@ class Bundle(object):
def extractfile(self, name):
return self._do_extractfile(name)
- def extractall(self, path, members=None):
- self._bundle.extractall(path=path, members=members)
+ def extractall(self, dst_root, members=None, extract=None):
+ if not extract:
+ self._bundle.extractall(path=dst_root, members=members)
+ return
+ try:
+ extract = extract.strip(os.sep) + os.sep
+ for arcname in self.get_names():
+ dst_path = arcname.strip(os.sep)
+ if dst_path.startswith(extract):
+ dst_path = dst_path[len(extract):]
+ dst_path = join(dst_root, dst_path)
+ if not exists(dirname(dst_path)):
+ os.makedirs(dirname(dst_path))
+ with file(dst_path, 'wb') as dst:
+ shutil.copyfileobj(self.extractfile(arcname), dst)
+ except Exception:
+ if exists(dst_root):
+ shutil.rmtree(dst_root)
+ raise
def getmember(self, name):
return self._cast_info(self._do_getmember(name))
diff --git a/sugar_network/toolkit/coroutine.py b/sugar_network/toolkit/coroutine.py
index 22a8590..8d0c8aa 100644
--- a/sugar_network/toolkit/coroutine.py
+++ b/sugar_network/toolkit/coroutine.py
@@ -17,12 +17,15 @@
# pylint: disable-msg=W0621
+import os
import logging
import gevent
import gevent.pool
import gevent.hub
+from sugar_network.toolkit import enforce
+
#: Process one events loop round.
dispatch = gevent.sleep
@@ -79,6 +82,12 @@ def signal(*args, **kwargs):
return gevent.signal(*args, **kwargs)
+def fork():
+ pid = os.fork()
+ if pid:
+ return _Child(pid)
+
+
def Server(*args, **kwargs):
import gevent.server
kwargs['spawn'] = spawn
@@ -131,17 +140,32 @@ def RLock(*args, **kwargs):
return gevent.lock.RLock(*args, **kwargs)
-class AsyncEvent(object):
+class ThreadEvent(object):
def __init__(self):
self._async = gevent.get_hub().loop.async()
+ def set(self):
+ self._async.send()
+
def wait(self):
gevent.get_hub().wait(self._async)
- def send(self):
+
+class ThreadResult(object):
+
+ def __init__(self):
+ self._async = gevent.get_hub().loop.async()
+ self._value = None
+
+ def set(self, value):
+ self._value = value
self._async.send()
+ def get(self):
+ gevent.get_hub().wait(self._async)
+ return self._value
+
class Empty(Exception):
pass
@@ -228,6 +252,34 @@ class Pool(gevent.pool.Pool):
self.kill()
+class _Child(object):
+
+ def __init__(self, pid):
+ self.pid = pid
+ self._watcher = None
+
+ def watch(self, cb, *args, **kwargs):
+ enforce(self._watcher is None, 'Watching already started')
+ loop = gevent.get_hub().loop
+ loop.install_sigchld()
+ self._watcher = loop.child(self.pid)
+ self._watcher.start(self.__sigchld_cb, cb, args, kwargs)
+
+ def wait(self):
+ result = AsyncResult()
+ self.watch(result.set)
+ return result.get()
+
+ def __sigchld_cb(self, cb, args, kwargs):
+ self._watcher.stop()
+ status = self._watcher.rstatus
+ if os.WIFSIGNALED(status):
+ returncode = -os.WTERMSIG(status)
+ else:
+ returncode = os.WEXITSTATUS(status)
+ cb(returncode, *args, **kwargs)
+
+
def _print_exception(context, klass, value, tb):
self = gevent.hub.get_hub()
if issubclass(klass, self.NOT_ERROR + self.SYSTEM_ERROR):
diff --git a/sugar_network/toolkit/gbus.py b/sugar_network/toolkit/gbus.py
new file mode 100644
index 0000000..e1b24eb
--- /dev/null
+++ b/sugar_network/toolkit/gbus.py
@@ -0,0 +1,116 @@
+# Copyright (C) 2013 Aleksey Lim
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <http://www.gnu.org/licenses/>.
+
+import os
+import sys
+import json
+import struct
+import logging
+
+from sugar_network.toolkit import coroutine, exception
+
+
+_logger = logging.getLogger('gbus')
+_dbus_thread = None
+_dbus_loop = None
+
+
+def call(op, *args, **kwargs):
+ result = coroutine.ThreadResult()
+
+ class _Exception(tuple):
+ pass
+
+ def do_call():
+ try:
+ op(result, *args, **kwargs)
+ except Exception:
+ result.set(_Exception(sys.exc_info()))
+
+ _logger.trace('Call %s(%r, %r)', op, args, kwargs)
+
+ _call(do_call)
+ value = result.get()
+ if type(value) is _Exception:
+ etype, error, traceback = value
+ raise etype, error, traceback
+
+ return value
+
+
+def pipe(op, *args, **kwargs):
+ fd_r, fd_w = os.pipe()
+
+ def feedback(event=None):
+ if event is None:
+ os.close(fd_w)
+ return
+ event = json.dumps(event)
+ os.write(fd_w, struct.pack('i', len(event)))
+ os.write(fd_w, event)
+
+ def do_call():
+ try:
+ op(feedback, *args, **kwargs)
+ except Exception:
+ exception('Failed to call %r(%r, %r)', op, args, kwargs)
+ os.close(fd_w)
+
+ _logger.trace('Pipe %s(%r, %r)', op, args, kwargs)
+
+ try:
+ _call(do_call)
+ while True:
+ coroutine.select([fd_r], [], [])
+ length = os.read(fd_r, struct.calcsize('i'))
+ if not length:
+ break
+ length = struct.unpack('i', length)[0]
+ yield json.loads(os.read(fd_r, length))
+ finally:
+ os.close(fd_r)
+
+
+def join():
+ global _dbus_thread
+
+ if _dbus_thread is None:
+ return
+
+ import gobject
+
+ gobject.idle_add(_dbus_loop.quit)
+ _dbus_thread.join()
+ _dbus_thread = None
+
+
+def _call(op):
+ import threading
+ import gobject
+ from dbus.mainloop import glib
+
+ global _dbus_loop
+ global _dbus_thread
+
+ if _dbus_thread is None:
+ gobject.threads_init()
+ glib.threads_init()
+ glib.DBusGMainLoop(set_as_default=True)
+ _dbus_loop = gobject.MainLoop()
+ _dbus_thread = threading.Thread(target=_dbus_loop.run)
+ _dbus_thread.daemon = True
+ _dbus_thread.start()
+
+ gobject.idle_add(op)
diff --git a/sugar_network/toolkit/http.py b/sugar_network/toolkit/http.py
index 215ec03..ada63da 100644
--- a/sugar_network/toolkit/http.py
+++ b/sugar_network/toolkit/http.py
@@ -29,6 +29,8 @@ from sugar_network import client, toolkit
from sugar_network.toolkit import coroutine, enforce
+_REDIRECT_CODES = frozenset([301, 302, 303, 307, 308])
+
_logger = logging.getLogger('http')
@@ -45,11 +47,13 @@ class StatusPass(Status):
class NotModified(StatusPass):
status = '304 Not Modified'
+ status_code = 304
class Redirect(StatusPass):
status = '303 See Other'
+ status_code = 303
def __init__(self, location):
StatusPass.__init__(self)
@@ -123,66 +127,66 @@ class Connection(object):
self._session.close()
def exists(self, path):
- response = self.request('GET', path, allowed=[404])
- return response.status_code != 404
+ reply = self.request('GET', path, allowed=[404])
+ return reply.status_code != 404
+
+ def head(self, path_=None, **kwargs):
+ from sugar_network.toolkit.router import Request, Response
+ request = Request(method='HEAD', path=path_, **kwargs)
+ response = Response()
+ self.call(request, response)
+ return response.meta
def get(self, path_=None, query_=None, **kwargs):
- response = self.request('GET', path_, params=kwargs)
- return self._decode_reply(response)
-
- def meta(self, path_=None, query_=None, **kwargs):
- response = self.request('HEAD', path_, params=query_ or kwargs)
- result = {}
- for key, value in response.headers.items():
- if key.startswith('x-sn-'):
- result[key[5:]] = json.loads(value)
- else:
- result[key] = value
- return result
+ reply = self.request('GET', path_, params=query_ or kwargs)
+ return self._decode_reply(reply)
def post(self, path_=None, data_=None, query_=None, **kwargs):
- response = self.request('POST', path_, json.dumps(data_),
+ reply = self.request('POST', path_, json.dumps(data_),
headers={'Content-Type': 'application/json'},
params=query_ or kwargs)
- return self._decode_reply(response)
+ return self._decode_reply(reply)
def put(self, path_=None, data_=None, query_=None, **kwargs):
- response = self.request('PUT', path_, json.dumps(data_),
+ reply = self.request('PUT', path_, json.dumps(data_),
headers={'Content-Type': 'application/json'},
params=query_ or kwargs)
- return self._decode_reply(response)
+ return self._decode_reply(reply)
def delete(self, path_=None, query_=None, **kwargs):
- response = self.request('DELETE', path_, params=query_ or kwargs)
- return self._decode_reply(response)
+ reply = self.request('DELETE', path_, params=query_ or kwargs)
+ return self._decode_reply(reply)
def download(self, path, dst=None):
- response = self.request('GET', path, allow_redirects=True)
+ reply = self.request('GET', path, allow_redirects=True)
- content_length = response.headers.get('Content-Length')
+ content_length = reply.headers.get('Content-Length')
if content_length:
chunk_size = min(int(content_length), toolkit.BUFFER_SIZE)
else:
chunk_size = toolkit.BUFFER_SIZE
if dst is None:
- return response.iter_content(chunk_size=chunk_size)
+ return reply.iter_content(chunk_size=chunk_size)
f = file(dst, 'wb') if isinstance(dst, basestring) else dst
try:
- for chunk in response.iter_content(chunk_size=chunk_size):
+ for chunk in reply.iter_content(chunk_size=chunk_size):
f.write(chunk)
finally:
if isinstance(dst, basestring):
f.close()
def upload(self, path, data, **kwargs):
- with file(data, 'rb') as f:
- response = self.request('POST', path, f, params=kwargs)
- if response.headers.get('Content-Type') == 'application/json':
- return json.loads(response.content)
+ if isinstance(data, basestring):
+ with file(data, 'rb') as f:
+ reply = self.request('POST', path, f, params=kwargs)
+ else:
+ reply = self.request('POST', path, data, params=kwargs)
+ if reply.headers.get('Content-Type') == 'application/json':
+ return json.loads(reply.content)
else:
- return response.raw
+ return reply.raw
def request(self, method, path=None, data=None, headers=None, allowed=None,
params=None, **kwargs):
@@ -198,14 +202,13 @@ class Connection(object):
while True:
a_try += 1
try:
- response = self._session.request(method, path, data=data,
+ reply = self._session.request(method, path, data=data,
headers=headers, params=params, **kwargs)
except SSLError:
_logger.warning('Use --no-check-certificate to avoid checks')
raise
-
- if response.status_code != 200:
- if response.status_code == 401:
+ if reply.status_code != 200:
+ if reply.status_code == 401:
enforce(method not in ('PUT', 'POST') or
not hasattr(data, 'read'),
'Cannot resend data after authentication')
@@ -216,9 +219,9 @@ class Connection(object):
self.post(['user'], self._get_profile())
a_try = 0
continue
- if allowed and response.status_code in allowed:
- return response
- content = response.content
+ if allowed and reply.status_code in allowed:
+ break
+ content = reply.content
try:
error = json.loads(content)['error']
except Exception:
@@ -228,16 +231,17 @@ class Connection(object):
# If so, try to resend request.
if a_try <= self._max_retries and method == 'GET':
continue
- error = content or response.headers.get('x-sn-error') or \
+ error = content or reply.headers.get('x-sn-error') or \
'No error message provided'
- _logger.trace('Request failed, method=%s path=%r params=%r '
+ _logger.debug('Request failed, method=%s path=%r params=%r '
'headers=%r status_code=%s error=%s',
- method, path, params, headers, response.status_code,
+ method, path, params, headers, reply.status_code,
'\n' + error)
- cls = _FORWARD_STATUSES.get(response.status_code, RuntimeError)
+ cls = _FORWARD_STATUSES.get(reply.status_code, RuntimeError)
raise cls(error)
+ break
- return response
+ return reply
def call(self, request, response=None):
if request.content_type == 'application/json':
@@ -268,15 +272,27 @@ class Connection(object):
if value is not None:
headers[key] = value
- reply = self.request(request.method, request.path,
- data=request.content, params=request.query or request,
- headers=headers, allow_redirects=True)
-
- if response is not None:
- if 'transfer-encoding' in reply.headers:
- # `requests` library handles encoding on its own
- del reply.headers['transfer-encoding']
- response.update(reply.headers)
+ path = request.path
+ while True:
+ reply = self.request(request.method, path,
+ data=request.content, params=request.query or request,
+ headers=headers, allowed=_REDIRECT_CODES,
+ allow_redirects=False)
+ resend = reply.status_code in _REDIRECT_CODES
+ if response is not None:
+ if 'transfer-encoding' in reply.headers:
+ # `requests` library handles encoding on its own
+ del reply.headers['transfer-encoding']
+ for key, value in reply.headers.items():
+ if key.startswith('x-sn-'):
+ response.meta[key[5:]] = json.loads(value)
+ elif not resend:
+ response[key] = value
+ if not resend:
+ break
+ path = reply.headers['location']
+ if path.startswith('/'):
+ path = self.api_url + path
if request.method != 'HEAD':
if reply.headers.get('Content-Type') == 'application/json':
@@ -287,11 +303,11 @@ class Connection(object):
def subscribe(self, **condition):
return _Subscription(self, condition)
- def _decode_reply(self, response):
- if response.headers.get('Content-Type') == 'application/json':
- return json.loads(response.content)
+ def _decode_reply(self, reply):
+ if reply.headers.get('Content-Type') == 'application/json':
+ return json.loads(reply.content)
else:
- return response.content
+ return reply.content
class _Subscription(object):
diff --git a/sugar_network/toolkit/lsb_release.py b/sugar_network/toolkit/lsb_release.py
index 704c557..a7a61d0 100644
--- a/sugar_network/toolkit/lsb_release.py
+++ b/sugar_network/toolkit/lsb_release.py
@@ -44,6 +44,11 @@ _DERIVATES = {
(int(float(x)) + 6, 4 if float(x) == int(float(x)) else 10)
],
),
+ 'Debian': (
+ 'Debian', [
+ lambda x: '%d.0' % int(float(x)),
+ ],
+ ),
}
diff --git a/sugar_network/toolkit/pipe.py b/sugar_network/toolkit/pipe.py
deleted file mode 100644
index 7a53201..0000000
--- a/sugar_network/toolkit/pipe.py
+++ /dev/null
@@ -1,189 +0,0 @@
-# Copyright (C) 2010-2013 Aleksey Lim
-#
-# This program is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with this program. If not, see <http://www.gnu.org/licenses/>.
-
-import os
-import sys
-import json
-import struct
-import signal
-import logging
-import threading
-from os.path import exists, dirname, basename
-
-from sugar_network import toolkit
-from sugar_network.toolkit import coroutine
-
-
-_logger = logging.getLogger('pipe')
-_pipe = None
-_trace = None
-
-
-def feedback(state, **event):
- if _pipe is None:
- return
- event['state'] = state
- event = json.dumps(event)
- os.write(_pipe, struct.pack('i', len(event)))
- os.write(_pipe, event)
-
-
-def trace(message, *args):
- global _trace
- if _trace is None:
- _trace = []
- if args:
- message = message % args
- _logger.debug(message)
- _trace.append(message)
-
-
-def fork(callback, log_path=None, session=None, **kwargs):
- fd_r, fd_w = os.pipe()
-
- pid = os.fork()
- if pid:
- os.close(fd_w)
- _logger.debug('Fork %s%r with %s pid', callback, kwargs, pid)
- return _Pipe(pid, fd_r)
-
- os.close(fd_r)
- global _pipe
- _pipe = fd_w
-
- def thread_func():
- environ = {}
- if log_path:
- environ['log_path'] = _setup_logging(log_path)
- feedback('fork', session=session, environ=environ)
- try:
- callback(**kwargs)
- except Exception, error:
- feedback('failure', error_type=type(error).__name__,
- error=str(error), environ={'trace': _trace})
- _logger.exception('%r(%r) failed', callback, kwargs)
-
- if session is None:
- session = {}
- # Avoid a mess with current thread coroutines
- thread = threading.Thread(target=thread_func)
- thread.start()
- thread.join()
-
- os.close(fd_w)
- sys.stdout.flush()
- sys.stderr.flush()
- # pylint: disable-msg=W0212
- os._exit(0)
-
-
-class _Pipe(object):
-
- def __init__(self, pid, fd):
- self._pid = pid
- self._fd = fd
- self._session = {}
- self._environ = {}
-
- def fileno(self):
- return self._fd
-
- def read(self):
- if self._fd is None:
- return None
-
- event = None
- failed = False
-
- event_length = os.read(self._fd, struct.calcsize('i'))
- if event_length:
- event_length = struct.unpack('i', event_length)[0]
- event = json.loads(os.read(self._fd, event_length))
- if 'session' in event:
- self._session.update(event.pop('session'))
- if 'environ' in event:
- self._environ.update(event.pop('environ'))
- failed = (event['state'] == 'failure')
-
- if event is None or failed:
- status = 0
- try:
- __, status = os.waitpid(self._pid, 0)
- except OSError:
- pass
- if event is None:
- failure = _decode_exit_failure(status)
- if failure:
- _logger.debug('Process %s failed: %s', self._pid, failure)
- event = {'state': 'failure', 'error': failure}
- failed = True
- else:
- _logger.debug('Process %s successfully exited', self._pid)
- event = {'state': 'exit'}
- os.close(self._fd)
- self._fd = None
-
- if failed:
- event.update(self._environ)
- event.update(self._session)
-
- return event
-
- def __iter__(self):
- try:
- while self._fd is not None:
- coroutine.select([self._fd], [], [])
- event = self.read()
- if event is None:
- break
- yield event
- finally:
- if self._fd is not None:
- _logger.debug('Kill %s process', self._pid)
- os.kill(self._pid, signal.SIGTERM)
- while self.read() is not None:
- pass
-
-
-def _decode_exit_failure(status):
- failure = None
- if os.WIFEXITED(status):
- status = os.WEXITSTATUS(status)
- if status:
- failure = 'Exited with status %s' % status
- elif os.WIFSIGNALED(status):
- signum = os.WTERMSIG(status)
- if signum not in (signal.SIGINT, signal.SIGKILL, signal.SIGTERM):
- failure = 'Terminated by signal %s' % signum
- else:
- signum = os.WTERMSIG(status)
- failure = 'Undefined status with signal %s' % signum
- return failure
-
-
-def _setup_logging(path):
- log_dir = dirname(path)
- if not exists(log_dir):
- os.makedirs(log_dir)
- path = toolkit.unique_filename(log_dir, basename(path) + '.log')
-
- logfile = file(path, 'a+')
- os.dup2(logfile.fileno(), sys.stdout.fileno())
- os.dup2(logfile.fileno(), sys.stderr.fileno())
- logfile.close()
-
- toolkit.init_logging()
-
- return path
diff --git a/sugar_network/toolkit/router.py b/sugar_network/toolkit/router.py
index c67ec97..189556f 100644
--- a/sugar_network/toolkit/router.py
+++ b/sugar_network/toolkit/router.py
@@ -31,6 +31,7 @@ from sugar_network.toolkit import http, coroutine, enforce
_logger = logging.getLogger('router')
+_NOT_SET = object()
def route(method, path=None, cmd=None, **kwargs):
@@ -96,70 +97,59 @@ class ACL(object):
class Request(dict):
- environ = None
- url = None
- method = None
- path = None
- cmd = None
- content = None
- content_type = None
- content_stream = None
- content_length = 0
principal = None
subcall = lambda *args: enforce(False)
def __init__(self, environ=None, method=None, path=None, cmd=None,
- content=None, **kwargs):
+ content=None, content_type=None, **kwargs):
dict.__init__(self)
+
+ self.path = []
+ self.cmd = None
+ self.environ = {}
+ self.session = {}
+ self._content = _NOT_SET
+
self._dirty_query = False
- self._if_modified_since = None
- self._accept_language = None
+ self._if_modified_since = _NOT_SET
+ self._accept_language = _NOT_SET
+ self._content_stream = _NOT_SET
+ self._content_type = content_type or _NOT_SET
+
+ if environ:
+ url = environ.get('PATH_INFO', '').strip('/')
+ self.path = [i for i in url.split('/') if i]
+ query = environ.get('QUERY_STRING') or ''
+ for key, value in parse_qsl(query, keep_blank_values=True):
+ key = str(key)
+ param = self.get(key)
+ if type(param) is list:
+ param.append(value)
+ else:
+ if param is not None:
+ value = [param, value]
+ if key == 'cmd':
+ self.cmd = value
+ else:
+ dict.__setitem__(self, key, value)
+ self.environ = environ
- if environ is None:
- self.environ = {}
- self.method = method
+ if method:
+ self.environ['REQUEST_METHOD'] = method
+ if path:
+ self.environ['PATH_INFO'] = '/' + '/'.join(path)
self.path = path
+ if cmd:
self.cmd = cmd
+ self._dirty_query = True
+ if content is not None:
+ self._content = content
+ if kwargs:
self.update(kwargs)
- self.content = content
- return
-
- self.environ = environ
- self.url = '/' + environ['PATH_INFO'].strip('/')
- self.path = [i for i in self.url[1:].split('/') if i]
- self.method = environ['REQUEST_METHOD']
+ self._dirty_query = True
enforce('..' not in self.path, 'Relative url path')
- query = environ.get('QUERY_STRING') or ''
- for key, value in parse_qsl(query, keep_blank_values=True):
- key = str(key)
- param = self.get(key)
- if type(param) is list:
- param.append(value)
- else:
- if param is not None:
- value = [param, value]
- if key == 'cmd':
- self.cmd = value
- else:
- dict.__setitem__(self, key, value)
- if query:
- self.url += '?' + query
-
- content_length = environ.get('CONTENT_LENGTH')
- if content_length is not None:
- self.content_length = int(content_length)
-
- content_type, __ = cgi.parse_header(environ.get('CONTENT_TYPE', ''))
- self.content_type = content_type.lower()
- if self.content_type == 'application/json':
- self.content = json.load(environ['wsgi.input'])
-
- stream = environ.get('wsgi.input')
- if stream is not None:
- self.content_stream = _ContentStream(stream, self.content_length)
-
def __setitem__(self, key, value):
self._dirty_query = True
if key == 'cmd':
@@ -172,6 +162,58 @@ class Request(dict):
return self.get(key)
@property
+ def method(self):
+ return self.environ.get('REQUEST_METHOD')
+
+ @property
+ def url(self):
+ result = self.environ['PATH_INFO']
+ if self.query:
+ result += '?' + self.query
+ return result
+
+ @property
+ def content_type(self):
+ if self._content_type is _NOT_SET:
+ value, __ = cgi.parse_header(
+ self.environ.get('CONTENT_TYPE', ''))
+ self._content_type = value.lower()
+ return self._content_type
+
+ @content_type.setter
+ def content_type(self, value):
+ self._content_type = value
+
+ @property
+ def content(self):
+ if self._content is _NOT_SET:
+ if self.content_type == 'application/json':
+ self._content = json.load(self.environ['wsgi.input'])
+ else:
+ self._content = None
+ return self._content
+
+ @content.setter
+ def content(self, value):
+ self._content = value
+
+ @property
+ def content_length(self):
+ value = self.environ.get('CONTENT_LENGTH')
+ if value is not None:
+ return int(value)
+
+ @property
+ def content_stream(self):
+ if self._content_stream is _NOT_SET:
+ s = self.environ.get('wsgi.input')
+ if s is None:
+ self._content_stream = None
+ else:
+ self._content_stream = _ContentStream(s, self.content_length)
+ return self._content_stream
+
+ @property
def resource(self):
if self.path:
return self.path[0]
@@ -194,7 +236,7 @@ class Request(dict):
@property
def if_modified_since(self):
- if self._if_modified_since is None:
+ if self._if_modified_since is _NOT_SET:
value = parsedate(self.environ.get('HTTP_IF_MODIFIED_SINCE'))
if value is not None:
self._if_modified_since = calendar.timegm(value)
@@ -204,7 +246,7 @@ class Request(dict):
@property
def accept_language(self):
- if self._accept_language is None:
+ if self._accept_language is _NOT_SET:
self._accept_language = _parse_accept_language(
self.environ.get('HTTP_ACCEPT_LANGUAGE'))
return self._accept_language
@@ -239,13 +281,21 @@ class Request(dict):
else:
existing_value = self[key] = [existing_value, value]
- def call(self, request=None, response=None, **kwargs):
- if request is None:
- request = Request(**kwargs)
+ def call(self, response=None, **kwargs):
+ environ = {}
+ for key in ('HTTP_HOST',
+ 'HTTP_ACCEPT_LANGUAGE',
+ 'HTTP_ACCEPT_ENCODING',
+ 'HTTP_IF_MODIFIED_SINCE',
+ 'HTTP_X_SN_LOGIN',
+ 'HTTP_X_SN_SIGNATURE'):
+ if key in self.environ:
+ environ[key] = self.environ[key]
+ request = Request(environ, **kwargs)
if response is None:
response = Response()
request.principal = self.principal
- request.environ = self.environ
+ request.subcall = self.subcall
return self.subcall(request, response)
def __repr__(self):
@@ -334,8 +384,8 @@ class Router(object):
self._host = None
self._routes = _Routes()
self._routes_model = routes_model
- self._preroutes = []
- self._postroutes = []
+ self._preroutes = set()
+ self._postroutes = set()
processed = set()
cls = type(routes_model)
@@ -345,10 +395,10 @@ class Router(object):
if name in processed:
continue
if hasattr(attr, 'is_preroute'):
- self._preroutes.append(getattr(routes_model, name))
+ self._preroutes.add(getattr(routes_model, name))
continue
elif hasattr(attr, 'is_postroute'):
- self._postroutes.append(getattr(routes_model, name))
+ self._postroutes.add(getattr(routes_model, name))
continue
elif not hasattr(attr, 'route'):
continue
@@ -376,7 +426,7 @@ class Router(object):
def call(self, request, response):
request.subcall = self.call
- result = self._call(request, response)
+ result = self._call_route(request, response)
if isinstance(result, Blob):
if 'url' in result:
@@ -489,8 +539,8 @@ class Router(object):
elif result is not None:
yield result
- def _call(self, request, response):
- route_ = self._resolve(request)
+ def _call_route(self, request, response):
+ route_ = self._resolve_route(request)
request.routes = self._routes_model
for arg, cast in route_.arguments.items():
@@ -535,7 +585,7 @@ class Router(object):
return result
- def _resolve(self, request):
+ def _resolve_route(self, request):
found_path = [False]
def resolve_path(routes, path):
diff --git a/sugar_network/toolkit/zeroconf.py b/sugar_network/toolkit/zeroconf.py
index 71ab2e8..9bda514 100644
--- a/sugar_network/toolkit/zeroconf.py
+++ b/sugar_network/toolkit/zeroconf.py
@@ -1,4 +1,4 @@
-# Copyright (C) 2012 Aleksey Lim
+# Copyright (C) 2012-2013 Aleksey Lim
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
@@ -15,7 +15,7 @@
import logging
-from sugar_network.toolkit import pipe
+from sugar_network.toolkit import gbus
_LOOKUP_RESULT_LOCAL = 8
@@ -32,20 +32,12 @@ _logger = logging.getLogger('zeroconf')
def browse_workstations():
_logger.info('Start browsing hosts using Avahi')
- # Run zeroconf loop in a subprocess to avoid dbus loop collisions
- for event in pipe.fork(_browser):
- if event['state'] == 'resolve':
- yield event['address']
+ for address in gbus.pipe(_browser):
+ yield address
-def _browser():
+def _browser(pipe):
import dbus
- import gobject
- from dbus.mainloop.glib import threads_init, DBusGMainLoop
-
- gobject.threads_init()
- threads_init()
- DBusGMainLoop(set_as_default=True)
bus = dbus.SystemBus()
server = dbus.Interface(bus.get_object(_DBUS_NAME, '/'),
@@ -62,7 +54,7 @@ def _browser():
def ResolveService_cb(interface, protocol, name, type_, domain,
host, aprotocol, address, port, txt, flags):
_logger.debug('Got new address: %s', address)
- pipe.feedback('resolve', address=str(address))
+ pipe(str(address))
def ItemRemove_cb(interface, protocol, name, type_, domain, *args):
_logger.debug('Got removed workstation: %s', name)
@@ -78,8 +70,6 @@ def _browser():
browser.connect_to_signal('ItemNew', ItemNew_cb)
browser.connect_to_signal('ItemRemove', ItemRemove_cb)
- gobject.MainLoop().run()
-
if __name__ == '__main__':
from pprint import pprint
diff --git a/tests/__init__.py b/tests/__init__.py
index fcd90a6..09149ec 100644
--- a/tests/__init__.py
+++ b/tests/__init__.py
@@ -16,12 +16,12 @@ from os.path import dirname, join, exists, abspath, isfile
from M2Crypto import DSA
from gevent import monkey
-from sugar_network.toolkit import coroutine, http, mountpoints, Option, pipe
+from sugar_network.toolkit import coroutine, http, mountpoints, Option, gbus
from sugar_network.toolkit.router import Router
-from sugar_network.client import journal, routes as client_routes
+from sugar_network.client import IPCConnection, journal, routes as client_routes
from sugar_network.client.routes import ClientRoutes
-from sugar_network import db, client, node, toolkit
-from sugar_network.client import injector, solver
+from sugar_network import db, client, node, toolkit, model
+from sugar_network.client import solver
from sugar_network.model.user import User
from sugar_network.model.context import Context
from sugar_network.model.implementation import Implementation
@@ -87,7 +87,6 @@ class Test(unittest.TestCase):
node.sync_layers.value = None
db.index_write_queue.value = 10
client.local_root.value = tmpdir
- client.activity_dirs.value = [tmpdir + '/Activities']
client.api_url.value = 'http://127.0.0.1:8888'
client.mounts_root.value = None
client.ipc_port.value = 5555
@@ -108,15 +107,12 @@ class Test(unittest.TestCase):
obs._repos = {'base': [], 'presolve': []}
http._RECONNECTION_NUMBER = 0
toolkit.cachedir.value = tmpdir + '/tmp'
- injector.invalidate_solutions(None)
- injector._pms_path = None
journal._ds_root = tmpdir + '/datastore'
solver.nodeps = False
solver._stability = None
solver._conn = None
downloads._POOL_SIZE = 256
- pipe._pipe = None
- pipe._trace = None
+ gbus.join()
db.Volume.model = [
'sugar_network.model.user',
@@ -306,24 +302,21 @@ class Test(unittest.TestCase):
classes = [User, Context, Implementation]
self.start_master(classes)
volume = db.Volume('client', classes)
- commands = ClientRoutes(volume, client.api_url.value)
- self.wait_for_events(commands, event='inline', state='online').wait()
+ self.client_routes = ClientRoutes(volume, client.api_url.value)
+ self.wait_for_events(self.client_routes, event='inline', state='online').wait()
self.client = coroutine.WSGIServer(
- ('127.0.0.1', client.ipc_port.value), Router(commands))
+ ('127.0.0.1', client.ipc_port.value), Router(self.client_routes))
coroutine.spawn(self.client.serve_forever)
coroutine.dispatch()
return volume
- def start_offline_client(self, classes=None):
- if classes is None:
- classes = [User, Context, Implementation]
- volume = db.Volume('client', classes)
- commands = ClientRoutes(volume)
- self.client = coroutine.WSGIServer(
- ('127.0.0.1', client.ipc_port.value), Router(commands))
- coroutine.spawn(self.client.serve_forever)
+ def start_offline_client(self):
+ self.home_volume = db.Volume('db', model.RESOURCES)
+ commands = ClientRoutes(self.home_volume)
+ server = coroutine.WSGIServer(('127.0.0.1', client.ipc_port.value), Router(commands))
+ coroutine.spawn(server.serve_forever)
coroutine.dispatch()
- return volume
+ return IPCConnection()
def restful_server(self, classes=None):
if not exists('remote'):
diff --git a/tests/data/node/context/ac/activity/author b/tests/data/node/context/ac/activity/author
index 6be2b3e..7f776df 100644
--- a/tests/data/node/context/ac/activity/author
+++ b/tests/data/node/context/ac/activity/author
@@ -1 +1 @@
-{"seqno": 2, "value": ["alsroot"]} \ No newline at end of file
+{"seqno": 5, "value": {"25c081e29242cf7a19ae893a420ab3de56e9e989": {"role": 3, "name": "test", "order": 0}}} \ No newline at end of file
diff --git a/tests/data/node/context/ac/activity2/aliases b/tests/data/node/context/ac/activity2/aliases
deleted file mode 100644
index 9331b9d..0000000
--- a/tests/data/node/context/ac/activity2/aliases
+++ /dev/null
@@ -1 +0,0 @@
-{"seqno": 2, "value": {}} \ No newline at end of file
diff --git a/tests/data/node/context/ac/activity2/author b/tests/data/node/context/ac/activity2/author
deleted file mode 100644
index 6be2b3e..0000000
--- a/tests/data/node/context/ac/activity2/author
+++ /dev/null
@@ -1 +0,0 @@
-{"seqno": 2, "value": ["alsroot"]} \ No newline at end of file
diff --git a/tests/data/node/context/ac/activity2/clone b/tests/data/node/context/ac/activity2/clone
deleted file mode 100644
index 86e4350..0000000
--- a/tests/data/node/context/ac/activity2/clone
+++ /dev/null
@@ -1 +0,0 @@
-{"seqno": 0, "value": 0} \ No newline at end of file
diff --git a/tests/data/node/context/ac/activity2/ctime b/tests/data/node/context/ac/activity2/ctime
deleted file mode 100644
index 1d8fcc5..0000000
--- a/tests/data/node/context/ac/activity2/ctime
+++ /dev/null
@@ -1 +0,0 @@
-{"seqno": 2, "value": 1350031825} \ No newline at end of file
diff --git a/tests/data/node/context/ac/activity2/dependencies b/tests/data/node/context/ac/activity2/dependencies
deleted file mode 100644
index 8cbfb95..0000000
--- a/tests/data/node/context/ac/activity2/dependencies
+++ /dev/null
@@ -1 +0,0 @@
-{"seqno": 2, "value": []} \ No newline at end of file
diff --git a/tests/data/node/context/ac/activity2/description b/tests/data/node/context/ac/activity2/description
deleted file mode 100644
index 0198dfa..0000000
--- a/tests/data/node/context/ac/activity2/description
+++ /dev/null
@@ -1 +0,0 @@
-{"seqno": 2, "value": {"en-US": "description"}} \ No newline at end of file
diff --git a/tests/data/node/context/ac/activity2/favorite b/tests/data/node/context/ac/activity2/favorite
deleted file mode 100644
index 051a6dc..0000000
--- a/tests/data/node/context/ac/activity2/favorite
+++ /dev/null
@@ -1 +0,0 @@
-{"seqno": 0, "value": false} \ No newline at end of file
diff --git a/tests/data/node/context/ac/activity2/guid b/tests/data/node/context/ac/activity2/guid
deleted file mode 100644
index d440ce8..0000000
--- a/tests/data/node/context/ac/activity2/guid
+++ /dev/null
@@ -1 +0,0 @@
-{"seqno": 2, "value": "activity2"} \ No newline at end of file
diff --git a/tests/data/node/context/ac/activity2/homepage b/tests/data/node/context/ac/activity2/homepage
deleted file mode 100644
index 7c57ae8..0000000
--- a/tests/data/node/context/ac/activity2/homepage
+++ /dev/null
@@ -1 +0,0 @@
-{"seqno": 2, "value": ""} \ No newline at end of file
diff --git a/tests/data/node/context/ac/activity2/implement b/tests/data/node/context/ac/activity2/implement
deleted file mode 100644
index 8cbfb95..0000000
--- a/tests/data/node/context/ac/activity2/implement
+++ /dev/null
@@ -1 +0,0 @@
-{"seqno": 2, "value": []} \ No newline at end of file
diff --git a/tests/data/node/context/ac/activity2/keep b/tests/data/node/context/ac/activity2/keep
deleted file mode 100644
index 13c2b1d..0000000
--- a/tests/data/node/context/ac/activity2/keep
+++ /dev/null
@@ -1,8 +0,0 @@
-(dp1
-Vseqno
-p2
-I2
-sVvalue
-p3
-I00
-s. \ No newline at end of file
diff --git a/tests/data/node/context/ac/activity2/keep_impl b/tests/data/node/context/ac/activity2/keep_impl
deleted file mode 100644
index 2cdb5f0..0000000
--- a/tests/data/node/context/ac/activity2/keep_impl
+++ /dev/null
@@ -1,8 +0,0 @@
-(dp1
-Vseqno
-p2
-I2
-sVvalue
-p3
-I0
-s. \ No newline at end of file
diff --git a/tests/data/node/context/ac/activity2/layer b/tests/data/node/context/ac/activity2/layer
deleted file mode 100644
index 5584ac9..0000000
--- a/tests/data/node/context/ac/activity2/layer
+++ /dev/null
@@ -1 +0,0 @@
-{"seqno": 2, "value": ["public"]} \ No newline at end of file
diff --git a/tests/data/node/context/ac/activity2/mime_types b/tests/data/node/context/ac/activity2/mime_types
deleted file mode 100644
index 8cbfb95..0000000
--- a/tests/data/node/context/ac/activity2/mime_types
+++ /dev/null
@@ -1 +0,0 @@
-{"seqno": 2, "value": []} \ No newline at end of file
diff --git a/tests/data/node/context/ac/activity2/mtime b/tests/data/node/context/ac/activity2/mtime
deleted file mode 100644
index 1d8fcc5..0000000
--- a/tests/data/node/context/ac/activity2/mtime
+++ /dev/null
@@ -1 +0,0 @@
-{"seqno": 2, "value": 1350031825} \ No newline at end of file
diff --git a/tests/data/node/context/ac/activity2/packages b/tests/data/node/context/ac/activity2/packages
deleted file mode 100644
index 9331b9d..0000000
--- a/tests/data/node/context/ac/activity2/packages
+++ /dev/null
@@ -1 +0,0 @@
-{"seqno": 2, "value": {}} \ No newline at end of file
diff --git a/tests/data/node/context/ac/activity2/position b/tests/data/node/context/ac/activity2/position
deleted file mode 100644
index a21b8ca..0000000
--- a/tests/data/node/context/ac/activity2/position
+++ /dev/null
@@ -1,10 +0,0 @@
-(dp1
-Vseqno
-p2
-I2
-sVvalue
-p3
-(lp4
-I-1
-aI-1
-as. \ No newline at end of file
diff --git a/tests/data/node/context/ac/activity2/presolve b/tests/data/node/context/ac/activity2/presolve
deleted file mode 100644
index 7c94802..0000000
--- a/tests/data/node/context/ac/activity2/presolve
+++ /dev/null
@@ -1,8 +0,0 @@
-(dp1
-Vseqno
-p2
-I2
-sVvalue
-p3
-(dp4
-s. \ No newline at end of file
diff --git a/tests/data/node/context/ac/activity2/rating b/tests/data/node/context/ac/activity2/rating
deleted file mode 100644
index 86e4350..0000000
--- a/tests/data/node/context/ac/activity2/rating
+++ /dev/null
@@ -1 +0,0 @@
-{"seqno": 0, "value": 0} \ No newline at end of file
diff --git a/tests/data/node/context/ac/activity2/reviews b/tests/data/node/context/ac/activity2/reviews
deleted file mode 100644
index dd3f63c..0000000
--- a/tests/data/node/context/ac/activity2/reviews
+++ /dev/null
@@ -1 +0,0 @@
-{"seqno": 0, "value": [0, 0]} \ No newline at end of file
diff --git a/tests/data/node/context/ac/activity2/seqno b/tests/data/node/context/ac/activity2/seqno
deleted file mode 100644
index 829c37d..0000000
--- a/tests/data/node/context/ac/activity2/seqno
+++ /dev/null
@@ -1 +0,0 @@
-{"seqno": 2, "value": 2} \ No newline at end of file
diff --git a/tests/data/node/context/ac/activity2/summary b/tests/data/node/context/ac/activity2/summary
deleted file mode 100644
index d15ae6a..0000000
--- a/tests/data/node/context/ac/activity2/summary
+++ /dev/null
@@ -1 +0,0 @@
-{"seqno": 2, "value": {"en-US": "summary"}} \ No newline at end of file
diff --git a/tests/data/node/context/ac/activity2/tags b/tests/data/node/context/ac/activity2/tags
deleted file mode 100644
index 8cbfb95..0000000
--- a/tests/data/node/context/ac/activity2/tags
+++ /dev/null
@@ -1 +0,0 @@
-{"seqno": 2, "value": []} \ No newline at end of file
diff --git a/tests/data/node/context/ac/activity2/title b/tests/data/node/context/ac/activity2/title
deleted file mode 100644
index 45a74f6..0000000
--- a/tests/data/node/context/ac/activity2/title
+++ /dev/null
@@ -1 +0,0 @@
-{"seqno": 2, "value": {"en-US": "title1"}} \ No newline at end of file
diff --git a/tests/data/node/context/ac/activity2/type b/tests/data/node/context/ac/activity2/type
deleted file mode 100644
index bf9b04a..0000000
--- a/tests/data/node/context/ac/activity2/type
+++ /dev/null
@@ -1 +0,0 @@
-{"seqno": 2, "value": ["activity"]} \ No newline at end of file
diff --git a/tests/data/node/context/ac/activity2/user b/tests/data/node/context/ac/activity2/user
deleted file mode 100644
index fba3c78..0000000
--- a/tests/data/node/context/ac/activity2/user
+++ /dev/null
@@ -1,10 +0,0 @@
-(dp1
-Vseqno
-p2
-I2
-sVvalue
-p3
-(lp4
-V25c081e29242cf7a19ae893a420ab3de56e9e989
-p5
-as. \ No newline at end of file
diff --git a/tests/data/node/context/ac/activity2/versions b/tests/data/node/context/ac/activity2/versions
deleted file mode 100644
index 14ad676..0000000
--- a/tests/data/node/context/ac/activity2/versions
+++ /dev/null
@@ -1,8 +0,0 @@
-(dp1
-Vseqno
-p2
-I2
-sVvalue
-p3
-(lp4
-s. \ No newline at end of file
diff --git a/tests/data/node/context/co/context/author b/tests/data/node/context/co/context/author
index 6be2b3e..7f776df 100644
--- a/tests/data/node/context/co/context/author
+++ b/tests/data/node/context/co/context/author
@@ -1 +1 @@
-{"seqno": 2, "value": ["alsroot"]} \ No newline at end of file
+{"seqno": 5, "value": {"25c081e29242cf7a19ae893a420ab3de56e9e989": {"role": 3, "name": "test", "order": 0}}} \ No newline at end of file
diff --git a/tests/data/node/context/co/context/layer b/tests/data/node/context/co/context/layer
index 5584ac9..a942f27 100644
--- a/tests/data/node/context/co/context/layer
+++ b/tests/data/node/context/co/context/layer
@@ -1 +1 @@
-{"seqno": 2, "value": ["public"]} \ No newline at end of file
+{"seqno": 2, "value": []} \ No newline at end of file
diff --git a/tests/data/node/context/de/dep1/author b/tests/data/node/context/de/dep1/author
index 87e524a..7f776df 100644
--- a/tests/data/node/context/de/dep1/author
+++ b/tests/data/node/context/de/dep1/author
@@ -1 +1 @@
-{"seqno": 5, "value": {}} \ No newline at end of file
+{"seqno": 5, "value": {"25c081e29242cf7a19ae893a420ab3de56e9e989": {"role": 3, "name": "test", "order": 0}}} \ No newline at end of file
diff --git a/tests/data/node/context/de/dep2/author b/tests/data/node/context/de/dep2/author
index 87e524a..7f776df 100644
--- a/tests/data/node/context/de/dep2/author
+++ b/tests/data/node/context/de/dep2/author
@@ -1 +1 @@
-{"seqno": 5, "value": {}} \ No newline at end of file
+{"seqno": 5, "value": {"25c081e29242cf7a19ae893a420ab3de56e9e989": {"role": 3, "name": "test", "order": 0}}} \ No newline at end of file
diff --git a/tests/data/node/context/de/dep3/author b/tests/data/node/context/de/dep3/author
index 87e524a..7f776df 100644
--- a/tests/data/node/context/de/dep3/author
+++ b/tests/data/node/context/de/dep3/author
@@ -1 +1 @@
-{"seqno": 5, "value": {}} \ No newline at end of file
+{"seqno": 5, "value": {"25c081e29242cf7a19ae893a420ab3de56e9e989": {"role": 3, "name": "test", "order": 0}}} \ No newline at end of file
diff --git a/tests/data/node/context/pa/package/author b/tests/data/node/context/pa/package/author
index 87e524a..7f776df 100644
--- a/tests/data/node/context/pa/package/author
+++ b/tests/data/node/context/pa/package/author
@@ -1 +1 @@
-{"seqno": 5, "value": {}} \ No newline at end of file
+{"seqno": 5, "value": {"25c081e29242cf7a19ae893a420ab3de56e9e989": {"role": 3, "name": "test", "order": 0}}} \ No newline at end of file
diff --git a/tests/data/node/implementation/im/implementation/author b/tests/data/node/implementation/im/implementation/author
index 8600883..7f776df 100644
--- a/tests/data/node/implementation/im/implementation/author
+++ b/tests/data/node/implementation/im/implementation/author
@@ -1 +1 @@
-{"seqno": 3, "value": ["alsroot"]} \ No newline at end of file
+{"seqno": 5, "value": {"25c081e29242cf7a19ae893a420ab3de56e9e989": {"role": 3, "name": "test", "order": 0}}} \ No newline at end of file
diff --git a/tests/data/node/implementation/im/implementation/data b/tests/data/node/implementation/im/implementation/data
index d8d70ef..8827a89 100644
--- a/tests/data/node/implementation/im/implementation/data
+++ b/tests/data/node/implementation/im/implementation/data
@@ -1 +1 @@
-{"seqno": 5, "mime_type": "application/octet-stream", "digest": "fdb59f1ebd6ee26a00396747b3a733f4fd274604", "spec": {"*-*": {"commands": {"activity": {"exec": "true"}}, "extract": "Chat.activity"}}} \ No newline at end of file
+{"blob_size": 1, "unpack_size": 1, "extract": "Chat.activity", "seqno": 5, "mime_type": "application/octet-stream", "digest": "fdb59f1ebd6ee26a00396747b3a733f4fd274604", "spec": {"*-*": {"commands": {"activity": {"exec": "true"}}}}} \ No newline at end of file
diff --git a/tests/data/node/implementation/im/implementation2/author b/tests/data/node/implementation/im/implementation2/author
index 8600883..7f776df 100644
--- a/tests/data/node/implementation/im/implementation2/author
+++ b/tests/data/node/implementation/im/implementation2/author
@@ -1 +1 @@
-{"seqno": 3, "value": ["alsroot"]} \ No newline at end of file
+{"seqno": 5, "value": {"25c081e29242cf7a19ae893a420ab3de56e9e989": {"role": 3, "name": "test", "order": 0}}} \ No newline at end of file
diff --git a/tests/integration/master_personal.py b/tests/integration/master_personal.py
index 8acc959..f484836 100755
--- a/tests/integration/master_personal.py
+++ b/tests/integration/master_personal.py
@@ -40,7 +40,7 @@ class MasterPersonalTest(tests.Test):
self.client_pid = self.popen([join(src_root, 'sugar-network-client'), '-F', 'start',
'--api-url=http://127.0.0.1:8100', '--cachedir=client/tmp',
'-DDD', '--rundir=client/run', '--server-mode', '--layers=pilot',
- '--local-root=client', '--activity-dirs=client/activities',
+ '--local-root=client',
'--port=8101', '--index-flush-threshold=1',
'--mounts-root=client/mnt', '--ipc-port=8102',
'--stats-user', '--stats-user-step=1',
diff --git a/tests/integration/node_client.py b/tests/integration/node_client.py
index ec30271..3bb7999 100755
--- a/tests/integration/node_client.py
+++ b/tests/integration/node_client.py
@@ -37,80 +37,96 @@ class NodeClientTest(tests.Test):
tests.Test.tearDown(self)
def test_ReleaseActivity(self):
+ blob1 = self.zips(['TestActivitry/activity/activity.info', [
+ '[Activity]',
+ 'name = TestActivitry',
+ 'bundle_id = activity2',
+ 'exec = true',
+ 'icon = icon',
+ 'activity_version = 1',
+ 'license = Public Domain',
+ 'stability = developer',
+ ]])
with file('bundle', 'wb') as f:
- f.write(self.zips(['TestActivitry/activity/activity.info', [
- '[Activity]',
- 'name = TestActivitry',
- 'bundle_id = activity2',
- 'exec = true',
- 'icon = icon',
- 'activity_version = 1',
- 'license = Public Domain',
- 'stability = developper',
- ]]))
- self.cli(['release', 'bundle', '--porcelain'])
-
- self.assertEqual([
- {'version': '1', 'stability': 'developper', 'license': ['Public Domain']},
- ],
- self.cli(['GET', '/implementation', 'context=activity2', 'reply=version,stability,license', 'order_by=version'])['result'])
-
+ f.write(blob1)
+ impl1 = self.cli(['release', 'bundle', '--porcelain', 'initial'])
+
+ blob2 = self.zips(['TestActivitry/activity/activity.info', [
+ '[Activity]',
+ 'name = TestActivitry',
+ 'bundle_id = activity2',
+ 'exec = true',
+ 'icon = icon',
+ 'activity_version = 2',
+ 'license = Public Domain',
+ 'stability = stable',
+ ]])
with file('bundle', 'wb') as f:
- f.write(self.zips(['TestActivitry/activity/activity.info', [
- '[Activity]',
- 'name = TestActivitry',
- 'bundle_id = activity2',
- 'exec = true',
- 'icon = icon',
- 'activity_version = 2',
- ]]))
- self.cli(['release', 'bundle', '--porcelain'])
+ f.write(blob2)
+ impl2 = self.cli(['release', 'bundle', '--porcelain'])
self.assertEqual([
- {'version': '1', 'stability': 'developper', 'license': ['Public Domain']},
- {'version': '2', 'stability': 'stable', 'license': ['Public Domain']},
+ {'guid': impl1, 'version': '1', 'stability': 'developer', 'license': ['Public Domain']},
+ {'guid': impl2, 'version': '2', 'stability': 'stable', 'license': ['Public Domain']},
],
- self.cli(['GET', '/implementation', 'context=activity2', 'reply=version,stability,license', 'order_by=version'])['result'])
+ self.cli(['GET', '/implementation', 'context=activity2', 'reply=guid,version,stability,license', 'order_by=version'])['result'])
+ assert blob1 == file('node/implementation/%s/%s/data.blob' % (impl1[:2], impl1)).read()
+ assert blob2 == file('node/implementation/%s/%s/data.blob' % (impl2[:2], impl2)).read()
- def test_CloneContext(self):
+ def test_ReleaseContext(self):
context = self.cli(['POST', '/context'], stdin={
- 'type': 'activity',
- 'title': 'title1',
+ 'type': 'content',
+ 'title': 'title',
'summary': 'summary',
'description': 'description',
})
- spec = ['[Activity]',
- 'name = TestActivitry',
- 'bundle_id = %s' % context,
- 'exec = true',
- 'icon = icon',
- 'activity_version = 1',
- 'license = Public Domain',
- ]
+ blob1 = 'content1'
with file('bundle', 'wb') as f:
- f.write(self.zips(['TestActivitry/activity/activity.info', spec]))
- impl = self.cli(['release', 'bundle'])
-
- self.cli(['PUT', '/context/%s' % context, 'cmd=clone', '-jd1'])
- assert exists('client/Activities/TestActivitry/activity/activity.info')
- self.assertEqual('\n'.join(spec), file('client/Activities/TestActivitry/activity/activity.info').read())
+ f.write(blob1)
+ impl1 = self.cli(['release', 'bundle', '--porcelain',
+ 'context=%s' % context,
+ 'license=GPLv3+',
+ 'version=1',
+ 'stability=developer',
+ ])
- def test_FavoriteContext(self):
- context = self.cli(['POST', '/context'], stdin={
- 'type': 'activity',
- 'title': 'title1',
- 'summary': 'summary',
- 'description': 'description',
- })
+ blob2 = 'content2'
+ with file('bundle', 'wb') as f:
+ f.write(blob2)
+ impl2 = self.cli(['release', 'bundle', '--porcelain',
+ 'context=%s' % context,
+ 'license=GPLv3+',
+ 'version=2',
+ 'stability=stable',
+ ])
- path = 'client/db/context/%s/%s/favorite' % (context[:2], context)
- assert not exists(path)
+ self.assertEqual([
+ {'guid': impl1, 'version': '1', 'stability': 'developer', 'license': ['GPLv3+']},
+ {'guid': impl2, 'version': '2', 'stability': 'stable', 'license': ['GPLv3+']},
+ ],
+ self.cli(['GET', '/implementation', 'context=%s' % context, 'reply=guid,version,stability,license', 'order_by=version'])['result'])
+ assert blob1 == file('node/implementation/%s/%s/data.blob' % (impl1[:2], impl1)).read()
+ assert blob2 == file('node/implementation/%s/%s/data.blob' % (impl2[:2], impl2)).read()
- self.cli(['PUT', '/context/%s' % context, 'cmd=favorite', '-jdtrue'])
+ def test_CloneContext(self):
+ activity_info = '\n'.join([
+ '[Activity]',
+ 'name = TestActivitry',
+ 'bundle_id = bundle_id',
+ 'exec = true',
+ 'icon = icon',
+ 'activity_version = 1',
+ 'license = Public Domain',
+ ])
+ with file('bundle', 'wb') as f:
+ f.write(self.zips(['TestActivitry/activity/activity.info', activity_info]))
+ impl = self.cli(['release', 'bundle', '--porcelain', 'initial'])
- assert exists(path)
- self.assertEqual(True, json.load(file(path))['value'])
+ self.cli(['PUT', '/context/bundle_id', 'cmd=clone', '-jd1'])
+ self.assertEqual(
+ activity_info,
+ file('client/db/implementation/%s/%s/data.blob/activity/activity.info' % (impl[:2], impl)).read())
def test_UsecaseOOB(self):
privkey_path = '.sugar/default/owner.key'
@@ -118,22 +134,13 @@ class NodeClientTest(tests.Test):
os.unlink(privkey_path)
os.unlink(pubkey_path)
- deplist = self.cli(['GET', '/context/activity', 'cmd=deplist', 'repo=Fedora-14', '--anonymous', '--no-dbus', '--porcelain'])
- assert not exists(privkey_path)
- assert not exists(pubkey_path)
- self.assertEqual(
- sorted(['dep1.rpm', 'dep2.rpm', 'dep3.rpm']),
- sorted([i.strip() for i in deplist.split('\n')]))
-
- self.cli(['PUT', '/context/context', '--anonymous', 'cmd=clone', 'nodeps=1', 'stability=stable', '-jd', '1'])
- assert not exists(privkey_path)
- assert not exists(pubkey_path)
+ self.cli(['PUT', '/context/context', '--no-dbus', '--anonymous', 'cmd=clone', '-jd', '1'])
+ self.cli(['PUT', '/context/context', '--no-dbus', '--anonymous', 'cmd=favorite', '-jd', 'true'])
- self.cli(['PUT', '/context/context', '--anonymous', 'cmd=favorite', '-jd', 'true'])
+ assert exists('client/db/implementation/im/implementation/data.blob/activity/activity.info')
+ self.assertEqual(['clone', 'favorite'], json.load(file('client/db/context/co/context/layer'))['value'])
assert not exists(privkey_path)
assert not exists(pubkey_path)
- assert exists('Activities/Chat.activity/activity/activity.info')
- self.assertEqual(True, json.load(file('client/db/context/co/context/favorite'))['value'])
def cli(self, cmd, stdin=None):
cmd = ['sugar-network', '--local-root=client', '--ipc-port=5101', '--api-url=http://127.0.0.1:8100', '-DDD'] + cmd
@@ -141,7 +148,7 @@ class NodeClientTest(tests.Test):
if '--anonymous' not in cmd and not self.client_pid:
self.client_pid = self.popen([join(src_root, 'sugar-network-client'),
'-DDDF', 'start',
- '--activity-dirs=client/Activities', '--local-root=client',
+ '--local-root=client',
'--mounts-root=mnt', '--cachedir=tmp', '--ipc-port=5101',
'--api-url=http://127.0.0.1:8100',
])
diff --git a/tests/integration/node_packages.py b/tests/integration/node_packages.py
index 1b38c64..a45b788 100755
--- a/tests/integration/node_packages.py
+++ b/tests/integration/node_packages.py
@@ -54,13 +54,13 @@ class NodePackagesSlaveTest(tests.Test):
@fallbackroute('GET', ['resolve'], mime_type='text/xml')
def resolve(self, request, response):
- return '<resolve><binary name="rpm" url="http://127.0.0.1:9999/packages/rpm" arch="arch"/></resolve>'
+ return '<resolve><binary name="rpm" url="http://127.0.0.1:1999/packages/rpm" arch="arch"/></resolve>'
@fallbackroute('GET', ['packages'], mime_type='text/plain')
def packages(self, request, response):
return 'package_content'
- obs = coroutine.WSGIServer(('127.0.0.1', 9999), Router(OBS()))
+ obs = coroutine.WSGIServer(('127.0.0.1', 1999), Router(OBS()))
coroutine.spawn(obs.serve_forever)
# From master
@@ -72,7 +72,7 @@ class NodePackagesSlaveTest(tests.Test):
'--stats-root=master/stats', '--stats-user', '--stats-user-step=1',
'--stats-user-rras=RRA:AVERAGE:0.5:1:100',
'--index-flush-threshold=1', '--pull-timeout=1',
- '--obs-url=http://127.0.0.1:9999',
+ '--obs-url=http://127.0.0.1:1999',
]))
coroutine.sleep(3)
conn = Connection('http://127.0.0.1:8100')
@@ -100,7 +100,7 @@ class NodePackagesSlaveTest(tests.Test):
pid = self.popen([join(src_root, 'sugar-network-client'), '-F', 'start',
'--api-url=http://127.0.0.1:8100', '--cachedir=master.client/tmp',
'-DDD', '--rundir=master.client/run', '--layers=pilot',
- '--local-root=master.client', '--activity-dirs=master.client/activities',
+ '--local-root=master.client',
'--index-flush-threshold=1', '--ipc-port=8200',
])
client.ipc_port.value = 8200
@@ -138,7 +138,7 @@ class NodePackagesSlaveTest(tests.Test):
pid = self.popen([join(src_root, 'sugar-network-client'), '-F', 'start',
'--api-url=http://127.0.0.1:8101', '--cachedir=master.client/tmp',
'-DDD', '--rundir=master.client/run', '--layers=pilot',
- '--local-root=master.client', '--activity-dirs=master.client/activities',
+ '--local-root=master.client',
'--index-flush-threshold=1', '--ipc-port=8200',
])
client.ipc_port.value = 8200
@@ -157,7 +157,7 @@ class NodePackagesSlaveTest(tests.Test):
self.pids.append(self.popen([join(src_root, 'sugar-network-client'), '-F', 'start',
'--api-url=http://127.0.0.1:8100', '--cachedir=client/tmp',
'-DDD', '--rundir=client/run', '--server-mode', '--layers=pilot',
- '--local-root=client', '--activity-dirs=client/activities',
+ '--local-root=client',
'--port=8102', '--index-flush-threshold=1',
'--mounts-root=client/mnt', '--ipc-port=8202',
]))
diff --git a/tests/units/client/__main__.py b/tests/units/client/__main__.py
index fc1d045..f9ed28e 100644
--- a/tests/units/client/__main__.py
+++ b/tests/units/client/__main__.py
@@ -2,15 +2,14 @@
from __init__ import tests
-from clones import *
-from routes import *
-from injector import *
from journal import *
+from solver import *
+from routes import *
from offline_routes import *
from online_routes import *
from server_routes import *
-from solver import *
from cache import *
+from implementations import *
if __name__ == '__main__':
tests.main()
diff --git a/tests/units/client/cache.py b/tests/units/client/cache.py
index 3342ce8..be22a22 100755
--- a/tests/units/client/cache.py
+++ b/tests/units/client/cache.py
@@ -4,12 +4,17 @@
import os
import time
import json
+import shutil
from cStringIO import StringIO
from os.path import exists
from __init__ import tests
-from sugar_network.client import cache, cache_limit, cache_lifetime
+from sugar_network import db
+from sugar_network.model.context import Context
+from sugar_network.model.implementation import Implementation
+from sugar_network.client import cache_limit, cache_lifetime, IPCConnection
+from sugar_network.client.cache import Cache
from sugar_network.toolkit import http
@@ -27,118 +32,260 @@ class CacheTest(tests.Test):
self.override(os, 'statvfs', lambda *args: statvfs())
cache_limit.value = 0
- def test_get(self):
- self.override(http.Connection, 'download', lambda self_, path: StringIO(self.zips(('topdir/probe', '/'.join(path)))))
- cache.get('impl', {'unpack_size': 100})
- self.assertEqual(100, json.load(file('cache/implementation/impl/.unpack_size')))
- self.assertEqual('implementation/impl/data', file('cache/implementation/impl/topdir/probe').read())
+ def test_open(self):
+ volume = db.Volume('db', [Context, Implementation])
- def test_ensure(self):
- self.touch(('cache/implementation/1/.unpack_size', '1', 1))
- self.touch(('cache/implementation/2/.unpack_size', '1', 2))
- self.touch(('cache/implementation/3/.unpack_size', '1', 3))
- cache_limit.value = 10
+ volume['implementation'].create({
+ 'guid': '1',
+ 'context': 'context',
+ 'license': ['GPL'],
+ 'version': '1',
+ 'stability': 'stable',
+ 'data': {'blob_size': 1},
+ })
+ os.utime('db/implementation/1/1', (1, 1))
+ volume['implementation'].create({
+ 'guid': '5',
+ 'context': 'context',
+ 'license': ['GPL'],
+ 'version': '5',
+ 'stability': 'stable',
+ 'data': {'blob_size': 5},
+ })
+ os.utime('db/implementation/5/5', (5, 5))
+ volume['implementation'].create({
+ 'guid': '2',
+ 'context': 'context',
+ 'license': ['GPL'],
+ 'version': '2',
+ 'stability': 'stable',
+ 'data': {},
+ })
+ os.utime('db/implementation/2/2', (2, 2))
+ volume['implementation'].create({
+ 'guid': '3',
+ 'context': 'context',
+ 'license': ['GPL'],
+ 'version': '3',
+ 'stability': 'stable',
+ })
+ os.utime('db/implementation/3/3', (3, 3))
+ volume['implementation'].create({
+ 'guid': '4',
+ 'context': 'context',
+ 'license': ['GPL'],
+ 'version': '4',
+ 'stability': 'stable',
+ 'data': {'blob_size': 4, 'unpack_size': 44},
+ })
+ os.utime('db/implementation/4/4', (4, 4))
- self.statvfs.f_bfree = 11
- cache.ensure(1, 0)
- assert exists('cache/implementation/1')
- assert exists('cache/implementation/2')
- assert exists('cache/implementation/3')
+ cache = Cache(volume)
+ self.assertEqual(['5', '4', '1'], [i for i in cache])
- self.statvfs.f_bfree = 10
- cache.ensure(1, 0)
- assert not exists('cache/implementation/1')
- assert exists('cache/implementation/2')
- assert exists('cache/implementation/3')
+ def test_open_IgnoreClones(self):
+ volume = db.Volume('db', [Context, Implementation])
- self.statvfs.f_bfree = 11
- cache.ensure(3, 0)
- assert not exists('cache/implementation/1')
- assert not exists('cache/implementation/2')
- assert not exists('cache/implementation/3')
+ volume['context'].create({
+ 'guid': 'context',
+ 'type': 'activity',
+ 'title': 'title',
+ 'summary': 'summary',
+ 'description': 'description',
+ })
+ volume['implementation'].create({
+ 'guid': 'impl',
+ 'context': 'context',
+ 'license': ['GPL'],
+ 'version': '1',
+ 'stability': 'stable',
+ 'data': {'blob_size': 1},
+ })
- self.statvfs.f_bfree = 10
- self.assertRaises(RuntimeError, cache.ensure, 1, 0)
+ cache = Cache(volume)
+ self.assertEqual(['impl'], [i for i in cache])
- def test_ensure_FailRightAway(self):
- self.touch(('cache/implementation/1/.unpack_size', '1', 1))
+ with file('db/context/co/context/clone', 'w') as f:
+ json.dump('impl', f)
+ cache = Cache(volume)
+ self.assertEqual([], [i for i in cache])
+
+ def test_ensure(self):
+ volume = db.Volume('db', [Context, Implementation])
+
+ volume['implementation'].create({'data': {'blob_size': 1}, 'guid': '1', 'context': 'context', 'version': '1', 'license': ['GPL'], 'stability': 'stable'})
+ os.utime('db/implementation/1/1', (1, 1))
+ volume['implementation'].create({'data': {'blob_size': 2}, 'guid': '2', 'context': 'context', 'version': '1', 'license': ['GPL'], 'stability': 'stable'})
+ os.utime('db/implementation/2/2', (2, 2))
+ volume['implementation'].create({'data': {'blob_size': 3}, 'guid': '3', 'context': 'context', 'version': '1', 'license': ['GPL'], 'stability': 'stable'})
+ os.utime('db/implementation/3/3', (3, 3))
+ cache = Cache(volume)
cache_limit.value = 10
- self.statvfs.f_bfree = 10
+ self.statvfs.f_bfree = 11
- self.assertRaises(RuntimeError, cache.ensure, 2, 0)
- assert exists('cache/implementation/1')
+ self.assertRaises(RuntimeError, cache.ensure, 100, 0)
+ assert volume['implementation'].exists('1')
+ assert volume['implementation'].exists('2')
+ assert volume['implementation'].exists('3')
cache.ensure(1, 0)
- assert not exists('cache/implementation/1')
+ assert volume['implementation'].exists('1')
+ assert volume['implementation'].exists('2')
+ assert volume['implementation'].exists('3')
+
+ cache.ensure(2, 0)
+ assert not volume['implementation'].exists('1')
+ assert volume['implementation'].exists('2')
+ assert volume['implementation'].exists('3')
+
+ cache.ensure(4, 0)
+ assert not volume['implementation'].exists('2')
+ assert not volume['implementation'].exists('3')
+
+ self.assertRaises(RuntimeError, cache.ensure, 2, 0)
def test_ensure_ConsiderTmpSize(self):
- self.touch(('cache/implementation/1/.unpack_size', '1', 1))
+ volume = db.Volume('db', [Context, Implementation])
+ volume['implementation'].create({'data': {'blob_size': 1}, 'guid': '1', 'context': 'context', 'version': '1', 'license': ['GPL'], 'stability': 'stable'})
+
+ cache = Cache(volume)
cache_limit.value = 10
self.statvfs.f_bfree = 10
- self.assertRaises(RuntimeError, cache.ensure, 2, 0)
- assert exists('cache/implementation/1')
+ self.assertRaises(RuntimeError, cache.ensure, 1, 11)
+ assert volume['implementation'].exists('1')
- cache.ensure(1, 0)
- assert not exists('cache/implementation/1')
+ cache.ensure(1, 10)
+ assert not volume['implementation'].exists('1')
def test_recycle(self):
ts = time.time()
- self.touch(('cache/implementation/1/.unpack_size', '1'))
- os.utime('cache/implementation/1', (ts - 1.5 * 86400, ts - 1.5 * 86400))
- self.touch(('cache/implementation/2/.unpack_size', '1'))
- os.utime('cache/implementation/2', (ts - 2.5 * 86400, ts - 2.5 * 86400))
- self.touch(('cache/implementation/3/.unpack_size', '1'))
- os.utime('cache/implementation/3', (ts - 3.5 * 86400, ts - 3.5 * 86400))
+
+ volume = db.Volume('db', [Context, Implementation])
+ volume['implementation'].create({'data': {'blob_size': 1}, 'guid': '1', 'context': 'context', 'version': '1', 'license': ['GPL'], 'stability': 'stable'})
+ os.utime('db/implementation/1/1', (ts - 1.5 * 86400, ts - 1.5 * 86400))
+ volume['implementation'].create({'data': {'blob_size': 1}, 'guid': '2', 'context': 'context', 'version': '1', 'license': ['GPL'], 'stability': 'stable'})
+ os.utime('db/implementation/2/2', (ts - 2.5 * 86400, ts - 2.5 * 86400))
+ volume['implementation'].create({'data': {'blob_size': 1}, 'guid': '3', 'context': 'context', 'version': '1', 'license': ['GPL'], 'stability': 'stable'})
+ os.utime('db/implementation/3/3', (ts - 3.5 * 86400, ts - 3.5 * 86400))
+ cache = Cache(volume)
cache_lifetime.value = 4
cache.recycle()
- assert exists('cache/implementation/1')
- assert exists('cache/implementation/2')
- assert exists('cache/implementation/3')
+ assert volume['implementation'].exists('1')
+ assert volume['implementation'].exists('2')
+ assert volume['implementation'].exists('3')
cache_lifetime.value = 3
cache.recycle()
- assert exists('cache/implementation/1')
- assert exists('cache/implementation/2')
- assert not exists('cache/implementation/3')
+ assert volume['implementation'].exists('1')
+ assert volume['implementation'].exists('2')
+ assert not volume['implementation'].exists('3')
cache_lifetime.value = 1
cache.recycle()
- assert not exists('cache/implementation/1')
- assert not exists('cache/implementation/2')
- assert not exists('cache/implementation/3')
-
- def test_recycle_CallEnsure(self):
- self.touch(('cache/implementation/1/.unpack_size', '1', 100))
- cache_limit.value = 10
- cache_lifetime.value = 0
+ assert not volume['implementation'].exists('1')
+ assert not volume['implementation'].exists('2')
+ assert not volume['implementation'].exists('3')
- self.statvfs.f_bfree = 100
cache.recycle()
- assert exists('cache/implementation/1')
- self.statvfs.f_bfree = 0
- cache.recycle()
- assert not exists('cache/implementation/1')
+ def test_checkin(self):
+ local_volume = self.start_online_client()
+ conn = IPCConnection()
+ self.statvfs.f_blocks = 0
- def test_RecycleBadDirs(self):
- cache_limit.value = 10
- self.statvfs.f_bfree = 10
- self.touch('cache/implementation/1/foo')
- self.touch('cache/implementation/2/bar')
- self.touch(('cache/implementation/3/.unpack_size', '1'))
- cache.ensure(1, 0)
- assert not exists('cache/implementation/1')
- assert not exists('cache/implementation/2')
- assert not exists('cache/implementation/3')
+ impl1 = conn.upload(['implementation'], StringIO(self.zips(['TestActivity/activity/activity.info', [
+ '[Activity]',
+ 'name = TestActivity',
+ 'bundle_id = context1',
+ 'exec = true',
+ 'icon = icon',
+ 'activity_version = 1',
+ 'license = Public Domain',
+ 'stability = stable',
+ ]])), cmd='release', initial=True)
+ impl2 = conn.upload(['implementation'], StringIO(self.zips(['TestActivity/activity/activity.info', [
+ '[Activity]',
+ 'name = TestActivity',
+ 'bundle_id = context2',
+ 'exec = true',
+ 'icon = icon',
+ 'activity_version = 1',
+ 'license = Public Domain',
+ 'stability = stable',
+ ]])), cmd='release', initial=True)
+ impl3 = conn.upload(['implementation'], StringIO(self.zips(['TestActivity/activity/activity.info', [
+ '[Activity]',
+ 'name = TestActivity',
+ 'bundle_id = context3',
+ 'exec = true',
+ 'icon = icon',
+ 'activity_version = 1',
+ 'license = Public Domain',
+ 'stability = stable',
+ ]])), cmd='release', initial=True)
- self.statvfs.f_bfree = 100
- self.touch('cache/implementation/1/foo')
- self.touch('cache/implementation/2/bar')
- cache.recycle()
- assert not exists('cache/implementation/1')
- assert not exists('cache/implementation/2')
+ conn.get(['context', 'context1'], cmd='launch')
+ self.assertEqual([impl1], [i for i in self.client_routes._cache])
+ assert local_volume['implementation'].exists(impl1)
+
+ conn.get(['context', 'context2'], cmd='launch')
+ self.assertEqual([impl2, impl1], [i for i in self.client_routes._cache])
+ assert local_volume['implementation'].exists(impl1)
+ assert local_volume['implementation'].exists(impl2)
+
+ conn.get(['context', 'context3'], cmd='launch')
+ self.assertEqual([impl3, impl2, impl1], [i for i in self.client_routes._cache])
+ assert local_volume['implementation'].exists(impl1)
+ assert local_volume['implementation'].exists(impl2)
+ assert local_volume['implementation'].exists(impl3)
+
+ def test_checkout(self):
+ local_volume = self.start_online_client()
+ conn = IPCConnection()
+ self.statvfs.f_blocks = 0
+
+ impl1 = conn.upload(['implementation'], StringIO(self.zips(['TestActivity/activity/activity.info', [
+ '[Activity]',
+ 'name = TestActivity',
+ 'bundle_id = context',
+ 'exec = true',
+ 'icon = icon',
+ 'activity_version = 1',
+ 'license = Public Domain',
+ 'stability = stable',
+ ]])), cmd='release', initial=True)
+
+ conn.put(['context', 'context'], True, cmd='clone')
+ self.assertEqual([], [i for i in self.client_routes._cache])
+ assert local_volume['implementation'].exists(impl1)
+
+ conn.put(['context', 'context'], False, cmd='clone')
+ self.assertEqual([impl1], [i for i in self.client_routes._cache])
+ assert local_volume['implementation'].exists(impl1)
+
+ impl2 = conn.upload(['implementation'], StringIO(self.zips(['TestActivity/activity/activity.info', [
+ '[Activity]',
+ 'name = TestActivity',
+ 'bundle_id = context',
+ 'exec = true',
+ 'icon = icon',
+ 'activity_version = 2',
+ 'license = Public Domain',
+ 'stability = stable',
+ ]])), cmd='release', initial=True)
+
+ shutil.rmtree('cache')
+ conn.put(['context', 'context'], True, cmd='clone')
+ self.assertEqual([impl1], [i for i in self.client_routes._cache])
+ assert local_volume['implementation'].exists(impl1)
+ assert local_volume['implementation'].exists(impl2)
+
+ conn.put(['context', 'context'], False, cmd='clone')
+ self.assertEqual([impl2, impl1], [i for i in self.client_routes._cache])
+ assert local_volume['implementation'].exists(impl1)
+ assert local_volume['implementation'].exists(impl2)
if __name__ == '__main__':
diff --git a/tests/units/client/clones.py b/tests/units/client/clones.py
deleted file mode 100755
index 974adca..0000000
--- a/tests/units/client/clones.py
+++ /dev/null
@@ -1,439 +0,0 @@
-#!/usr/bin/env python
-# sugar-lint: disable
-
-import os
-import shutil
-import hashlib
-from os.path import abspath, lexists, exists
-
-from __init__ import tests
-
-from sugar_network import db, model
-from sugar_network.model.user import User
-from sugar_network.model.context import Context
-from sugar_network.client import clones
-from sugar_network.toolkit import coroutine
-
-
-class CloneTest(tests.Test):
-
- def setUp(self):
- tests.Test.setUp(self)
- self.volume = db.Volume('local', [User, Context])
- self.job = None
-
- def tearDown(self):
- if self.job is not None:
- self.job.kill()
- self.volume.close()
- tests.Test.tearDown(self)
-
- def test_Inotify_NoPermissions(self):
- assert not exists('/foo/bar')
- inotify = clones._Inotify(self.volume['context'])
- inotify.setup(['/foo/bar'])
- assert not exists('/foo/bar')
-
- def test_Inotify_Walkthrough(self):
- self.touch('file')
- os.makedirs('activity-1')
- os.makedirs('activity-2/activity')
- self.touch('activity-3/activity/activity.info')
- self.touch('activity-4/activity/activity.info')
- self.touch('activity-5/activity/activity.info')
-
- found = []
- lost = []
-
- inotify = clones._Inotify(self.volume['context'])
- inotify.found = found.append
- inotify.lost = lost.append
- inotify.setup(['.'])
- self.job = coroutine.spawn(inotify.serve_forever)
- coroutine.sleep(1)
-
- self.assertEqual(
- sorted([
- tests.tmpdir + '/activity-3',
- tests.tmpdir + '/activity-4',
- tests.tmpdir + '/activity-5',
- ]),
- sorted(found))
- self.assertEqual([], lost)
- del found[:]
-
- with file('activity-4/activity/activity.info', 'w') as f:
- f.close()
- coroutine.sleep(.1)
- self.assertEqual([], found)
- self.assertEqual([], lost)
-
- with file('activity-2/activity/activity.info', 'w') as f:
- f.close()
- coroutine.sleep(.1)
- self.assertEqual([tests.tmpdir + '/activity-2'], found)
- self.assertEqual([], lost)
- del found[:]
-
- os.makedirs('activity-6/activity')
- coroutine.sleep(.1)
- self.assertEqual([], found)
- self.assertEqual([], lost)
-
- with file('activity-6/activity/activity.info', 'w') as f:
- f.close()
- coroutine.sleep(.1)
- self.assertEqual([tests.tmpdir + '/activity-6'], found)
- self.assertEqual([], lost)
- del found[:]
-
- os.unlink('activity-5/activity/activity.info')
- coroutine.sleep(.1)
- self.assertEqual([], found)
- self.assertEqual([tests.tmpdir + '/activity-5'], lost)
- del lost[:]
-
- shutil.rmtree('activity-5')
- coroutine.sleep(.1)
- self.assertEqual([], found)
- self.assertEqual([], lost)
-
- shutil.rmtree('activity-4')
- coroutine.sleep(.1)
- coroutine.sleep(.1)
- self.assertEqual([], found)
- self.assertEqual([tests.tmpdir + '/activity-4'], lost)
- del lost[:]
-
- def test_Inotify_Moves(self):
- self.touch('Activities/activity/activity/activity.info')
-
- found = []
- lost = []
-
- inotify = clones._Inotify(self.volume['context'])
- inotify.found = found.append
- inotify.lost = lost.append
- inotify.setup(['Activities'])
- self.job = coroutine.spawn(inotify.serve_forever)
- coroutine.sleep(.1)
-
- shutil.move('Activities/activity', '.')
- coroutine.sleep(.1)
- self.assertEqual([tests.tmpdir + '/Activities/activity'], found)
- self.assertEqual([tests.tmpdir + '/Activities/activity'], lost)
- del found[:]
- del lost[:]
- shutil.move('activity', 'Activities/')
- coroutine.sleep(.1)
- self.assertEqual([tests.tmpdir + '/Activities/activity'], found)
- self.assertEqual([], lost)
- del found[:]
- del lost[:]
-
- shutil.move('Activities/activity/activity', 'Activities/activity/activity2')
- coroutine.sleep(.1)
- self.assertEqual([], found)
- self.assertEqual([tests.tmpdir + '/Activities/activity'], lost)
- del found[:]
- del lost[:]
- shutil.move('Activities/activity/activity2', 'Activities/activity/activity')
- coroutine.sleep(.1)
- self.assertEqual([tests.tmpdir + '/Activities/activity'], found)
- self.assertEqual([], lost)
- del found[:]
- del lost[:]
-
- shutil.move('Activities/activity/activity/activity.info', 'Activities/activity/activity/activity.info2')
- coroutine.sleep(.1)
- self.assertEqual([], found)
- self.assertEqual([tests.tmpdir + '/Activities/activity'], lost)
- del found[:]
- del lost[:]
- shutil.move('Activities/activity/activity/activity.info2', 'Activities/activity/activity/activity.info')
- coroutine.sleep(.1)
- self.assertEqual([tests.tmpdir + '/Activities/activity'], found)
- self.assertEqual([], lost)
- del found[:]
- del lost[:]
-
- def test_Checkin_Create(self):
- self.job = coroutine.spawn(clones.monitor,
- self.volume['context'], ['Activities'])
- coroutine.sleep()
-
- self.volume['context'].create({
- 'guid': 'org.sugarlabs.HelloWorld',
- 'type': 'activity',
- 'title': {'en': 'title'},
- 'summary': {'en': 'summary'},
- 'description': {'en': 'description'},
- 'user': [tests.UID],
- })
-
- os.makedirs('Activities/activity/activity')
- coroutine.sleep(1)
- self.touch('Activities/activity/activity/icon.svg')
- self.touch(('Activities/activity/activity/mimetypes.xml', [
- '<?xml version="1.0" encoding="UTF-8"?>',
- '<mime-info xmlns="http://www.freedesktop.org/standards/shared-mime-info">',
- '<mime-type type="application/x-foo-bar">',
- '<comment xml:lang="en">foo-bar</comment>',
- '<glob pattern="*.foo"/>',
- '</mime-type>',
- '</mime-info>',
- ]))
- spec = ['[Activity]',
- 'name = HelloWorld',
- 'activity_version = 1',
- 'bundle_id = org.sugarlabs.HelloWorld',
- 'exec = sugar-activity activity.HelloWorldActivity',
- 'icon = icon',
- 'license = GPLv2+',
- 'mime_types = foo/bar',
- ]
- with file('Activities/activity/activity/activity.info', 'w') as f:
- coroutine.sleep(1)
- f.write('\n'.join(spec))
- coroutine.sleep(1)
-
- hashed_path = hashlib.sha1(tests.tmpdir + '/Activities/activity').hexdigest()
- assert exists('clones/checkin/' + hashed_path)
- self.assertEqual(
- abspath('Activities/activity'),
- os.readlink('clones/context/org.sugarlabs.HelloWorld/' + hashed_path))
- self.assertEqual(
- {'guid': 'org.sugarlabs.HelloWorld', 'title': {'en': 'title'}, 'favorite': False, 'clone': 2},
- self.volume['context'].get('org.sugarlabs.HelloWorld').properties(['guid', 'title', 'favorite', 'clone']))
- assert exists('share/icons/sugar/scalable/mimetypes/foo-bar.svg')
- self.assertEqual(
- tests.tmpdir + '/Activities/activity/activity/icon.svg',
- os.readlink('share/icons/sugar/scalable/mimetypes/foo-bar.svg'))
- assert exists('share/mime/packages/%s.xml' % hashed_path)
- self.assertEqual(
- tests.tmpdir + '/Activities/activity/activity/mimetypes.xml',
- os.readlink('share/mime/packages/%s.xml' % hashed_path))
- assert exists('share/mime/application/x-foo-bar.xml')
-
- def test_Checkin_Copy(self):
- self.job = coroutine.spawn(clones.monitor,
- self.volume['context'], ['Activities'])
- coroutine.sleep()
-
- self.volume['context'].create({
- 'guid': 'org.sugarlabs.HelloWorld',
- 'type': 'activity',
- 'title': {'en': 'title'},
- 'summary': {'en': 'summary'},
- 'description': {'en': 'description'},
- 'user': [tests.UID],
- })
-
- self.touch(('activity/activity/activity.info', [
- '[Activity]',
- 'name = HelloWorld',
- 'activity_version = 1',
- 'bundle_id = org.sugarlabs.HelloWorld',
- 'exec = sugar-activity activity.HelloWorldActivity',
- 'icon = activity-helloworld',
- 'license = GPLv2+',
- ]))
- shutil.copytree('activity', 'Activities/activity')
- coroutine.sleep(1)
-
- hashed_path = hashlib.sha1(tests.tmpdir + '/Activities/activity').hexdigest()
- assert exists('clones/checkin/' + hashed_path)
- self.assertEqual(
- abspath('Activities/activity'),
- os.readlink('clones/context/org.sugarlabs.HelloWorld/' + hashed_path))
- self.assertEqual(
- {'guid': 'org.sugarlabs.HelloWorld', 'title': {'en': 'title'}, 'favorite': False, 'clone': 2},
- self.volume['context'].get('org.sugarlabs.HelloWorld').properties(['guid', 'title', 'favorite', 'clone']))
-
- def test_Checkin_Hardlink(self):
- self.job = coroutine.spawn(clones.monitor,
- self.volume['context'], ['Activities'])
- coroutine.sleep()
-
- self.volume['context'].create({
- 'guid': 'org.sugarlabs.HelloWorld',
- 'type': 'activity',
- 'title': {'en': 'title'},
- 'summary': {'en': 'summary'},
- 'description': {'en': 'description'},
- 'user': [tests.UID],
- })
-
- self.touch(('activity/activity/activity.info', [
- '[Activity]',
- 'name = HelloWorld',
- 'activity_version = 1',
- 'bundle_id = org.sugarlabs.HelloWorld',
- 'exec = sugar-activity activity.HelloWorldActivity',
- 'icon = activity-helloworld',
- 'license = GPLv2+',
- ]))
- os.makedirs('Activities/activity/activity')
- coroutine.sleep(1)
- os.link('activity/activity/activity.info', 'Activities/activity/activity/activity.info')
- coroutine.sleep(1)
-
- hashed_path = hashlib.sha1(tests.tmpdir + '/Activities/activity').hexdigest()
- assert exists('clones/checkin/' + hashed_path)
- self.assertEqual(
- abspath('Activities/activity'),
- os.readlink('clones/context/org.sugarlabs.HelloWorld/' + hashed_path))
- self.assertEqual(
- {'guid': 'org.sugarlabs.HelloWorld', 'title': {'en': 'title'}, 'favorite': False, 'clone': 2},
- self.volume['context'].get('org.sugarlabs.HelloWorld').properties(['guid', 'title', 'favorite', 'clone']))
-
- def test_OfflineCheckin(self):
- self.job = coroutine.spawn(clones.monitor,
- self.volume['context'], ['Activities'])
- coroutine.sleep()
-
- self.touch(('Activities/activity/activity/activity.info', [
- '[Activity]',
- 'name = HelloWorld',
- 'activity_version = 1',
- 'bundle_id = org.sugarlabs.HelloWorld',
- 'exec = sugar-activity activity.HelloWorldActivity',
- 'icon = activity-helloworld',
- 'license = GPLv2+',
- ]))
- coroutine.sleep(1)
-
- hashed_path = hashlib.sha1(tests.tmpdir + '/Activities/activity').hexdigest()
- assert exists('clones/checkin/' + hashed_path)
- self.assertEqual(
- abspath('Activities/activity'),
- os.readlink('clones/context/org.sugarlabs.HelloWorld/' + hashed_path))
-
- self.assertEqual(
- {'guid': 'org.sugarlabs.HelloWorld', 'title': {'en-us': 'HelloWorld'}, 'favorite': False, 'clone': 2},
- self.volume['context'].get('org.sugarlabs.HelloWorld').properties(['guid', 'title', 'favorite', 'clone']))
-
- def test_Checkout(self):
- self.job = coroutine.spawn(clones.monitor,
- self.volume['context'], ['Activities'])
-
- self.volume['context'].create({
- 'guid': 'org.sugarlabs.HelloWorld',
- 'type': 'activity',
- 'title': {'en': 'title'},
- 'summary': {'en': 'summary'},
- 'description': {'en': 'description'},
- 'user': [tests.UID],
- })
-
- self.touch('Activities/activity/activity/icon.svg')
- self.touch(('Activities/activity/activity/mimetypes.xml', [
- '<?xml version="1.0" encoding="UTF-8"?>',
- '<mime-info xmlns="http://www.freedesktop.org/standards/shared-mime-info">',
- '<mime-type type="application/x-foo-bar">',
- '<comment xml:lang="en">foo-bar</comment>',
- '<glob pattern="*.foo"/>',
- '</mime-type>',
- '</mime-info>',
- ]))
- self.touch(('Activities/activity/activity/activity.info', [
- '[Activity]',
- 'name = HelloWorld',
- 'activity_version = 1',
- 'bundle_id = org.sugarlabs.HelloWorld',
- 'exec = sugar-activity activity.HelloWorldActivity',
- 'icon = icon',
- 'license = GPLv2+',
- 'mime_types = foo/bar',
- ]))
- coroutine.sleep(1)
-
- hashed_path = hashlib.sha1(tests.tmpdir + '/Activities/activity').hexdigest()
- assert exists('clones/checkin/' + hashed_path)
- assert exists('clones/context/org.sugarlabs.HelloWorld/' + hashed_path)
- self.assertEqual(
- {'guid': 'org.sugarlabs.HelloWorld', 'title': {'en': 'title'}, 'favorite': False, 'clone': 2},
- self.volume['context'].get('org.sugarlabs.HelloWorld').properties(['guid', 'title', 'favorite', 'clone']))
- assert exists('share/icons/sugar/scalable/mimetypes/foo-bar.svg')
- assert exists('share/mime/packages/%s.xml' % hashed_path)
- assert exists('share/mime/application/x-foo-bar.xml')
-
- shutil.rmtree('Activities/activity')
- coroutine.sleep(1)
-
- assert not exists('clones/checkin/' + hashed_path)
- assert not exists('clones/context/org.sugarlabs.HelloWorld/' + hashed_path)
- self.assertEqual(
- {'guid': 'org.sugarlabs.HelloWorld', 'title': {'en': 'title'}, 'favorite': False, 'clone': 0},
- self.volume['context'].get('org.sugarlabs.HelloWorld').properties(['guid', 'title', 'favorite', 'clone']))
- assert not lexists('share/icons/sugar/scalable/mimetypes/foo-bar.svg')
- assert not lexists('share/mime/packages/%s.xml' % hashed_path)
- assert not lexists('share/mime/application/x-foo-bar.xml')
-
- def test_Sync(self):
- volume = db.Volume('client', model.RESOURCES)
- volume['context'].create({
- 'guid': 'context1',
- 'type': 'activity',
- 'title': 'title',
- 'summary': 'summary',
- 'description': 'description',
- 'clone': 0,
- })
- volume['context'].create({
- 'guid': 'context2',
- 'type': 'activity',
- 'title': 'title',
- 'summary': 'summary',
- 'description': 'description',
- 'clone': 1,
- })
- volume['context'].create({
- 'guid': 'context3',
- 'type': 'activity',
- 'title': 'title',
- 'summary': 'summary',
- 'description': 'description',
- 'clone': 2,
- })
-
- os.makedirs('Activities')
- os.utime('Activities', (volume['context'].mtime + 1, volume['context'].mtime + 1))
-
- self.touch(clones._context_path('context1', 'clone'))
- self.touch(clones._context_path('context2', 'clone'))
- clones.populate(volume['context'], ['Activities'])
-
- self.assertEqual(0, volume['context'].get('context1')['clone'])
- self.assertEqual(2, volume['context'].get('context2')['clone'])
- self.assertEqual(0, volume['context'].get('context3')['clone'])
-
- def test_SyncByMtime(self):
- volume = db.Volume('client', model.RESOURCES)
- volume['context'].create({
- 'guid': 'context',
- 'type': 'activity',
- 'title': 'title',
- 'summary': 'summary',
- 'description': 'description',
- 'clone': 2,
- })
-
- os.makedirs('Activities')
- os.utime('Activities', (2, 2))
-
- volume['context'].mtime = 3
- clones.populate(volume['context'], ['Activities'])
- self.assertEqual(2, volume['context'].get('context')['clone'])
-
- volume['context'].mtime = 2
- clones.populate(volume['context'], ['Activities'])
- self.assertEqual(2, volume['context'].get('context')['clone'])
-
- volume['context'].mtime = 1
- clones.populate(volume['context'], ['Activities'])
- self.assertEqual(0, volume['context'].get('context')['clone'])
-
-
-if __name__ == '__main__':
- tests.main()
diff --git a/tests/units/client/implementations.py b/tests/units/client/implementations.py
new file mode 100755
index 0000000..af12d01
--- /dev/null
+++ b/tests/units/client/implementations.py
@@ -0,0 +1,368 @@
+#!/usr/bin/env python
+# sugar-lint: disable
+
+import os
+import imp
+import json
+import time
+import pickle
+import shutil
+import zipfile
+import logging
+from cStringIO import StringIO
+from os.path import exists, dirname
+
+from __init__ import tests
+
+from sugar_network.client import journal, implementations
+from sugar_network.toolkit import coroutine, enforce, lsb_release
+from sugar_network.node import obs
+from sugar_network.model.user import User
+from sugar_network.model.context import Context
+from sugar_network.model.implementation import Implementation
+from sugar_network.client import IPCConnection, packagekit, solver
+from sugar_network.toolkit import http, Option
+from sugar_network import client
+
+
+class Implementations(tests.Test):
+
+ def setUp(self, fork_num=0):
+ tests.Test.setUp(self, fork_num)
+ self.override(obs, 'get_repos', lambda: [])
+ self.override(obs, 'presolve', lambda *args: None)
+
+ def test_InstallDeps(self):
+ self.start_online_client()
+ conn = IPCConnection()
+
+ blob = self.zips(['TestActivity/activity/activity.info', [
+ '[Activity]',
+ 'name = TestActivity',
+ 'bundle_id = bundle_id',
+ 'exec = true',
+ 'icon = icon',
+ 'activity_version = 1',
+ 'license = Public Domain',
+ 'requires = dep1; dep2',
+ ]])
+ impl = conn.upload(['implementation'], StringIO(blob), cmd='release', initial=True)
+
+ conn.post(['context'], {
+ 'guid': 'dep1',
+ 'type': 'package',
+ 'title': 'title',
+ 'summary': 'summary',
+ 'description': 'description',
+ 'aliases': {
+ lsb_release.distributor_id(): {
+ 'status': 'success',
+ 'binary': [['dep1.bin']],
+ },
+ },
+ })
+ conn.post(['context'], {
+ 'guid': 'dep2',
+ 'type': 'package',
+ 'title': 'title',
+ 'summary': 'summary',
+ 'description': 'description',
+ 'aliases': {
+ lsb_release.distributor_id(): {
+ 'status': 'success',
+ 'binary': [['dep2.bin']],
+ },
+ },
+ })
+
+ def resolve(names):
+ with file('resolve', 'a') as f:
+ pickle.dump(names, f)
+ return dict([(i, {'name': i, 'pk_id': i, 'version': '0', 'arch': '*', 'installed': i == 'dep1.bin'}) for i in names])
+
+ def install(packages):
+ with file('install', 'a') as f:
+ pickle.dump([i['name'] for i in packages], f)
+
+ self.override(packagekit, 'resolve', resolve)
+ self.override(packagekit, 'install', install)
+ conn.get(['context', 'bundle_id'], cmd='launch')
+
+ with file('resolve') as f:
+ deps = [pickle.load(f),
+ pickle.load(f),
+ ]
+ self.assertRaises(EOFError, pickle.load, f)
+ self.assertEqual(
+ sorted([['dep1.bin'], ['dep2.bin']]),
+ sorted(deps))
+ with file('install') as f:
+ self.assertEqual(['dep2.bin'], pickle.load(f))
+ self.assertRaises(EOFError, pickle.load, f)
+
+ def test_SetExecPermissions(self):
+ self.start_online_client()
+ conn = IPCConnection()
+
+ blob = self.zips(
+ ['TestActivity/activity/activity.info', [
+ '[Activity]',
+ 'name = TestActivity',
+ 'bundle_id = bundle_id',
+ 'exec = true',
+ 'icon = icon',
+ 'activity_version = 1',
+ 'license = Public Domain',
+ ]],
+ 'TestActivity/activity/foo',
+ 'TestActivity/bin/bar',
+ 'TestActivity/bin/probe',
+ 'TestActivity/file1',
+ 'TestActivity/test/file2',
+ )
+ impl = conn.upload(['implementation'], StringIO(blob), cmd='release', initial=True)
+
+ conn.put(['context', 'bundle_id'], True, cmd='clone')
+
+ path = 'client/implementation/%s/%s/data.blob/' % (impl[:2], impl)
+ assert os.access(path + 'activity/foo', os.X_OK)
+ assert os.access(path + 'bin/bar', os.X_OK)
+ assert os.access(path + 'bin/probe', os.X_OK)
+ assert not os.access(path + 'file1', os.X_OK)
+ assert not os.access(path + 'test/file2', os.X_OK)
+
+ def test_ReuseCachedSolution(self):
+ self.start_online_client()
+ conn = IPCConnection()
+
+ impl = conn.upload(['implementation'], StringIO(self.zips(['TestActivity/activity/activity.info', [
+ '[Activity]',
+ 'name = TestActivity',
+ 'bundle_id = bundle_id',
+ 'exec = true',
+ 'icon = icon',
+ 'activity_version = 1',
+ 'license = Public Domain',
+ 'stability = stable',
+ ]])), cmd='release', initial=True)
+ solution = ['http://127.0.0.1:8888', ['stable'], [{
+ 'license': ['Public Domain'],
+ 'stability': 'stable',
+ 'version': '1',
+ 'command': ['true'],
+ 'context': 'bundle_id',
+ 'path': tests.tmpdir + '/client/implementation/%s/%s/data.blob' % (impl[:2], impl),
+ 'extract': 'TestActivity',
+ 'guid': impl,
+ }]]
+ cached_path = 'cache/solutions/bu/bundle_id'
+
+ conn.get(['context', 'bundle_id'], cmd='launch')
+ self.assertEqual(solution, json.load(file(cached_path)))
+
+ os.utime(cached_path, (0, 0))
+ self.assertEqual(solution, json.load(file(cached_path)))
+ assert os.stat(cached_path).st_mtime == 0
+
+ def test_InvalidaeCachedSolutions(self):
+ self.start_online_client()
+ conn = IPCConnection()
+
+ conn.post(['context'], {
+ 'guid': 'bundle_id',
+ 'type': 'activity',
+ 'title': 'title',
+ 'summary': 'summary',
+ 'description': 'description',
+ })
+
+ solution = json.dumps(['http://127.0.0.1:8888', ['stable'], [{
+ 'license': ['Public Domain'],
+ 'stability': 'stable',
+ 'version': '1',
+ 'command': ['true'],
+ 'context': 'bundle_id',
+ 'path': tests.tmpdir,
+ 'guid': 'impl',
+ }]])
+ cached_path = 'cache/solutions/bu/bundle_id'
+ self.touch([cached_path, solution])
+ cached_mtime = int(os.stat(cached_path).st_mtime)
+
+ conn.get(['context', 'bundle_id'], cmd='launch')
+
+ client.api_url.value = 'fake'
+ self.assertRaises(http.NotFound, conn.get, ['context', 'bundle_id'], cmd='launch')
+ self.assertEqual(solution, file(cached_path).read())
+
+ client.api_url.value = 'http://127.0.0.1:8888'
+ conn.get(['context', 'bundle_id'], cmd='launch')
+
+ self.client_routes._node_mtime = cached_mtime + 1
+ self.assertRaises(http.NotFound, conn.get, ['context', 'bundle_id'], cmd='launch')
+ self.assertEqual(solution, file(cached_path).read())
+
+ self.client_routes._node_mtime = cached_mtime
+ conn.get(['context', 'bundle_id'], cmd='launch')
+
+ self.override(packagekit, 'mtime', lambda: cached_mtime + 1)
+ self.assertRaises(http.NotFound, conn.get, ['context', 'bundle_id'], cmd='launch')
+ self.assertEqual(solution, file(cached_path).read())
+
+ self.override(packagekit, 'mtime', lambda: cached_mtime)
+ conn.get(['context', 'bundle_id'], cmd='launch')
+
+ self.touch(('config', [
+ '[stabilities]',
+ 'bundle_id = buggy',
+ ]))
+ Option.load(['config'])
+ self.assertRaises(http.NotFound, conn.get, ['context', 'bundle_id'], cmd='launch')
+ self.assertEqual(solution, file(cached_path).read())
+
+ self.touch(('config', [
+ '[stabilities]',
+ 'bundle_id = stable',
+ ]))
+ Option.load(['config'])
+ conn.get(['context', 'bundle_id'], cmd='launch')
+
+ def test_DeliberateReuseCachedSolutionInOffline(self):
+ self.start_online_client()
+ conn = IPCConnection()
+
+ conn.post(['context'], {
+ 'guid': 'bundle_id',
+ 'type': 'activity',
+ 'title': 'title',
+ 'summary': 'summary',
+ 'description': 'description',
+ })
+
+ solution = json.dumps(['http://127.0.0.1:8888', ['stable'], [{
+ 'license': ['Public Domain'],
+ 'stability': 'stable',
+ 'version': '1',
+ 'command': ['true'],
+ 'context': 'bundle_id',
+ 'path': tests.tmpdir,
+ 'guid': 'impl',
+ }]])
+ self.touch(['cache/solutions/bu/bundle_id', solution])
+
+ client.api_url.value = 'fake'
+ self.assertRaises(http.NotFound, conn.get, ['context', 'bundle_id'], cmd='launch')
+
+ self.node.stop()
+ coroutine.sleep(.1)
+ conn.get(['context', 'bundle_id'], cmd='launch')
+
+ def test_StabilityPreferences(self):
+ self.start_online_client()
+ conn = IPCConnection()
+
+ conn.upload(['implementation'], StringIO(self.zips(['TestActivity/activity/activity.info', [
+ '[Activity]',
+ 'name = TestActivity',
+ 'bundle_id = bundle_id',
+ 'exec = true',
+ 'icon = icon',
+ 'activity_version = 1',
+ 'license = Public Domain',
+ 'stability = stable',
+ ]])), cmd='release', initial=True)
+ conn.upload(['implementation'], StringIO(self.zips(['TestActivity/activity/activity.info', [
+ '[Activity]',
+ 'name = TestActivity',
+ 'bundle_id = bundle_id',
+ 'exec = true',
+ 'icon = icon',
+ 'activity_version = 2',
+ 'license = Public Domain',
+ 'stability = testing',
+ ]])), cmd='release')
+ conn.upload(['implementation'], StringIO(self.zips(['TestActivity/activity/activity.info', [
+ '[Activity]',
+ 'name = TestActivity',
+ 'bundle_id = bundle_id',
+ 'exec = true',
+ 'icon = icon',
+ 'activity_version = 3',
+ 'license = Public Domain',
+ 'stability = buggy',
+ ]])), cmd='release')
+ cached_path = 'cache/solutions/bu/bundle_id'
+
+ conn.get(['context', 'bundle_id'], cmd='launch')
+ self.assertEqual('1', json.load(file(cached_path))[2][0]['version'])
+
+ self.touch(('config', [
+ '[stabilities]',
+ 'bundle_id = testing',
+ ]))
+ Option.load(['config'])
+ conn.get(['context', 'bundle_id'], cmd='launch')
+ self.assertEqual('2', json.load(file(cached_path))[2][0]['version'])
+
+ self.touch(('config', [
+ '[stabilities]',
+ 'bundle_id = testing buggy',
+ ]))
+ Option.load(['config'])
+ conn.get(['context', 'bundle_id'], cmd='launch')
+ self.assertEqual('3', json.load(file(cached_path))[2][0]['version'])
+
+ self.touch(('config', [
+ '[stabilities]',
+ 'default = testing',
+ ]))
+ Option.load(['config'])
+ conn.get(['context', 'bundle_id'], cmd='launch')
+ self.assertEqual('2', json.load(file(cached_path))[2][0]['version'])
+
+ def test_LaunchContext(self):
+ self.start_online_client()
+ conn = IPCConnection()
+
+ app = conn.upload(['implementation'], StringIO(self.zips(
+ ['TestActivity/activity/activity.info', [
+ '[Activity]',
+ 'name = TestActivity',
+ 'bundle_id = bundle_id',
+ 'exec = activity',
+ 'icon = icon',
+ 'activity_version = 1',
+ 'license = Public Domain',
+ ]],
+ ['TestActivity/bin/activity', [
+ '#!/bin/sh',
+ 'cat $2',
+ ]],
+ )), cmd='release', initial=True)
+
+ conn.post(['context'], {
+ 'guid': 'document',
+ 'type': 'content',
+ 'title': 'title',
+ 'summary': 'summary',
+ 'description': 'description',
+ })
+ doc = conn.post(['implementation'], {
+ 'context': 'document',
+ 'license': 'GPLv3+',
+ 'version': '1',
+ 'stability': 'stable',
+ })
+ self.node_volume['implementation'].update(doc, {'data': {
+ 'mime_type': 'application/octet-stream',
+ 'blob': StringIO('content'),
+ }})
+
+ conn.get(['context', 'document'], cmd='launch', context='bundle_id')
+ coroutine.sleep(.1)
+ self.assertEqual('content', file('.sugar/default/logs/bundle_id.log').read())
+
+
+
+if __name__ == '__main__':
+ tests.main()
diff --git a/tests/units/client/injector.py b/tests/units/client/injector.py
deleted file mode 100755
index 786efa2..0000000
--- a/tests/units/client/injector.py
+++ /dev/null
@@ -1,1059 +0,0 @@
-#!/usr/bin/env python
-# sugar-lint: disable
-
-import os
-import imp
-import json
-import time
-import pickle
-import shutil
-import zipfile
-import logging
-from cStringIO import StringIO
-from os.path import exists, dirname
-
-from __init__ import tests
-
-from sugar_network.client import journal
-from sugar_network.toolkit import coroutine, enforce, pipe as pipe_, lsb_release
-from sugar_network.node import obs
-from sugar_network.model.user import User
-from sugar_network.model.context import Context
-from sugar_network.model.implementation import Implementation
-from sugar_network.client import IPCConnection, packagekit, injector, clones, solver
-from sugar_network.toolkit import Option
-from sugar_network import client
-
-
-class InjectorTest(tests.Test):
-
- def setUp(self, fork_num=0):
- tests.Test.setUp(self, fork_num)
- self.override(pipe_, 'trace', lambda *args: None)
- self.override(obs, 'get_repos', lambda: [])
- self.override(obs, 'presolve', lambda *args: None)
-
- def test_clone(self):
- self.start_online_client([User, Context, Implementation])
- conn = IPCConnection()
-
- context = conn.post(['context'], {
- 'type': 'activity',
- 'title': 'title',
- 'summary': 'summary',
- 'description': 'description',
- })
-
- pipe = injector.clone(context)
- log_path = tests.tmpdir + '/.sugar/default/logs/%s.log' % context
- self.assertEqual([
- {'state': 'fork', 'context': context},
- {'state': 'analyze', 'context': context},
- {'state': 'failure', 'context': context, 'log_path': log_path, 'trace': None, 'error_type': 'RuntimeError', 'error': """\
-Can't find all required implementations:
-- %s -> (problem)
- No known implementations at all""" % context}],
- [i for i in pipe])
-
- impl = conn.post(['implementation'], {
- 'context': context,
- 'license': 'GPLv3+',
- 'version': '1',
- 'stability': 'stable',
- 'notes': '',
- })
- self.node_volume['implementation'].update(impl, {'data': {
- 'spec': {
- '*-*': {
- 'commands': {
- 'activity': {
- 'exec': 'echo',
- },
- },
- 'extract': 'topdir',
- },
- },
- }})
-
- pipe = injector.clone(context)
- log_path = tests.tmpdir + '/.sugar/default/logs/%s_1.log' % context
- self.assertEqual([
- {'state': 'fork', 'context': context},
- {'state': 'analyze', 'context': context},
- {'state': 'solved', 'context': context},
- {'state': 'download', 'context': context},
- {'state': 'failure', 'context': context, 'error': 'BLOB does not exist', 'log_path': log_path, 'trace': None,
- 'error_type': 'NotFound',
- 'solution': [{'name': 'title', 'prefix': 'topdir', 'version': '1', 'command': ['echo'], 'context': context, 'id': impl, 'stability': 'stable'}],
- },
- ][-1],
- [i for i in pipe][-1])
- assert not exists('cache/implementation/%s' % impl)
-
- spec = '\n'.join([
- '[Activity]',
- 'name = TestActivitry',
- 'bundle_id = %s' % context,
- 'exec = true',
- 'icon = icon',
- 'activity_version = 1',
- 'license=Public Domain',
- ])
- self.touch((
- 'master/implementation/%s/%s/data.blob' % (impl[:2], impl),
- self.zips(['topdir/activity/activity.info', spec]),
- ))
-
- pipe = injector.clone(context)
- log_path = tests.tmpdir + '/.sugar/default/logs/%s_2.log' % context
- self.assertEqual([
- {'state': 'fork', 'context': context},
- {'state': 'analyze', 'context': context},
- {'state': 'solved', 'context': context},
- {'state': 'download', 'context': context},
- {'state': 'ready', 'context': context},
- {'state': 'exit', 'context': context},
- ],
- [i for i in pipe])
- assert exists('cache/implementation/%s' % impl)
- self.assertEqual(spec, file('Activities/topdir/activity/activity.info').read())
-
- def test_clone_impl(self):
- self.start_online_client([User, Context, Implementation])
- conn = IPCConnection()
-
- context = conn.post(['context'], {
- 'type': 'activity',
- 'title': 'title',
- 'summary': 'summary',
- 'description': 'description',
- })
- impl = conn.post(['implementation'], {
- 'context': context,
- 'license': 'GPLv3+',
- 'version': '1',
- 'stability': 'stable',
- 'notes': '',
- })
- self.node_volume['implementation'].update(impl, {'data': {
- 'spec': {
- '*-*': {
- 'commands': {
- 'activity': {
- 'exec': 'true',
- },
- },
- 'extract': 'topdir',
- },
- },
- 'blob': StringIO(self.zips(['topdir/probe', [
- 'probe',
- ]])),
- }})
-
- pipe = injector.clone_impl(context)
- log_path = tests.tmpdir + '/.sugar/default/logs/%s.log' % context
- self.assertEqual([
- {'state': 'fork', 'context': context},
- {'state': 'download', 'context': context},
- {'state': 'exit', 'context': context},
- ],
- [i for i in pipe])
- assert exists('cache/implementation/%s' % impl)
- assert exists('Activities/topdir/probe')
- __, __, (solution,) = json.load(file('cache/solutions/%s/%s' % (context[:2], context)))
- self.assertEqual(tests.tmpdir + '/Activities/topdir', solution['path'])
- self.assertEqual('probe', file('Activities/topdir/probe').read())
-
- def test_clone_CachedSolutionPointsToClonedPath(self):
- self.start_online_client([User, Context, Implementation])
- conn = IPCConnection()
-
- context = conn.post(['context'], {
- 'type': 'activity',
- 'title': 'title',
- 'summary': 'summary',
- 'description': 'description',
- })
- impl = conn.post(['implementation'], {
- 'context': context,
- 'license': 'GPLv3+',
- 'version': '1',
- 'stability': 'stable',
- 'notes': '',
- })
- self.node_volume['implementation'].update(impl, {'data': {
- 'spec': {
- '*-*': {
- 'commands': {
- 'activity': {
- 'exec': 'echo',
- },
- },
- 'extract': 'topdir',
- },
- },
- 'blob': StringIO(self.zips(['topdir/probe', [
- 'probe',
- ]])),
- }})
-
- for event in injector.clone(context):
- pass
- self.assertEqual('exit', event['state'])
- __, __, (solution,) = json.load(file('cache/solutions/%s/%s' % (context[:2], context)))
- self.assertEqual(tests.tmpdir + '/Activities/topdir', solution['path'])
-
- def test_launch_Online(self):
- self.start_online_client([User, Context, Implementation])
- conn = IPCConnection()
-
- context = conn.post(['context'], {
- 'type': 'activity',
- 'title': 'title',
- 'summary': 'summary',
- 'description': 'description',
- })
- impl = conn.post(['implementation'], {
- 'context': context,
- 'license': 'GPLv3+',
- 'version': '1',
- 'stability': 'stable',
- 'notes': '',
- })
- self.node_volume['implementation'].update(impl, {'data': {
- 'spec': {
- '*-*': {
- 'commands': {
- 'activity': {
- 'exec': 'true',
- },
- },
- 'extract': 'TestActivitry',
- },
- },
- 'blob': StringIO(self.zips(['TestActivitry/activity/activity.info', [
- '[Activity]',
- 'name = TestActivitry',
- 'bundle_id = %s' % context,
- 'exec = true',
- 'icon = icon',
- 'activity_version = 1',
- 'license=Public Domain',
- ]])),
- }})
-
- self.override(journal, 'create_activity_id', lambda: 'activity_id')
- pipe = injector.launch(context)
-
- log_path = tests.tmpdir + '/.sugar/default/logs/%s.log' % context
- self.assertEqual([
- {'state': 'fork', 'context': context, 'color': None, 'activity_id': 'activity_id'},
- {'state': 'analyze', 'context': context, 'color': None, 'activity_id': 'activity_id'},
- {'state': 'solved', 'context': context, 'color': None, 'activity_id': 'activity_id'},
- {'state': 'download', 'context': context, 'color': None, 'activity_id': 'activity_id'},
- {'state': 'ready', 'context': context, 'color': None, 'activity_id': 'activity_id'},
- {'state': 'exec', 'context': context, 'color': None, 'activity_id': 'activity_id'},
- {'state': 'exit', 'context': context, 'color': None, 'activity_id': 'activity_id'},
- ],
- [i for i in pipe])
- self.assertEqual([client.api_url.value, ['stable'], [{
- 'command': ['true'],
- 'context': context,
- 'id': impl,
- 'name': 'title',
- 'path': tests.tmpdir + '/cache/implementation/%s/TestActivitry' % impl,
- 'prefix': 'TestActivitry',
- 'stability': 'stable',
- 'version': '1',
- }]],
- json.load(file('cache/solutions/%s/%s' % (context[:2], context))))
-
- impl_2 = conn.post(['implementation'], {
- 'context': context,
- 'license': 'GPLv3+',
- 'version': '2',
- 'stability': 'stable',
- 'notes': '',
- })
- self.node_volume['implementation'].update(impl_2, {'data': {
- 'spec': {
- '*-*': {
- 'commands': {
- 'activity': {
- 'exec': 'true',
- },
- },
- 'extract': 'TestActivitry',
- },
- },
- 'blob': StringIO(self.zips(['TestActivitry/activity/activity.info', [
- '[Activity]',
- 'name = TestActivitry',
- 'bundle_id = %s' % context,
- 'exec = true',
- 'icon = icon',
- 'activity_version = 2',
- 'license=Public Domain',
- ]])),
- }})
-
- shutil.rmtree('cache', ignore_errors=True)
- pipe = injector.launch(context)
- log_path = tests.tmpdir + '/.sugar/default/logs/%s_1.log' % context
- self.assertEqual([
- {'state': 'fork', 'context': context, 'color': None, 'activity_id': 'activity_id'},
- {'state': 'analyze', 'context': context, 'color': None, 'activity_id': 'activity_id'},
- {'state': 'solved', 'context': context, 'color': None, 'activity_id': 'activity_id'},
- {'state': 'download', 'context': context, 'color': None, 'activity_id': 'activity_id'},
- {'state': 'ready', 'context': context, 'color': None, 'activity_id': 'activity_id'},
- {'state': 'exec', 'context': context, 'color': None, 'activity_id': 'activity_id'},
- {'state': 'exit', 'context': context, 'color': None, 'activity_id': 'activity_id'},
- ],
- [i for i in pipe])
- self.assertEqual([client.api_url.value, ['stable'], [{
- 'command': ['true'],
- 'context': context,
- 'id': impl_2,
- 'name': 'title',
- 'path': tests.tmpdir + '/cache/implementation/%s/TestActivitry' % impl_2,
- 'prefix': 'TestActivitry',
- 'stability': 'stable',
- 'version': '2',
- }]],
- json.load(file('cache/solutions/%s/%s' % (context[:2], context))))
-
- def test_launch_Offline(self):
- self.touch(('Activities/activity/activity/activity.info', [
- '[Activity]',
- 'name = title',
- 'bundle_id = bundle_id',
- 'exec = true',
- 'icon = icon',
- 'activity_version = 1',
- 'license = Public Domain',
- ]))
-
- home_volume = self.start_offline_client()
- monitor = coroutine.spawn(clones.monitor, home_volume['context'], ['Activities'])
- coroutine.sleep()
-
- pipe = injector.launch('bundle_id', activity_id='activity_id')
- log_path = tests.tmpdir + '/.sugar/default/logs/bundle_id.log'
- self.assertEqual([
- {'state': 'fork', 'context': 'bundle_id', 'color': None, 'activity_id': 'activity_id'},
- {'state': 'analyze', 'context': 'bundle_id', 'color': None, 'activity_id': 'activity_id'},
- {'state': 'solved', 'context': 'bundle_id', 'color': None, 'activity_id': 'activity_id'},
- {'state': 'ready', 'context': 'bundle_id', 'color': None, 'activity_id': 'activity_id'},
- {'state': 'exec', 'context': 'bundle_id', 'color': None, 'activity_id': 'activity_id'},
- {'state': 'exit', 'context': 'bundle_id', 'color': None, 'activity_id': 'activity_id'},
- ],
- [i for i in pipe])
- self.assertEqual([client.api_url.value, ['stable'], [{
- 'command': ['true'],
- 'context': 'bundle_id',
- 'id': tests.tmpdir + '/Activities/activity',
- 'name': 'title',
- 'path': tests.tmpdir + '/Activities/activity',
- 'spec': tests.tmpdir + '/Activities/activity/activity/activity.info',
- 'stability': 'stable',
- 'version': '1',
- }]],
- json.load(file('cache/solutions/bu/bundle_id')))
-
- def test_InstallDeps(self):
- self.start_online_client([User, Context, Implementation])
- conn = IPCConnection()
-
- context = conn.post(['context'], {
- 'type': 'activity',
- 'title': 'title',
- 'summary': 'summary',
- 'description': 'description',
- })
- impl = conn.post(['implementation'], {
- 'context': context,
- 'license': 'GPLv3+',
- 'version': '1',
- 'stability': 'stable',
- 'notes': '',
- })
- self.node_volume['implementation'].update(impl, {'data': {
- 'spec': {
- '*-*': {
- 'commands': {
- 'activity': {
- 'exec': 'true',
- },
- },
- 'extract': 'topdir',
- 'requires': {
- 'dep1': {},
- 'dep2': {},
- },
- },
- },
- 'blob': StringIO(self.zips(['topdir/probe', [
- 'probe',
- ]])),
- }})
-
- conn.post(['context'], {
- 'guid': 'dep1',
- 'type': 'package',
- 'title': 'title',
- 'summary': 'summary',
- 'description': 'description',
- 'aliases': {
- lsb_release.distributor_id(): {
- 'status': 'success',
- 'binary': [['dep1.bin']],
- },
- },
- })
- conn.post(['context'], {
- 'guid': 'dep2',
- 'type': 'package',
- 'title': 'title',
- 'summary': 'summary',
- 'description': 'description',
- 'aliases': {
- lsb_release.distributor_id(): {
- 'status': 'success',
- 'binary': [['dep2.bin']],
- },
- },
- })
-
- def resolve(names):
- with file('resolve', 'a') as f:
- pickle.dump(names, f)
- return dict([(i, {'name': i, 'pk_id': i, 'version': '0', 'arch': '*', 'installed': i == 'dep1.bin'}) for i in names])
-
- def install(packages):
- with file('install', 'a') as f:
- pickle.dump([i['name'] for i in packages], f)
-
- self.override(packagekit, 'resolve', resolve)
- self.override(packagekit, 'install', install)
-
- pipe = injector.launch(context)
- self.assertEqual('exit', [i for i in pipe][-1].get('state'))
- with file('resolve') as f:
- deps = [pickle.load(f),
- pickle.load(f),
- ]
- self.assertRaises(EOFError, pickle.load, f)
- self.assertEqual(
- sorted([['dep1.bin'], ['dep2.bin']]),
- sorted(deps))
- with file('install') as f:
- self.assertEqual(['dep2.bin'], pickle.load(f))
- self.assertRaises(EOFError, pickle.load, f)
-
- def test_SolutionsCache_Set(self):
- solution = [{'name': 'name', 'context': 'context', 'id': 'id', 'version': 'version'}]
- self.override(client, 'IPCConnection', lambda: _FakeConnection(True))
- self.override(solver, 'solve', lambda *args: solution)
-
- self.assertEqual((solution, ['stable']), injector._solve('context'))
-
- def test_SolutionsCache_InvalidateByAPIUrl(self):
- solution = [{'name': 'name', 'context': 'context', 'id': 'id', 'version': 'version'}]
- self.override(client, 'IPCConnection', lambda: _FakeConnection(True))
- self.override(solver, 'solve', lambda *args: solution)
- cached_path = 'cache/solutions/co/context'
-
- solution2 = [{'name': 'name2', 'context': 'context2', 'id': 'id2', 'version': 'version2'}]
- self.touch((cached_path, json.dumps([client.api_url.value, ['stable'], solution2])))
- self.assertEqual((solution2, None), injector._solve('context'))
-
- client.api_url.value = 'fake'
- self.assertEqual((solution, ['stable']), injector._solve('context'))
-
- def test_SolutionsCache_InvalidateByMtime(self):
- solution = [{'name': 'name', 'context': 'context', 'id': 'id', 'version': 'version'}]
- self.override(client, 'IPCConnection', lambda: _FakeConnection(True))
- self.override(solver, 'solve', lambda *args: solution)
- cached_path = 'cache/solutions/co/context'
-
- solution2 = [{'name': 'name2', 'context': 'context2', 'id': 'id2', 'version': 'version2'}]
- injector.invalidate_solutions(1)
- self.touch((cached_path, json.dumps([client.api_url.value, ['stable'], solution2])))
- os.utime(cached_path, (1, 1))
- self.assertEqual((solution2, None), injector._solve('context'))
-
- os.utime(cached_path, (2, 2))
- self.assertEqual((solution2, None), injector._solve('context'))
-
- injector.invalidate_solutions(3)
- self.assertEqual((solution, ['stable']), injector._solve('context'))
-
- def test_SolutionsCache_InvalidateByPMSMtime(self):
- solution = [{'name': 'name', 'context': 'context', 'id': 'id', 'version': 'version'}]
- self.override(client, 'IPCConnection', lambda: _FakeConnection(True))
- self.override(solver, 'solve', lambda *args: solution)
- cached_path = 'cache/solutions/co/context'
-
- injector._pms_path = 'pms'
- self.touch('pms')
- os.utime('pms', (1, 1))
- solution2 = [{'name': 'name2', 'context': 'context2', 'id': 'id2', 'version': 'version2'}]
- self.touch((cached_path, json.dumps([client.api_url.value, ['stable'], solution2])))
- os.utime(cached_path, (1, 1))
- self.assertEqual((solution2, None), injector._solve('context'))
-
- os.utime(cached_path, (2, 2))
- self.assertEqual((solution2, None), injector._solve('context'))
-
- os.utime('pms', (3, 3))
- self.assertEqual((solution, ['stable']), injector._solve('context'))
-
- def test_SolutionsCache_DeliberateReuseInOffline(self):
- solution1 = [{'name': 'name', 'context': 'context', 'id': 'id', 'version': 'version'}]
- solution2 = [{'name': 'name2', 'context': 'context2', 'id': 'id2', 'version': 'version2'}]
- self.override(solver, 'solve', lambda *args: solution1)
- cached_path = 'cache/solutions/co/context'
-
- self.override(client, 'IPCConnection', lambda: _FakeConnection(True))
- self.touch((cached_path, json.dumps([client.api_url.value, ['stable'], solution2])))
- os.utime(cached_path, (1, 1))
- injector.invalidate_solutions(2)
- self.assertEqual((solution1, ['stable']), injector._solve('context'))
-
- self.override(client, 'IPCConnection', lambda: _FakeConnection(False))
- self.touch((cached_path, json.dumps([client.api_url.value, ['stable'], solution2])))
- os.utime(cached_path, (1, 1))
- injector.invalidate_solutions(2)
- self.assertEqual((solution2, None), injector._solve('context'))
-
-
- def test_SolutionsCache_InvalidateBySpecMtime(self):
- solution = [{'name': 'name', 'context': 'context', 'id': 'id', 'version': 'version'}]
- self.override(client, 'IPCConnection', lambda: _FakeConnection(True))
- self.override(solver, 'solve', lambda *args: solution)
- cached_path = 'cache/solutions/co/context'
-
- solution2 = [{'spec': 'spec', 'name': 'name2', 'context': 'context2', 'id': 'id2', 'version': 'version2'}]
- self.touch('spec')
- os.utime('spec', (1, 1))
- self.touch((cached_path, json.dumps([client.api_url.value, ['stable'], solution2])))
- os.utime(cached_path, (1, 1))
- self.assertEqual((solution2, None), injector._solve('context'))
-
- os.utime(cached_path, (2, 2))
- self.assertEqual((solution2, None), injector._solve('context'))
-
- os.utime('spec', (3, 3))
- self.assertEqual((solution, ['stable']), injector._solve('context'))
-
- def test_clone_SetExecPermissionsForActivities(self):
- self.start_online_client([User, Context, Implementation])
- conn = IPCConnection()
-
- context = conn.post(['context'], {
- 'type': 'activity',
- 'title': 'title',
- 'summary': 'summary',
- 'description': 'description',
- })
- impl = conn.post(['implementation'], {
- 'context': context,
- 'license': 'GPLv3+',
- 'version': '1',
- 'stability': 'stable',
- 'notes': '',
- })
- self.node_volume['implementation'].update(impl, {'data': {
- 'spec': {
- '*-*': {
- 'commands': {
- 'activity': {
- 'exec': 'echo',
- },
- },
- 'extract': 'topdir',
- },
- },
- 'blob': StringIO(self.zips(
- 'topdir/activity/foo',
- 'topdir/bin/bar',
- 'topdir/bin/probe',
- 'topdir/file1',
- 'topdir/test/file2',
- )),
- }})
-
- pipe = injector.clone(context)
- log_path = tests.tmpdir + '/.sugar/default/logs/%s_2.log' % context
- self.assertEqual('exit', [i for i in pipe][-1]['state'])
- assert os.access('Activities/topdir/activity/foo', os.X_OK)
- assert os.access('Activities/topdir/bin/bar', os.X_OK)
- assert os.access('Activities/topdir/bin/probe', os.X_OK)
- assert not os.access('Activities/topdir/file1', os.X_OK)
- assert not os.access('Activities/topdir/test/file2', os.X_OK)
-
- def test_clone_InvalidateSolutionByAbsentImpls(self):
- self.start_online_client([User, Context, Implementation])
- conn = IPCConnection()
-
- context = conn.post(['context'], {
- 'type': 'activity',
- 'title': 'title',
- 'summary': 'summary',
- 'description': 'description',
- })
- impl = conn.post(['implementation'], {
- 'context': context,
- 'license': 'GPLv3+',
- 'version': '1',
- 'stability': 'stable',
- 'notes': '',
- })
- self.node_volume['implementation'].update(impl, {'data': {
- 'spec': {
- '*-*': {
- 'commands': {
- 'activity': {
- 'exec': 'echo',
- },
- },
- 'extract': 'topdir',
- },
- },
- 'blob': StringIO(self.zips(['topdir/probe', [
- 'probe',
- ]])),
- }})
-
- for event in injector.clone(context):
- pass
- self.assertEqual('exit', event['state'])
- shutil.rmtree('Activities/topdir')
-
- for event in injector.clone(context):
- pass
- self.assertEqual('exit', event['state'])
- assert exists('Activities/topdir')
-
- def test_launch_Arguments(self):
- forks = []
- self.override(pipe_, 'fork', lambda callback, log_path, session, args=None, **kwargs: forks.append(args))
- self.override(journal, 'create_activity_id', lambda: 'new_activity_id')
-
- injector.launch('app')
- injector.launch('app', ['foo'])
- injector.launch('app', ['foo'], activity_id='activity_id', object_id='object_id', uri='uri')
-
- self.assertEqual([
- ['-b', 'app', '-a', 'new_activity_id'],
- ['foo', '-b', 'app', '-a', 'new_activity_id'],
- ['foo', '-b', 'app', '-a', 'activity_id', '-o', 'object_id', '-u', 'uri'],
- ],
- forks)
-
- def test_ProcessCommonDependencies(self):
- self.start_online_client([User, Context, Implementation])
- conn = IPCConnection()
-
- context = conn.post(['context'], {
- 'type': 'activity',
- 'title': 'title',
- 'summary': 'summary',
- 'description': 'description',
- 'dependencies': ['dep1', 'dep2'],
- })
- impl = conn.post(['implementation'], {
- 'context': context,
- 'license': 'GPLv3+',
- 'version': '1',
- 'stability': 'stable',
- 'notes': '',
- })
- self.node_volume['implementation'].update(impl, {'data': {
- 'spec': {
- '*-*': {
- 'commands': {
- 'activity': {
- 'exec': 'echo',
- },
- },
- 'requires': {
- 'dep2': {'restrictions': [['1', '2']]},
- 'dep3': {},
- },
- },
- },
- }})
- conn.post(['context'], {
- 'guid': 'dep1',
- 'type': 'package',
- 'title': 'title1',
- 'summary': 'summary',
- 'description': 'description',
- 'aliases': {
- lsb_release.distributor_id(): {
- 'status': 'success',
- 'binary': [['dep1.bin']],
- },
- },
- })
- conn.post(['context'], {
- 'guid': 'dep2',
- 'type': 'package',
- 'title': 'title2',
- 'summary': 'summary',
- 'description': 'description',
- 'aliases': {
- lsb_release.distributor_id(): {
- 'status': 'success',
- 'binary': [['dep2.bin']],
- },
- },
- })
- conn.post(['context'], {
- 'guid': 'dep3',
- 'type': 'package',
- 'title': 'title3',
- 'summary': 'summary',
- 'description': 'description',
- 'aliases': {
- lsb_release.distributor_id(): {
- 'status': 'success',
- 'binary': [['dep3.bin']],
- },
- },
- })
-
- def resolve(names):
- return dict([(i, {'name': i, 'pk_id': i, 'version': '1', 'arch': '*', 'installed': True}) for i in names])
-
- self.override(packagekit, 'resolve', resolve)
-
- self.assertEqual(
- sorted([
- {'version': '1', 'id': 'dep1', 'context': 'dep1', 'name': 'title1', 'stability': 'packaged'},
- {'version': '1', 'id': 'dep2', 'context': 'dep2', 'name': 'title2', 'stability': 'packaged'},
- {'version': '1', 'id': 'dep3', 'context': 'dep3', 'name': 'title3', 'stability': 'packaged'},
- {'name': 'title', 'version': '1', 'command': ['echo'], 'context': context, 'id': impl, 'stability': 'stable'},
- ]),
- sorted(solver.solve(conn, context, ['stable'])))
-
- def test_LoadFeed_SetPackages(self):
- self.start_online_client([User, Context, Implementation])
- conn = IPCConnection()
-
- context = conn.post(['context'], {
- 'type': 'activity',
- 'title': 'title',
- 'summary': 'summary',
- 'description': 'description',
- })
- impl = conn.post(['implementation'], {
- 'context': context,
- 'license': 'GPLv3+',
- 'version': '1',
- 'stability': 'stable',
- 'notes': '',
- })
- self.node_volume['implementation'].update(impl, {'data': {
- 'spec': {
- '*-*': {
- 'commands': {
- 'activity': {
- 'exec': 'echo',
- },
- },
- 'requires': {
- 'dep': {},
- },
- },
- },
- }})
- conn.post(['context'], {
- 'guid': 'dep',
- 'type': 'package',
- 'title': 'title',
- 'summary': 'summary',
- 'description': 'description',
- })
-
- def resolve(names):
- return dict([(i, {'name': i, 'pk_id': i, 'version': '1', 'arch': '*', 'installed': True}) for i in names])
- self.override(packagekit, 'resolve', resolve)
-
- self.assertRaises(RuntimeError, solver.solve, conn, context, ['stable'])
-
- conn.put(['context', 'dep', 'aliases'], {
- lsb_release.distributor_id(): {
- 'status': 'success',
- 'binary': [['bin']],
- },
- })
- self.assertEqual('dep', solver.solve(conn, context, ['stable'])[-1]['context'])
-
- conn.put(['context', 'dep', 'aliases'], {
- 'foo': {
- 'status': 'success',
- 'binary': [['bin']],
- },
- })
- self.assertRaises(RuntimeError, solver.solve, conn, context, ['stable'])
-
- conn.put(['context', 'dep', 'aliases'], {
- lsb_release.distributor_id(): {
- 'binary': [['bin']],
- },
- })
- self.assertEqual('dep', solver.solve(conn, context, ['stable'])[-1]['context'])
-
- def test_SolveSugar(self):
- self.touch(('__init__.py', ''))
- self.touch(('jarabe.py', 'class config: version = "0.94"'))
- file_, pathname_, description_ = imp.find_module('jarabe', ['.'])
- imp.load_module('jarabe', file_, pathname_, description_)
-
- self.start_online_client([User, Context, Implementation])
- conn = IPCConnection()
-
- context = conn.post(['context'], {
- 'type': 'activity',
- 'title': 'title',
- 'summary': 'summary',
- 'description': 'description',
- })
- conn.post(['context'], {
- 'guid': 'sugar',
- 'type': 'package',
- 'title': 'title',
- 'summary': 'summary',
- 'description': 'description',
- })
-
- impl = conn.post(['implementation'], {
- 'context': context,
- 'license': 'GPLv3+',
- 'version': '1',
- 'stability': 'stable',
- 'notes': '',
- })
- self.node_volume['implementation'].update(impl, {'data': {
- 'spec': {
- '*-*': {
- 'commands': {
- 'activity': {
- 'exec': 'echo',
- },
- },
- 'requires': {
- 'sugar': {},
- },
- },
- },
- }})
- self.assertEqual([
- {'name': 'title', 'version': '1', 'command': ['echo'], 'context': context, 'id': impl, 'stability': 'stable'},
- {'name': 'sugar', 'version': '0.94', 'context': 'sugar', 'path': '/', 'id': 'sugar-0.94', 'stability': 'packaged'},
- ],
- solver.solve(conn, context, ['stable']))
-
- self.node_volume['implementation'].update(impl, {'data': {
- 'spec': {
- '*-*': {
- 'commands': {
- 'activity': {
- 'exec': 'echo',
- },
- },
- 'requires': {
- 'sugar': {'restrictions': [['0.80', '0.87']]},
- },
- },
- },
- }})
- self.assertEqual([
- {'name': 'title', 'version': '1', 'command': ['echo'], 'context': context, 'id': impl, 'stability': 'stable'},
- {'name': 'sugar', 'version': '0.86', 'context': 'sugar', 'path': '/', 'id': 'sugar-0.86', 'stability': 'packaged'},
- ],
- solver.solve(conn, context, ['stable']))
-
- def test_StripSugarVersion(self):
- self.touch(('__init__.py', ''))
- self.touch(('jarabe.py', 'class config: version = "0.94.1"'))
- file_, pathname_, description_ = imp.find_module('jarabe', ['.'])
- imp.load_module('jarabe', file_, pathname_, description_)
-
- self.start_online_client([User, Context, Implementation])
- conn = IPCConnection()
-
- context = conn.post(['context'], {
- 'type': 'activity',
- 'title': 'title',
- 'summary': 'summary',
- 'description': 'description',
- })
- conn.post(['context'], {
- 'guid': 'sugar',
- 'type': 'package',
- 'title': 'title',
- 'summary': 'summary',
- 'description': 'description',
- })
-
- impl = conn.post(['implementation'], {
- 'context': context,
- 'license': 'GPLv3+',
- 'version': '1',
- 'stability': 'stable',
- 'notes': '',
- })
- self.node_volume['implementation'].update(impl, {'data': {
- 'spec': {
- '*-*': {
- 'commands': {
- 'activity': {
- 'exec': 'echo',
- },
- },
- 'requires': {
- 'sugar': {},
- },
- },
- },
- }})
- self.assertEqual([
- {'name': 'title', 'version': '1', 'command': ['echo'], 'context': context, 'id': impl, 'stability': 'stable'},
- {'name': 'sugar', 'version': '0.94', 'context': 'sugar', 'path': '/', 'id': 'sugar-0.94', 'stability': 'packaged'},
- ],
- solver.solve(conn, context, ['stable']))
-
- def test_PopupServiceUnavailableInOffline(self):
- self.touch(('Activities/Activity/activity/activity.info', [
- '[Activity]',
- 'name = TestActivity',
- 'bundle_id = bundle_id',
- 'exec = false',
- 'icon = icon',
- 'activity_version = 1',
- 'license = Public Domain',
- 'requires = dep',
- ]))
-
- home_volume = self.start_client()
- clones.populate(home_volume['context'], ['Activities'])
- ipc = IPCConnection()
-
- self.assertEqual([
- {'context': 'context', 'state': 'fork'},
- {'context': 'context', 'state': 'analyze'},
- {'context': 'context', 'state': 'failure',
- 'error': '', 'error_type': 'ServiceUnavailable', 'trace': None,
- 'log_path': tests.tmpdir + '/.sugar/default/logs/context.log'},
- ],
- [i for i in injector.make('context')])
-
- def test_StabilityPreferences(self):
- self.start_online_client()
- ipc = IPCConnection()
- data = {'spec': {'*-*': {'commands': {'activity': {'exec': 'echo'}}, 'extract': 'topdir'}}}
-
- context = ipc.post(['context'], {
- 'type': 'activity',
- 'title': 'title',
- 'summary': 'summary',
- 'description': 'description',
- })
- impl1 = ipc.post(['implementation'], {
- 'context': context,
- 'license': 'GPLv3+',
- 'version': '1',
- 'stability': 'stable',
- 'notes': '',
- })
- self.node_volume['implementation'].update(impl1, {'data': data})
- impl2 = ipc.post(['implementation'], {
- 'context': context,
- 'license': 'GPLv3+',
- 'version': '2',
- 'stability': 'testing',
- 'notes': '',
- })
- self.node_volume['implementation'].update(impl2, {'data': data})
- impl3 = ipc.post(['implementation'], {
- 'context': context,
- 'license': 'GPLv3+',
- 'version': '3',
- 'stability': 'buggy',
- 'notes': '',
- })
- self.node_volume['implementation'].update(impl3, {'data': data})
- impl4 = ipc.post(['implementation'], {
- 'context': context,
- 'license': 'GPLv3+',
- 'version': '4',
- 'stability': 'insecure',
- 'notes': '',
- })
- self.node_volume['implementation'].update(impl4, {'data': data})
-
- self.assertEqual('1', injector._solve(context)[0][0]['version'])
-
- self.touch(('config', [
- '[stabilities]',
- '%s = testing' % context,
- ]))
- Option.load(['config'])
- self.assertEqual('2', injector._solve(context)[0][0]['version'])
-
- self.touch(('config', [
- '[stabilities]',
- '%s = testing buggy' % context,
- ]))
- Option.load(['config'])
- self.assertEqual('3', injector._solve(context)[0][0]['version'])
-
- self.touch(('config', [
- '[stabilities]',
- 'default = insecure',
- '%s = stable' % context,
- ]))
- Option.load(['config'])
- self.assertEqual('1', injector._solve(context)[0][0]['version'])
-
- self.touch(('config', [
- '[stabilities]',
- 'default = insecure',
- ]))
- Option.load(['config'])
- self.assertEqual('4', injector._solve(context)[0][0]['version'])
-
- def test_SolutionsCache_InvalidateByStabilityPreferences(self):
- solution = [{'name': 'name', 'context': 'context', 'id': 'id', 'version': 'version'}]
- self.override(client, 'IPCConnection', lambda: _FakeConnection(True))
- self.override(solver, 'solve', lambda *args: solution)
- cached_path = 'cache/solutions/co/context'
-
- solution2 = [{'name': 'name2', 'context': 'context2', 'id': 'id2', 'version': 'version2'}]
- self.touch((cached_path, json.dumps([client.api_url.value, ['stable'], solution2])))
- self.assertEqual((solution2, None), injector._solve('context'))
-
- self.touch(('config', [
- '[stabilities]',
- 'context = buggy',
- ]))
- Option.load(['config'])
- self.assertEqual((solution, ['buggy']), injector._solve('context'))
-
-
-class _FakeConnection(object):
-
- def __init__(self, inline):
- self.inline = inline
-
- def get(self, cmd=None, *args, **kwargs):
- if cmd == 'status':
- return {'route': 'proxy' if self.inline else 'offline'}
-
-
-if __name__ == '__main__':
- tests.main()
diff --git a/tests/units/client/offline_routes.py b/tests/units/client/offline_routes.py
index 961bbb5..2a8692b 100755
--- a/tests/units/client/offline_routes.py
+++ b/tests/units/client/offline_routes.py
@@ -1,108 +1,34 @@
#!/usr/bin/env python
# sugar-lint: disable
+import json
+from cStringIO import StringIO
from os.path import exists
from __init__ import tests, src_root
from sugar_network import client, model
-from sugar_network.client import IPCConnection, clones
+from sugar_network.client import IPCConnection, implementations, packagekit
from sugar_network.client.routes import ClientRoutes
-from sugar_network.db import Volume
from sugar_network.toolkit.router import Router
-from sugar_network.toolkit import coroutine, http
+from sugar_network.toolkit import coroutine, http, lsb_release
class OfflineRoutes(tests.Test):
- def setUp(self):
- tests.Test.setUp(self)
- self.home_volume = Volume('db', model.RESOURCES)
- commands = ClientRoutes(self.home_volume)
- server = coroutine.WSGIServer(('127.0.0.1', client.ipc_port.value), Router(commands))
- coroutine.spawn(server.serve_forever)
- coroutine.dispatch()
-
- def test_NoAuthors(self):
- ipc = IPCConnection()
-
- guid = ipc.post(['context'], {
- 'type': 'activity',
- 'title': 'title',
- 'summary': 'summary',
- 'description': 'description',
- })
- self.assertEqual(
- {},
- self.home_volume['context'].get(guid)['author'])
- self.assertEqual(
- [],
- ipc.get(['context', guid, 'author']))
-
- def test_HandleDeletes(self):
- ipc = IPCConnection()
-
- guid = ipc.post(['context'], {
- 'type': 'activity',
- 'title': 'title',
- 'summary': 'summary',
- 'description': 'description',
- })
-
- guid_path = 'db/context/%s/%s' % (guid[:2], guid)
- assert exists(guid_path)
-
- ipc.delete(['context', guid])
- self.assertRaises(http.NotFound, ipc.get, ['context', guid])
- assert not exists(guid_path)
+ def setUp(self, fork_num=0):
+ tests.Test.setUp(self, fork_num)
+ self.override(implementations, '_activity_id_new', lambda: 'activity_id')
def test_whoami(self):
- ipc = IPCConnection()
+ ipc = self.start_offline_client()
self.assertEqual(
{'guid': tests.UID, 'roles': []},
ipc.get(cmd='whoami'))
- def test_clone(self):
- ipc = IPCConnection()
-
- context = ipc.post(['context'], {
- 'type': 'activity',
- 'title': 'title',
- 'summary': 'summary',
- 'description': 'description',
- })
-
- self.assertRaises(RuntimeError, ipc.put, ['context', context], 1, cmd='clone')
-
- def test_favorite(self):
- ipc = IPCConnection()
-
- context = ipc.post(['context'], {
- 'type': 'activity',
- 'title': 'title',
- 'summary': 'summary',
- 'description': 'description',
- })
-
- self.assertEqual(
- {'favorite': False},
- ipc.get(['context', context], reply=['favorite']))
-
- ipc.put(['context', context], True, cmd='favorite')
-
- self.assertEqual(
- {'favorite': True},
- ipc.get(['context', context], reply=['favorite']))
-
- ipc.put(['context', context], False, cmd='favorite')
-
- self.assertEqual(
- {'favorite': False},
- ipc.get(['context', context], reply=['favorite']))
-
- def test_subscribe(self):
- ipc = IPCConnection()
+ def test_Events(self):
+ ipc = self.start_offline_client()
events = []
def read_events():
@@ -117,13 +43,11 @@ class OfflineRoutes(tests.Test):
'summary': 'summary',
'description': 'description',
})
- coroutine.dispatch()
ipc.put(['context', guid], {
'title': 'title_2',
})
- coroutine.dispatch()
ipc.delete(['context', guid])
- coroutine.sleep(.5)
+ coroutine.sleep(.1)
job.kill()
self.assertEqual([
@@ -133,8 +57,71 @@ class OfflineRoutes(tests.Test):
],
events)
+ def test_Feeds(self):
+ ipc = self.start_offline_client()
+
+ context = ipc.post(['context'], {
+ 'type': 'activity',
+ 'title': 'title',
+ 'summary': 'summary',
+ 'description': 'description',
+ })
+ impl1 = ipc.post(['implementation'], {
+ 'context': context,
+ 'license': 'GPLv3+',
+ 'version': '1',
+ 'stability': 'stable',
+ 'notes': '',
+ })
+ self.home_volume['implementation'].update(impl1, {'data': {
+ 'spec': {'*-*': {}},
+ }})
+ impl2 = ipc.post(['implementation'], {
+ 'context': context,
+ 'license': 'GPLv3+',
+ 'version': '2',
+ 'stability': 'stable',
+ 'notes': '',
+ })
+ self.home_volume['implementation'].update(impl2, {'data': {
+ 'spec': {'*-*': {
+ 'requires': {
+ 'dep1': {},
+ 'dep2': {'restrictions': [['1', '2']]},
+ 'dep3': {'restrictions': [[None, '2']]},
+ 'dep4': {'restrictions': [['3', None]]},
+ },
+ }},
+ }})
+
+ self.assertEqual({
+ 'implementations': [
+ {
+ 'version': '1',
+ 'arch': '*-*',
+ 'stability': 'stable',
+ 'guid': impl1,
+ 'license': ['GPLv3+'],
+ },
+ {
+ 'version': '2',
+ 'arch': '*-*',
+ 'stability': 'stable',
+ 'guid': impl2,
+ 'requires': {
+ 'dep1': {},
+ 'dep2': {'restrictions': [['1', '2']]},
+ 'dep3': {'restrictions': [[None, '2']]},
+ 'dep4': {'restrictions': [['3', None]]},
+ },
+ 'license': ['GPLv3+'],
+ },
+ ],
+ },
+ ipc.get(['context', context], cmd='feed'))
+
def test_BLOBs(self):
- ipc = IPCConnection()
+ ipc = self.start_offline_client()
guid = ipc.post(['context'], {
'type': 'activity',
@@ -164,71 +151,365 @@ class OfflineRoutes(tests.Test):
[{'icon': 'http://127.0.0.1:5555/static/images/missing.png'}],
ipc.get(['context'], reply=['icon'])['result'])
- def test_Feeds(self):
- self.touch(('Activities/activity-1/activity/activity.info', [
- '[Activity]',
- 'name = TestActivity',
- 'bundle_id = bundle_id',
- 'exec = false',
- 'icon = icon',
- 'activity_version = 1',
- 'license = Public Domain',
- ]))
- self.touch(('Activities/activity-2/activity/activity.info', [
+ def test_favorite(self):
+ ipc = self.start_offline_client()
+ events = []
+
+ def read_events():
+ for event in ipc.subscribe(event='!commit'):
+ events.append(event)
+ coroutine.spawn(read_events)
+ coroutine.dispatch()
+
+ context1 = ipc.post(['context'], {
+ 'type': 'activity',
+ 'title': 'title1',
+ 'summary': 'summary',
+ 'description': 'description',
+ })
+ context2 = ipc.post(['context'], {
+ 'type': 'activity',
+ 'title': 'title2',
+ 'summary': 'summary',
+ 'description': 'description',
+ })
+
+ self.assertEqual(
+ sorted([]),
+ sorted(ipc.get(['context'], layer='favorite')['result']))
+ self.assertEqual(
+ sorted([{'guid': context1}, {'guid': context2}]),
+ sorted(ipc.get(['context'], layer='local')['result']))
+ self.assertEqual(
+ sorted([{'guid': context1}, {'guid': context2}]),
+ sorted(ipc.get(['context'])['result']))
+ self.assertEqual(
+ sorted([{'guid': context1, 'layer': ['local']}, {'guid': context2, 'layer': ['local']}]),
+ sorted(ipc.get(['context'], reply='layer')['result']))
+ self.assertEqual({'layer': ['local']}, ipc.get(['context', context1], reply='layer'))
+ self.assertEqual(['local'], ipc.get(['context', context1, 'layer']))
+ self.assertEqual({'layer': ['local']}, ipc.get(['context', context2], reply='layer'))
+ self.assertEqual(['local'], ipc.get(['context', context2, 'layer']))
+
+ del events[:]
+ ipc.put(['context', context1], True, cmd='favorite')
+ coroutine.sleep(.1)
+
+ self.assertEqual(
+ {'guid': context1, 'resource': 'context', 'event': 'update'},
+ events[-1])
+ self.assertEqual(
+ sorted([{'guid': context1}]),
+ sorted(ipc.get(['context'], layer='favorite')['result']))
+ self.assertEqual(
+ sorted([{'guid': context1}, {'guid': context2}]),
+ sorted(ipc.get(['context'], layer='local')['result']))
+ self.assertEqual(
+ sorted([{'guid': context1}, {'guid': context2}]),
+ sorted(ipc.get(['context'])['result']))
+ self.assertEqual(
+ sorted([{'guid': context1, 'layer': ['favorite', 'local']}, {'guid': context2, 'layer': ['local']}]),
+ sorted(ipc.get(['context'], reply='layer')['result']))
+ self.assertEqual({'layer': ['favorite', 'local']}, ipc.get(['context', context1], reply='layer'))
+ self.assertEqual(['favorite', 'local'], ipc.get(['context', context1, 'layer']))
+ self.assertEqual({'layer': ['local']}, ipc.get(['context', context2], reply='layer'))
+ self.assertEqual(['local'], ipc.get(['context', context2, 'layer']))
+
+ del events[:]
+ ipc.put(['context', context2], True, cmd='favorite')
+ coroutine.sleep(.1)
+
+ self.assertEqual(
+ {'guid': context2, 'resource': 'context', 'event': 'update'},
+ events[-1])
+ self.assertEqual(
+ sorted([{'guid': context1}, {'guid': context2}]),
+ sorted(ipc.get(['context'], layer='favorite')['result']))
+ self.assertEqual(
+ sorted([{'guid': context1}, {'guid': context2}]),
+ sorted(ipc.get(['context'], layer='local')['result']))
+ self.assertEqual(
+ sorted([{'guid': context1}, {'guid': context2}]),
+ sorted(ipc.get(['context'])['result']))
+ self.assertEqual(
+ sorted([{'guid': context1, 'layer': ['favorite', 'local']}, {'guid': context2, 'layer': ['favorite', 'local']}]),
+ sorted(ipc.get(['context'], reply='layer')['result']))
+ self.assertEqual({'layer': ['favorite', 'local']}, ipc.get(['context', context1], reply='layer'))
+ self.assertEqual(['favorite', 'local'], ipc.get(['context', context1, 'layer']))
+ self.assertEqual({'layer': ['favorite', 'local']}, ipc.get(['context', context2], reply='layer'))
+ self.assertEqual(['favorite', 'local'], ipc.get(['context', context2, 'layer']))
+
+ del events[:]
+ ipc.put(['context', context1], False, cmd='favorite')
+ coroutine.sleep(.1)
+
+ self.assertEqual(
+ {'guid': context1, 'resource': 'context', 'event': 'update'},
+ events[-1])
+ self.assertEqual(
+ sorted([{'guid': context2}]),
+ sorted(ipc.get(['context'], layer='favorite')['result']))
+ self.assertEqual(
+ sorted([{'guid': context1}, {'guid': context2}]),
+ sorted(ipc.get(['context'], layer='local')['result']))
+ self.assertEqual(
+ sorted([{'guid': context1}, {'guid': context2}]),
+ sorted(ipc.get(['context'])['result']))
+ self.assertEqual(
+ sorted([{'guid': context1, 'layer': ['local']}, {'guid': context2, 'layer': ['favorite', 'local']}]),
+ sorted(ipc.get(['context'], reply='layer')['result']))
+ self.assertEqual({'layer': ['local']}, ipc.get(['context', context1], reply='layer'))
+ self.assertEqual(['local'], ipc.get(['context', context1, 'layer']))
+ self.assertEqual({'layer': ['favorite', 'local']}, ipc.get(['context', context2], reply='layer'))
+ self.assertEqual(['favorite', 'local'], ipc.get(['context', context2, 'layer']))
+
+ def test_launch_Activity(self):
+ local = self.start_online_client()
+ ipc = IPCConnection()
+
+ blob = self.zips(['TestActivity/activity/activity.info', [
'[Activity]',
'name = TestActivity',
'bundle_id = bundle_id',
'exec = true',
'icon = icon',
- 'activity_version = 2',
- 'license = Public Domain',
- 'requires = dep1; dep2 = 1; dep3 < 2; dep4 >= 3',
- ]))
+ 'activity_version = 1',
+ 'license=Public Domain',
+ ]])
+ impl = ipc.upload(['implementation'], StringIO(blob), cmd='release', initial=True)
+
+ ipc.put(['context', 'bundle_id'], True, cmd='clone')
+ solution = [{
+ 'guid': impl,
+ 'context': 'bundle_id',
+ 'extract': 'TestActivity',
+ 'license': ['Public Domain'],
+ 'stability': 'stable',
+ 'version': '1',
+ 'command': ['true'],
+ 'path': tests.tmpdir + '/client/implementation/%s/%s/data.blob' % (impl[:2], impl),
+ }]
+ assert local['implementation'].exists(impl)
+ self.assertEqual(
+ [client.api_url.value, ['stable'], solution],
+ json.load(file('cache/solutions/bu/bundle_id')))
- ipc = IPCConnection()
- monitor = coroutine.spawn(clones.monitor, self.home_volume['context'], ['Activities'])
+ self.node.stop()
+ coroutine.sleep(.1)
+
+ def read_events():
+ for event in ipc.subscribe(event='!commit'):
+ events.append(event)
+ events = []
+ coroutine.spawn(read_events)
+ coroutine.dispatch()
+
+ ipc.get(['context', 'bundle_id'], cmd='launch', foo='bar')
+ coroutine.sleep(.1)
+
+ log_path = tests.tmpdir + '/.sugar/default/logs/bundle_id.log'
+ self.assertEqual([
+ {'event': 'exec', 'cmd': 'launch', 'guid': 'bundle_id', 'foo': 'bar', 'args': ['true', '-b', 'bundle_id', '-a', 'activity_id'], 'activity_id': 'activity_id', 'log_path': log_path, 'solution': solution},
+ {'event': 'exit', 'cmd': 'launch', 'guid': 'bundle_id', 'foo': 'bar', 'args': ['true', '-b', 'bundle_id', '-a', 'activity_id'], 'activity_id': 'activity_id', 'log_path': log_path, 'solution': solution},
+ ],
+ events)
+ assert local['implementation'].exists(impl)
+ self.assertEqual(
+ [client.api_url.value, ['stable'], solution],
+ json.load(file('cache/solutions/bu/bundle_id')))
+
+ def test_ServiceUnavailableWhileSolving(self):
+ ipc = self.start_offline_client()
+
+ def read_events():
+ for event in ipc.subscribe(event='!commit'):
+ events.append(event)
+ events = []
+ coroutine.spawn(read_events)
coroutine.dispatch()
+ self.assertRaises(http.ServiceUnavailable, ipc.get, ['context', 'foo'], cmd='launch')
+ coroutine.dispatch()
self.assertEqual({
- 'name': 'TestActivity',
- 'implementations': [
- {
- 'version': '1',
- 'arch': '*-*',
- 'commands': {
- 'activity': {
- 'exec': 'false',
- },
- },
- 'stability': 'stable',
- 'guid': tests.tmpdir + '/Activities/activity-1',
- 'requires': {},
+ 'event': 'failure',
+ 'method': 'GET',
+ 'guid': 'foo',
+ 'cmd': 'launch',
+ 'resource': 'context',
+ 'prop': None,
+ 'exception': 'ServiceUnavailable',
+ 'error': "Resource 'foo' does not exist in 'context'",
+ },
+ events[-1])
+
+ context = ipc.post(['context'], {
+ 'type': 'activity',
+ 'title': 'title',
+ 'summary': 'summary',
+ 'description': 'description',
+ })
+ self.assertRaises(http.ServiceUnavailable, ipc.get, ['context', context], cmd='launch')
+ coroutine.dispatch()
+ self.assertEqual({
+ 'event': 'failure',
+ 'method': 'GET',
+ 'guid': context,
+ 'cmd': 'launch',
+ 'resource': 'context',
+ 'prop': None,
+ 'exception': 'ServiceUnavailable',
+ 'error': """\
+Can't find all required implementations:
+- %s -> (problem)
+ No known implementations at all""" % context,
+ },
+ events[-1])
+
+ impl = ipc.post(['implementation'], {
+ 'context': context,
+ 'license': 'GPLv3+',
+ 'version': '1',
+ 'stability': 'stable',
+ })
+ self.home_volume['implementation'].update(impl, {'data': {
+ 'spec': {
+ '*-*': {
+ 'commands': {'activity': {'exec': 'true'}},
+ 'requires': {'dep': {}},
},
- {
- 'version': '2',
- 'arch': '*-*',
- 'commands': {
- 'activity': {
- 'exec': 'true',
- },
- },
+ },
+ }})
+
+ self.assertRaises(http.ServiceUnavailable, ipc.get, ['context', context], cmd='launch')
+ coroutine.dispatch()
+ self.assertEqual({
+ 'event': 'failure',
+ 'method': 'GET',
+ 'guid': context,
+ 'cmd': 'launch',
+ 'resource': 'context',
+ 'prop': None,
+ 'exception': 'ServiceUnavailable',
+ 'error': """\
+Can't find all required implementations:
+- %s -> 1 (%s)
+- dep -> (problem)
+ No known implementations at all""" % (context, impl),
+ },
+ events[-1])
+
+ assert not exists('cache/solutions/%s/%s' % (context[:2], context))
+
+ def test_ServiceUnavailableWhileInstalling(self):
+ ipc = self.start_offline_client()
+
+ context = ipc.post(['context'], {
+ 'type': 'activity',
+ 'title': 'title',
+ 'summary': 'summary',
+ 'description': 'description',
+ })
+ impl = ipc.post(['implementation'], {
+ 'context': context,
+ 'license': 'GPLv3+',
+ 'version': '1',
+ 'stability': 'stable',
+ })
+ self.home_volume['implementation'].update(impl, {'data': {
+ 'spec': {
+ '*-*': {
+ 'commands': {'activity': {'exec': 'true'}},
+ 'requires': {'dep': {}},
+ },
+ },
+ }})
+ ipc.post(['context'], {
+ 'guid': 'dep',
+ 'type': 'package',
+ 'title': 'title',
+ 'summary': 'summary',
+ 'description': 'description',
+ 'aliases': {
+ lsb_release.distributor_id(): {
+ 'status': 'success',
+ 'binary': [['dep.bin']],
+ },
+ },
+ })
+
+ def resolve(names):
+ return dict([(i, {'name': i, 'pk_id': i, 'version': '0', 'arch': '*', 'installed': False}) for i in names])
+ self.override(packagekit, 'resolve', resolve)
+
+ def read_events():
+ for event in ipc.subscribe(event='!commit'):
+ events.append(event)
+ events = []
+ coroutine.spawn(read_events)
+ coroutine.dispatch()
+
+ self.assertRaises(http.ServiceUnavailable, ipc.get, ['context', context], cmd='launch')
+ coroutine.dispatch()
+ self.assertEqual({
+ 'event': 'failure',
+ 'method': 'GET',
+ 'guid': context,
+ 'cmd': 'launch',
+ 'resource': 'context',
+ 'prop': None,
+ 'exception': 'ServiceUnavailable',
+ 'error': 'Installation is not available in offline',
+ 'solution': [
+ { 'guid': impl,
+ 'context': context,
+ 'license': ['GPLv3+'],
'stability': 'stable',
- 'guid': tests.tmpdir + '/Activities/activity-2',
- 'requires': {
- 'dep1': {},
- 'dep2': {'restrictions': [['1', '2']]},
- 'dep3': {'restrictions': [[None, '2']]},
- 'dep4': {'restrictions': [['3', None]]},
- },
+ 'version': '1',
+ 'command': ['true'],
+ },
+ { 'guid': 'dep',
+ 'context': 'dep',
+ 'install': [{'arch': '*', 'installed': False, 'name': 'dep.bin', 'pk_id': 'dep.bin', 'version': '0'}],
+ 'license': None,
+ 'stability': 'packaged',
+ 'version': '0',
},
],
},
- ipc.get(['context', 'bundle_id'], cmd='feed'))
+ events[-1])
- def test_LocalAPIShouldDuplicateNodeButWith503Response(self):
- ipc = IPCConnection()
- self.assertRaises(http.ServiceUnavailable, ipc.get, ['context', 'foo'], cmd='feed')
- self.assertRaises(http.ServiceUnavailable, ipc.get, ['packages', 'foo', 'bar'])
+ def test_NoAuthors(self):
+ ipc = self.start_offline_client()
+
+ guid = ipc.post(['context'], {
+ 'type': 'activity',
+ 'title': 'title',
+ 'summary': 'summary',
+ 'description': 'description',
+ })
+ self.assertEqual(
+ {},
+ self.home_volume['context'].get(guid)['author'])
+ self.assertEqual(
+ [],
+ ipc.get(['context', guid, 'author']))
+
+ def test_HandleDeletes(self):
+ ipc = self.start_offline_client()
+
+ guid = ipc.post(['context'], {
+ 'type': 'activity',
+ 'title': 'title',
+ 'summary': 'summary',
+ 'description': 'description',
+ })
+
+ guid_path = 'db/context/%s/%s' % (guid[:2], guid)
+ assert exists(guid_path)
+
+ ipc.delete(['context', guid])
+ self.assertRaises(http.NotFound, ipc.get, ['context', guid])
+ assert not exists(guid_path)
if __name__ == '__main__':
diff --git a/tests/units/client/online_routes.py b/tests/units/client/online_routes.py
index 891b239..79b01cb 100755
--- a/tests/units/client/online_routes.py
+++ b/tests/units/client/online_routes.py
@@ -7,12 +7,12 @@ import time
import shutil
import zipfile
from cStringIO import StringIO
-from os.path import exists
+from os.path import exists, lexists, basename
from __init__ import tests, src_root
from sugar_network import client, db, model
-from sugar_network.client import IPCConnection, journal, clones, injector, routes
+from sugar_network.client import IPCConnection, journal, routes, implementations
from sugar_network.toolkit import coroutine, http
from sugar_network.toolkit.spec import Spec
from sugar_network.client.routes import ClientRoutes, Request, Response
@@ -30,26 +30,9 @@ import requests
class OnlineRoutes(tests.Test):
- def test_inline(self):
- cp = ClientRoutes(Volume('client', model.RESOURCES), client.api_url.value)
- assert not cp.inline()
-
- trigger = self.wait_for_events(cp, event='inline', state='online')
- coroutine.sleep(1)
- self.start_master()
- trigger.wait(1)
- assert trigger.value is None
- assert not cp.inline()
-
- request = Request(method='GET', cmd='whoami')
- cp.whoami(request, Response())
- trigger.wait()
- assert cp.inline()
-
- trigger = self.wait_for_events(cp, event='inline', state='offline')
- self.node.stop()
- trigger.wait()
- assert not cp.inline()
+ def setUp(self, fork_num=0):
+ tests.Test.setUp(self, fork_num)
+ self.override(implementations, '_activity_id_new', lambda: 'activity_id')
def test_whoami(self):
self.start_online_client()
@@ -59,109 +42,102 @@ class OnlineRoutes(tests.Test):
{'guid': tests.UID, 'roles': []},
ipc.get(cmd='whoami'))
- def test_clone_Activities(self):
- self.home_volume = self.start_online_client()
+ def test_Events(self):
+ local_volume = self.start_online_client()
ipc = IPCConnection()
- coroutine.spawn(clones.monitor, self.home_volume['context'], ['Activities'])
+ events = []
- context = ipc.post(['context'], {
+ def read_events():
+ for event in ipc.subscribe(event='!commit'):
+ events.append(event)
+ coroutine.spawn(read_events)
+ coroutine.dispatch()
+
+ guid = ipc.post(['context'], {
'type': 'activity',
'title': 'title',
'summary': 'summary',
'description': 'description',
})
- impl = ipc.post(['implementation'], {
- 'context': context,
- 'license': 'GPLv3+',
- 'version': '1',
- 'stability': 'stable',
- 'notes': '',
+ ipc.put(['context', guid], {
+ 'title': 'title_2',
})
- self.node_volume['implementation'].update(impl, {'data': {
- 'spec': {
- '*-*': {
- 'commands': {
- 'activity': {
- 'exec': 'true',
- },
- },
- 'extract': 'TestActivitry',
- },
- },
- 'blob': StringIO(self.zips(['TestActivitry/activity/activity.info', [
- '[Activity]',
- 'name = TestActivitry',
- 'bundle_id = %s' % context,
- 'exec = false',
- 'icon = icon',
- 'activity_version = 1',
- 'license=Public Domain',
- ]])),
- }})
-
- assert not exists('Activities/TestActivitry/activity/activity.info')
- assert not exists('Activities/TestActivitry_1/activity/activity.info')
- self.assertEqual(
- {'clone': 0, 'type': ['activity']},
- ipc.get(['context', context], reply=['clone']))
-
- ipc.put(['context', context], 2, cmd='clone')
- coroutine.sleep(.5)
-
- assert exists('Activities/TestActivitry/activity/activity.info')
- assert not exists('Activities/TestActivitry_1/activity/activity.info')
- self.assertEqual(
- {'clone': 2},
- ipc.get(['context', context], reply=['clone']))
-
- ipc.put(['context', context], 2, cmd='clone')
- coroutine.sleep(.5)
-
- assert exists('Activities/TestActivitry/activity/activity.info')
- assert not exists('Activities/TestActivitry_1/activity/activity.info')
- self.assertEqual(
- {'clone': 2},
- ipc.get(['context', context], reply=['clone']))
-
- ipc.put(['context', context], 1, cmd='clone', force=1)
- coroutine.sleep(.5)
+ coroutine.sleep(.1)
+ ipc.delete(['context', guid])
+ coroutine.sleep(.1)
- assert exists('Activities/TestActivitry/activity/activity.info')
- assert exists('Activities/TestActivitry_1/activity/activity.info')
- self.assertEqual(
- {'clone': 2},
- ipc.get(['context', context], reply=['clone']))
+ self.assertEqual([
+ {'guid': guid, 'resource': 'context', 'event': 'create'},
+ {'guid': guid, 'resource': 'context', 'event': 'update'},
+ {'guid': guid, 'event': 'delete', 'resource': 'context'},
+ ],
+ events)
+ del events[:]
- ipc.put(['context', context], 0, cmd='clone')
- coroutine.sleep(.5)
+ guid = self.node_volume['context'].create({
+ 'type': 'activity',
+ 'title': 'title',
+ 'summary': 'summary',
+ 'description': 'description',
+ })
+ self.node_volume['context'].update(guid, {
+ 'title': 'title_2',
+ })
+ coroutine.sleep(.1)
+ self.node_volume['context'].delete(guid)
+ coroutine.sleep(.1)
- assert not exists('Activities/TestActivitry/activity/activity.info')
- assert not exists('Activities/TestActivitry_1/activity/activity.info')
- self.assertEqual(
- {'clone': 0},
- ipc.get(['context', context], reply=['clone']))
+ self.assertEqual([
+ {'guid': guid, 'resource': 'context', 'event': 'create'},
+ {'guid': guid, 'resource': 'context', 'event': 'update'},
+ {'guid': guid, 'event': 'delete', 'resource': 'context'},
+ ],
+ events)
+ del events[:]
- ipc.put(['context', context], 1, cmd='clone')
- coroutine.sleep(.5)
+ guid = local_volume['context'].create({
+ 'type': 'activity',
+ 'title': 'title',
+ 'summary': 'summary',
+ 'description': 'description',
+ })
+ local_volume['context'].update(guid, {
+ 'title': 'title_2',
+ })
+ coroutine.sleep(.1)
+ local_volume['context'].delete(guid)
+ coroutine.sleep(.1)
- assert exists('Activities/TestActivitry/activity/activity.info')
- self.assertEqual(
- {'clone': 2},
- ipc.get(['context', context], reply=['clone']))
+ self.assertEqual([], events)
- trigger = self.wait_for_events(ipc, event='inline', state='offline')
self.node.stop()
- trigger.wait()
- assert ipc.get(cmd='status')['route'] == 'offline'
+ coroutine.sleep(.1)
+ del events[:]
- self.assertEqual(
- {'clone': 2},
- ipc.get(['context', context], reply=['clone']))
+ guid = local_volume['context'].create({
+ 'type': 'activity',
+ 'title': 'title',
+ 'summary': 'summary',
+ 'description': 'description',
+ })
+ local_volume['context'].update(guid, {
+ 'title': 'title_2',
+ })
+ coroutine.sleep(.1)
+ local_volume['context'].delete(guid)
+ coroutine.sleep(.1)
- def test_clone_ActivitiesWithStabilityPreferences(self):
- self.home_volume = self.start_online_client()
+ self.assertEqual([
+ {'guid': guid, 'resource': 'context', 'event': 'create'},
+ {'guid': guid, 'resource': 'context', 'event': 'update'},
+ {'guid': guid, 'event': 'delete', 'resource': 'context'},
+ ],
+ events)
+ del events[:]
+
+ def test_Feeds(self):
+ self.start_online_client()
ipc = IPCConnection()
- coroutine.spawn(clones.monitor, self.home_volume['context'], ['Activities'])
context = ipc.post(['context'], {
'type': 'activity',
@@ -169,7 +145,6 @@ class OnlineRoutes(tests.Test):
'summary': 'summary',
'description': 'description',
})
-
impl1 = ipc.post(['implementation'], {
'context': context,
'license': 'GPLv3+',
@@ -177,216 +152,306 @@ class OnlineRoutes(tests.Test):
'stability': 'stable',
'notes': '',
})
- info1 = '\n'.join([
- '[Activity]',
- 'name = TestActivitry',
- 'bundle_id = %s' % context,
- 'exec = false',
- 'icon = icon',
- 'activity_version = 1',
- 'license=Public Domain',
- ])
self.node_volume['implementation'].update(impl1, {'data': {
- 'spec': {
- '*-*': {
- 'commands': {
- 'activity': {
- 'exec': 'true',
- },
- },
- 'extract': 'TestActivitry',
- },
- },
- 'blob': StringIO(self.zips(['TestActivitry/activity/activity.info', info1])),
+ 'spec': {'*-*': {}},
}})
-
impl2 = ipc.post(['implementation'], {
'context': context,
'license': 'GPLv3+',
'version': '2',
- 'stability': 'testing',
+ 'stability': 'stable',
'notes': '',
})
- info2 = '\n'.join([
- '[Activity]',
- 'name = TestActivitry',
- 'bundle_id = %s' % context,
- 'exec = false',
- 'icon = icon',
- 'activity_version = 1',
- 'license=Public Domain',
- ])
self.node_volume['implementation'].update(impl2, {'data': {
- 'spec': {
- '*-*': {
- 'commands': {
- 'activity': {
- 'exec': 'true',
- },
- },
- 'extract': 'TestActivitry2',
+ 'spec': {'*-*': {
+ 'requires': {
+ 'dep1': {},
+ 'dep2': {'restrictions': [['1', '2']]},
+ 'dep3': {'restrictions': [[None, '2']]},
+ 'dep4': {'restrictions': [['3', None]]},
},
- },
- 'blob': StringIO(self.zips(['TestActivitry2/activity/activity.info', info2])),
+ }},
}})
- ipc.put(['context', context], 2, cmd='clone')
- coroutine.sleep(.5)
- not exists('Activities/TestActivitry2/activity/activity.info')
- self.assertEqual(info1, file('Activities/TestActivitry/activity/activity.info').read())
-
- self.touch(('config', [
- '[stabilities]',
- '%s = testing stable' % context,
- ]))
- Option.load(['config'])
-
- shutil.rmtree('cache/solutions')
- ipc.put(['context', context], 2, cmd='clone', force=1)
- coroutine.sleep(.5)
- self.assertEqual(info2, file('Activities/TestActivitry2/activity/activity.info').read())
+ self.assertEqual({
+ 'implementations': [
+ {
+ 'version': '1',
+ 'arch': '*-*',
+ 'stability': 'stable',
+ 'guid': impl1,
+ 'license': ['GPLv3+'],
+ },
+ {
+ 'version': '2',
+ 'arch': '*-*',
+ 'stability': 'stable',
+ 'guid': impl2,
+ 'requires': {
+ 'dep1': {},
+ 'dep2': {'restrictions': [['1', '2']]},
+ 'dep3': {'restrictions': [[None, '2']]},
+ 'dep4': {'restrictions': [['3', None]]},
+ },
+ 'license': ['GPLv3+'],
+ },
+ ],
+ },
+ ipc.get(['context', context], cmd='feed'))
- def test_clone_ActivityImpl(self):
- self.home_volume = self.start_online_client()
+ def test_BLOBs(self):
+ self.start_online_client()
ipc = IPCConnection()
- coroutine.spawn(clones.monitor, self.home_volume['context'], ['Activities'])
- context = ipc.post(['context'], {
+ guid = ipc.post(['context'], {
'type': 'activity',
'title': 'title',
'summary': 'summary',
'description': 'description',
})
+ ipc.request('PUT', ['context', guid, 'preview'], 'image')
- impl1 = ipc.post(['implementation'], {
- 'context': context,
- 'license': 'GPLv3+',
- 'version': '1',
- 'stability': 'stable',
- 'notes': '',
+ self.assertEqual(
+ 'image',
+ ipc.request('GET', ['context', guid, 'preview']).content)
+ self.assertEqual(
+ {'preview': 'http://127.0.0.1:8888/context/%s/preview' % guid},
+ ipc.get(['context', guid], reply=['preview']))
+ self.assertEqual(
+ [{'preview': 'http://127.0.0.1:8888/context/%s/preview' % guid}],
+ ipc.get(['context'], reply=['preview'])['result'])
+
+ self.assertEqual(
+ file(src_root + '/sugar_network/static/httpdocs/images/missing.png').read(),
+ ipc.request('GET', ['context', guid, 'icon']).content)
+ self.assertEqual(
+ {'icon': 'http://127.0.0.1:8888/static/images/missing.png'},
+ ipc.get(['context', guid], reply=['icon']))
+ self.assertEqual(
+ [{'icon': 'http://127.0.0.1:8888/static/images/missing.png'}],
+ ipc.get(['context'], reply=['icon'])['result'])
+
+ def test_favorite(self):
+ local = self.start_online_client()
+ ipc = IPCConnection()
+ events = []
+
+ def read_events():
+ for event in ipc.subscribe(event='!commit'):
+ events.append(event)
+ coroutine.spawn(read_events)
+ coroutine.dispatch()
+
+ context1 = ipc.post(['context'], {
+ 'type': 'activity',
+ 'title': 'title1',
+ 'summary': 'summary',
+ 'description': 'description',
+ 'layer': ['foo'],
+ })
+ context2 = ipc.post(['context'], {
+ 'type': 'activity',
+ 'title': 'title2',
+ 'summary': 'summary',
+ 'description': 'description',
+ 'layer': ['foo'],
})
- self.node_volume['implementation'].update(impl1, {'data': {
- 'blob': StringIO(self.zips(('TestActivity/activity/activity.info', [
- '[Activity]',
- 'name = TestActivity',
- 'bundle_id = %s' % context,
- 'exec = true',
- 'icon = icon',
- 'activity_version = 1',
- 'license=GPLv3+',
- ]))),
- 'spec': {
- '*-*': {
- 'extract': 'TestActivity',
- 'commands': {'activity': {'exec': 'true'}},
- 'requires': {
- 'dep1': {},
- },
- },
- },
- }})
- impl2 = ipc.post(['implementation'], {
- 'context': context,
- 'license': 'GPLv3+',
- 'version': '2',
- 'stability': 'stable',
- 'notes': '',
+ self.assertEqual(
+ sorted([]),
+ sorted(ipc.get(['context'], layer='favorite')['result']))
+ self.assertEqual(
+ sorted([{'guid': context1}, {'guid': context2}]),
+ sorted(ipc.get(['context'], layer='foo')['result']))
+ self.assertEqual(
+ sorted([{'guid': context1}, {'guid': context2}]),
+ sorted(ipc.get(['context'])['result']))
+ self.assertEqual(
+ sorted([{'guid': context1, 'layer': ['foo']}, {'guid': context2, 'layer': ['foo']}]),
+ sorted(ipc.get(['context'], reply='layer')['result']))
+ self.assertEqual({'layer': ['foo']}, ipc.get(['context', context1], reply='layer'))
+ self.assertEqual(['foo'], ipc.get(['context', context1, 'layer']))
+ self.assertEqual({'layer': ['foo']}, ipc.get(['context', context2], reply='layer'))
+ self.assertEqual(['foo'], ipc.get(['context', context2, 'layer']))
+ self.assertEqual(
+ sorted([]),
+ sorted([i['layer'] for i in local['context'].find(reply='layer')[0]]))
+
+ del events[:]
+ ipc.put(['context', context1], True, cmd='favorite')
+ coroutine.sleep(.1)
+
+ self.assertEqual([
+ {'guid': context1, 'resource': 'context', 'event': 'update'},
+ ],
+ events)
+ self.assertEqual(
+ sorted([{'guid': context1}]),
+ sorted(ipc.get(['context'], layer='favorite')['result']))
+ self.assertEqual(
+ sorted([{'guid': context1}, {'guid': context2}]),
+ sorted(ipc.get(['context'], layer='foo')['result']))
+ self.assertEqual(
+ sorted([{'guid': context1}, {'guid': context2}]),
+ sorted(ipc.get(['context'])['result']))
+ self.assertEqual(
+ sorted([{'guid': context1, 'layer': ['foo', 'favorite']}, {'guid': context2, 'layer': ['foo']}]),
+ sorted(ipc.get(['context'], reply='layer')['result']))
+ self.assertEqual({'layer': ['foo', 'favorite']}, ipc.get(['context', context1], reply='layer'))
+ self.assertEqual(['foo', 'favorite'], ipc.get(['context', context1, 'layer']))
+ self.assertEqual({'layer': ['foo']}, ipc.get(['context', context2], reply='layer'))
+ self.assertEqual(['foo'], ipc.get(['context', context2, 'layer']))
+ self.assertEqual(
+ sorted([['foo', 'favorite']]),
+ sorted([i['layer'] for i in local['context'].find(reply='layer')[0]]))
+
+ del events[:]
+ ipc.put(['context', context2], True, cmd='favorite')
+ coroutine.sleep(.1)
+
+ self.assertEqual([
+ {'guid': context2, 'resource': 'context', 'event': 'update'},
+ ],
+ events)
+ self.assertEqual(
+ sorted([{'guid': context1}, {'guid': context2}]),
+ sorted(ipc.get(['context'], layer='favorite')['result']))
+ self.assertEqual(
+ sorted([{'guid': context1}, {'guid': context2}]),
+ sorted(ipc.get(['context'], layer='foo')['result']))
+ self.assertEqual(
+ sorted([{'guid': context1}, {'guid': context2}]),
+ sorted(ipc.get(['context'])['result']))
+ self.assertEqual(
+ sorted([{'guid': context1, 'layer': ['foo', 'favorite']}, {'guid': context2, 'layer': ['foo', 'favorite']}]),
+ sorted(ipc.get(['context'], reply='layer')['result']))
+ self.assertEqual({'layer': ['foo', 'favorite']}, ipc.get(['context', context1], reply='layer'))
+ self.assertEqual(['foo', 'favorite'], ipc.get(['context', context1, 'layer']))
+ self.assertEqual({'layer': ['foo', 'favorite']}, ipc.get(['context', context2], reply='layer'))
+ self.assertEqual(['foo', 'favorite'], ipc.get(['context', context2, 'layer']))
+ self.assertEqual(
+ sorted([(context1, ['foo', 'favorite']), (context2, ['foo', 'favorite'])]),
+ sorted([(i.guid, i['layer']) for i in local['context'].find(reply='layer')[0]]))
+
+ del events[:]
+ ipc.put(['context', context1], False, cmd='favorite')
+ coroutine.sleep(.1)
+
+ self.assertEqual([
+ {'guid': context1, 'resource': 'context', 'event': 'update'},
+ ],
+ events)
+ self.assertEqual(
+ sorted([{'guid': context2}]),
+ sorted(ipc.get(['context'], layer='favorite')['result']))
+ self.assertEqual(
+ sorted([{'guid': context1}, {'guid': context2}]),
+ sorted(ipc.get(['context'], layer='foo')['result']))
+ self.assertEqual(
+ sorted([{'guid': context1}, {'guid': context2}]),
+ sorted(ipc.get(['context'])['result']))
+ self.assertEqual(
+ sorted([{'guid': context1, 'layer': ['foo']}, {'guid': context2, 'layer': ['foo', 'favorite']}]),
+ sorted(ipc.get(['context'], reply='layer')['result']))
+ self.assertEqual({'layer': ['foo']}, ipc.get(['context', context1], reply='layer'))
+ self.assertEqual(['foo'], ipc.get(['context', context1, 'layer']))
+ self.assertEqual({'layer': ['foo', 'favorite']}, ipc.get(['context', context2], reply='layer'))
+ self.assertEqual(['foo', 'favorite'], ipc.get(['context', context2, 'layer']))
+ self.assertEqual(
+ sorted([(context1, ['foo']), (context2, ['foo', 'favorite'])]),
+ sorted([(i.guid, i['layer']) for i in local['context'].find(reply='layer')[0]]))
+
+ def test_clone_Fails(self):
+ self.start_online_client([User, Context, Implementation])
+ conn = IPCConnection()
+ events = []
+
+ def read_events():
+ for event in conn.subscribe(event='!commit'):
+ events.append(event)
+ coroutine.spawn(read_events)
+ coroutine.dispatch()
+
+ context = conn.post(['context'], {
+ 'type': 'activity',
+ 'title': 'title',
+ 'summary': 'summary',
+ 'description': 'description',
})
- self.node_volume['implementation'].update(impl2, {'data': {
- 'blob': StringIO(self.zips(('TestActivity/activity/activity.info', [
- '[Activity]',
- 'name = TestActivity',
- 'bundle_id = %s' % context,
- 'exec = true',
- 'icon = icon',
- 'activity_version = 2',
- 'license=GPLv3+',
- ]))),
- 'spec': {
- '*-*': {
- 'extract': 'TestActivity',
- 'commands': {'activity': {'exec': 'true'}},
- 'requires': {
- 'dep2': {},
- },
- },
- },
- }})
- impl3 = ipc.post(['implementation'], {
+ self.assertRaises(http.NotFound, conn.put, ['context', context], True, cmd='clone')
+ coroutine.dispatch()
+ self.assertEqual({
+ 'event': 'failure',
+ 'method': 'PUT',
+ 'cmd': 'clone',
+ 'resource': 'context',
+ 'guid': context,
+ 'prop': None,
+ 'exception': 'NotFound',
+ 'error': """\
+Can't find all required implementations:
+- %s -> (problem)
+ No known implementations at all""" % context,
+ },
+ events[-1])
+ assert not exists('cache/solutions/%s/%s' % (context[:2], context))
+
+ impl = conn.post(['implementation'], {
'context': context,
'license': 'GPLv3+',
- 'version': '3',
- 'stability': 'developer',
+ 'version': '1',
+ 'stability': 'stable',
'notes': '',
})
- self.node_volume['implementation'].update(impl3, {'data': {
- 'blob': StringIO(self.zips(('TestActivity/activity/activity.info', [
- '[Activity]',
- 'name = TestActivity',
- 'bundle_id = %s' % context,
- 'exec = true',
- 'icon = icon',
- 'activity_version = 3',
- 'license=GPLv3+',
- ]))),
+ self.node_volume['implementation'].update(impl, {'data': {
+ 'extract': 'topdir',
'spec': {
'*-*': {
- 'extract': 'TestActivity',
- 'commands': {'activity': {'exec': 'true'}},
- 'requires': {
- 'dep3': {},
+ 'commands': {
+ 'activity': {
+ 'exec': 'echo',
+ },
},
},
},
}})
- self.assertRaises(RuntimeError, ipc.put, ['context', context], 2, cmd='clone', nodeps=1, requires='foo')
- coroutine.sleep(.1)
- self.assertEqual({'clone': 0}, ipc.get(['context', context], reply=['clone']))
- assert not exists('Activities/TestActivity/activity/activity.info')
-
- ipc.put(['context', context], 2, cmd='clone', nodeps=1)
- # XXX seems to be an ugly low level bug, removing the following sleep means not reasing HTTP response for the next request
- coroutine.sleep(.1)
- self.assertEqual({'clone': 2}, ipc.get(['context', context], reply=['clone']))
- self.assertEqual('2', Spec('Activities/TestActivity/activity/activity.info')['version'])
-
- ipc.put(['context', context], 0, cmd='clone')
- coroutine.sleep(.1)
- self.assertEqual({'clone': 0}, ipc.get(['context', context], reply=['clone']))
- assert not exists('Activities/TestActivity/activity/activity.info')
-
- ipc.put(['context', context], 2, cmd='clone', nodeps=1, stability='developer')
- coroutine.sleep(.1)
- self.assertEqual({'clone': 2}, ipc.get(['context', context], reply=['clone']))
- self.assertEqual('3', Spec('Activities/TestActivity/activity/activity.info')['version'])
-
- ipc.put(['context', context], 0, cmd='clone')
- coroutine.sleep(.1)
- self.assertEqual({'clone': 0}, ipc.get(['context', context], reply=['clone']))
- assert not exists('Activities/TestActivity/activity/activity.info')
-
- ipc.put(['context', context], 2, cmd='clone', nodeps=1, requires='dep1')
- coroutine.sleep(.1)
- self.assertEqual({'clone': 2}, ipc.get(['context', context], reply=['clone']))
- self.assertEqual('1', Spec('Activities/TestActivity/activity/activity.info')['version'])
+ self.assertRaises(http.NotFound, conn.put, ['context', context], True, cmd='clone')
+ coroutine.dispatch()
+ self.assertEqual({
+ 'event': 'failure',
+ 'method': 'PUT',
+ 'cmd': 'clone',
+ 'resource': 'context',
+ 'guid': context,
+ 'prop': None,
+ 'exception': 'NotFound',
+ 'error': 'BLOB does not exist',
+ 'solution': [{
+ 'command': ['echo'],
+ 'context': context,
+ 'guid': impl,
+ 'license': ['GPLv3+'],
+ 'extract': 'topdir',
+ 'stability': 'stable',
+ 'version': '1',
+ 'path': tests.tmpdir + '/client/implementation/%s/%s/data.blob' % (impl[:2], impl),
+ }],
+ },
+ events[-1])
+ assert not exists('cache/solutions/%s/%s' % (context[:2], context))
def test_clone_Content(self):
- self.start_online_client()
- updates = []
-
- def journal_update(self, guid, data=None, preview=None, **kwargs):
- if data is not None:
- kwargs['data'] = data.read()
- updates.append((guid, kwargs))
-
- self.override(journal.Routes, '__init__', lambda *args: None)
- self.override(journal.Routes, 'journal_update', journal_update)
- self.override(journal.Routes, 'journal_delete', lambda self, guid: updates.append((guid,)))
-
+ local = self.start_online_client([User, Context, Implementation])
ipc = IPCConnection()
+ events = []
+
+ def read_events():
+ for event in ipc.subscribe(event='!commit'):
+ events.append(event)
+ coroutine.spawn(read_events)
+ coroutine.dispatch()
context = ipc.post(['context'], {
'type': 'content',
@@ -401,260 +466,540 @@ class OnlineRoutes(tests.Test):
'stability': 'stable',
'notes': '',
})
- ipc.request('PUT', ['implementation', impl, 'data'], 'version_1')
+ blob = 'content'
+ self.node_volume['implementation'].update(impl, {'data': {'blob': StringIO(blob), 'foo': 'bar'}})
+ clone_path = 'client/context/%s/%s/.clone' % (context[:2], context)
- self.assertEqual({'clone': 0, 'type': ['content']}, ipc.get(['context', context], reply=['clone']))
-
- ipc.put(['context', context], 2, cmd='clone')
- self.touch('datastore/%s/%s/metadata/uid' % (context[:2], context))
+ ipc.put(['context', context], True, cmd='clone')
+ coroutine.dispatch()
- self.assertEqual([
- (context, {'activity_id': impl, 'data': 'version_1', 'description': 'description', 'title': 'title', 'mime_type': 'application/octet-stream'}),
- ],
- updates)
+ self.assertEqual({
+ 'event': 'update',
+ 'guid': context,
+ 'resource': 'context',
+ },
+ events[-1])
self.assertEqual(
- {'clone': 2, 'type': ['content']},
- ipc.get(['context', context], reply=['clone']))
- del updates[:]
-
- ipc.request('PUT', ['implementation', impl, 'data'], 'version_2',
- headers={'Content-Type': 'foo/bar'})
- ipc.put(['context', context], 2, cmd='clone')
-
+ sorted([{'guid': context}]),
+ sorted(ipc.get(['context'], layer='clone')['result']))
self.assertEqual(
- [],
- updates)
+ sorted([{'guid': context}]),
+ sorted(ipc.get(['context'])['result']))
self.assertEqual(
- {'clone': 2, 'type': ['content']},
- ipc.get(['context', context], reply=['clone']))
-
- ipc.put(['context', context], 1, cmd='clone', force=1)
-
- self.assertEqual([
- (context, {'activity_id': impl, 'data': 'version_2', 'description': 'description', 'title': 'title', 'mime_type': 'foo/bar'}),
- ],
- updates)
+ sorted([{'guid': context, 'layer': ['clone']}]),
+ sorted(ipc.get(['context'], reply='layer')['result']))
+ self.assertEqual({'layer': ['clone']}, ipc.get(['context', context], reply='layer'))
+ self.assertEqual(['clone'], ipc.get(['context', context, 'layer']))
self.assertEqual(
- {'clone': 2, 'type': ['content']},
- ipc.get(['context', context], reply=['clone']))
- del updates[:]
+ [(context, ['clone'])],
+ [(i.guid, i['layer']) for i in local['context'].find(reply='layer')[0]])
+ self.assertEqual({
+ 'layer': ['clone'],
+ 'type': ['content'],
+ 'author': {tests.UID: {'role': 3, 'name': 'test', 'order': 0}},
+ 'title': {'en-us': 'title'},
+ },
+ local['context'].get(context).properties(['layer', 'type', 'author', 'title']))
+ self.assertEqual({
+ 'context': context,
+ 'license': ['GPLv3+'],
+ 'version': '1',
+ 'stability': 'stable',
+ },
+ local['implementation'].get(impl).properties(['context', 'license', 'version', 'stability']))
+ blob_path = 'client/implementation/%s/%s/data.blob' % (impl[:2], impl)
+ self.assertEqual({
+ 'seqno': 5,
+ 'blob_size': len(blob),
+ 'blob': tests.tmpdir + '/' + blob_path,
+ 'mtime': int(os.stat(blob_path[:-5]).st_mtime),
+ 'foo': 'bar',
+ },
+ local['implementation'].get(impl).meta('data'))
+ self.assertEqual('content', file(blob_path).read())
+ assert exists(clone_path + '/data.blob')
+ assert not exists('cache/solutions/%s/%s' % (context[:2], context))
- ipc.put(['context', context], 0, cmd='clone')
- shutil.rmtree('datastore/%s/%s' % (context[:2], context))
+ ipc.put(['context', context], False, cmd='clone')
+ coroutine.dispatch()
- self.assertEqual([
- (context,),
- ],
- updates)
+ self.assertEqual({
+ 'event': 'update',
+ 'guid': context,
+ 'resource': 'context',
+ },
+ events[-1])
self.assertEqual(
- {'clone': 0, 'type': ['content']},
- ipc.get(['context', context], reply=['clone']))
- del updates[:]
-
- def test_clone_Artifacts(self):
- self.start_online_client([User, Context, Implementation, Artifact])
- updates = []
+ sorted([{'guid': context, 'layer': []}]),
+ sorted(ipc.get(['context'], reply='layer')['result']))
+ self.assertEqual({'layer': []}, ipc.get(['context', context], reply='layer'))
+ self.assertEqual([], ipc.get(['context', context, 'layer']))
+ self.assertEqual({
+ 'layer': [],
+ 'type': ['content'],
+ 'author': {tests.UID: {'role': 3, 'name': 'test', 'order': 0}},
+ 'title': {'en-us': 'title'},
+ },
+ local['context'].get(context).properties(['layer', 'type', 'author', 'title']))
+ blob_path = 'client/implementation/%s/%s/data.blob' % (impl[:2], impl)
+ self.assertEqual({
+ 'seqno': 5,
+ 'blob_size': len(blob),
+ 'blob': tests.tmpdir + '/' + blob_path,
+ 'mtime': int(os.stat(blob_path[:-5]).st_mtime),
+ 'foo': 'bar',
+ },
+ local['implementation'].get(impl).meta('data'))
+ self.assertEqual('content', file(blob_path).read())
+ assert not lexists(clone_path)
+ assert not exists('cache/solutions/%s/%s' % (context[:2], context))
- def journal_update(self, guid, data=None, preview=None, **kwargs):
- if data is not None:
- kwargs['data'] = data.read()
- updates.append((guid, kwargs))
+ ipc.put(['context', context], True, cmd='clone')
+ coroutine.dispatch()
- self.override(journal.Routes, '__init__', lambda *args: None)
- self.override(journal.Routes, 'journal_update', journal_update)
- self.override(journal.Routes, 'journal_delete', lambda self, guid: updates.append((guid,)))
+ self.assertEqual({
+ 'event': 'update',
+ 'guid': context,
+ 'resource': 'context',
+ },
+ events[-1])
+ self.assertEqual(
+ sorted([{'guid': context, 'layer': ['clone']}]),
+ sorted(ipc.get(['context'], reply='layer')['result']))
+ assert exists(clone_path + '/data.blob')
+ assert not exists('cache/solutions/%s/%s' % (context[:2], context))
+ def test_clone_Activity(self):
+ local = self.start_online_client([User, Context, Implementation])
ipc = IPCConnection()
+ events = []
- artifact = ipc.post(['artifact'], {
- 'context': 'context',
- 'type': 'instance',
- 'title': 'title',
- 'description': 'description',
- })
- ipc.request('PUT', ['artifact', artifact, 'data'], 'data')
+ def read_events():
+ for event in ipc.subscribe(event='!commit'):
+ events.append(event)
+ coroutine.spawn(read_events)
+ coroutine.dispatch()
- self.assertEqual({'clone': 0}, ipc.get(['artifact', artifact], reply=['clone']))
+ activity_info = '\n'.join([
+ '[Activity]',
+ 'name = TestActivity',
+ 'bundle_id = bundle_id',
+ 'exec = true',
+ 'icon = icon',
+ 'activity_version = 1',
+ 'license=Public Domain',
+ ])
+ blob = self.zips(['TestActivity/activity/activity.info', activity_info])
+ impl = ipc.upload(['implementation'], StringIO(blob), cmd='release', initial=True)
+ clone_path = 'client/context/bu/bundle_id/.clone'
+ blob_path = tests.tmpdir + '/client/implementation/%s/%s/data.blob' % (impl[:2], impl)
+ solution = [{
+ 'guid': impl,
+ 'context': 'bundle_id',
+ 'extract': 'TestActivity',
+ 'license': ['Public Domain'],
+ 'stability': 'stable',
+ 'version': '1',
+ 'command': ['true'],
+ 'path': blob_path,
+ }]
- ipc.put(['artifact', artifact], 2, cmd='clone')
- self.touch('datastore/%s/%s/metadata/uid' % (artifact[:2], artifact))
+ ipc.put(['context', 'bundle_id'], True, cmd='clone')
+ coroutine.dispatch()
- self.assertEqual([
- (artifact, {'data': 'data', 'description': 'description', 'title': 'title', 'activity': 'context'}),
- ],
- updates)
+ self.assertEqual({
+ 'event': 'update',
+ 'guid': 'bundle_id',
+ 'resource': 'context',
+ },
+ events[-1])
self.assertEqual(
- {'clone': 2},
- ipc.get(['artifact', artifact], reply=['clone']))
- del updates[:]
-
- ipc.put(['artifact', artifact], 2, cmd='clone')
-
+ sorted([{'guid': 'bundle_id'}]),
+ sorted(ipc.get(['context'], layer='clone')['result']))
self.assertEqual(
- [],
- updates)
+ sorted([{'guid': 'bundle_id'}]),
+ sorted(ipc.get(['context'])['result']))
self.assertEqual(
- {'clone': 2},
- ipc.get(['artifact', artifact], reply=['clone']))
+ sorted([{'guid': 'bundle_id', 'layer': ['clone']}]),
+ sorted(ipc.get(['context'], reply='layer')['result']))
+ self.assertEqual({'layer': ['clone']}, ipc.get(['context', 'bundle_id'], reply='layer'))
+ self.assertEqual(['clone'], ipc.get(['context', 'bundle_id', 'layer']))
+ self.assertEqual({
+ 'layer': ['clone'],
+ 'type': ['activity'],
+ 'author': {tests.UID: {'role': 3, 'name': 'test', 'order': 0}},
+ 'title': {'en-us': 'TestActivity'},
+ },
+ local['context'].get('bundle_id').properties(['layer', 'type', 'author', 'title']))
+ self.assertEqual({
+ 'context': 'bundle_id',
+ 'license': ['Public Domain'],
+ 'version': '1',
+ 'stability': 'stable',
+ },
+ local['implementation'].get(impl).properties(['context', 'license', 'version', 'stability']))
+ self.assertEqual({
+ 'seqno': 5,
+ 'unpack_size': len(activity_info),
+ 'blob_size': len(blob),
+ 'blob': blob_path,
+ 'mtime': int(os.stat(blob_path[:-5]).st_mtime),
+ 'extract': 'TestActivity',
+ 'mime_type': 'application/vnd.olpc-sugar',
+ 'spec': {
+ '*-*': {
+ 'requires': {},
+ 'commands': {'activity': {'exec': 'true'}},
+ },
+ },
+ },
+ local['implementation'].get(impl).meta('data'))
+ self.assertEqual(activity_info, file(blob_path + '/activity/activity.info').read())
+ assert exists(clone_path + '/data.blob/activity/activity.info')
+ self.assertEqual(
+ [client.api_url.value, ['stable'], solution],
+ json.load(file('cache/solutions/bu/bundle_id')))
- ipc.request('PUT', ['artifact', artifact, 'data'], 'data_2')
- ipc.put(['artifact', artifact], 1, cmd='clone', force=1)
+ ipc.put(['context', 'bundle_id'], False, cmd='clone')
+ coroutine.dispatch()
- self.assertEqual([
- (artifact, {'data': 'data_2', 'description': 'description', 'title': 'title', 'activity': 'context'}),
- ],
- updates)
+ self.assertEqual({
+ 'event': 'update',
+ 'guid': 'bundle_id',
+ 'resource': 'context',
+ },
+ events[-1])
+ self.assertEqual(
+ sorted([{'guid': 'bundle_id', 'layer': []}]),
+ sorted(ipc.get(['context'], reply='layer')['result']))
+ self.assertEqual({'layer': []}, ipc.get(['context', 'bundle_id'], reply='layer'))
+ self.assertEqual([], ipc.get(['context', 'bundle_id', 'layer']))
+ self.assertEqual({
+ 'layer': [],
+ 'type': ['activity'],
+ 'author': {tests.UID: {'role': 3, 'name': 'test', 'order': 0}},
+ 'title': {'en-us': 'TestActivity'},
+ },
+ local['context'].get('bundle_id').properties(['layer', 'type', 'author', 'title']))
+ self.assertEqual({
+ 'seqno': 5,
+ 'unpack_size': len(activity_info),
+ 'blob_size': len(blob),
+ 'blob': blob_path,
+ 'mtime': int(os.stat(blob_path[:-5]).st_mtime),
+ 'extract': 'TestActivity',
+ 'mime_type': 'application/vnd.olpc-sugar',
+ 'spec': {
+ '*-*': {
+ 'requires': {},
+ 'commands': {'activity': {'exec': 'true'}},
+ },
+ },
+ },
+ local['implementation'].get(impl).meta('data'))
+ self.assertEqual(activity_info, file(blob_path + '/activity/activity.info').read())
+ assert not exists(clone_path)
self.assertEqual(
- {'clone': 2},
- ipc.get(['artifact', artifact], reply=['clone']))
- del updates[:]
+ [client.api_url.value, ['stable'], solution],
+ json.load(file('cache/solutions/bu/bundle_id')))
- ipc.put(['artifact', artifact], 0, cmd='clone')
- shutil.rmtree('datastore/%s/%s' % (artifact[:2], artifact))
+ ipc.put(['context', 'bundle_id'], True, cmd='clone')
+ coroutine.dispatch()
- self.assertEqual([
- (artifact,),
- ],
- updates)
+ self.assertEqual({
+ 'event': 'update',
+ 'guid': 'bundle_id',
+ 'resource': 'context',
+ },
+ events[-1])
self.assertEqual(
- {'clone': 0},
- ipc.get(['artifact', artifact], reply=['clone']))
- del updates[:]
+ sorted([{'guid': 'bundle_id', 'layer': ['clone']}]),
+ sorted(ipc.get(['context'], reply='layer')['result']))
+ assert exists(clone_path + '/data.blob/activity/activity.info')
+ self.assertEqual(
+ [client.api_url.value, ['stable'], solution],
+ json.load(file('cache/solutions/bu/bundle_id')))
- def test_favorite(self):
- self.start_online_client()
+ def test_clone_ActivityWithStabilityPreferences(self):
+ local = self.start_online_client([User, Context, Implementation])
ipc = IPCConnection()
- context = ipc.post(['context'], {
- 'type': 'activity',
- 'title': 'title',
- 'summary': 'summary',
- 'description': 'description',
- })
+ activity_info1 = '\n'.join([
+ '[Activity]',
+ 'name = TestActivity',
+ 'bundle_id = bundle_id',
+ 'exec = true',
+ 'icon = icon',
+ 'activity_version = 1',
+ 'license = Public Domain',
+ ])
+ blob1 = self.zips(['TestActivity/activity/activity.info', activity_info1])
+ impl1 = ipc.upload(['implementation'], StringIO(blob1), cmd='release', initial=True)
- self.assertEqual(
- {'favorite': 0, 'type': ['activity']},
- ipc.get(['context', context], reply=['favorite']))
+ activity_info2 = '\n'.join([
+ '[Activity]',
+ 'name = TestActivity',
+ 'bundle_id = bundle_id',
+ 'exec = true',
+ 'icon = icon',
+ 'activity_version = 2',
+ 'license = Public Domain',
+ 'stability = buggy',
+ ])
+ blob2 = self.zips(['TestActivity/activity/activity.info', activity_info2])
+ impl2 = ipc.upload(['implementation'], StringIO(blob2), cmd='release', initial=True)
- ipc.put(['context', context], True, cmd='favorite')
- coroutine.sleep(.5)
- self.assertEqual(
- {'favorite': True},
- ipc.get(['context', context], reply=['favorite']))
+ ipc.put(['context', 'bundle_id'], True, cmd='clone')
+ coroutine.dispatch()
+ self.assertEqual({'layer': ['clone']}, ipc.get(['context', 'bundle_id'], reply='layer'))
+ self.assertEqual([impl1], [i.guid for i in local['implementation'].find()[0]])
+ self.assertEqual(impl1, basename(os.readlink('client/context/bu/bundle_id/.clone')))
- ipc.put(['context', context], False, cmd='favorite')
- self.assertEqual(
- {'favorite': False},
- ipc.get(['context', context], reply=['favorite']))
+ self.touch(('config', [
+ '[stabilities]',
+ 'bundle_id = buggy stable',
+ ]))
+ Option.load(['config'])
- ipc.put(['context', context], True, cmd='favorite')
- coroutine.sleep(.5)
- self.assertEqual(
- {'favorite': True},
- ipc.get(['context', context], reply=['favorite']))
+ ipc.put(['context', 'bundle_id'], False, cmd='clone')
+ ipc.put(['context', 'bundle_id'], True, cmd='clone')
+ coroutine.dispatch()
+ self.assertEqual({'layer': ['clone']}, ipc.get(['context', 'bundle_id'], reply='layer'))
+ self.assertEqual([impl1, impl2], [i.guid for i in local['implementation'].find()[0]])
+ self.assertEqual(impl2, basename(os.readlink('client/context/bu/bundle_id/.clone')))
- trigger = self.wait_for_events(ipc, event='inline', state='offline')
- self.node.stop()
- trigger.wait()
- assert ipc.get(cmd='status')['route'] == 'offline'
+ def test_clone_Head(self):
+ local = self.start_online_client([User, Context, Implementation])
+ ipc = IPCConnection()
- self.assertEqual(
- {'favorite': True},
- ipc.get(['context', context], reply=['favorite']))
+ activity_info = '\n'.join([
+ '[Activity]',
+ 'name = TestActivity',
+ 'bundle_id = bundle_id',
+ 'exec = true',
+ 'icon = icon',
+ 'activity_version = 1',
+ 'license = Public Domain',
+ ])
+ blob = self.zips(['TestActivity/activity/activity.info', activity_info])
+ impl = ipc.upload(['implementation'], StringIO(blob), cmd='release', initial=True)
+ blob_path = 'master/implementation/%s/%s/data.blob' % (impl[:2], impl)
- def test_subscribe(self):
- self.start_online_client()
+ self.assertEqual({
+ 'guid': impl,
+ 'license': ['Public Domain'],
+ 'stability': 'stable',
+ 'version': '1',
+ 'context': 'bundle_id',
+ 'data': {
+ 'blob_size': len(blob),
+ 'extract': 'TestActivity',
+ 'mime_type': 'application/vnd.olpc-sugar',
+ 'mtime': int(os.stat(blob_path[:-5]).st_mtime),
+ 'seqno': 3,
+ 'spec': {'*-*': {'commands': {'activity': {'exec': 'true'}}, 'requires': {}}},
+ 'unpack_size': len(activity_info),
+ },
+ },
+ ipc.head(['context', 'bundle_id'], cmd='clone'))
+
+ ipc.put(['context', 'bundle_id'], True, cmd='clone')
+ coroutine.dispatch()
+ blob_path = tests.tmpdir + '/client/implementation/%s/%s/data.blob' % (impl[:2], impl)
+
+ self.assertEqual({
+ 'guid': impl,
+ 'license': ['Public Domain'],
+ 'stability': 'stable',
+ 'version': '1',
+ 'context': 'bundle_id',
+ 'data': {
+ 'blob': blob_path,
+ 'blob_size': len(blob),
+ 'extract': 'TestActivity',
+ 'mime_type': 'application/vnd.olpc-sugar',
+ 'mtime': int(os.stat(blob_path[:-5]).st_mtime),
+ 'seqno': 5,
+ 'spec': {'*-*': {'commands': {'activity': {'exec': 'true'}}, 'requires': {}}},
+ 'unpack_size': len(activity_info),
+ },
+ },
+ ipc.head(['context', 'bundle_id'], cmd='clone'))
+
+ def test_launch_Activity(self):
+ local = self.start_online_client([User, Context, Implementation])
ipc = IPCConnection()
- events = []
+
+ blob = self.zips(['TestActivity/activity/activity.info', [
+ '[Activity]',
+ 'name = TestActivity',
+ 'bundle_id = bundle_id',
+ 'exec = true',
+ 'icon = icon',
+ 'activity_version = 1',
+ 'license=Public Domain',
+ ]])
+ impl = ipc.upload(['implementation'], StringIO(blob), cmd='release', initial=True)
+ coroutine.sleep(.1)
def read_events():
for event in ipc.subscribe(event='!commit'):
events.append(event)
- job = coroutine.spawn(read_events)
- coroutine.dispatch(.1)
+ events = []
+ coroutine.spawn(read_events)
+ coroutine.dispatch()
- guid = ipc.post(['context'], {
- 'type': 'activity',
- 'title': 'title',
- 'summary': 'summary',
- 'description': 'description',
- })
- coroutine.dispatch(.1)
- ipc.put(['context', guid], {
- 'title': 'title_2',
- })
- coroutine.dispatch(.1)
- ipc.delete(['context', guid])
- coroutine.sleep(.5)
- job.kill()
+ ipc.get(['context', 'bundle_id'], cmd='launch', foo='bar')
+ coroutine.sleep(.1)
+ solution = [{
+ 'guid': impl,
+ 'context': 'bundle_id',
+ 'extract': 'TestActivity',
+ 'license': ['Public Domain'],
+ 'stability': 'stable',
+ 'version': '1',
+ 'command': ['true'],
+ 'path': tests.tmpdir + '/client/implementation/%s/%s/data.blob' % (impl[:2], impl),
+ }]
+ log_path = tests.tmpdir + '/.sugar/default/logs/bundle_id.log'
self.assertEqual([
- {'guid': guid, 'resource': 'context', 'event': 'create'},
- {'guid': guid, 'resource': 'context', 'event': 'update'},
- {'guid': guid, 'event': 'delete', 'resource': 'context'},
+ {'event': 'exec', 'cmd': 'launch', 'guid': 'bundle_id', 'args': ['true', '-b', 'bundle_id', '-a', 'activity_id'], 'foo': 'bar', 'activity_id': 'activity_id', 'log_path': log_path, 'solution': solution},
+ {'event': 'exit', 'cmd': 'launch', 'guid': 'bundle_id', 'args': ['true', '-b', 'bundle_id', '-a', 'activity_id'], 'foo': 'bar', 'activity_id': 'activity_id', 'log_path': log_path, 'solution': solution},
],
events)
- del events[:]
+ assert local['implementation'].exists(impl)
+ self.assertEqual(
+ [client.api_url.value, ['stable'], solution],
+ json.load(file('cache/solutions/bu/bundle_id')))
- job = coroutine.spawn(read_events)
- coroutine.dispatch(.1)
- guid = self.node_volume['context'].create({
- 'type': 'activity',
- 'title': 'title',
- 'summary': 'summary',
- 'description': 'description',
- })
- coroutine.dispatch(.1)
- self.node_volume['context'].update(guid, {
- 'title': 'title_2',
- })
- coroutine.dispatch(.1)
- self.node_volume['context'].delete(guid)
- coroutine.dispatch(.1)
- job.kill()
+ blob = self.zips(['TestActivity/activity/activity.info', [
+ '[Activity]',
+ 'name = TestActivity',
+ 'bundle_id = bundle_id',
+ 'exec = true',
+ 'icon = icon',
+ 'activity_version = 2',
+ 'license=Public Domain',
+ ]])
+ impl = ipc.upload(['implementation'], StringIO(blob), cmd='release')
+ coroutine.sleep(.1)
+
+ shutil.rmtree('cache/solutions')
+ del events[:]
+ ipc.get(['context', 'bundle_id'], cmd='launch', foo='bar')
+ coroutine.sleep(.1)
+ solution = [{
+ 'guid': impl,
+ 'context': 'bundle_id',
+ 'extract': 'TestActivity',
+ 'license': ['Public Domain'],
+ 'stability': 'stable',
+ 'version': '2',
+ 'command': ['true'],
+ 'path': tests.tmpdir + '/client/implementation/%s/%s/data.blob' % (impl[:2], impl),
+ }]
+ log_path = tests.tmpdir + '/.sugar/default/logs/bundle_id_1.log'
self.assertEqual([
- {'guid': guid, 'resource': 'context', 'event': 'create'},
- {'guid': guid, 'resource': 'context', 'event': 'update'},
- {'guid': guid, 'event': 'delete', 'resource': 'context'},
+ {'event': 'exec', 'cmd': 'launch', 'guid': 'bundle_id', 'args': ['true', '-b', 'bundle_id', '-a', 'activity_id'], 'foo': 'bar', 'activity_id': 'activity_id', 'log_path': log_path, 'solution': solution},
+ {'event': 'exit', 'cmd': 'launch', 'guid': 'bundle_id', 'args': ['true', '-b', 'bundle_id', '-a', 'activity_id'], 'foo': 'bar', 'activity_id': 'activity_id', 'log_path': log_path, 'solution': solution},
],
events)
+ assert local['implementation'].exists(impl)
+ self.assertEqual(
+ [client.api_url.value, ['stable'], solution],
+ json.load(file('cache/solutions/bu/bundle_id')))
- def test_BLOBs(self):
- self.start_online_client()
- ipc = IPCConnection()
+ self.node.stop()
+ coroutine.sleep(.1)
- guid = ipc.post(['context'], {
- 'type': 'activity',
- 'title': 'title',
- 'summary': 'summary',
- 'description': 'description',
- })
- ipc.request('PUT', ['context', guid, 'preview'], 'image')
+ del events[:]
+ ipc.get(['context', 'bundle_id'], cmd='launch', foo='bar')
+ coroutine.sleep(.1)
+ log_path = tests.tmpdir + '/.sugar/default/logs/bundle_id_2.log'
+ self.assertEqual([
+ {'event': 'exec', 'cmd': 'launch', 'guid': 'bundle_id', 'args': ['true', '-b', 'bundle_id', '-a', 'activity_id'], 'foo': 'bar', 'activity_id': 'activity_id', 'log_path': log_path, 'solution': solution},
+ {'event': 'exit', 'cmd': 'launch', 'guid': 'bundle_id', 'args': ['true', '-b', 'bundle_id', '-a', 'activity_id'], 'foo': 'bar', 'activity_id': 'activity_id', 'log_path': log_path, 'solution': solution},
+ ],
+ events)
+ assert local['implementation'].exists(impl)
self.assertEqual(
- 'image',
- ipc.request('GET', ['context', guid, 'preview']).content)
- self.assertEqual(
- {'preview': 'http://127.0.0.1:8888/context/%s/preview' % guid},
- ipc.get(['context', guid], reply=['preview']))
- self.assertEqual(
- [{'preview': 'http://127.0.0.1:8888/context/%s/preview' % guid}],
- ipc.get(['context'], reply=['preview'])['result'])
+ [client.api_url.value, ['stable'], solution],
+ json.load(file('cache/solutions/bu/bundle_id')))
+ shutil.rmtree('cache/solutions')
+ del events[:]
+ ipc.get(['context', 'bundle_id'], cmd='launch', foo='bar')
+ coroutine.sleep(.1)
+
+ log_path = tests.tmpdir + '/.sugar/default/logs/bundle_id_3.log'
+ self.assertEqual([
+ {'event': 'exec', 'cmd': 'launch', 'guid': 'bundle_id', 'args': ['true', '-b', 'bundle_id', '-a', 'activity_id'], 'foo': 'bar', 'activity_id': 'activity_id', 'log_path': log_path, 'solution': solution},
+ {'event': 'exit', 'cmd': 'launch', 'guid': 'bundle_id', 'args': ['true', '-b', 'bundle_id', '-a', 'activity_id'], 'foo': 'bar', 'activity_id': 'activity_id', 'log_path': log_path, 'solution': solution},
+ ],
+ events)
+ assert local['implementation'].exists(impl)
self.assertEqual(
- file(src_root + '/sugar_network/static/httpdocs/images/missing.png').read(),
- ipc.request('GET', ['context', guid, 'icon']).content)
- self.assertEqual(
- {'icon': 'http://127.0.0.1:8888/static/images/missing.png'},
- ipc.get(['context', guid], reply=['icon']))
+ [client.api_url.value, ['stable'], solution],
+ json.load(file('cache/solutions/bu/bundle_id')))
+
+ def test_launch_ActivityFailed(self):
+ local = self.start_online_client([User, Context, Implementation])
+ ipc = IPCConnection()
+
+ activity_info = '\n'.join([
+ '[Activity]',
+ 'name = TestActivity',
+ 'bundle_id = bundle_id',
+ 'exec = false',
+ 'icon = icon',
+ 'activity_version = 1',
+ 'license=Public Domain',
+ ])
+ blob = self.zips(['TestActivity/activity/activity.info', activity_info])
+ impl = ipc.upload(['implementation'], StringIO(blob), cmd='release', initial=True)
+ coroutine.sleep(.1)
+
+ def read_events():
+ for event in ipc.subscribe(event='!commit'):
+ events.append(event)
+ events = []
+ coroutine.spawn(read_events)
+ coroutine.dispatch()
+
+ ipc.get(['context', 'bundle_id'], cmd='launch', foo='bar')
+ coroutine.sleep(.1)
+
+ solution = [{
+ 'guid': impl,
+ 'context': 'bundle_id',
+ 'extract': 'TestActivity',
+ 'license': ['Public Domain'],
+ 'stability': 'stable',
+ 'version': '1',
+ 'command': ['false'],
+ 'path': tests.tmpdir + '/client/implementation/%s/%s/data.blob' % (impl[:2], impl),
+ }]
+ log_path = tests.tmpdir + '/.sugar/default/logs/bundle_id.log'
+ self.assertEqual([
+ {'event': 'exec', 'cmd': 'launch', 'guid': 'bundle_id', 'args': ['false', '-b', 'bundle_id', '-a', 'activity_id'], 'foo': 'bar', 'activity_id': 'activity_id', 'log_path': log_path, 'solution': solution},
+ {'event': 'failure', 'error': 'Process exited with 1 status', 'cmd': 'launch', 'guid': 'bundle_id', 'args': ['false', '-b', 'bundle_id', '-a', 'activity_id'], 'foo': 'bar', 'activity_id': 'activity_id', 'log_path': log_path, 'solution': solution},
+ ],
+ events)
+ assert local['implementation'].exists(impl)
self.assertEqual(
- [{'icon': 'http://127.0.0.1:8888/static/images/missing.png'}],
- ipc.get(['context'], reply=['icon'])['result'])
+ [client.api_url.value, ['stable'], solution],
+ json.load(file('cache/solutions/bu/bundle_id')))
- def test_Feeds(self):
+ def test_InvalidateSolutions(self):
self.start_online_client()
ipc = IPCConnection()
+ self.assertNotEqual(None, self.client_routes._node_mtime)
+
+ mtime = self.client_routes._node_mtime
+ coroutine.sleep(1.1)
context = ipc.post(['context'], {
'type': 'activity',
@@ -662,6 +1007,10 @@ class OnlineRoutes(tests.Test):
'summary': 'summary',
'description': 'description',
})
+ assert self.client_routes._node_mtime == mtime
+
+ coroutine.sleep(1.1)
+
impl1 = ipc.post(['implementation'], {
'context': context,
'license': 'GPLv3+',
@@ -672,6 +1021,11 @@ class OnlineRoutes(tests.Test):
self.node_volume['implementation'].update(impl1, {'data': {
'spec': {'*-*': {}},
}})
+ assert self.client_routes._node_mtime > mtime
+
+ mtime = self.client_routes._node_mtime
+ coroutine.sleep(1.1)
+
impl2 = ipc.post(['implementation'], {
'context': context,
'license': 'GPLv3+',
@@ -689,37 +1043,34 @@ class OnlineRoutes(tests.Test):
},
}},
}})
+ assert self.client_routes._node_mtime > mtime
- self.assertEqual({
- 'name': 'title',
- 'implementations': [
- {
- 'version': '1',
- 'arch': '*-*',
- 'stability': 'stable',
- 'guid': impl1,
- 'unpack_size': None,
- 'blob_size': None,
- },
- {
- 'version': '2',
- 'arch': '*-*',
- 'stability': 'stable',
- 'guid': impl2,
- 'requires': {
- 'dep1': {},
- 'dep2': {'restrictions': [['1', '2']]},
- 'dep3': {'restrictions': [[None, '2']]},
- 'dep4': {'restrictions': [['3', None]]},
- },
- 'unpack_size': None,
- 'blob_size': None,
- },
- ],
- },
- ipc.get(['context', context], cmd='feed'))
+ def test_NoNeedlessRemoteRequests(self):
+ home_volume = self.start_online_client()
+ ipc = IPCConnection()
+
+ guid = ipc.post(['context'], {
+ 'type': 'content',
+ 'title': 'remote',
+ 'summary': 'summary',
+ 'description': 'description',
+ })
+ self.assertEqual(
+ {'title': 'remote'},
+ ipc.get(['context', guid], reply=['title']))
+
+ home_volume['context'].create({
+ 'guid': guid,
+ 'type': 'activity',
+ 'title': 'local',
+ 'summary': 'summary',
+ 'description': 'description',
+ })
+ self.assertEqual(
+ {'title': 'local'},
+ ipc.get(['context', guid], reply=['title']))
- def test_Feeds_RestrictLayers(self):
+ def test_RestrictLayers(self):
self.start_online_client([User, Context, Implementation, Artifact])
ipc = IPCConnection()
@@ -741,70 +1092,48 @@ class OnlineRoutes(tests.Test):
self.node_volume['implementation'].update(impl, {'data': {
'spec': {'*-*': {}},
}})
- artifact = ipc.post(['artifact'], {
- 'type': 'instance',
- 'context': 'context',
- 'title': 'title',
- 'description': 'description',
- 'layer': 'public',
- })
-
- self.assertEqual(
- [{'layer': ['public']}],
- ipc.get(['context'], reply='layer')['result'])
- self.assertEqual(
- [],
- ipc.get(['context'], reply='layer', layer='foo')['result'])
- self.assertEqual(
- [{'layer': ['public']}],
- ipc.get(['context'], reply='layer', layer='public')['result'])
self.assertEqual(
- [{'layer': ['public']}],
- ipc.get(['implementation'], reply='layer')['result'])
+ [{'guid': context, 'layer': ['public']}],
+ ipc.get(['context'], reply=['guid', 'layer'])['result'])
self.assertEqual(
[],
- ipc.get(['implementation'], reply='layer', layer='foo')['result'])
+ ipc.get(['context'], reply=['guid', 'layer'], layer='foo')['result'])
self.assertEqual(
- [{'layer': ['public']}],
- ipc.get(['implementation'], reply='layer', layer='public')['result'])
+ [{'guid': context, 'layer': ['public']}],
+ ipc.get(['context'], reply=['guid', 'layer'], layer='public')['result'])
self.assertEqual(
- [{'layer': ['public']}],
- ipc.get(['artifact'], reply='layer')['result'])
+ [{'guid': impl, 'layer': ['public']}],
+ ipc.get(['implementation'], reply=['guid', 'layer'])['result'])
self.assertEqual(
[],
- ipc.get(['artifact'], reply='layer', layer='foo')['result'])
+ ipc.get(['implementation'], reply=['guid', 'layer'], layer='foo')['result'])
self.assertEqual(
- [{'layer': ['public']}],
- ipc.get(['artifact'], reply='layer', layer='public')['result'])
+ [{'guid': impl, 'layer': ['public']}],
+ ipc.get(['implementation'], reply=['guid', 'layer'], layer='public')['result'])
self.assertEqual({
- 'name': 'title',
'implementations': [{
'stability': 'stable',
'guid': impl,
'arch': '*-*',
'version': '1',
- 'unpack_size': None,
- 'blob_size': None,
+ 'license': ['GPLv3+'],
}],
},
ipc.get(['context', context], cmd='feed'))
self.assertEqual({
- 'name': 'title',
'implementations': [],
},
ipc.get(['context', context], cmd='feed', layer='foo'))
self.assertEqual({
- 'name': 'title',
'implementations': [{
'stability': 'stable',
'guid': impl,
'arch': '*-*',
'version': '1',
- 'unpack_size': None,
- 'blob_size': None,
+ 'license': ['GPLv3+'],
}],
},
ipc.get(['context', context], cmd='feed', layer='public'))
@@ -813,192 +1142,43 @@ class OnlineRoutes(tests.Test):
self.assertEqual(
[],
- ipc.get(['context'], reply='layer')['result'])
+ ipc.get(['context'], reply=['guid', 'layer'])['result'])
self.assertEqual(
[],
- ipc.get(['context'], reply='layer', layer='foo')['result'])
+ ipc.get(['context'], reply=['guid', 'layer'], layer='foo')['result'])
self.assertEqual(
- [{'layer': ['public']}],
- ipc.get(['context'], reply='layer', layer='public')['result'])
+ [{'guid': context, 'layer': ['public']}],
+ ipc.get(['context'], reply=['guid', 'layer'], layer='public')['result'])
self.assertEqual(
[],
- ipc.get(['implementation'], reply='layer')['result'])
+ ipc.get(['implementation'], reply=['guid', 'layer'])['result'])
self.assertEqual(
[],
- ipc.get(['implementation'], reply='layer', layer='foo')['result'])
+ ipc.get(['implementation'], reply=['guid', 'layer'], layer='foo')['result'])
self.assertEqual(
- [{'layer': ['public']}],
- ipc.get(['implementation'], reply='layer', layer='public')['result'])
-
- self.assertEqual(
- [{'layer': ['public']}],
- ipc.get(['artifact'], reply='layer')['result'])
- self.assertEqual(
- [],
- ipc.get(['artifact'], reply='layer', layer='foo')['result'])
- self.assertEqual(
- [{'layer': ['public']}],
- ipc.get(['artifact'], reply='layer', layer='public')['result'])
+ [{'guid': impl, 'layer': ['public']}],
+ ipc.get(['implementation'], reply=['guid', 'layer'], layer='public')['result'])
self.assertEqual({
- 'name': 'title',
'implementations': [],
},
ipc.get(['context', context], cmd='feed'))
self.assertEqual({
- 'name': 'title',
'implementations': [],
},
ipc.get(['context', context], cmd='feed', layer='foo'))
self.assertEqual({
- 'name': 'title',
'implementations': [{
'stability': 'stable',
'guid': impl,
'arch': '*-*',
'version': '1',
- 'unpack_size': None,
- 'blob_size': None,
+ 'license': ['GPLv3+'],
}],
},
ipc.get(['context', context], cmd='feed', layer='public'))
- def test_Feeds_PreferLocalFeeds(self):
- home_volume = self.start_online_client()
- ipc = IPCConnection()
-
- context = ipc.post(['context'], {
- 'type': 'activity',
- 'title': 'title',
- 'summary': 'summary',
- 'description': 'description',
- })
- impl = ipc.post(['implementation'], {
- 'context': context,
- 'license': 'GPLv3+',
- 'version': '2',
- 'stability': 'stable',
- 'notes': '',
- })
- self.node_volume['implementation'].update(impl, {'data': {
- 'spec': {'*-*': {}},
- }})
-
- self.assertEqual({
- 'name': 'title',
- 'implementations': [
- {
- 'version': '2',
- 'arch': '*-*',
- 'stability': 'stable',
- 'guid': impl,
- 'unpack_size': None,
- 'blob_size': None,
- },
- ],
- },
- ipc.get(['context', context], cmd='feed'))
-
- self.touch(('Activities/activity-1/activity/activity.info', [
- '[Activity]',
- 'name = TestActivity',
- 'bundle_id = ' + context,
- 'exec = true',
- 'icon = icon',
- 'activity_version = 1',
- 'license = Public Domain',
- ]))
- monitor = coroutine.spawn(clones.monitor, home_volume['context'], ['Activities'])
- coroutine.dispatch()
-
- self.assertEqual({
- 'name': 'TestActivity',
- 'implementations': [
- {
- 'version': '1',
- 'arch': '*-*',
- 'commands': {
- 'activity': {
- 'exec': 'true',
- },
- },
- 'stability': 'stable',
- 'guid': tests.tmpdir + '/Activities/activity-1',
- 'requires': {},
- },
- ],
- },
- ipc.get(['context', context], cmd='feed'))
-
- def test_InvalidateSolutions(self):
- self.start_online_client()
- ipc = IPCConnection()
- self.assertNotEqual(None, injector._mtime)
-
- mtime = injector._mtime
- coroutine.sleep(1.5)
-
- context = ipc.post(['context'], {
- 'type': 'activity',
- 'title': 'title',
- 'summary': 'summary',
- 'description': 'description',
- })
- assert injector._mtime == mtime
-
- impl1 = ipc.post(['implementation'], {
- 'context': context,
- 'license': 'GPLv3+',
- 'version': '1',
- 'stability': 'stable',
- 'notes': '',
- })
- self.node_volume['implementation'].update(impl1, {'data': {
- 'spec': {'*-*': {}},
- }})
- coroutine.sleep(.5)
- assert injector._mtime > mtime
-
- mtime = injector._mtime
- coroutine.sleep(1)
-
- impl2 = ipc.post(['implementation'], {
- 'context': context,
- 'license': 'GPLv3+',
- 'version': '2',
- 'stability': 'stable',
- 'notes': '',
- })
- self.node_volume['implementation'].update(impl2, {'data': {
- 'spec': {'*-*': {
- 'requires': {
- 'dep1': {},
- 'dep2': {'restrictions': [['1', '2']]},
- 'dep3': {'restrictions': [[None, '2']]},
- 'dep4': {'restrictions': [['3', None]]},
- },
- }},
- }})
- assert injector._mtime > mtime
-
- def test_ContentDisposition(self):
- self.start_online_client([User, Context, Implementation, Artifact])
- ipc = IPCConnection()
-
- artifact = ipc.post(['artifact'], {
- 'type': 'instance',
- 'context': 'context',
- 'title': 'title',
- 'description': 'description',
- })
- ipc.request('PUT', ['artifact', artifact, 'data'], 'blob', headers={'Content-Type': 'image/png'})
-
- response = ipc.request('GET', ['artifact', artifact, 'data'])
- self.assertEqual(
- 'attachment; filename="Title.png"',
- response.headers.get('Content-Disposition'))
-
def test_Redirects(self):
URL = 'http://sugarlabs.org'
@@ -1016,201 +1196,22 @@ class OnlineRoutes(tests.Test):
self.assertEqual(303, response.status_code)
self.assertEqual(URL, response.headers['Location'])
- def test_Proxy_Activities(self):
- home_volume = self.start_online_client()
- ipc = IPCConnection()
-
- context = ipc.post(['context'], {
- 'type': 'activity',
- 'title': 'title',
- 'summary': 'summary',
- 'description': 'description',
- })
-
- self.assertEqual(
- [{'guid': context, 'favorite': False, 'clone': 0, 'type': ['activity']}],
- ipc.get(['context'], reply=['favorite', 'clone'])['result'])
- self.assertEqual(
- {'favorite': False, 'clone': 0, 'type': ['activity']},
- ipc.get(['context', context], reply=['favorite', 'clone']))
-
- home_volume['context'].create({
- 'guid': context,
- 'type': 'activity',
- 'title': 'title',
- 'summary': 'summary',
- 'description': 'description',
- 'favorite': True,
- 'clone': 2,
- })
-
- self.assertEqual(
- [{'guid': context, 'favorite': True, 'clone': 2, 'type': ['activity']}],
- ipc.get(['context'], reply=['favorite', 'clone'])['result'])
- self.assertEqual(
- {'favorite': True, 'clone': 2},
- ipc.get(['context', context], reply=['favorite', 'clone']))
-
- def test_Proxy_Content(self):
- self.start_online_client([User, Context, Implementation, Artifact])
- ipc = IPCConnection()
-
- guid = ipc.post(['context'], {
- 'type': 'content',
- 'title': 'title',
- 'summary': 'summary',
- 'description': 'description',
- })
-
- self.assertEqual(
- [{'guid': guid, 'favorite': False, 'clone': 0, 'type': ['content']}],
- ipc.get(['context'], reply=['favorite', 'clone'])['result'])
- self.assertEqual(
- {'favorite': False, 'clone': 0, 'type': ['content']},
- ipc.get(['context', guid], reply=['favorite', 'clone']))
-
- self.touch(('datastore/%s/%s/metadata/keep' % (guid[:2], guid), '0'))
-
- self.assertEqual(
- [{'guid': guid, 'favorite': False, 'clone': 2, 'type': ['content']}],
- ipc.get(['context'], reply=['favorite', 'clone'])['result'])
- self.assertEqual(
- {'favorite': False, 'clone': 2, 'type': ['content']},
- ipc.get(['context', guid], reply=['favorite', 'clone']))
-
- self.touch(('datastore/%s/%s/metadata/keep' % (guid[:2], guid), '1'))
-
- self.assertEqual(
- [{'guid': guid, 'favorite': True, 'clone': 2, 'type': ['content']}],
- ipc.get(['context'], reply=['favorite', 'clone'])['result'])
- self.assertEqual(
- {'favorite': True, 'clone': 2, 'type': ['content']},
- ipc.get(['context', guid], reply=['favorite', 'clone']))
-
- def test_Proxy_Artifacts(self):
+ def test_ContentDisposition(self):
self.start_online_client([User, Context, Implementation, Artifact])
ipc = IPCConnection()
- guid = ipc.post(['artifact'], {
+ artifact = ipc.post(['artifact'], {
'type': 'instance',
'context': 'context',
'title': 'title',
'description': 'description',
})
+ ipc.request('PUT', ['artifact', artifact, 'data'], 'blob', headers={'Content-Type': 'image/png'})
+ response = ipc.request('GET', ['artifact', artifact, 'data'])
self.assertEqual(
- [{'guid': guid, 'favorite': False, 'clone': 0}],
- ipc.get(['artifact'], reply=['favorite', 'clone'])['result'])
- self.assertEqual(
- {'favorite': False, 'clone': 0},
- ipc.get(['artifact', guid], reply=['favorite', 'clone']))
-
- self.touch(('datastore/%s/%s/metadata/keep' % (guid[:2], guid), '0'))
-
- self.assertEqual(
- [{'guid': guid, 'favorite': False, 'clone': 2}],
- ipc.get(['artifact'], reply=['favorite', 'clone'])['result'])
- self.assertEqual(
- {'favorite': False, 'clone': 2},
- ipc.get(['artifact', guid], reply=['favorite', 'clone']))
-
- self.touch(('datastore/%s/%s/metadata/keep' % (guid[:2], guid), '1'))
-
- self.assertEqual(
- [{'guid': guid, 'favorite': True, 'clone': 2}],
- ipc.get(['artifact'], reply=['favorite', 'clone'])['result'])
- self.assertEqual(
- {'favorite': True, 'clone': 2},
- ipc.get(['artifact', guid], reply=['favorite', 'clone']))
-
- def test_Proxy_NoNeedlessRemoteRequests(self):
- home_volume = self.start_online_client()
- ipc = IPCConnection()
-
- guid = ipc.post(['context'], {
- 'type': 'content',
- 'title': 'remote',
- 'summary': 'summary',
- 'description': 'description',
- })
- self.assertEqual(
- {'title': 'remote'},
- ipc.get(['context', guid], reply=['title']))
-
- home_volume['context'].create({
- 'guid': guid,
- 'type': 'activity',
- 'title': 'local',
- 'summary': 'summary',
- 'description': 'description',
- 'favorite': True,
- })
- self.assertEqual(
- {'title': 'local'},
- ipc.get(['context', guid], reply=['title']))
-
- def test_HomeVolumeEvents(self):
- self.home_volume = self.start_online_client()
- ipc = IPCConnection()
- coroutine.spawn(clones.monitor, self.home_volume['context'], ['Activities'])
-
- context1 = ipc.post(['context'], {
- 'type': 'activity',
- 'title': 'title',
- 'summary': 'summary',
- 'description': 'description',
- })
- impl = ipc.post(['implementation'], {
- 'context': context1,
- 'license': 'GPLv3+',
- 'version': '1',
- 'stability': 'stable',
- 'notes': '',
- })
- self.node_volume['implementation'].update(impl, {'data': {
- 'spec': {
- '*-*': {
- 'commands': {
- 'activity': {
- 'exec': 'true',
- },
- },
- 'stability': 'stable',
- 'size': 0,
- 'extract': 'TestActivitry',
- },
- },
- 'blob': StringIO(self.zips(['TestActivitry/activity/activity.info', [
- '[Activity]',
- 'name = TestActivitry',
- 'bundle_id = %s' % context1,
- 'exec = false',
- 'icon = icon',
- 'activity_version = 1',
- 'license=Public Domain',
- ]])),
-
- }})
-
- trigger = self.wait_for_events(ipc, event='update', resource='context', guid=context1)
- ipc.put(['context', context1], 2, cmd='clone')
- trigger.wait()
- self.assertEqual(
- {'clone': 2},
- ipc.get(['context', context1], reply=['clone']))
-
- context2 = ipc.post(['context'], {
- 'type': 'activity',
- 'title': 'title',
- 'summary': 'summary',
- 'description': 'description',
- })
- trigger = self.wait_for_events(ipc, event='create', resource='context', guid=context2)
- ipc.put(['context', context2], True, cmd='favorite')
- trigger.wait()
- self.assertEqual(
- {'favorite': True},
- ipc.get(['context', context2], reply=['favorite']))
+ 'attachment; filename="Title.png"',
+ response.headers.get('Content-Disposition'))
def test_FallbackToLocalSNOnRemoteTransportFails(self):
@@ -1313,6 +1314,27 @@ class OnlineRoutes(tests.Test):
self.fork_master([User])
self.wait_for_events(ipc, event='inline', state='online').wait()
+ def test_inline(self):
+ cp = ClientRoutes(Volume('client', model.RESOURCES), client.api_url.value)
+ assert not cp.inline()
+
+ trigger = self.wait_for_events(cp, event='inline', state='online')
+ coroutine.sleep(1)
+ self.start_master()
+ trigger.wait(1)
+ assert trigger.value is None
+ assert not cp.inline()
+
+ request = Request(method='GET', cmd='whoami')
+ cp.whoami(request, Response())
+ trigger.wait()
+ assert cp.inline()
+
+ trigger = self.wait_for_events(cp, event='inline', state='offline')
+ self.node.stop()
+ trigger.wait()
+ assert not cp.inline()
+
if __name__ == '__main__':
tests.main()
diff --git a/tests/units/client/routes.py b/tests/units/client/routes.py
index fcc355f..6450e81 100755
--- a/tests/units/client/routes.py
+++ b/tests/units/client/routes.py
@@ -3,11 +3,12 @@
# sugar-lint: disable
import json
+from cStringIO import StringIO
from __init__ import tests
from sugar_network import db, client, model
-from sugar_network.client import journal, injector, IPCConnection
+from sugar_network.client import journal, IPCConnection
from sugar_network.client.routes import ClientRoutes, CachedClientRoutes
from sugar_network.model.user import User
from sugar_network.model.report import Report
@@ -43,124 +44,86 @@ class RoutesTest(tests.Test):
response = requests.request('GET', url + '/hub/', allow_redirects=False)
self.assertEqual(index_html, response.content)
- def test_launch(self):
- self.override(injector, 'launch', lambda *args, **kwargs: [{'args': args, 'kwargs': kwargs}])
- volume = db.Volume('db', model.RESOURCES)
- cp = ClientRoutes(volume)
-
- trigger = self.wait_for_events(cp, event='launch')
- cp.launch(Request(path=['context', 'app']), [])
- self.assertEqual(
- {'event': 'launch', 'args': ['app', []], 'kwargs': {'color': None, 'activity_id': None, 'uri': None, 'object_id': None}},
- trigger.wait())
-
- def test_launch_ResumeJobject(self):
- self.override(injector, 'launch', lambda *args, **kwargs: [{'args': args, 'kwargs': kwargs}])
- self.override(journal, 'exists', lambda *args: True)
- volume = db.Volume('db', model.RESOURCES)
- cp = ClientRoutes(volume)
-
- trigger = self.wait_for_events(cp, event='launch')
- cp.launch(Request(path=['context', 'app']), [], object_id='object_id')
- self.assertEqual(
- {'event': 'launch', 'args': ['app', []], 'kwargs': {'color': None, 'activity_id': None, 'uri': None, 'object_id': 'object_id'}},
- trigger.wait())
-
- def test_InlineSwitchInFind(self):
+ def test_LocalLayers(self):
self.home_volume = self.start_online_client()
ipc = IPCConnection()
guid1 = ipc.post(['context'], {
+ 'guid': 'context1',
'type': 'activity',
'title': '1',
'summary': 'summary',
'description': 'description',
})
- guid2 = ipc.post(['context'], {
- 'type': 'activity',
- 'title': '2',
- 'summary': 'summary',
- 'description': 'description',
- })
- guid3 = ipc.post(['context'], {
+ ipc.upload(['implementation'], StringIO(self.zips(['TestActivity/activity/activity.info', [
+ '[Activity]',
+ 'name = 2',
+ 'bundle_id = context2',
+ 'exec = true',
+ 'icon = icon',
+ 'activity_version = 1',
+ 'license = Public Domain',
+ 'stability = stable',
+ ]])), cmd='release', initial=True)
+ guid2 = 'context2'
+ ipc.upload(['implementation'], StringIO(self.zips(['TestActivity/activity/activity.info', [
+ '[Activity]',
+ 'name = 3',
+ 'bundle_id = context3',
+ 'exec = true',
+ 'icon = icon',
+ 'activity_version = 1',
+ 'license = Public Domain',
+ 'stability = stable',
+ ]])), cmd='release', initial=True)
+ guid3 = 'context3'
+ guid4 = ipc.post(['context'], {
+ 'guid': 'context4',
'type': 'activity',
- 'title': '3',
+ 'title': '4',
'summary': 'summary',
'description': 'description',
})
self.assertEqual([
- {'guid': guid1, 'title': '1'},
- {'guid': guid2, 'title': '2'},
- {'guid': guid3, 'title': '3'},
+ {'guid': guid1, 'title': '1', 'layer': []},
+ {'guid': guid2, 'title': '2', 'layer': []},
+ {'guid': guid3, 'title': '3', 'layer': []},
+ {'guid': guid4, 'title': '4', 'layer': []},
],
- ipc.get(['context'], reply=['guid', 'title'])['result'])
+ ipc.get(['context'], reply=['guid', 'title', 'layer'])['result'])
self.assertEqual([
- {'guid': guid1, 'title': '1'},
- {'guid': guid2, 'title': '2'},
- {'guid': guid3, 'title': '3'},
],
- ipc.get(['context'], reply=['guid', 'title'], clone=0)['result'])
+ ipc.get(['context'], reply=['guid', 'title'], layer='favorite')['result'])
self.assertEqual([
- {'guid': guid1, 'title': '1'},
- {'guid': guid2, 'title': '2'},
- {'guid': guid3, 'title': '3'},
],
- ipc.get(['context'], reply=['guid', 'title'], favorite=False)['result'])
- self.assertEqual([
- ],
- ipc.get(['context'], reply=['guid', 'title'], favorite=True)['result'])
- self.assertEqual([
- ],
- ipc.get(['context'], reply=['guid', 'title'], clone=2)['result'])
+ ipc.get(['context'], reply=['guid', 'title'], layer='clone')['result'])
+ ipc.put(['context', guid1], True, cmd='favorite')
ipc.put(['context', guid2], True, cmd='favorite')
+ ipc.put(['context', guid2], True, cmd='clone')
+ ipc.put(['context', guid3], True, cmd='clone')
+ self.home_volume['context'].update(guid1, {'title': '1_'})
self.home_volume['context'].update(guid2, {'title': '2_'})
- self.assertEqual([
- {'guid': guid2, 'title': '2_'},
- ],
- ipc.get(['context'], reply=['guid', 'title'], favorite=True)['result'])
- self.assertEqual([
- ],
- ipc.get(['context'], reply=['guid', 'title'], clone=2)['result'])
+ self.home_volume['context'].update(guid3, {'title': '3_'})
- ipc.put(['context', guid1], True, cmd='favorite')
- ipc.put(['context', guid3], True, cmd='favorite')
- self.home_volume['context'].update(guid1, {'clone': 1, 'title': '1_'})
- self.home_volume['context'].update(guid3, {'clone': 2, 'title': '3_'})
self.assertEqual([
- {'guid': guid1, 'title': '1_'},
- {'guid': guid2, 'title': '2_'},
- {'guid': guid3, 'title': '3_'},
+ {'guid': guid1, 'title': '1', 'layer': ['favorite']},
+ {'guid': guid2, 'title': '2', 'layer': ['clone', 'favorite']},
+ {'guid': guid3, 'title': '3', 'layer': ['clone']},
+ {'guid': guid4, 'title': '4', 'layer': []},
],
- ipc.get(['context'], reply=['guid', 'title'], favorite=True)['result'])
+ ipc.get(['context'], reply=['guid', 'title', 'layer'])['result'])
self.assertEqual([
{'guid': guid1, 'title': '1_'},
+ {'guid': guid2, 'title': '2_'},
],
- ipc.get(['context'], reply=['guid', 'title'], clone=1)['result'])
+ ipc.get(['context'], reply=['guid', 'title'], layer='favorite')['result'])
self.assertEqual([
+ {'guid': guid2, 'title': '2_'},
{'guid': guid3, 'title': '3_'},
],
- ipc.get(['context'], reply=['guid', 'title'], clone=2)['result'])
-
- self.assertEqual([
- {'guid': guid1, 'title': '1'},
- {'guid': guid2, 'title': '2'},
- {'guid': guid3, 'title': '3'},
- ],
- ipc.get(['context'], reply=['guid', 'title'])['result'])
- self.assertEqual([
- {'guid': guid1, 'title': '1'},
- {'guid': guid2, 'title': '2'},
- {'guid': guid3, 'title': '3'},
- ],
- ipc.get(['context'], reply=['guid', 'title'], clone=0)['result'])
- self.assertEqual([
- {'guid': guid1, 'title': '1'},
- {'guid': guid2, 'title': '2'},
- {'guid': guid3, 'title': '3'},
- ],
- ipc.get(['context'], reply=['guid', 'title'], favorite=False)['result'])
+ ipc.get(['context'], reply=['guid', 'title'], layer='clone')['result'])
def test_SetLocalLayerInOffline(self):
volume = db.Volume('client', model.RESOURCES)
@@ -185,8 +148,8 @@ class RoutesTest(tests.Test):
guid = call(cp, post)
self.assertEqual([], call(cp, Request(method='GET', path=['context', guid, 'layer'])))
- def test_CachedClientCommands(self):
- volume = db.Volume('client', model.RESOURCES)
+ def test_CachedClientRoutes(self):
+ volume = db.Volume('client', model.RESOURCES, lazy_open=True)
cp = CachedClientRoutes(volume, client.api_url.value)
post = Request(method='POST', path=['context'])
@@ -196,6 +159,7 @@ class RoutesTest(tests.Test):
'title': 'title',
'summary': 'summary',
'description': 'description',
+ 'layer': ['foo', 'clone', 'favorite'],
}
guid1 = call(cp, post)
guid2 = call(cp, post)
@@ -207,12 +171,16 @@ class RoutesTest(tests.Test):
self.assertEqual([[3, None]], json.load(file('client/push.sequence')))
self.assertEqual({'en-us': 'title'}, volume['context'].get(guid1)['title'])
+ self.assertEqual(['foo', 'clone', 'favorite', 'local'], volume['context'].get(guid1)['layer'])
self.assertEqual({'en-us': 'title'}, self.node_volume['context'].get(guid1)['title'])
+ self.assertEqual(['foo'], self.node_volume['context'].get(guid1)['layer'])
self.assertEqual(
{tests.UID: {'role': 3, 'name': 'test', 'order': 0}},
self.node_volume['context'].get(guid1)['author'])
self.assertEqual({'en-us': 'title'}, volume['context'].get(guid2)['title'])
+ self.assertEqual(['foo', 'clone', 'favorite', 'local'], volume['context'].get(guid2)['layer'])
self.assertEqual({'en-us': 'title'}, self.node_volume['context'].get(guid2)['title'])
+ self.assertEqual(['foo'], self.node_volume['context'].get(guid2)['layer'])
self.assertEqual(
{tests.UID: {'role': 3, 'name': 'test', 'order': 0}},
self.node_volume['context'].get(guid2)['author'])
@@ -242,8 +210,8 @@ class RoutesTest(tests.Test):
{tests.UID: {'role': 3, 'name': 'test', 'order': 0}},
self.node_volume['context'].get(guid2)['author'])
- def test_CachedClientCommands_WipeReports(self):
- volume = db.Volume('client', model.RESOURCES)
+ def test_CachedClientRoutes_WipeReports(self):
+ volume = db.Volume('client', model.RESOURCES, lazy_open=True)
cp = CachedClientRoutes(volume, client.api_url.value)
post = Request(method='POST', path=['report'])
@@ -262,6 +230,31 @@ class RoutesTest(tests.Test):
assert not volume['report'].exists(guid)
assert self.node_volume['report'].exists(guid)
+ def test_CachedClientRoutes_OpenOnlyChangedResources(self):
+ volume = db.Volume('client', model.RESOURCES, lazy_open=True)
+ cp = CachedClientRoutes(volume, client.api_url.value)
+ guid = call(cp, Request(method='POST', path=['context'], content_type='application/json', content={
+ 'type': 'activity',
+ 'title': 'title',
+ 'summary': 'summary',
+ 'description': 'description',
+ 'layer': ['foo', 'clone', 'favorite'],
+ }))
+ cp.close()
+
+ volume = db.Volume('client', model.RESOURCES, lazy_open=True)
+ cp = CachedClientRoutes(volume, client.api_url.value)
+
+ trigger = self.wait_for_events(cp, event='push')
+ self.start_master()
+ call(cp, Request(method='GET', cmd='inline'))
+ trigger.wait()
+
+ self.assertEqual([[2, None]], json.load(file('client/push.sequence')))
+ assert self.node_volume['context'].exists(guid)
+ self.assertEqual(['context'], volume.keys())
+
+
def test_SwitchToOfflineForAbsentOnlineProps(self):
volume = db.Volume('client', model.RESOURCES)
cp = ClientRoutes(volume, client.api_url.value)
diff --git a/tests/units/client/server_routes.py b/tests/units/client/server_routes.py
index d1ffda0..6b36061 100755
--- a/tests/units/client/server_routes.py
+++ b/tests/units/client/server_routes.py
@@ -15,53 +15,7 @@ from sugar_network.toolkit.router import Router
from sugar_network.toolkit import mountpoints, coroutine
-class ServerCommandsTest(tests.Test):
-
- def start_node(self):
- os.makedirs('disk/sugar-network')
- self.node_volume = Volume('db', model.RESOURCES)
- cp = ClientRoutes(self.node_volume)
- trigger = self.wait_for_events(cp, event='inline', state='online')
- coroutine.spawn(mountpoints.monitor, tests.tmpdir)
- trigger.wait()
- server = coroutine.WSGIServer(('127.0.0.1', client.ipc_port.value), Router(cp))
- coroutine.spawn(server.serve_forever)
- coroutine.dispatch()
- return cp
-
- def test_PopulateNode(self):
- os.makedirs('disk/sugar-network')
- volume = Volume('db', model.RESOURCES)
- cp = ClientRoutes(volume)
-
- assert not cp.inline()
- trigger = self.wait_for_events(cp, event='inline', state='online')
- mountpoints.populate('.')
- assert trigger.value is not None
- assert cp.inline()
-
- def test_MountNode(self):
- volume = Volume('db', model.RESOURCES)
- cp = ClientRoutes(volume)
-
- trigger = self.wait_for_events(cp, event='inline', state='online')
- mountpoints.populate('.')
- assert not cp.inline()
- assert trigger.value is None
-
- coroutine.spawn(mountpoints.monitor, '.')
- coroutine.dispatch()
- os.makedirs('disk/sugar-network')
- trigger.wait()
- assert cp.inline()
-
- def test_UnmountNode(self):
- cp = self.start_node()
- assert cp.inline()
- trigger = self.wait_for_events(cp, event='inline', state='offline')
- shutil.rmtree('disk')
- trigger.wait()
- assert not cp.inline()
+class ServerRoutesTest(tests.Test):
def test_whoami(self):
self.start_node()
@@ -71,7 +25,7 @@ class ServerCommandsTest(tests.Test):
{'guid': tests.UID, 'roles': []},
ipc.get(cmd='whoami'))
- def test_subscribe(self):
+ def test_Events(self):
self.start_node()
ipc = IPCConnection()
events = []
@@ -79,7 +33,7 @@ class ServerCommandsTest(tests.Test):
def read_events():
for event in ipc.subscribe(event='!commit'):
events.append(event)
- job = coroutine.spawn(read_events)
+ coroutine.spawn(read_events)
coroutine.dispatch()
guid = ipc.post(['context'], {
@@ -88,19 +42,93 @@ class ServerCommandsTest(tests.Test):
'summary': 'summary',
'description': 'description',
})
- coroutine.dispatch()
ipc.put(['context', guid], {
'title': 'title_2',
})
- coroutine.dispatch()
- coroutine.sleep(.5)
- job.kill()
+ coroutine.sleep(.1)
+ ipc.delete(['context', guid])
+ coroutine.sleep(.1)
+
+ self.assertEqual([
+ {'guid': guid, 'resource': 'context', 'event': 'create'},
+ {'guid': guid, 'resource': 'context', 'event': 'update'},
+ {'guid': guid, 'event': 'delete', 'resource': 'context'},
+ ],
+ events)
+ del events[:]
+
+ guid = self.node_volume['context'].create({
+ 'type': 'activity',
+ 'title': 'title',
+ 'summary': 'summary',
+ 'description': 'description',
+ })
+ self.node_volume['context'].update(guid, {
+ 'title': 'title_2',
+ })
+ coroutine.sleep(.1)
+ self.node_volume['context'].delete(guid)
+ coroutine.sleep(.1)
+
+ self.assertEqual([
+ {'guid': guid, 'resource': 'context', 'event': 'create'},
+ {'guid': guid, 'resource': 'context', 'event': 'update'},
+ {'guid': guid, 'event': 'delete', 'resource': 'context'},
+ ],
+ events)
+ del events[:]
+
+ guid = self.home_volume['context'].create({
+ 'type': 'activity',
+ 'title': 'title',
+ 'summary': 'summary',
+ 'description': 'description',
+ })
+ self.home_volume['context'].update(guid, {
+ 'title': 'title_2',
+ })
+ coroutine.sleep(.1)
+ self.home_volume['context'].delete(guid)
+ coroutine.sleep(.1)
+
+ self.assertEqual([], events)
+ return
+
+ self.node.stop()
+ coroutine.sleep(.1)
+ del events[:]
+
+ guid = self.home_volume['context'].create({
+ 'type': 'activity',
+ 'title': 'title',
+ 'summary': 'summary',
+ 'description': 'description',
+ })
+ self.home_volume['context'].update(guid, {
+ 'title': 'title_2',
+ })
+ coroutine.sleep(.1)
+ self.home_volume['context'].delete(guid)
+ coroutine.sleep(.1)
self.assertEqual([
{'guid': guid, 'resource': 'context', 'event': 'create'},
{'guid': guid, 'resource': 'context', 'event': 'update'},
+ {'guid': guid, 'event': 'delete', 'resource': 'context'},
],
events)
+ del events[:]
+
+
+
+
+
+
+
+
+
+
+
def test_BLOBs(self):
self.start_node()
@@ -134,6 +162,54 @@ class ServerCommandsTest(tests.Test):
[{'icon': 'http://127.0.0.1:5555/static/images/missing.png'}],
ipc.get(['context'], reply=['icon'])['result'])
+ def test_PopulateNode(self):
+ os.makedirs('disk/sugar-network')
+ volume = Volume('db', model.RESOURCES)
+ cp = ClientRoutes(volume)
+
+ assert not cp.inline()
+ trigger = self.wait_for_events(cp, event='inline', state='online')
+ mountpoints.populate('.')
+ coroutine.dispatch()
+ assert trigger.value is not None
+ assert cp.inline()
+
+ def test_MountNode(self):
+ volume = Volume('db', model.RESOURCES)
+ cp = ClientRoutes(volume)
+
+ trigger = self.wait_for_events(cp, event='inline', state='online')
+ mountpoints.populate('.')
+ assert not cp.inline()
+ assert trigger.value is None
+
+ coroutine.spawn(mountpoints.monitor, '.')
+ coroutine.dispatch()
+ os.makedirs('disk/sugar-network')
+ trigger.wait()
+ assert cp.inline()
+
+ def test_UnmountNode(self):
+ cp = self.start_node()
+ assert cp.inline()
+ trigger = self.wait_for_events(cp, event='inline', state='offline')
+ shutil.rmtree('disk')
+ trigger.wait()
+ assert not cp.inline()
+
+ def start_node(self):
+ os.makedirs('disk/sugar-network')
+ self.home_volume = Volume('db', model.RESOURCES)
+ cp = ClientRoutes(self.home_volume)
+ trigger = self.wait_for_events(cp, event='inline', state='online')
+ coroutine.spawn(mountpoints.monitor, tests.tmpdir)
+ trigger.wait()
+ self.node_volume = cp._node.volume
+ server = coroutine.WSGIServer(('127.0.0.1', client.ipc_port.value), Router(cp))
+ coroutine.spawn(server.serve_forever)
+ coroutine.dispatch()
+ return cp
+
if __name__ == '__main__':
tests.main()
diff --git a/tests/units/client/solver.py b/tests/units/client/solver.py
index 6e35a50..b3d9666 100755
--- a/tests/units/client/solver.py
+++ b/tests/units/client/solver.py
@@ -2,10 +2,11 @@
# sugar-lint: disable
import os
+import imp
from __init__ import tests
-from sugar_network.client import IPCConnection, packagekit, solver, clones
+from sugar_network.client import IPCConnection, packagekit, solver
from sugar_network.toolkit import lsb_release
@@ -22,57 +23,212 @@ class SolverTest(tests.Test):
self.assertEqual(host_arch, solver.select_architecture([host_arch]))
self.assertEqual(host_arch, solver.select_architecture(['foo', host_arch, 'bar']))
- def test_FirstSuccessfulSolveMighMissImplsDueToPackageDeps(self):
- self.override(packagekit, 'resolve', lambda names:
- dict([(i, {'name': i, 'pk_id': i, 'version': '0', 'arch': '*', 'installed': True}) for i in names]))
-
- self.touch(('Activities/1/activity/activity.info', [
- '[Activity]',
- 'name = name',
- 'bundle_id = bundle_id',
- 'exec = false',
- 'icon = icon',
- 'activity_version = 1',
- 'license = Public Domain',
- ]))
- self.touch(('Activities/2/activity/activity.info', [
- '[Activity]',
- 'name = name',
- 'bundle_id = bundle_id',
- 'exec = false',
- 'icon = icon',
- 'activity_version = 2',
- 'license = Public Domain',
- 'requires = dep',
- ]))
-
- home_volume = self.start_online_client()
- clones.populate(home_volume['context'], ['Activities'])
- ipc = IPCConnection()
-
- ipc.post(['context'], {
- 'guid': 'dep',
- 'type': 'package',
+ def test_ProcessCommonDependencies(self):
+ self.start_online_client()
+ conn = IPCConnection()
+
+ context = conn.post(['context'], {
+ 'type': 'activity',
'title': 'title',
'summary': 'summary',
'description': 'description',
+ 'dependencies': ['dep1', 'dep2'],
+ })
+ impl = conn.post(['implementation'], {
+ 'context': context,
+ 'license': 'GPLv3+',
+ 'version': '1',
+ 'stability': 'stable',
+ 'notes': '',
+ })
+ self.node_volume['implementation'].update(impl, {'data': {
+ 'spec': {
+ '*-*': {
+ 'commands': {
+ 'activity': {
+ 'exec': 'echo',
+ },
+ },
+ 'requires': {
+ 'dep2': {'restrictions': [['1', '2']]},
+ 'dep3': {},
+ },
+ },
+ },
+ }})
+ conn.post(['context'], {
+ 'guid': 'dep1',
+ 'type': 'package',
+ 'title': 'title1',
+ 'summary': 'summary',
+ 'description': 'description',
'aliases': {
lsb_release.distributor_id(): {
'status': 'success',
- 'binary': [['dep.bin']],
+ 'binary': [['dep1.bin']],
+ },
+ },
+ })
+ conn.post(['context'], {
+ 'guid': 'dep2',
+ 'type': 'package',
+ 'title': 'title2',
+ 'summary': 'summary',
+ 'description': 'description',
+ 'aliases': {
+ lsb_release.distributor_id(): {
+ 'status': 'success',
+ 'binary': [['dep2.bin']],
+ },
+ },
+ })
+ conn.post(['context'], {
+ 'guid': 'dep3',
+ 'type': 'package',
+ 'title': 'title3',
+ 'summary': 'summary',
+ 'description': 'description',
+ 'aliases': {
+ lsb_release.distributor_id(): {
+ 'status': 'success',
+ 'binary': [['dep3.bin']],
},
},
})
- solution = solver.solve(ipc, 'bundle_id', ['stable'])
- self.assertEqual(
- 2, len(solution))
- self.assertEqual(
- ('bundle_id', '2'),
- (solution[0]['context'], solution[0]['version']))
+ def resolve(names):
+ return dict([(i, {'name': i, 'pk_id': i, 'version': '1', 'arch': '*', 'installed': True}) for i in names])
+
+ self.override(packagekit, 'resolve', resolve)
+
self.assertEqual(
- ('dep', '0'),
- (solution[1]['context'], solution[1]['version']))
+ sorted([
+ {'version': '1', 'guid': 'dep1', 'context': 'dep1', 'stability': 'packaged', 'license': None},
+ {'version': '1', 'guid': 'dep2', 'context': 'dep2', 'stability': 'packaged', 'license': None},
+ {'version': '1', 'guid': 'dep3', 'context': 'dep3', 'stability': 'packaged', 'license': None},
+ {'version': '1', 'command': ['echo'], 'context': context, 'guid': impl, 'stability': 'stable', 'license': ['GPLv3+']},
+ ]),
+ sorted(solver.solve(self.client_routes.fallback, context, ['stable'])))
+
+ def test_SolveSugar(self):
+ self.touch(('__init__.py', ''))
+ self.touch(('jarabe.py', 'class config: version = "0.94"'))
+ file_, pathname_, description_ = imp.find_module('jarabe', ['.'])
+ imp.load_module('jarabe', file_, pathname_, description_)
+
+ self.start_online_client()
+ conn = IPCConnection()
+
+ context = conn.post(['context'], {
+ 'type': 'activity',
+ 'title': 'title',
+ 'summary': 'summary',
+ 'description': 'description',
+ })
+ conn.post(['context'], {
+ 'guid': 'sugar',
+ 'type': 'package',
+ 'title': 'title',
+ 'summary': 'summary',
+ 'description': 'description',
+ })
+
+ impl = conn.post(['implementation'], {
+ 'context': context,
+ 'license': 'GPLv3+',
+ 'version': '1',
+ 'stability': 'stable',
+ 'notes': '',
+ })
+ self.node_volume['implementation'].update(impl, {'data': {
+ 'spec': {
+ '*-*': {
+ 'commands': {
+ 'activity': {
+ 'exec': 'echo',
+ },
+ },
+ 'requires': {
+ 'sugar': {},
+ },
+ },
+ },
+ }})
+ self.assertEqual([
+ {'version': '1', 'command': ['echo'], 'context': context, 'guid': impl, 'stability': 'stable', 'license': ['GPLv3+']},
+ {'version': '0.94', 'context': 'sugar', 'guid': 'sugar-0.94', 'stability': 'packaged', 'license': None},
+ ],
+ solver.solve(self.client_routes.fallback, context, ['stable']))
+
+ self.node_volume['implementation'].update(impl, {'data': {
+ 'spec': {
+ '*-*': {
+ 'commands': {
+ 'activity': {
+ 'exec': 'echo',
+ },
+ },
+ 'requires': {
+ 'sugar': {'restrictions': [['0.80', '0.87']]},
+ },
+ },
+ },
+ }})
+ self.assertEqual([
+ {'version': '1', 'command': ['echo'], 'context': context, 'guid': impl, 'stability': 'stable', 'license': ['GPLv3+']},
+ {'version': '0.86', 'context': 'sugar', 'guid': 'sugar-0.86', 'stability': 'packaged', 'license': None},
+ ],
+ solver.solve(self.client_routes.fallback, context, ['stable']))
+
+ def test_StripSugarVersion(self):
+ self.touch(('__init__.py', ''))
+ self.touch(('jarabe.py', 'class config: version = "0.94.1"'))
+ file_, pathname_, description_ = imp.find_module('jarabe', ['.'])
+ imp.load_module('jarabe', file_, pathname_, description_)
+
+ self.start_online_client()
+ conn = IPCConnection()
+
+ context = conn.post(['context'], {
+ 'type': 'activity',
+ 'title': 'title',
+ 'summary': 'summary',
+ 'description': 'description',
+ })
+ conn.post(['context'], {
+ 'guid': 'sugar',
+ 'type': 'package',
+ 'title': 'title',
+ 'summary': 'summary',
+ 'description': 'description',
+ })
+
+ impl = conn.post(['implementation'], {
+ 'context': context,
+ 'license': 'GPLv3+',
+ 'version': '1',
+ 'stability': 'stable',
+ 'notes': '',
+ })
+ self.node_volume['implementation'].update(impl, {'data': {
+ 'spec': {
+ '*-*': {
+ 'commands': {
+ 'activity': {
+ 'exec': 'echo',
+ },
+ },
+ 'requires': {
+ 'sugar': {},
+ },
+ },
+ },
+ }})
+ self.assertEqual([
+ {'version': '1', 'command': ['echo'], 'context': context, 'guid': impl, 'stability': 'stable', 'license': ['GPLv3+']},
+ {'version': '0.94', 'context': 'sugar', 'guid': 'sugar-0.94', 'stability': 'packaged', 'license': None},
+ ],
+ solver.solve(self.client_routes.fallback, context, ['stable']))
if __name__ == '__main__':
diff --git a/tests/units/db/routes.py b/tests/units/db/routes.py
index 1ea43ed..b1d1afd 100755
--- a/tests/units/db/routes.py
+++ b/tests/units/db/routes.py
@@ -186,16 +186,10 @@ class RoutesTest(tests.Test):
content={}, content_type='application/json')
self.assertRaises(http.NotFound, self.call, 'GET', path=['testdocument', guid, 'blob'])
- self.touch('file')
- self.assertRaises(RuntimeError, self.call, 'PUT', path=['testdocument', guid, 'blob'],
- content={'blob': 'file'}, content_type='application/json')
- self.assertRaises(http.NotFound, self.call, 'GET', path=['testdocument', guid, 'blob'])
-
self.call('PUT', path=['testdocument', guid, 'blob'],
content={'url': 'foo', 'bar': 'probe'}, content_type='application/json')
blob = self.call('GET', path=['testdocument', guid, 'blob'])
self.assertEqual('foo', blob['url'])
- assert 'bar' not in blob
def test_RemoveBLOBs(self):
@@ -1150,7 +1144,6 @@ class RoutesTest(tests.Test):
assert self.call('HEAD', ['testdocument', guid, 'blob1'], host='localhost') is None
meta = self.volume['testdocument'].get(guid).meta('blob1')
meta.pop('blob')
- meta['url'] = 'http://localhost/testdocument/%s/blob1' % guid
self.assertEqual(meta, self.response.meta)
self.assertEqual(len('blob'), self.response.content_length)
self.assertEqual(formatdate(meta['mtime'], localtime=False, usegmt=True), self.response.last_modified)
@@ -1161,6 +1154,11 @@ class RoutesTest(tests.Test):
self.assertEqual(100, self.response.content_length)
self.assertEqual(formatdate(meta['mtime'], localtime=False, usegmt=True), self.response.last_modified)
+ assert self.call('GET', ['testdocument', guid, 'blob2']) is not None
+ meta = self.volume['testdocument'].get(guid).meta('blob2')
+ self.assertEqual(meta, self.response.meta)
+ self.assertEqual(formatdate(meta['mtime'], localtime=False, usegmt=True), self.response.last_modified)
+
def test_DefaultAuthor(self):
class User(db.Resource):
@@ -1316,6 +1314,29 @@ class RoutesTest(tests.Test):
},
self.volume['document'].get(guid)['author'])
+ def test_CopyAthors(self):
+
+ class User(db.Resource):
+
+ @db.indexed_property(slot=1)
+ def name(self, value):
+ return value
+
+ class Document(db.Resource):
+ pass
+
+ self.volume = db.Volume('db', [User, Document])
+ self.volume['user'].create({'guid': 'user', 'color': '', 'pubkey': '', 'name': 'User'})
+
+ guid1 = self.call('POST', ['document'], content={}, principal='user')
+ self.assertEqual({'user': {'name': 'User', 'role': 3, 'order': 0}}, self.volume['document'].get(guid1)['author'])
+ author = self.call('GET', ['document', guid1, 'author'])
+ self.assertEqual([{'guid': 'user', 'role': 3, 'name': 'User'}], author)
+
+ guid2 = self.volume['document'].create({'author': author}, setters=True)
+ author = self.call('GET', ['document', guid1, 'author'])
+ self.assertEqual({'user': {'name': 'User', 'role': 3, 'order': 0}}, self.volume['document'].get(guid2)['author'])
+
def test_AddUser(self):
class User(db.Resource):
@@ -1595,23 +1616,23 @@ class RoutesTest(tests.Test):
content_type=None, host=None, request=None, routes=db.Routes, principal=None,
**kwargs):
if request is None:
- request = Request({
- 'REQUEST_METHOD': method,
- 'PATH_INFO': '/'.join([''] + path),
- 'HTTP_ACCEPT_LANGUAGE': ','.join(accept_language or []),
- 'HTTP_HOST': host,
- 'wsgi.input': content_stream,
- })
- request.cmd = cmd
- request.content = content
- request.content_type = content_type
+ environ = {
+ 'REQUEST_METHOD': method,
+ 'PATH_INFO': '/'.join([''] + path),
+ 'HTTP_ACCEPT_LANGUAGE': ','.join(accept_language or []),
+ 'HTTP_HOST': host,
+ 'wsgi.input': content_stream,
+ }
+ if content_type:
+ environ['CONTENT_TYPE'] = content_type
if content_stream is not None:
- request.content_length = len(content_stream.getvalue())
+ environ['CONTENT_LENGTH'] = str(len(content_stream.getvalue()))
+ request = Request(environ, cmd=cmd, content=content)
request.update(kwargs)
request.principal = principal
router = Router(routes(self.volume))
self.response = Response()
- return router._call(request, self.response)
+ return router._call_route(request, self.response)
if __name__ == '__main__':
diff --git a/tests/units/model/routes.py b/tests/units/model/routes.py
index dd5bcb3..04491ee 100755
--- a/tests/units/model/routes.py
+++ b/tests/units/model/routes.py
@@ -18,7 +18,7 @@ from sugar_network.toolkit import coroutine
class RoutesTest(tests.Test):
def test_StaticFiles(self):
- router = Router(model.Routes())
+ router = Router(model.FrontRoutes())
local_path = src_root + '/sugar_network/static/httpdocs/images/missing.png'
response = []
@@ -48,7 +48,7 @@ class RoutesTest(tests.Test):
def prop(self, value):
return value
- routes = model.Routes()
+ routes = model.FrontRoutes()
volume = db.Volume('db', [Document], routes.broadcast)
events = []
@@ -81,7 +81,7 @@ class RoutesTest(tests.Test):
events)
def test_SubscribeWithPong(self):
- routes = model.Routes()
+ routes = model.FrontRoutes()
for event in routes.subscribe(ping=True):
break
self.assertEqual('data: {"event": "pong"}\n\n', event)
diff --git a/tests/units/node/node.py b/tests/units/node/node.py
index b56a61f..e163912 100755
--- a/tests/units/node/node.py
+++ b/tests/units/node/node.py
@@ -556,7 +556,7 @@ class NodeTest(tests.Test):
'summary': 'summary',
'description': 'description',
})
- impl = client.post(['implementation'], {
+ impl1 = client.post(['implementation'], {
'context': context,
'license': 'GPLv3+',
'version': '1',
@@ -564,7 +564,7 @@ class NodeTest(tests.Test):
'notes': '',
})
blob1 = self.zips(('topdir/probe', 'probe1'))
- volume['implementation'].update(impl, {'data': {
+ volume['implementation'].update(impl1, {'data': {
'blob': StringIO(blob1),
'spec': {
'*-*': {
@@ -574,7 +574,7 @@ class NodeTest(tests.Test):
},
},
}})
- impl = client.post(['implementation'], {
+ impl2 = client.post(['implementation'], {
'context': context,
'license': 'GPLv3+',
'version': '2',
@@ -582,7 +582,7 @@ class NodeTest(tests.Test):
'notes': '',
})
blob2 = self.zips(('topdir/probe', 'probe2'))
- volume['implementation'].update(impl, {'data': {
+ volume['implementation'].update(impl2, {'data': {
'blob': StringIO(blob2),
'spec': {
'*-*': {
@@ -593,7 +593,7 @@ class NodeTest(tests.Test):
},
},
}})
- impl = client.post(['implementation'], {
+ impl3 = client.post(['implementation'], {
'context': context,
'license': 'GPLv3+',
'version': '3',
@@ -601,7 +601,7 @@ class NodeTest(tests.Test):
'notes': '',
})
blob3 = self.zips(('topdir/probe', 'probe3'))
- volume['implementation'].update(impl, {'data': {
+ volume['implementation'].update(impl3, {'data': {
'blob': StringIO(blob3),
'spec': {
'*-*': {
@@ -611,7 +611,7 @@ class NodeTest(tests.Test):
},
},
}})
- impl = client.post(['implementation'], {
+ impl4 = client.post(['implementation'], {
'context': context,
'license': 'GPLv3+',
'version': '4',
@@ -619,7 +619,7 @@ class NodeTest(tests.Test):
'notes': '',
})
blob4 = self.zips(('topdir/probe', 'probe4'))
- volume['implementation'].update(impl, {'data': {
+ volume['implementation'].update(impl4, {'data': {
'blob': StringIO(blob4),
'spec': {
'*-*': {
@@ -641,6 +641,31 @@ class NodeTest(tests.Test):
self.assertRaises(http.NotFound, client.get, ['context', context], cmd='clone', requires='dep4')
self.assertRaises(http.NotFound, client.get, ['context', context], cmd='clone', stability='foo')
+ response = Response()
+ client.call(Request(method='GET', path=['context', context], cmd='clone'), response)
+ self.assertEqual({
+ 'context': context,
+ 'stability': 'stable',
+ 'guid': impl3,
+ 'version': '3',
+ 'license': ['GPLv3+'],
+ 'data': {
+ 'seqno': 8,
+ 'mtime': int(os.stat('master/implementation/%s/%s/data.blob' % (impl3[:2], impl3)).st_mtime),
+ 'blob_size': len(blob3),
+ 'spec': {
+ '*-*': {
+ 'requires': {
+ 'dep2': {
+ 'restrictions': [['2', None]],
+ },
+ },
+ },
+ },
+ },
+ },
+ response.meta)
+
def test_release(self):
volume = self.start_master()
conn = Connection()
@@ -679,7 +704,6 @@ class NodeTest(tests.Test):
data = impl.meta('data')
self.assertEqual({
'*-*': {
- 'extract': 'topdir',
'commands': {'activity': {'exec': 'true'}},
'requires': {'dep': {}, 'sugar': {'restrictions': [['0.88', None]]}},
},
@@ -901,13 +925,12 @@ def call(routes, method, document=None, guid=None, prop=None, principal=None, cm
path.append(guid)
if prop:
path.append(prop)
- request = Request(method=method, path=path)
+ request = Request(method=method, path=path, cmd=cmd, content=content)
request.update(kwargs)
- request.cmd = cmd
- request.content = content
- request.environ = {'HTTP_HOST': '127.0.0.1'}
+ request.environ['HTTP_HOST'] = '127.0.0.1'
if principal:
request.environ['HTTP_X_SN_LOGIN'] = principal
+ request.principal = principal
router = Router(routes)
return router.call(request, Response())
diff --git a/tests/units/node/volume.py b/tests/units/node/volume.py
index f6b2105..77bf5ba 100755
--- a/tests/units/node/volume.py
+++ b/tests/units/node/volume.py
@@ -682,11 +682,9 @@ def call(routes, method, document=None, guid=None, prop=None, cmd=None, content=
path.append(guid)
if prop:
path.append(prop)
- request = Request(method=method, path=path)
+ request = Request(method=method, path=path, cmd=cmd, content=content)
request.update(kwargs)
- request.cmd = cmd
- request.content = content
- request.environ = {'HTTP_HOST': '127.0.0.1'}
+ request.environ['HTTP_HOST'] = '127.0.0.1'
router = Router(routes)
return router.call(request, Response())
diff --git a/tests/units/toolkit/__main__.py b/tests/units/toolkit/__main__.py
index 841711e..79b0e5b 100644
--- a/tests/units/toolkit/__main__.py
+++ b/tests/units/toolkit/__main__.py
@@ -10,6 +10,7 @@ from toolkit import *
from options import *
from spec import *
from router import *
+from gbus import *
if __name__ == '__main__':
tests.main()
diff --git a/tests/units/toolkit/gbus.py b/tests/units/toolkit/gbus.py
new file mode 100755
index 0000000..e5ef530
--- /dev/null
+++ b/tests/units/toolkit/gbus.py
@@ -0,0 +1,31 @@
+#!/usr/bin/env python
+# sugar-lint: disable
+
+from __init__ import tests
+
+from sugar_network.toolkit import gbus
+
+
+class GbusTest(tests.Test):
+
+ def test_call(self):
+
+ def op(result, arg):
+ result.set(arg)
+
+ self.assertEqual('probe', gbus.call(op, 'probe'))
+
+ def test_pipe(self):
+
+ def op(pipe, args):
+ for i in args:
+ pipe(i)
+ pipe()
+
+ self.assertEqual(
+ [1, 2, 3],
+ [i for i in gbus.pipe(op, [1, 2, 3])])
+
+
+if __name__ == '__main__':
+ tests.main()
diff --git a/tests/units/toolkit/http.py b/tests/units/toolkit/http.py
index 7a63176..d21af37 100755
--- a/tests/units/toolkit/http.py
+++ b/tests/units/toolkit/http.py
@@ -7,7 +7,7 @@ import select
from __init__ import tests
from sugar_network import client as local
-from sugar_network.toolkit.router import route, Router, Request
+from sugar_network.toolkit.router import route, Router, Request, Response
from sugar_network.toolkit import coroutine, http
@@ -54,6 +54,28 @@ class HTTPTest(tests.Test):
[None, None, None, None, -1, {'foo': 'bar'}],
events)
+ def test_call(self):
+
+ class Commands(object):
+
+ @route('FOO', [None, None], cmd='f1', mime_type='application/json')
+ def f1(self, request):
+ return request.path
+
+ self.server = coroutine.WSGIServer(('127.0.0.1', local.ipc_port.value), Router(Commands()))
+ coroutine.spawn(self.server.serve_forever)
+ coroutine.dispatch()
+ conn = http.Connection('http://127.0.0.1:%s' % local.ipc_port.value)
+
+ request = Request({
+ 'REQUEST_METHOD': 'FOO',
+ 'PATH_INFO': '/foo/bar',
+ 'QUERY_STRING': 'cmd=f1',
+ })
+ self.assertEqual(['foo', 'bar'], conn.call(request))
+
+ self.assertEqual(['foo', 'bar'], conn.call(Request(method='FOO', path=['foo', 'bar'], cmd='f1')))
+
def test_call_ReturnStream(self):
class Commands(object):
@@ -85,6 +107,89 @@ class HTTPTest(tests.Test):
})
self.assertEqual('result', json.load(client.call(request)))
+ def test_call_ReturnMeta(self):
+
+ class Commands(object):
+
+ @route('HEAD')
+ def f1(self, response):
+ response.meta['str'] = 'str'
+ response.meta['bool'] = True
+ response.meta['int'] = -1
+
+ @route('POST')
+ def f2(self):
+ response.meta['str'] = 'STR'
+ response.meta['bool'] = False
+ response.meta['int'] = 1
+
+ @route('GET')
+ def f3(self):
+ response.meta['str'] = 'FOO'
+ response.meta['bool'] = True
+ response.meta['int'] = 10
+
+ server = coroutine.WSGIServer(('127.0.0.1', local.ipc_port.value), Router(Commands()))
+ coroutine.spawn(server.serve_forever)
+ coroutine.dispatch()
+ conn = http.Connection('http://127.0.0.1:%s' % local.ipc_port.value)
+
+ request = Request({
+ 'REQUEST_METHOD': 'HEAD',
+ 'PATH_INFO': '/',
+ })
+ response = Response()
+ conn.call(request, response)
+ self.assertEqual({
+ 'int': -1,
+ 'bool': True,
+ 'str': 'str',
+ }, response.meta)
+
+ response = Response()
+ conn.call(Request(method='POST'), response)
+ self.assertEqual({
+ 'int': 1,
+ 'bool': False,
+ 'str': 'STR',
+ }, response.meta)
+
+ response = Response()
+ conn.call(Request(method='GET'), response)
+ self.assertEqual({
+ 'int': 10,
+ 'bool': True,
+ 'str': 'FOO',
+ }, response.meta)
+
+ def test_call_ReturnMetaOnRedirects(self):
+
+ class Front(object):
+
+ @route('GET')
+ def get(self, response):
+ response.meta['front'] = 'value1'
+ raise http.Redirect('http://127.0.0.1:%s' % (local.ipc_port.value + 1))
+
+ server = coroutine.WSGIServer(('127.0.0.1', local.ipc_port.value), Router(Front()))
+ coroutine.spawn(server.serve_forever)
+
+ class Back(object):
+
+ @route('GET')
+ def get(self, response):
+ response.meta['back'] = 'value2'
+
+ server = coroutine.WSGIServer(('127.0.0.1', local.ipc_port.value + 1), Router(Back()))
+ coroutine.spawn(server.serve_forever)
+
+ conn = http.Connection('http://127.0.0.1:%s' % local.ipc_port.value)
+ coroutine.dispatch()
+
+ response = Response()
+ stream = conn.call(Request(method='GET'), response)
+ self.assertEqual({'front': 'value1', 'back': 'value2'}, response.meta)
+
if __name__ == '__main__':
tests.main()