1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
|
# Copyright (C) 2009 One Laptop Per Child
# Licensed under the terms of the GNU GPL v2 or later; see COPYING for details.
from __future__ import with_statement
from __future__ import division
import sys
import os.path
import urllib
import urllib2
import urlparse
import time
import pickle
from bitfrost.update import microformat
import ooblib
cache = os.path.join(ooblib.cachedir, 'activities')
if not os.path.exists(cache):
os.makedirs(cache)
baseurl = ooblib.read_config('sugar_activity_group', 'url')
install_activities = ooblib.read_config_bool('sugar_activity_group',
'install_activities')
systemwide = ooblib.read_config_bool('sugar_activity_group',
'activity_group_systemwide')
if install_activities:
vmaj = int(ooblib.read_config('global', 'olpc_version_major'))
vmin = int(ooblib.read_config('global', 'olpc_version_minor'))
vrel = int(ooblib.read_config('global', 'olpc_version_release'))
suffixes = ["%d.%d.%d" % (vmaj, vmin, vrel), "%d.%d" % (vmaj, vmin), ""]
for suffix in suffixes:
if len(suffix) > 0:
grpurl = urlparse.urljoin(baseurl + "/", urllib.quote(suffix))
grpurlcache = os.path.join(cache, os.path.basename(baseurl)
+ '-' + suffix + ".html")
else:
grpurl = baseurl
grpurlcache = os.path.join(cache, os.path.basename(baseurl)
+ ".html")
if ooblib.cacheonly:
print >>sys.stderr, "Trying group URL cache file", grpurlcache
if os.path.exists(grpurlcache):
name, desc, results = pickle.load(open(grpurlcache))
else:
continue
else:
print >>sys.stderr, "Trying group URL", grpurl
try:
name, desc, results = microformat.parse_url(grpurl)
except urllib2.HTTPError, e:
if e.code == 404:
continue
raise e
if len(results) == 0 or (name is None and desc is None):
continue
print >>sys.stderr, "Found activity group:", name
pickle.dump([name, desc, results], open(grpurlcache, 'w'))
if results:
break #process only the first URL (or cached file)
if not results:
print >>sys.stderr, "No Activity Group URL found"
sys.exit(1)
for name, info in results.items():
(version, url) = microformat.only_best_update(info)
print >>sys.stderr, "Examining %s v%s: %s" % (name, version, url)
if ooblib.cacheonly:
path = urlparse.urlsplit(url)[2]
path = os.path.basename(path)
localpath = os.path.join(cache, path)
if os.path.exists(localpath):
print >>sys.stderr, "Using: ", localpath
ooblib.install_sugar_bundle(localpath)
continue
else:
print >>sys.stderr, "Cannot find cache for ", url
sys.exit(1)
fd = None
for attempts in range(5):
if attempts > 0:
print >>sys.stderr, 'Retrying.'
time.sleep(1)
try:
fd = urllib2.urlopen(url)
break
except urllib2.HTTPError, e:
print >>sys.stderr, 'HTTP error: ', e.code
except urllib2.URLError, e:
print >>sys.stderr, 'Network or server error: ', e.reason
if not fd:
print >>sys.stderr, 'Could not reach ', url
sys.exit(1)
headers = fd.info()
if not 'Content-length' in headers:
raise Exception("No content length for %s" % url)
length = int(headers['Content-length'])
path = urlparse.urlsplit(fd.geturl())[2]
path = os.path.basename(path)
localpath = os.path.join(cache, path)
if os.path.exists(localpath):
localsize = os.stat(localpath).st_size
if localsize == length:
print >>sys.stderr, "Not downloading, already in cache."
ooblib.install_sugar_bundle(localpath)
continue
print >>sys.stderr, "Downloading (%dkB)..." % (length/1024)
localfd = open(localpath, 'w')
localfd.write(fd.read())
fd.close()
localfd.close()
ooblib.install_sugar_bundle(localpath)
if systemwide:
print "mkdir -p $INSTALL_ROOT/etc/olpc-update"
print "echo '%s' > $INSTALL_ROOT/etc/olpc-update/activity-groups" % baseurl
else:
print "echo '%s' > $INSTALL_ROOT/home/olpc/Activities/.groups" % baseurl
|