Web   ·   Wiki   ·   Activities   ·   Blog   ·   Lists   ·   Chat   ·   Meeting   ·   Bugs   ·   Git   ·   Translate   ·   Archive   ·   People   ·   Donate
summaryrefslogtreecommitdiffstats
path: root/lib/ooblib.py
blob: dc3fba622155758638128ed693ae7d876b223b72 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
# Copyright (C) 2009 One Laptop Per Child
# Licensed under the terms of the GNU GPL v2 or later; see COPYING for details.

import os
import sys
import shutil
import hashlib
import urllib2
from xml.etree.ElementTree import ElementTree

libdir = os.environ['OOB__libdir']
bindir = os.environ['OOB__bindir']
builddir = os.environ['OOB__builddir']
cachedir = os.environ['OOB__cachedir']
intermediatesdir = os.environ['OOB__intermediatesdir']
outputdir = os.environ['OOB__outputdir']
statedir = os.environ['OOB__statedir']
fsmount = os.environ['OOB__fsmount']

METADATA_NS = "http://linux.duke.edu/metadata/common"

cacheonly = 'OOB__cacheonly' in os.environ

def read_config(module, option):
    vname = "CFG_%s__%s" % (module, option)
    if not vname in os.environ:
        return None
    return os.environ[vname]

def read_config_bool(module, option):
    vname = "CFG_%s__%s" % (module, option)
    if not vname in os.environ:
        return None
    return bool(int(os.environ[vname]))

def read_buildnr():
    buildnr_path = os.path.join(intermediatesdir, 'buildnr')
    if not os.path.isfile(buildnr_path):
        return "0"
    return open(buildnr_path, "r").readline().strip()

def read_laptop_model_number():
    path = os.path.join(intermediatesdir, 'laptop_model_number')
    if not os.path.isfile(path):
        return "0"
    return open(path, "r").readline().strip()

def image_name():
    major_ver = read_config('global', 'olpc_version_major')
    minor_ver = read_config('global', 'olpc_version_minor')
    cust_tag = read_config('global', 'customization_tag')
    buildnr = int(read_buildnr())
    modelnr = read_laptop_model_number()

    return "%s%s%03d%s%s" % (major_ver, minor_ver, buildnr, cust_tag, modelnr)

def arch_matches(myarch, arch):
    # figure out if a package under 'arch' is suitable for 'myarch'
    # myarch is either 'i386', 'arm' or 'armhfp'
    # but 'arch' can be i386, i586, i686, armv5tel, armv7hl, and so on

    # noarch is always suitable
    if arch == 'noarch':
        return True

    if myarch.startswith('arm'):
        return arch.startswith('arm')
    elif myarch == 'i386':
        return arch in ['i386', 'i486', 'i586', 'i686']
    else:
        return False

def add_packages_from_xml(fd, pkglist, myarch):
    et = ElementTree(file=fd)
    root = et.getroot()
    for i in root.getchildren():
        if not i.tag.endswith("}package"):
            continue
        arch = i.find("{%s}arch" % METADATA_NS)
        name = i.find("{%s}name" % METADATA_NS)

        # Only add packages that are suitable for myarch.
        if myarch and arch is not None:
            if not arch_matches(myarch, arch.text):
                continue

        if name is not None:
            pkglist.add(name.text)

def get_repomd(baseurl):

    # default
    md = {
        'primary'      : 'repodata/primary.xml.gz',
        'primary_db'   : 'repodata/primary.sqlite.bz2',
        'group'        : 'repodata/comps.xml',
        'group_gz'     : 'repodata/repodata/comps.xml.gz',
        'filelists'    : 'repodata/filelists.xml.gz',
        'filelists_db' : 'repodata/filelists.sqlite.bz2',
        'other'        : 'repodata/other.xml.gz',
        'other_db'     : 'repodata/other.sqlite.bz2'
        }

    url = "%s/repodata/repomd.xml" % baseurl
    try:
        fd = cachedurlopen(url)
        et = ElementTree(file=fd)
        root = et.getroot()
        # iterate over data tags
        for data in root.findall('{http://linux.duke.edu/metadata/repo}data'):
            type = data.attrib['type']
            location = data.find('{http://linux.duke.edu/metadata/repo}location')
            md[type] = location.attrib['href']
    except urllib2.HTTPError:
        pass
    return md

def ln_or_cp(src, dest):
    src_dev = os.stat(src).st_dev
    dest_dev = os.stat(dest).st_dev

    if src_dev == dest_dev:
        if os.path.isdir(dest):
            dest = os.path.join(dest, os.path.basename(src))
        os.link(src, dest)
    else:
        shutil.copy(src, dest)

def install_sugar_bundle(path):
    bundlesdir = os.path.join(intermediatesdir, "shared", "sugar-bundles")
    if not os.path.exists(bundlesdir):
        os.makedirs(bundlesdir)
    ln_or_cp(path, bundlesdir)

""" A wrapper around urllib2.urlopen() that stores responses in
    cache. When cacheonly=True, it works offline, never hitting
    the network.
"""
def cachedurlopen(url):
    class CachedURLException(Exception):
        def __init__(self, value):
            self.value=value

    cachedfpath = os.path.join(cachedir, 'simplecache', hashlib.sha1(url).hexdigest())
    if cacheonly:
        if os.path.exists(cachedfpath):
            return open(cachedfpath)
        else:
            print >>sys.stderr, "ERROR: No cached file for %s" % url
            raise CachedURLException("No cached file for %s" % url)

    ourcachedir=os.path.join(cachedir, 'simplecache')
    if not os.path.exists(ourcachedir):
        os.makedirs(ourcachedir)

    urlfd = urllib2.urlopen(url)
    fd = open(cachedfpath, 'w')
    fd.write(urlfd.read())
    urlfd.close()
    fd.close()

    return open(cachedfpath, 'r')