Web   ·   Wiki   ·   Activities   ·   Blog   ·   Lists   ·   Chat   ·   Meeting   ·   Bugs   ·   Git   ·   Translate   ·   Archive   ·   People   ·   Donate
summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorMartin Langhoff <martin@laptop.org>2012-07-23 18:45:56 (GMT)
committer Daniel Drake <dsd@laptop.org>2012-08-20 20:32:31 (GMT)
commitdc3aa94150215ea49ec849ab880385222f98bf3c (patch)
treeba96a19cc558269617b9cfad85485be61acd9b09
parent402c9b9b1aace372d55792aa4b1c69883387fd53 (diff)
ooblib: introduce cachedurlopen(), use it in get_repomd()
cachedurlopen() allows for a trivial caching scheme to support offline usage.
-rw-r--r--lib/ooblib.py34
1 files changed, 32 insertions, 2 deletions
diff --git a/lib/ooblib.py b/lib/ooblib.py
index 13babbc..93cf7d8 100644
--- a/lib/ooblib.py
+++ b/lib/ooblib.py
@@ -2,7 +2,9 @@
# Licensed under the terms of the GNU GPL v2 or later; see COPYING for details.
import os
+import sys
import shutil
+import hashlib
import urllib2
from xml.etree.ElementTree import ElementTree
@@ -101,7 +103,7 @@ def get_repomd(baseurl):
url = "%s/repodata/repomd.xml" % baseurl
try:
- fd = urllib2.urlopen(url)
+ fd = cachedurlopen(url)
et = ElementTree(file=fd)
root = et.getroot()
# iterate over data tags
@@ -109,7 +111,7 @@ def get_repomd(baseurl):
type = data.attrib['type']
location = data.find('{http://linux.duke.edu/metadata/repo}location')
md[type] = location.attrib['href']
- except:
+ except urllib2.HTTPError:
pass
return md
@@ -130,3 +132,31 @@ def install_sugar_bundle(path):
os.makedirs(bundlesdir)
ln_or_cp(path, bundlesdir)
+""" A wrapper around urllib2.urlopen() that stores responses in
+ cache. When cacheonly=True, it works offline, never hitting
+ the network.
+"""
+def cachedurlopen(url):
+ class CachedURLException(Exception):
+ def __init__(self, value):
+ self.value=value
+
+ cachedfpath = os.path.join(cachedir, 'simplecache', hashlib.sha1(url).hexdigest())
+ if cacheonly:
+ if os.path.exists(cachedfpath):
+ return open(cachedfpath)
+ else:
+ print >>sys.stderr, "ERROR: No cached file for %s" % url
+ raise CachedURLException("No cached file for %s" % url)
+
+ ourcachedir=os.path.join(cachedir, 'simplecache')
+ if not os.path.exists(ourcachedir):
+ os.makedirs(ourcachedir)
+
+ urlfd = urllib2.urlopen(url)
+ fd = open(cachedfpath, 'w')
+ fd.write(urlfd.read())
+ urlfd.close()
+ fd.close()
+
+ return open(cachedfpath, 'r')