Web   ·   Wiki   ·   Activities   ·   Blog   ·   Lists   ·   Chat   ·   Meeting   ·   Bugs   ·   Git   ·   Translate   ·   Archive   ·   People   ·   Donate
summaryrefslogtreecommitdiffstats
path: root/sugar_network/toolkit
diff options
context:
space:
mode:
authorAleksey Lim <alsroot@sugarlabs.org>2014-04-20 11:24:21 (GMT)
committer Aleksey Lim <alsroot@sugarlabs.org>2014-04-20 11:24:21 (GMT)
commit046073b04229021ec53833a353ffd069d0a5b561 (patch)
tree1930c720a4391daeaf3e8540b2b027f9cd1ab97f /sugar_network/toolkit
parent71391e654f497234fac0a4602bba769820aa521c (diff)
Pull node updates for checked-in resources
Diffstat (limited to 'sugar_network/toolkit')
-rw-r--r--sugar_network/toolkit/__init__.py65
-rw-r--r--sugar_network/toolkit/http.py1
-rw-r--r--sugar_network/toolkit/parcel.py113
-rw-r--r--sugar_network/toolkit/router.py70
4 files changed, 179 insertions, 70 deletions
diff --git a/sugar_network/toolkit/__init__.py b/sugar_network/toolkit/__init__.py
index 675c25f..7585e29 100644
--- a/sugar_network/toolkit/__init__.py
+++ b/sugar_network/toolkit/__init__.py
@@ -464,6 +464,35 @@ class NamedTemporaryFile(object):
return getattr(self._file, name)
+class Variable(list):
+
+ def __init__(self, default=None):
+ list.__init__(self, [default])
+
+ @property
+ def value(self):
+ return self[0]
+
+ @value.setter
+ def value(self, value):
+ self[0] = value
+
+ def __contains__(self, key):
+ return key in self[0]
+
+ def __getitem__(self, key):
+ return self[0].get(key)
+
+ def __setitem__(self, key, value):
+ self[0][key] = value
+
+ def __delitem__(self, key):
+ del self[0][key]
+
+ def __getattr__(self, name):
+ return getattr(self[0], name)
+
+
class Bin(object):
"""Store variable in a file."""
@@ -471,10 +500,7 @@ class Bin(object):
self._path = abspath(path)
self.value = default_value
- if exists(self._path):
- with file(self._path) as f:
- self.value = json.load(f)
- else:
+ if not self.reset():
self.commit()
@property
@@ -491,6 +517,13 @@ class Bin(object):
f.flush()
os.fsync(f.fileno())
+ def reset(self):
+ if not exists(self._path):
+ return False
+ with file(self._path) as f:
+ self.value = json.load(f)
+ return True
+
def __enter__(self):
return self.value
@@ -535,6 +568,30 @@ class Seqno(Bin):
return self.value
+class CaseInsensitiveDict(dict):
+
+ def __contains__(self, key):
+ return dict.__contains__(self, key.lower())
+
+ def __getitem__(self, key):
+ return self.get(key.lower())
+
+ def __setitem__(self, key, value):
+ return self.set(key.lower(), value)
+
+ def __delitem__(self, key):
+ self.remove(key.lower())
+
+ def get(self, key, default=None):
+ return dict.get(self, key, default)
+
+ def set(self, key, value):
+ dict.__setitem__(self, key, value)
+
+ def remove(self, key):
+ dict.__delitem__(self, key)
+
+
class Pool(object):
"""Stack that keeps its iterators correct after changing content."""
diff --git a/sugar_network/toolkit/http.py b/sugar_network/toolkit/http.py
index 0ebee86..0cbd535 100644
--- a/sugar_network/toolkit/http.py
+++ b/sugar_network/toolkit/http.py
@@ -267,6 +267,7 @@ class Connection(object):
value = request.environ.get(env_key)
if value is not None:
headers[key] = value
+ headers.update(request.headers)
path = request.path
while True:
diff --git a/sugar_network/toolkit/parcel.py b/sugar_network/toolkit/parcel.py
index 9d583cd..edbbf02 100644
--- a/sugar_network/toolkit/parcel.py
+++ b/sugar_network/toolkit/parcel.py
@@ -46,7 +46,14 @@ _logger = logging.getLogger('parcel')
def decode(stream, limit=None):
_logger.debug('Decode %r stream limit=%r', stream, limit)
- stream = _UnzipStream(stream, limit)
+ if limit is not None:
+ limit -= 2
+ magic = stream.read(2)
+ enforce(len(magic) == 2, http.BadRequest, 'Malformed parcel')
+ if magic == '\037\213':
+ stream = _ZippedDecoder(stream, limit)
+ else:
+ stream = _Decoder(magic, stream, limit)
header = stream.read_record()
packet = _DecodeIterator(stream)
@@ -63,7 +70,11 @@ def encode(packets, limit=None, header=None, compresslevel=None,
_logger.debug('Encode %r packets limit=%r header=%r',
packets, limit, header)
- ostream = _ZipStream(compresslevel)
+ if compresslevel is 0:
+ ostream = _Encoder()
+ else:
+ ostream = _ZippedEncoder(compresslevel)
+
# In case of downloading blobs
# (?) reuse current `this.http`
this.http = http.Connection()
@@ -242,16 +253,10 @@ class _DecodeIterator(object):
pass
-class _ZipStream(object):
+class _Encoder(object):
- def __init__(self, compresslevel=None):
- if compresslevel is None:
- compresslevel = DEFAULT_COMPRESSLEVEL
- self._zipper = zlib.compressobj(compresslevel,
- zlib.DEFLATED, -_ZLIB_WBITS, zlib.DEF_MEM_LEVEL, 0)
+ def __init__(self):
self._offset = 0
- self._size = 0
- self._crc = zlib.crc32('') & 0xffffffffL
def write_record(self, record, limit=None):
chunk = json.dumps(record) + '\n'
@@ -260,49 +265,58 @@ class _ZipStream(object):
return self.write(chunk)
def write(self, chunk):
+ chunk = self._encode(chunk)
+ if chunk:
+ self._offset += len(chunk)
+ return chunk
+
+ def flush(self):
+ chunk = self._flush()
+ self._offset += len(chunk)
+ return chunk
+
+ def _encode(self, chunk):
+ return chunk
+
+ def _flush(self):
+ return ''
+
+
+class _ZippedEncoder(_Encoder):
+
+ def __init__(self, compresslevel=None):
+ _Encoder.__init__(self)
+ if compresslevel is None:
+ compresslevel = DEFAULT_COMPRESSLEVEL
+ self._zipper = zlib.compressobj(compresslevel,
+ zlib.DEFLATED, -_ZLIB_WBITS, zlib.DEF_MEM_LEVEL, 0)
+ self._size = 0
+ self._crc = zlib.crc32('') & 0xffffffffL
+
+ def _encode(self, chunk):
self._size += len(chunk)
self._crc = zlib.crc32(chunk, self._crc) & 0xffffffffL
chunk = self._zipper.compress(chunk)
-
if self._offset == 0:
chunk = '\037\213' + '\010' + chr(0) + \
struct.pack('<L', long(time.time())) + \
'\002' + '\377' + \
chunk
self._offset = _ZLIB_WBITS_SIZE
- if chunk:
- self._offset += len(chunk)
-
return chunk
- def flush(self):
- chunk = self._zipper.flush() + \
+ def _flush(self):
+ return self._zipper.flush() + \
struct.pack('<L', self._crc) + \
struct.pack('<L', self._size & 0xffffffffL)
- self._offset += len(chunk)
- return chunk
-class _UnzipStream(object):
+class _Decoder(object):
- def __init__(self, stream, limit):
+ def __init__(self, prefix, stream, limit):
+ self._buffer = prefix
self._stream = stream
self._limit = limit
- self._unzipper = zlib.decompressobj(-_ZLIB_WBITS)
- self._crc = zlib.crc32('') & 0xffffffffL
- self._size = 0
- self._buffer = ''
-
- if self._limit is not None:
- self._limit -= 10
- magic = stream.read(2)
- enforce(magic == '\037\213', http.BadRequest,
- 'Not a gzipped file')
- enforce(ord(stream.read(1)) == 8, http.BadRequest,
- 'Unknown compression method')
- enforce(ord(stream.read(1)) == 0, http.BadRequest,
- 'Gzip flags should be empty')
- stream.read(6) # Ignore the rest of header
def read_record(self):
while True:
@@ -328,20 +342,41 @@ class _UnzipStream(object):
if self._limit is not None:
size = min(size, self._limit)
chunk = self._stream.read(size)
+ if chunk and self._limit is not None:
+ self._limit -= len(chunk)
+ return self._decode(chunk)
+
+ def _decode(self, chunk):
+ self._buffer += chunk
+ return bool(self._buffer)
+
+class _ZippedDecoder(_Decoder):
+
+ def __init__(self, stream, limit):
+ _Decoder.__init__(self, '', stream, limit)
+ self._unzipper = zlib.decompressobj(-_ZLIB_WBITS)
+ self._crc = zlib.crc32('') & 0xffffffffL
+ self._size = 0
+
+ if self._limit is not None:
+ self._limit -= 8
+ enforce(ord(stream.read(1)) == 8, http.BadRequest,
+ 'Unknown compression method')
+ enforce(ord(stream.read(1)) == 0, http.BadRequest,
+ 'Gzip flags should be empty')
+ stream.read(6) # Ignore the rest of header
+
+ def _decode(self, chunk):
if chunk:
- if self._limit is not None:
- self._limit -= len(chunk)
self._add_to_buffer(self._unzipper.decompress(chunk))
return True
-
enforce(len(self._unzipper.unused_data) >= 8, http.BadRequest,
'Malformed gzipped file')
crc = struct.unpack('<I', self._unzipper.unused_data[:4])[0]
enforce(crc == self._crc, http.BadRequest, 'CRC check failed')
size = struct.unpack('<I', self._unzipper.unused_data[4:8])[0]
enforce(size == self._size, http.BadRequest, 'Incorrect length')
-
return self._add_to_buffer(self._unzipper.flush())
def _add_to_buffer(self, chunk):
diff --git a/sugar_network/toolkit/router.py b/sugar_network/toolkit/router.py
index e9e91fd..f4b23ce 100644
--- a/sugar_network/toolkit/router.py
+++ b/sugar_network/toolkit/router.py
@@ -140,6 +140,7 @@ class Request(dict):
else:
dict.__setitem__(self, key, value)
self.environ = environ
+ self.headers = _RequestHeaders(self.environ)
if method:
self.environ['REQUEST_METHOD'] = method
@@ -312,35 +313,15 @@ class Request(dict):
(self.method, self.path, self.cmd, dict(self))
-class CaseInsensitiveDict(dict):
-
- def __contains__(self, key):
- return dict.__contains__(self, key.lower())
-
- def __getitem__(self, key):
- return self.get(key.lower())
-
- def __setitem__(self, key, value):
- return self.set(key.lower(), value)
-
- def __delitem__(self, key):
- self.remove(key.lower())
-
- def get(self, key, default=None):
- return dict.get(self, key, default)
-
- def set(self, key, value):
- dict.__setitem__(self, key, value)
-
- def remove(self, key):
- dict.__delitem__(self, key)
-
-
-class Response(CaseInsensitiveDict):
+class Response(toolkit.CaseInsensitiveDict):
status = '200 OK'
relocations = 0
+ def __init__(self):
+ toolkit.CaseInsensitiveDict.__init__(self)
+ self.headers = _ResponseHeaders(self)
+
@property
def content_length(self):
return int(self.get('content-length') or '0')
@@ -392,7 +373,7 @@ class File(str):
pass
def __new__(cls, path=None, digest=None, meta=None):
- meta = CaseInsensitiveDict(meta or [])
+ meta = toolkit.CaseInsensitiveDict(meta or [])
url = ''
if meta:
@@ -568,7 +549,7 @@ class Router(object):
raise
finally:
for i in self._postroutes:
- i(result, exception)
+ result = i(result, exception)
return result
@@ -915,4 +896,39 @@ class _Route(object):
return '%s /%s (%s)' % (self.method, path, self.callback.__name__)
+class _RequestHeaders(dict):
+
+ def __init__(self, environ):
+ dict.__init__(self)
+ self._environ = environ
+
+ def __contains__(self, key):
+ return 'HTTP_X_%s' % key.upper() in self._environ
+
+ def __getitem__(self, key):
+ value = self._environ.get('HTTP_X_%s' % key.upper())
+ if value is not None:
+ return json.loads(value)
+
+ def __setitem__(self, key, value):
+ dict.__setitem__(self, 'x-%s' % key, json.dumps(value))
+
+
+class _ResponseHeaders(object):
+
+ def __init__(self, headers):
+ self._headers = headers
+
+ def __contains__(self, key):
+ return 'x-%s' % key.lower() in self._headers
+
+ def __getitem__(self, key):
+ value = self._headers.get('x-%s' % key.lower())
+ if value is not None:
+ return json.loads(value)
+
+ def __setitem__(self, key, value):
+ self._headers.set('x-%s' % key.lower(), json.dumps(value))
+
+
File.AWAY = File(None)