Web   ·   Wiki   ·   Activities   ·   Blog   ·   Lists   ·   Chat   ·   Meeting   ·   Bugs   ·   Git   ·   Translate   ·   Archive   ·   People   ·   Donate
summaryrefslogtreecommitdiffstats
path: root/sugar_network/toolkit/sneakernet.py
blob: 0a2c3df43a9cd1cfdaa08ec335cdbee84a82d71b (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
# Copyright (C) 2012 Aleksey Lim
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program.  If not, see <http://www.gnu.org/licenses/>.

import os
import json
import time
import gzip
import tarfile
import logging
import tempfile
from cStringIO import StringIO
from contextlib import contextmanager
from os.path import join, exists

import active_document as ad
from active_toolkit.sockets import BUFFER_SIZE
from active_toolkit import util, enforce


TMPDIR = None

_RESERVED_SIZE = 1024 * 1024
_MAX_PACKET_SIZE = 1024 * 1024 * 100
_PACKET_COMPRESS_MODE = 'gz'
_RECORD_SUFFIX = '.record'
_PACKET_SUFFIX = '.packet'

_logger = logging.getLogger('sneakernet')


def walk(path):
    for root, __, files in os.walk(path):
        for filename in files:
            if not filename.endswith(_PACKET_SUFFIX):
                continue
            with InPacket(join(root, filename)) as packet:
                yield packet


class DiskFull(Exception):
    pass


class InPacket(object):

    def __init__(self, path=None, stream=None):
        self._file = None
        self._tarball = None
        self.header = {}
        self.content_type = None

        try:
            if stream is None:
                self._file = stream = file(path, 'rb')
            elif not hasattr(stream, 'seek'):
                # tarfile/gzip/zip might require seeking
                self._file = tempfile.TemporaryFile(dir=TMPDIR)

                if hasattr(stream, 'read'):
                    while True:
                        chunk = stream.read(BUFFER_SIZE)
                        if not chunk:
                            break
                        self._file.write(chunk)
                else:
                    for chunk in stream:
                        self._file.write(chunk)

                self._file.flush()
                self._file.seek(0)
                stream = self._file

            self._tarball = tarfile.open('r', fileobj=stream)
            with self._extract('header') as f:
                self.header = json.load(f)
            enforce(type(self.header) is dict, 'Incorrect header')

            if isinstance(self._tarball.fileobj, file):
                self.content_type = 'application/x-tar'
            elif isinstance(self._tarball.fileobj, gzip.GzipFile):
                self.content_type = 'application/x-compressed-tar'
            else:
                self.content_type = 'application/x-bzip-compressed-tar'

        except Exception, error:
            self.close()
            util.exception()
            raise RuntimeError('Malformed %r packet: %s' % (self, error))

        _logger.debug('Reading %r input packet', self)

    @property
    def path(self):
        if self._file is not None:
            return self._file.name

    def records(self, **filters):
        for info in self._tarball:
            if not info.isfile():
                continue

            if info.name.endswith(_PACKET_SUFFIX):
                _logger.debug('Reading %r sub packet from %r', info.name, self)
                with self._extract(info) as f:
                    with InPacket(stream=f) as sub_packet:
                        for sub_record in sub_packet.records(**filters):
                            yield sub_record
                continue
            elif not info.name.endswith(_RECORD_SUFFIX):
                continue

            with self._extract(info) as f:
                meta = json.load(f)
            meta.update(self.header)

            skip = False
            for key, value in filters.items():
                if meta.get(key) != value:
                    skip = True
                    break
            if skip:
                continue

            if meta.get('content_type') == 'records':
                _logger.debug('Reading %r records from %r', info.name, self)
                with self._extract(info.name[: - len(_RECORD_SUFFIX)]) as f:
                    for line in f:
                        item = json.loads(line)
                        item.update(meta)
                        yield item
            elif meta.get('content_type') == 'blob':
                _logger.debug('Reading %r blob from %r', info.name, self)
                with self._extract(info.name[: - len(_RECORD_SUFFIX)]) as f:
                    meta['blob'] = f
                    yield meta
            else:
                yield meta

    def close(self):
        if self._tarball is not None:
            self._tarball.close()
            self._tarball = None
        if self._file is not None:
            self._file.close()
            self._file = None

    def __repr__(self):
        header = ['%s=%r' % i for i in self.header.items()]
        return '<InPacket %s>' % (' '.join(['path=%r' % self.path] + header))

    def __enter__(self):
        return self

    def __exit__(self, exc_type, exc_value, traceback):
        self.close()

    def __iter__(self):
        return self.records()

    @contextmanager
    def _extract(self, arcname):
        f = self._tarball.extractfile(arcname)
        try:
            yield f
        finally:
            f.close()


class OutPacket(object):

    def __init__(self, root=None, stream=None, limit=None, compress_mode=None,
            seqno=None, filename=None, **kwargs):
        if compress_mode is None:
            compress_mode = _PACKET_COMPRESS_MODE

        self._stream = None
        self._file = None
        self._path = None
        self._tarball = None
        self.header = kwargs
        self._size_to_flush = 0
        self._file_num = 0
        self._empty = True

        if filename:
            self._basename = filename
        else:
            if 'src' in kwargs:
                self._basename = kwargs['src']
                if seqno is not None:
                    self._basename += '-%s' % seqno
            else:
                self._basename = ad.uuid()
            self._basename += _PACKET_SUFFIX
        kwargs['filename'] = self._basename

        if root is not None:
            if not exists(root):
                os.makedirs(root)
            self._path = util.unique_filename(root, self._basename)
            self._file = stream = file(self._path, 'wb+')
        elif hasattr(stream, 'fileno'):
            self._file = stream
            self._path = stream.name
        else:
            limit = limit or _MAX_PACKET_SIZE
        self._limit = limit

        enforce(stream is not None)
        self._tarball = tarfile.open(mode='w:' + compress_mode, fileobj=stream)
        self._stream = stream

        if compress_mode == 'gz':
            self.content_type = 'application/x-compressed-tar'
        elif compress_mode == 'bz2':
            self.content_type = 'application/x-bzip-compressed-tar'
        else:
            self.content_type = 'application/x-tar'

        _logger.debug('Writing %r output packet', self)

    @property
    def basename(self):
        return self._basename

    @property
    def path(self):
        return self._path or self._basename

    @property
    def closed(self):
        return self._tarball is None

    @property
    def empty(self):
        return self._empty

    def __repr__(self):
        header = ['%s=%r' % i for i in self.header.items()]
        return '<OutPacket %s>' % (' '.join(['path=%r' % self.path] + header))

    def __enter__(self):
        return self

    def __exit__(self, exc_type, exc_value, traceback):
        if exc_type is not None:
            if exc_type is not DiskFull:
                self.clear()
        self.close()

    def close(self, clear=False):
        if self.empty:
            clear = True
        self._commit(clear)
        if self._file is not None:
            if not clear:
                self._file.flush()
                os.fsync(self._file.fileno())
            self._file.close()
            if clear:
                if exists(self._file.name):
                    os.unlink(self._file.name)
            self._file = None

    def clear(self):
        self.close(clear=True)

    def pop(self):
        self._commit(False)
        if self._file is not None:
            stream = self._file
            self._file = None
        else:
            stream = self._stream
        self._stream = None
        if stream is not None:
            stream.seek(0)
        return stream

    def push(self, data=None, arcname=None, force=False, **meta):
        if isinstance(data, OutPacket):
            _logger.debug('Writing %r sub packet to %r', data.path, self)
            if not arcname:
                arcname = data.basename
            f = data.pop()
            try:
                self._addfile(arcname, f, force)
            finally:
                f.close()
            return
        elif data is None:
            _logger.debug('Writing %r record to %r', meta, self)
            self._add(arcname, None, meta)
            return
        elif hasattr(data, 'fileno'):
            _logger.debug('Writing %r blob to %r', data.name, self)
            meta['content_type'] = 'blob'
            self._add(arcname, data, meta)
            return

        _logger.debug('Writing %r records to %r', data, self)

        if not hasattr(data, 'next'):
            data = iter(data)
        try:
            chunk = json.dumps(data.next())
        except StopIteration:
            return

        meta['content_type'] = 'records'

        while chunk is not None:
            self._flush(0, True)
            limit = self._enforce_limit()

            with tempfile.TemporaryFile(dir=TMPDIR) as arcfile:
                while True:
                    limit -= len(chunk)
                    if limit <= 0:
                        break
                    arcfile.write(chunk)
                    arcfile.write('\n')

                    try:
                        chunk = json.dumps(data.next())
                    except StopIteration:
                        chunk = None
                        break

                if not arcfile.tell():
                    if chunk is not None:
                        _logger.debug('Reach size limit for %r packet', self)
                        raise DiskFull()
                    break

                arcfile.seek(0)
                self._add(arcname, arcfile, meta)

    def push_file(self, path_, **kwargs):
        with file(path_, 'rb') as f:
            self.push(f, **kwargs)

    def _add(self, arcname, data, meta):
        if not arcname:
            self._file_num += 1
            arcname = '%08d' % self._file_num
        if data is not None:
            self._addfile(arcname, data, False)
        self._addfile(arcname + _RECORD_SUFFIX, meta, True)

    def _addfile(self, arcname, data, force):
        info = tarfile.TarInfo(arcname)
        info.mtime = time.time()

        if hasattr(data, 'fileno'):
            info.size = os.fstat(data.fileno()).st_size
            fileobj = data
        elif hasattr(data, 'seek'):
            data.seek(0, 2)
            info.size = data.tell()
            data.seek(0)
            fileobj = data
        else:
            data = json.dumps(data)
            info.size = len(data)
            fileobj = StringIO(data)

        self._flush(info.size, False)
        if not force:
            self._enforce_limit(info.size)

        self._tarball.addfile(info, fileobj=fileobj)
        self._empty = False

    def _flush(self, size, force):
        if force or self._size_to_flush >= _RESERVED_SIZE:
            self._tarball.fileobj.flush()
            self._size_to_flush = 0
        self._size_to_flush += size

    def _enforce_limit(self, size=0):
        if self._limit is None:
            stat = os.statvfs(self.path)
            free = stat.f_bfree * stat.f_frsize
        else:
            free = self._limit - self._stream.tell()
        free -= _RESERVED_SIZE
        if free - size <= 0:
            _logger.debug('Reach size limit for %r packet', self)
            raise DiskFull()
        return free

    def _commit(self, clear):
        if self._tarball is None:
            return
        _logger.debug('Closing %r output packet, clear=%r', self, clear)
        if not clear:
            self._addfile('header', self.header, True)
        self._tarball.close()
        self._tarball = None
        self._empty = True


class OutFilePacket(OutPacket):

    def __init__(self, root=None, **kwargs):
        stream = None
        if root is None:
            stream = tempfile.NamedTemporaryFile(dir=TMPDIR)
        OutPacket.__init__(self, root=root, stream=stream, **kwargs)


class OutBufferPacket(OutPacket):

    def __init__(self, **kwargs):
        OutPacket.__init__(self, root=None, stream=StringIO(), **kwargs)