Web   ·   Wiki   ·   Activities   ·   Blog   ·   Lists   ·   Chat   ·   Meeting   ·   Bugs   ·   Git   ·   Translate   ·   Archive   ·   People   ·   Donate
summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorflorent <florent@toopy.org>2011-12-21 00:06:52 (GMT)
committer florent <florent@toopy.org>2011-12-21 00:06:52 (GMT)
commitf98205a8b043e52a5839d019c91aad8b7198060b (patch)
treebf74a22f1dbd0c8c0af653e862d5baa2d0ce1534
parentcac09853d93df3c026c4473b75288f3312dde03f (diff)
implement the extract method + finalize the moulinette loop
-rw-r--r--datastore_reader/_datastore_reader.py52
1 files changed, 40 insertions, 12 deletions
diff --git a/datastore_reader/_datastore_reader.py b/datastore_reader/_datastore_reader.py
index 35cde1e..9417581 100644
--- a/datastore_reader/_datastore_reader.py
+++ b/datastore_reader/_datastore_reader.py
@@ -1,11 +1,21 @@
# -*- coding: utf-8 -*-
# python import
-import json, os
+import json, os, tarfile
from datetime import datetime
# ds reader import
from datastore_reader.utils import _csv, config as c
+# common relative store path
+STORE_PATH = os.path.join(
+ 'home',
+ 'olpc',
+ '.sugar',
+ 'default',
+ 'datastore',
+ 'store'
+ )
+
def parse_meta_file(meta_path):
"""Returns an info dict from matadata content parsed.
@@ -41,18 +51,28 @@ def list_meta_files(datastore_path):
yield os.path.join(datastore_path, file_name)
-def extract_backup(backup_path):
+def extract_backup(backup_serial, backup_path):
"""Extracts backup archive in the tmp folder and the the corresponding
datastore path.
:param backup: a path, ex.: '/somewhere/here'
:return backup: a path, ex.: '/somewhere/here/tmp/home/.../store'
- >>> extract_backup('data/demo.bz2')
- './data/tmp/demo/home/olpc/.sugar/default/datastore/store'
+ >>> extract_backup('demo', 'data/demo.tar.bz2')
+ 'data/tmp/demo/home/olpc/.sugar/default/datastore/store'
"""
- pass
+ tmp_path = os.path.join(c.main.working_dir, 'tmp', backup_serial)
+ with tarfile.open(backup_path) as t:
+ t.extractall(tmp_path)
+ # datastore should be as follow
+ datastore_path = os.path.join(tmp_path, STORE_PATH)
+ # check path or raise an error
+ if os.path.exists(datastore_path):
+ return datastore_path
+ # Oups
+ else:
+ raise Exception('bad data store path for serial: %s!' % backup_serial)
def list_backups(working_dir):
@@ -79,21 +99,25 @@ def read_backups():
# working dir check
if not os.path.exists(c.main.working_dir):
raise Exception('no working_dir found!')
+ # out dir
+ csv_dir = os.path.join(c.main.working_dir, 'out')
+ if not os.path.exists(csv_dir):
+ os.mkdir(csv_dir)
# csv path
csv_name = datetime.now().strftime('%Y-%m-%d_%H:%M_moulinette_result.csv')
- csv_path = os.path.join(c.main.working_dir, 'out', csv_name)
- if not os.path.exists(csv_path):
- os.mkdir(csv_path)
+ csv_path = os.path.join(csv_dir, csv_name)
+ # columns shortcut
+ columns = c.moulinette.columns.as_list()
# init the csv writer
with open(csv_path, 'wb') as f:
# init csv writer
- writer = csv.Writer(f, delimiter=';')
+ writer = _csv.Writer(f, delimiter=';')
# header line
- writer.writerow(c.moulinette.columns)
+ writer.writerow(columns)
# list backups
for backup_serial, backup_path in list_backups(c.main.working_dir):
# extract backup
- datastore_path = extract_backup(backup_path)
+ datastore_path = extract_backup(backup_serial, backup_path)
# list metadata
for meta_path in list_meta_files(datastore_path):
# parse the metadata file
@@ -101,7 +125,11 @@ def read_backups():
# add serial do dict
meta_dict['serial'] = backup_serial
# write meta info in a new csv row according config
- writer.writerow([meta_dict[k] for k in c.moulinette.columns])
+ row = list()
+ for k in columns:
+ row.append(meta_dict[k] if k in meta_dict else '')
+ # write row
+ writer.writerow(row)
if __name__ == "__main__":