diff options
author | Florent Pigout <florent.pigout@gmail.com> | 2011-12-17 16:14:18 (GMT) |
---|---|---|
committer | Florent Pigout <florent.pigout@gmail.com> | 2011-12-17 16:14:18 (GMT) |
commit | 67c46db80189655090f1be7c9812faea6714fd9b (patch) | |
tree | b15cd604465c5f8369129708fd5e07efb8d53b83 | |
parent | 601a1a0986ace7b60b53782847a6ec5fb501e1e8 (diff) |
fix import problem + add moulinette doctests to be implemented now!
-rw-r--r-- | datastore_reader/__init__.py | 1 | ||||
-rw-r--r-- | datastore_reader/_datastore_reader.py | 98 | ||||
-rw-r--r-- | datastore_reader/datastore_reader.py | 31 |
3 files changed, 99 insertions, 31 deletions
diff --git a/datastore_reader/__init__.py b/datastore_reader/__init__.py new file mode 100644 index 0000000..59680bc --- /dev/null +++ b/datastore_reader/__init__.py @@ -0,0 +1 @@ +from datastore_reader._datastore_reader import read_backups diff --git a/datastore_reader/_datastore_reader.py b/datastore_reader/_datastore_reader.py new file mode 100644 index 0000000..01cc8bc --- /dev/null +++ b/datastore_reader/_datastore_reader.py @@ -0,0 +1,98 @@ +# python import +import os +from datetime import datetime + +# ds reader import +from datastore_reader.utils import csv, config as c + + +def parse_meta_file(meta_path): + """Returns an info dict from matadata content parsed. + + :param meta_path: '/somewhere/here/tmp/.../store/file.metadata' + :return meta_dict: {'activity': 'paint', 'timestamp': '1324136292', ...} + + >>> parse_meta_file('data/.../../.metadata') + {'activity': 'paint', 'timestamp': '1324136292'} + + """ + pass + + +def list_meta_files(datastore_path): + """Iters the path for the metadata files found in the given datastore + folder. + + :param datastore_path: ex.: '/somewhere/here/tmp/home/.../store' + :return meta_path: ex.: ['/somewhere/here/tmp/.../store/file.metadata', ..] + + >>> path = './data/tmp/demo/home/olpc/.sugar/default/datastore/store' + >>> res = [meta_path for meta_path in list_meta_files(path)] + res[0] = os.path.join(path, 'demo.metadata') + + """ + pass + + +def extract_backup(backup_path): + """Extracts backup archive in the tmp folder and the the corresponding + datastore path. + + :param backup: a path, ex.: '/somewhere/here' + :return backup: a path, ex.: '/somewhere/here/tmp/home/.../store' + + >>> extract_backup('data/demo.bz2') + './data/tmp/demo/home/olpc/.sugar/default/datastore/store' + + """ + pass + + +def list_backups(working_dir=None): + """Iters serial nb and path tuples according the backup files of the + working dir, ex.: [('serial_1': '/somewhere/here')] + + Returns None if no working dir. + + :param working_dir: for testing issue otherwise use the value from + the config file. + + >>> for backup_serial, backup_path in list_backup(working_dir='data') + 'demo', './data/demo.bz2' + + """ + pass + + +def read_backups(): + # working dir check + if not os.path.exists(c.main.working_dir): + raise Exception('no working_dir found!') + # csv path + csv_name = datetime.now().strftime('%Y-%m-%d_%H:%M_moulinette_result.csv') + csv_path = os.path.join(c.main.working_dir, 'out', csv_name) + if not os.path.exists(csv_path): + os.mkdir(csv_path) + # init the csv writer + with open(csv_path, 'wb') as f: + # init csv writer + writer = csv.Writer(f, delimiter=';') + # header line + writer.writerow(c.moulinette.columns) + # list backups + for backup_serial, backup_path in list_backups(): + # extract backup + datastore_path = extract_backup(backup_path) + # list metadata + for meta_path in list_meta_files(datastore_path): + # parse the metadata file + meta_dict = parse_meta_file(meta_path) + # add serial do dict + meta_dict['serial'] = backup_serial + # write meta info in a new csv row according config + writer.writerow([meta_dict[k] for k in c.moulinette.columns]) + + +if __name__ == "__main__": + import doctest + doctest.testmod() diff --git a/datastore_reader/datastore_reader.py b/datastore_reader/datastore_reader.py deleted file mode 100644 index 55d6d49..0000000 --- a/datastore_reader/datastore_reader.py +++ /dev/null @@ -1,31 +0,0 @@ -import os - - -def extract(file_path): - os.chdir(config.input_path) - # unzip - f = tarfile.open(file_path) - # get input file name - _in_file_name = f.getnames()[0] - f.extractall() - f.close() - - -def list_datastores(working_dir=None): - """Returns a dict of datastores from the working dir, - ex.: {'kevin': '/somewhere/here'} - - Returns None if no working dir. - - :param working_dir: for testing issue otherwise use the value from - the config file. - - >>> list_datastores() - - """ - return None - - -if __name__ == "__main__": - import doctest - doctest.testmod() |