# python import import logging, os, shutil, tarfile, time from gettext import gettext as _ # png import from pypng from lib import png # sugar import try: from sugar.activity import activity from sugar.datastore import datastore import dbus, gtk ROOT = activity.get_activity_root() BUND = activity.get_bundle_path() except Exception, e: datastore, dbus, gtk = None, None, None ROOT = os.path.join(os.path.dirname(__file__), '..', '..', '..', 'atdj_root') BUND = os.path.join(os.path.dirname(__file__), '..', '..') # get application logger logger = logging.getLogger('atoidejouer') ACTIVITY_NAMES = { 'paint': 'org.laptop.Oficina', 'record': 'org.laptop.RecordActivity', } ACTIVITY_PER_MIME_TYPES = { 'image/png': 'org.laptop.Oficina', 'audio/ogg': 'org.laptop.RecordActivity', } def get_tmp_path(ext='png'): return os.path.join(ROOT, 'tmp.%s' % ext) def get_config_path(): # get bundle path _bundle_path = BUND # return path return os.path.join(_bundle_path, 'static', 'data', 'config', 'config.ini') def get_db_path(filename, dir_='db'): # return path return os.path.join(ROOT, 'data', dir_, '%s.db' % filename) def get_sound_path(filename, dir_='sounds'): # return path return os.path.join(ROOT, 'data', dir_, '%s.ogg' % filename) def get_icon_path(stock_id): # return path return os.path.join(BUND, 'static', 'data', 'icons', '%s.png' % stock_id) def get_image_path(filename, dir_='graphics'): # return path if filename in ['background_default', 'mask_default']\ or dir_=='data': return os.path.join(BUND, 'static', 'data', 'graphics', '%s.png' % filename) else: return os.path.join(ROOT, 'data', dir_, '%s.png' % filename) def get_html_path(page): # return path return os.path.join(BUND, 'static', 'data', 'html', '%s.html' % page) def get_pixbuf_from_data(data, image_type=None, size=None): # load it if gtk is None: return elif image_type: _loader = gtk.gdk.PixbufLoader(image_type=image_type) else: _loader = gtk.gdk.PixbufLoader() # size check if size is None: pass else: # get image size from atoidejouer.tools import image _info, _w, _h = image.get_image_info(data=data) # parse size _max_w, _max_h = size # compute target size to keep ratio _w, _h = image.compute_width_height(_w, _h, _max_w, _max_h) # set loader size _loader.set_size(_w, _h) # load data _loader.write(data) # close loader _loader.close() # pix it return _loader.get_pixbuf() def __remove_inner_true(a_list): _new_list = list() # .. _has_false = False # .. for _i, _v in enumerate(a_list): if _v is False: _has_false = True else: _sub = a_list[_i+1:] if _has_false is True\ and _sub.count(False) != 0: _new_list.append(False) continue else: pass # .. _new_list.append(_v) # .. del a_list # .. return _new_list def png_from_pixbuf(filename, timestamp): # prepare outpath _out_path = get_image_path(filename) if os.path.exists(_out_path): return else: pass # prepare inpath _in_path = get_path_from_journal(timestamp, 'image/png') # init png reader _reader = png.Reader(filename=_in_path) # read the file _w, _h, _pixels, _metadata = _reader.read() # init working vars _new_pixels = list() _first_color = None _remove_row_list = [True for _dummy in range(_h)] _remove_col_list = [True for _dummy in range(_w)] # .. _planes = _metadata['planes'] # update vars for _i, _row in enumerate(_pixels): # init new row _new_row = list() for _j, _col in enumerate(_row): # upate rgb if _j % _planes == 0: _rgb = [_col] continue else: _rgb.append(_col) # update color first and after if _j % _planes == (_planes - 1): # keep the first color if _first_color is None: _first_color = _rgb else: pass # make it alpha if first if _rgb == _first_color: _new_row.extend([0, 0, 0, 0]) else: _remove_row_list[_i] = False _remove_col_list[(_j/_planes)-1] = False # small hack if _planes == 3: _rgb.append(255) else: pass _new_row.extend(_rgb) else: continue # add new row _new_pixels.append(_new_row) # cleaning del _reader del _pixels # remove inner True in cols or rows _remove_row_list = __remove_inner_true(_remove_row_list) _remove_col_list = __remove_inner_true(_remove_col_list) # init working vars _new_new_pixels = list() # 2cd pass for _i, _row in enumerate(_new_pixels): # transparent row if _remove_row_list[_i] is True: continue else: # init new row _new_new_row = list() # .. for _j, _col in enumerate(_row): # upate rgb if _j % 4 == 0: _rgb = [_col] continue else: _rgb.append(_col) # update color first and after if _j % 4 == 3: # transparent col if _remove_col_list[(_j/4)-1] is True: continue else: _new_new_row.extend(_rgb) else: continue # sorry for that! _new_new_pixels.append(_new_new_row) # cleaning del _new_pixels del _remove_row_list del _remove_col_list # update h and w _w = len(_new_new_pixels[0])/4 _h = len(_new_new_pixels) # update alpha meta _metadata['alpha'] = True _metadata['planes'] = 4 del _metadata['size'] # write the new image with alpha _new_png = open(_out_path, 'wb') _writer = png.Writer(_w, _h, **_metadata) _writer.write(_new_png, _new_new_pixels) _new_png.close() # just in case del _writer del _new_new_pixels def journal_query(query): if not datastore: return # find in ds _results, _count = datastore.find(query, sorting='timestamp') for _r in _results: if 'title' in query\ and query['title'] != str(_r.metadata['title']): continue yield _r def get_journal_objects(activity_name=None, mime_type=None): if activity_name is None\ and mime_type is None: return [] elif mime_type is None: return journal_query({'activity': ACTIVITY_NAMES[activity_name]}) else: return journal_query({'mime_type': mime_type}) def list_info_from_journal(mime_type=None): # make unique titles _titles = {} # return infos for _o in journal_query({'mime_type': mime_type}): # get title _t = _o.metadata['title'] # ensure description _d = _o.metadata['description'] if 'description' in _o.metadata else '' _p = _o.metadata['preview'] if 'preview' in _o.metadata else None # little check if _t in _titles: # udpate reg _titles[_t] += 1 # update value to show _t = '%s (%s)' % (_t, _titles[_t]) # init title reg else: _titles[_t] = 1 # ensure info yield { 'activity_id' : _o.metadata['activity_id'], 'description' : _d, 'timestamp' : _o.metadata['timestamp'], 'preview' : _p, 'title' : _t, 'file_path' : _o.file_path, 'mime_type' : mime_type } def list_files_from_journal(activity_name=None, mime_type=None): # get objects first _objs = get_journal_objects(activity_name=activity_name, mime_type=mime_type) # return paths for _o in _objs: # TODO open the files yield _o.file_path() def get_path_from_journal(timestamp, mime_type): if not datastore: return # prepare datastore query _query = { 'timestamp': int(timestamp), 'mime_type': mime_type } # find in ds _results, _count = datastore.find(_query) if _count == 1: return _results[0].file_path else: return None def __check_dir(dir_name, parent='data'): # get activity path if parent is None: _dir = os.path.join(ROOT, dir_name) else: _dir = os.path.join(ROOT, parent, dir_name) # ensure activity path if os.path.exists(_dir): pass else: os.mkdir(_dir) def __check_file_in_bundle(sub_path, file_name, mime_type=None, in_journal=False): # ensure dir exist in bundle __check_dir(sub_path) # file path _path = os.path.join(ROOT, 'data', sub_path, file_name) # ensure file if os.path.exists(_path): pass else: # get bundle path _p = os.path.join(BUND, 'static', 'ext', sub_path, file_name) # copy to the bundle shutil.copy(_p, _path) def is_in_journal(file_name, mime_type): if not datastore: return # prepare query _query = { 'activity': ACTIVITY_PER_MIME_TYPES[mime_type], 'mime_type': mime_type } # has records ? _res, _count = datastore.find(_query) # manual check because query with title doesnt work ? return file_name in [o.metadata['title'] for o in _res] def __get_preview(file_path): """Creates preview from file path for ds metadata. cf.: http://en.flossmanuals.net/make-your-own-sugar-activities/fun-with-the-journal """ if dbus is None: return None # avoid inter-dependance loop from atoidejouer.tools import image # prepare pixbuf _pixbuf = image.get_pixbuf(file_path, 128, 96) # data cb def _save_func(buf, data): data.append(buf) # save data _data = [] _pixbuf.save_to_callback(_save_func, 'png', user_data=_data) # done return dbus.ByteArray(''.join(_data)) def add_file_to_journal(file_name, file_path, mime_type): if not datastore: return _props = { 'activity': ACTIVITY_PER_MIME_TYPES[mime_type], 'mime_type': mime_type, 'title': file_name, 'preview': __get_preview(file_path) } # prepare meta _meta = datastore.DSMetadata(_props) # prepare ds object _dsobject = datastore.create() _dsobject.metadata = _meta _dsobject.file_path = file_path # write it datastore.write(_dsobject) def __check_file_in_journal(sub_path, file_name, mime_type=None): # is already in the journal if is_in_journal(file_name, mime_type): pass else: # file path _path = os.path.join(ROOT, 'data', sub_path, file_name) # ensure dir exist in bundle add_file_to_journal(file_name, _path, mime_type) def __check_dir_files(sub_path, mime_type=None, in_journal=False): # get bundle path _path = os.path.join(BUND, 'static', 'ext', sub_path) # file by file for _f in os.listdir(_path): # full path _p = os.path.join(_path, _f) # little check if os.path.isdir(_p) or _f in ['blank']: pass elif in_journal is True: __check_file_in_journal(sub_path, _f, mime_type=mime_type) else: __check_file_in_bundle(sub_path, _f) def init_activity_files(): __check_dir_files('db') __check_dir_files('stories') # add embedded resources to the journal for common usage __check_dir_files('graphics', mime_type='image/png', in_journal=True) __check_dir_files('sounds', mime_type='audio/ogg', in_journal=True) def __show_in_out_result_message(label, message): # .. label.set_markup('%s' % message) label.show() def __merge_dir(project_name, dir_name, exist_list=None): # archive path _path_src = os.path.join(ROOT, 'tmp', project_name, dir_name) # little check if os.path.exists(_path_src): # project path _path_dst = os.path.join(ROOT, 'data', dir_name) # init existing list exist_list = list() if exist_list is None else exist_list for _f in os.listdir(_path_src): # .. _p_src = os.path.join(_path_src, _f) _p_dst = os.path.join(_path_dst, _f) # little check if os.path.isdir(_p_src): continue # do not replace elif os.path.exists(_p_dst): # update exist list exist_list.append(os.path.join(dir_name, _f)) # do copy else: shutil.copy(_p_src, _path_dst) # OK! return True else: # Oops! return False def __import_keys(activity_, project_name): pass """ # .. _path_data = os.path.join(ROOT, 'tmp', project_name, 'story.keys') # init content _data = None # little check if os.path.exists(_path_data): # read file _file = open(_path_data, 'r') try: _data = _file.read() finally: _file.close() # parse json data _exist_graphic_keys = activity_.graphic_keys.loads(_data, clear=False) _exist_sound_keys = activity_.sound_keys.loads(_data, clear=False) # set activity new number of keys activity_.update_max_time() # .. return { 'graphics': _exist_graphic_keys, 'sounds': _exist_sound_keys, } # ?? invalid archive else: return None """ def import_project(activity_, file_path, msg_label): pass """ # clean tmp dir __remove_dir('tmp', parent=None) __check_dir('tmp', parent=None) # .. _tmp_root = os.path.join(ROOT, 'tmp') try: # copy file to tmp _tar_path = os.path.join(_tmp_root, '__tmp.tar.bz2') shutil.copy(file_path, _tar_path) # change dir for unzipping os.chdir(_tmp_root) # extract files in tmp dir _tar = tarfile.open(file_path) _p_name = _tar.getnames()[0] _tar.extractall() _tar.close() except Exception, e: # prepare message _msg = _('Project import failed!') _msg += _('\n\n[Error] Can not read archive file!') # remove tmp structure __remove_dir('tmp', parent=None) # quit! return __show_in_out_result_message(msg_label, _msg) # merge dirs _exist_list = list() if __merge_dir(_p_name, 'graphics', exist_list=_exist_list)\ and __merge_dir(_p_name, 'sounds', exist_list=_exist_list): # init result message _msg = _('Project sucessfully imported') else: # prepare message _msg = _('Project import failed!') _msg += _('\n\n[Error] Can not load files!') # remove tmp structure __remove_dir('tmp', parent=None) # quit! return __show_in_out_result_message(msg_label, _msg) # existing files stop if len(_exist_list) == 0: pass else: # prepare message _msg += _('\n\n[Warning] Following files already exist:\n') for _f in _exist_list: _msg = '%s - %s\n' % (_msg, _f) # merge keys _existing_dict = __import_keys(activity_, _p_name) if _existing_dict is None: # prepare message _msg = _('Project import failed!') _msg += _('\n\n[Error] Can not load keys!') # remove tmp structure __remove_dir('tmp', parent=None) # quit! return __show_in_out_result_message(msg_label, _msg) if len(_existing_dict['graphics']) == 0\ or len(_existing_dict['sounds']) == 0: pass else: # prepare message _msg += _('\n\n[Warning] Following sequences already exist:\n') for _s in _existing_dict['graphics']: _msg = '%s - graphics.%s\n' % (_msg, _s) _msg = '%s\n' % _msg for _s in _existing_dict['sounds']: _msg = '%s - sounds.%s\n' % (_msg, _s) # remove tmp structure __remove_dir('tmp', parent=None) # show result __show_in_out_result_message(msg_label, _msg) """ def __remove_dir(dir_name, parent=None): # get activity path if parent is None: _dir = os.path.join(ROOT, dir_name) _next_parent = dir_name else: _dir = os.path.join(ROOT, parent, dir_name) _next_parent = os.path.join(parent, dir_name) # remove files and dir recursively if os.path.exists(_dir): for _f in os.listdir(_dir): _p = os.path.join(_dir, _f) if os.path.isdir(_p): __remove_dir(_f, parent=_next_parent) else: os.remove(_p) # and remove the dir if os.path.exists(_dir): os.removedirs(_dir) else: pass # nothing to do else: pass def export_project(activity_, msg_label, media): pass """ # get the toolbar _toolbar = activity_._toolbox.get_activity_toolbar() # get the projet name _name = _toolbar.title.get_text() # clean tmp dir first __remove_dir('tmp', parent=None) __check_dir('tmp', parent=None) # create a tmp stucture __check_dir(_name, parent='tmp') __check_dir(os.path.join(_name, 'graphics'), parent='tmp') __check_dir(os.path.join(_name, 'sounds'), parent='tmp') # .. _tmp_root = os.path.join(ROOT, 'tmp') _out_root = os.path.join(_tmp_root, _name) # copy keys _keys_path = os.path.join(_out_root, 'story.keys') activity_.write_file(_keys_path) # copy sequences and resources # __export_seq_and_res(activity_, _out_root, type_='graphics') # __export_seq_and_res(activity_, _out_root, type_='sounds') # change dir for zipping os.chdir(_tmp_root) # zip all _tar_name = '%s.tar.bz2' % _name # .. _tar = tarfile.open(_tar_name, "w:bz2") _tar.add(_name) _tar.close() # try to copy try: if os.path.exists(os.path.join('/media', media, _tar_name)): # .. _msg = _('Project') + ' "' + _name + '" ' _msg += _('already exported to') + ' "' + media + '" ' else: # .. shutil.copy(os.path.join(_tmp_root, _tar_name), os.path.join('/media', media)) # .. _msg = _('Project') + ' "' + _name + '" ' _msg += _('sucessfully exported to') + ' "' + media + '" ' except Exception, e: # ERROR logger.error('[storage] export_project - e: %s' % e) # ERROR # .. # .. _msg = _('Project') + ' "' + _name + '" ' _msg += _('export to') + ' "' + media + '" ' + _('failed!') # remove tmp structure __remove_dir('tmp', parent=None) # tmp message __show_in_out_result_message(msg_label, _msg) """