diff options
Diffstat (limited to 'tutorius')
38 files changed, 6394 insertions, 1500 deletions
diff --git a/tutorius/TProbe.py b/tutorius/TProbe.py index ec0f9a3..f55547c 100644 --- a/tutorius/TProbe.py +++ b/tutorius/TProbe.py @@ -1,4 +1,5 @@ import logging +LOGGER = logging.getLogger("sugar.tutorius.TProbe") import os import gobject @@ -7,10 +8,12 @@ import dbus import dbus.service import cPickle as pickle -import sugar.tutorius.addon as addon -from sugar.tutorius.services import ObjectStore +from . import addon +from .services import ObjectStore +from .properties import TPropContainer +from .dbustools import remote_call, save_args import copy """ @@ -24,25 +27,15 @@ import copy -------------------- ---------- """ +#TODO Add stub error handling for remote calls in the classes so that it will +# be clearer how errors can be handled in the future. + class TProbe(dbus.service.Object): """ Tutorius Probe Defines an entry point for Tutorius into activities that allows performing actions and registering events onto an activity via a DBUS Interface. - - Exposes the following dbus methods: - void registered(string service) - string ping() -> status - string install(string action) -> address - void update(string address, string action_props) - void uninstall(string address) - string subscribe(string pickled_event) -> address - void unsubscribe(string address) - - Exposes the following dbus Events: - eventOccured(event): - """ def __init__(self, activity_name, activity): @@ -52,9 +45,9 @@ class TProbe(dbus.service.Object): @param activity_name unique activity_id @param activity activity reference, must be a gtk container """ - logging.debug("TProbe :: Creating TProbe for %s (%d)", activity_name, os.getpid()) - logging.debug("TProbe :: Current gobject context: %s", str(gobject.main_context_default())) - logging.debug("TProbe :: Current gobject depth: %s", str(gobject.main_depth())) + LOGGER.debug("TProbe :: Creating TProbe for %s (%d)", activity_name, os.getpid()) + LOGGER.debug("TProbe :: Current gobject context: %s", str(gobject.main_context_default())) + LOGGER.debug("TProbe :: Current gobject depth: %s", str(gobject.main_depth())) # Moving the ObjectStore assignment here, in the meantime # the reference to the activity shouldn't be share as a # global variable but passed by the Probe to the objects @@ -157,38 +150,24 @@ class TProbe(dbus.service.Object): in_signature='s', out_signature='s') def subscribe(self, pickled_event): """ - Subscribe to a Gtk Widget Event - @param pickled_event string pickled Event + Subscribe to an Event + @param pickled_event string pickled EventFilter @return string unique name of registered event """ - event = pickle.loads(str(pickled_event)) - - # TODO elavoie 2009-07-25 Move to a reference counting implementation - # to avoid duplicating eventfilters when the event signature is the - # same - - # For now we will assume every probe is inserted in a GTK activity, - # however, in the future this should be moved in a subclass - eventfilter = addon.create("GtkWidgetEventFilter") - - # There might be a validation of the Address in source in the future - # and a partial resolution to extract the object_id from the address - eventfilter.object_id = event.source - - # TODO elavoie 2009-07-19 - # There should be a type translation from a tutorius type - # to a GTK type here - eventfilter.event_name = event.type + #TODO Perform event unmapping once Tutorials use abstract events + # instead of concrete EventFilters that are tied to their + # implementation. + eventfilter = pickle.loads(str(pickled_event)) # The callback uses the event defined previously and each # successive call to subscribe will register a different # callback that references a different event def callback(*args): - self.notify(event) + self.notify(eventfilter) eventfilter.install_handlers(callback, activity=self._activity) - name = self._generate_event_reference(event) + name = self._generate_event_reference(eventfilter) self._subscribedEvents[name] = eventfilter return name @@ -215,7 +194,12 @@ class TProbe(dbus.service.Object): # The actual method we will call on the probe to send events def notify(self, event): - self.eventOccured(pickle.dumps(event)) + LOGGER.debug("TProbe :: notify event %s", str(event)) + #Check that this event is even allowed + if event in self._subscribedEvents.values(): + self.eventOccured(pickle.dumps(event)) + else: + raise RuntimeWarning("Attempted to raise an unregistered event") # Return a unique name for this action def _generate_action_reference(self, action): @@ -232,11 +216,14 @@ class TProbe(dbus.service.Object): # Return a unique name for this event def _generate_event_reference(self, event): # TODO elavoie 2009-07-25 Should return a universal address - name = event.type - suffix = 1 + name = event.__class__.__name__ + #Keep the counter to avoid looping all the time + suffix = getattr(self, '_event_ref_suffix', 0 ) + 1 while self._subscribedEvents.has_key(name+str(suffix)): suffix += 1 + + #setattr(self, '_event_ref_suffix', suffix) return name + str(suffix) @@ -246,108 +233,98 @@ class ProbeProxy: It provides an object interface to the TProbe, which requires pickled strings, across a DBus communication. - - Public Methods: - ProbeProxy(string activityName) :: Constructor - string install(Action action) - void update(Action action) - void uninstall(Action action) - void uninstall_all() - string subscribe(Event event, callable callback) - void unsubscribe(Event event, callable callback) - void unsubscribe_all() """ def __init__(self, activityName): """ Constructor - @param activityName unique activity id + @param activityName unique activity id. Must be a valid dbus bus name. """ - logging.debug("ProbeProxy :: Creating ProbeProxy for %s (%d)", activityName, os.getpid()) - logging.debug("ProbeProxy :: Current gobject context: %s", str(gobject.main_context_default())) - logging.debug("ProbeProxy :: Current gobject depth: %s", str(gobject.main_depth())) + LOGGER.debug("ProbeProxy :: Creating ProbeProxy for %s (%d)", activityName, os.getpid()) + LOGGER.debug("ProbeProxy :: Current gobject context: %s", str(gobject.main_context_default())) + LOGGER.debug("ProbeProxy :: Current gobject depth: %s", str(gobject.main_depth())) bus = dbus.SessionBus() self._object = bus.get_object(activityName, "/tutorius/Probe") self._probe = dbus.Interface(self._object, "org.tutorius.ProbeInterface") self._actions = {} - self._events = {} # We keep those two data structures to be able to have multiple callbacks # for the same event and be able to remove them independently + # _subscribedEvents holds a list of callback addresses's for each event + # _registeredCallbacks holds the functions to call for each address self._subscribedEvents = {} self._registeredCallbacks = {} - def _handle_signal(pickled_event): - event = pickle.loads(str(pickled_event)) - if self._registeredCallbacks.has_key(event): - for callback in self._registeredCallbacks[event].itervalues(): - callback(event) - - self._object.connect_to_signal("eventOccured", _handle_signal, dbus_interface="org.tutorius.ProbeInterface") - + + self._object.connect_to_signal("eventOccured", self._handle_signal, dbus_interface="org.tutorius.ProbeInterface") + + def _handle_signal(self, pickled_event): + event = pickle.loads(str(pickled_event)) + LOGGER.debug("ProbeProxy :: Received Event : %s %s", str(event), str(event._props.items())) + + LOGGER.debug("ProbeProxy :: Currently %d events registered", len(self._registeredCallbacks)) + if self._registeredCallbacks.has_key(event): + for callback in self._registeredCallbacks[event].values(): + callback(event) + else: + for event in self._registeredCallbacks.keys(): + LOGGER.debug("==== %s", str(event._props.items())) + LOGGER.debug("ProbeProxy :: Event does not appear to be registered") + def isAlive(self): try: return self._probe.ping() == "alive" except: return False - def install(self, action): + def __update_action(self, action, address): + LOGGER.debug("ProbeProxy :: Updating action %s with address %s", str(action), str(address)) + self._actions[action] = str(address) + + def __clear_action(self, action): + self._actions.pop(action, None) + + def install(self, action, block=False): """ Install an action on the TProbe's activity @param action Action to install + @param block Force a synchroneous dbus call if True @return None """ - address = str(self._probe.install(pickle.dumps(action))) - self._actions[action] = address + return remote_call(self._probe.install, (pickle.dumps(action),), + save_args(self.__update_action, action), + block=block) - def update(self, action): + def update(self, action, newaction, block=False): """ Update an already installed action's properties and run it again @param action Action to update + @param newaction Action to update it with + @param block Force a synchroneous dbus call if True @return None """ + #TODO review how to make this work well if not action in self._actions: raise RuntimeWarning("Action not installed") - return - self._probe.update(self._actions[action], pickle.dumps(action._props)) + #TODO Check error handling + return remote_call(self._probe.update, (self._actions[action], pickle.dumps(newaction._props)), block=block) - def uninstall(self, action): + def uninstall(self, action, block=False): """ Uninstall an installed action @param action Action to uninstall + @param block Force a synchroneous dbus call if True """ if action in self._actions: - self._probe.uninstall(self._actions.pop(action)) - - def uninstall_all(self): - """ - Uninstall all installed actions - @return None - """ - for action in self._actions.keys(): - self.uninstall(action) - - def subscribe(self, event, callback): - """ - Register an event listener - @param event Event to listen for - @param callback callable that will be called when the event occurs - @return address identifier used for unsubscribing - """ - # TODO elavoie 2009-07-25 When we will allow for patterns both - # for event types and sources, we will need to revise the lookup - # mecanism for which callback function to call - if (event, callback) in self._events: - raise RuntimeError("event already registered for callback") - return + remote_call(self._probe.uninstall,(self._actions.pop(action),), block=block) + def __update_event(self, event, callback, address): + LOGGER.debug("ProbeProxy :: Registered event %s with address %s", str(hash(event)), str(address)) # Since multiple callbacks could be associated to the same # event signature, we will store multiple callbacks # in a dictionary indexed by the unique address # given for this subscribtion and access this # dictionary from another one indexed by event - address = str(self._probe.subscribe(pickle.dumps(event))) - - self._events[(event, callback)] = address + address = str(address) # We use the event object as a key if not self._registeredCallbacks.has_key(event): @@ -371,19 +348,8 @@ class ProbeProxy: return address - def unsubscribe(self, event, callback): - """ - Unregister an event listener - @param address identifier given by subscribe() - @return None - """ - if not (event, callback) in self._events: - raise RuntimeWarning("callback/event not subscribed") - return - - address = self._events.pop((event, callback)) - self._probe.unsubscribe() - + def __clear_event(self, address): + LOGGER.debug("ProbeProxy :: Unregistering adress %s", str(address)) # Cleanup everything if self._subscribedEvents.has_key(address): event = self._subscribedEvents[address] @@ -396,22 +362,69 @@ class ProbeProxy: self._registeredCallbacks.pop(event) self._subscribedEvents.pop(address) + else: + LOGGER.debug("ProbeProxy :: unsubsribe address %s inconsistency : not registered", address) + + def subscribe(self, event, callback, block=True): + """ + Register an event listener + @param event Event to listen for + @param callback callable that will be called when the event occurs + @param block Force a synchroneous dbus call if True (Not allowed yet) + @return address identifier used for unsubscribing + """ + LOGGER.debug("ProbeProxy :: Registering event %s", str(hash(event))) + if not block: + raise RuntimeError("This function does not allow non-blocking mode yet") - def unsubscribe_all(self): + # TODO elavoie 2009-07-25 When we will allow for patterns both + # for event types and sources, we will need to revise the lookup + # mecanism for which callback function to call + return remote_call(self._probe.subscribe, (pickle.dumps(event),), + save_args(self.__update_event, event, callback), + block=block) + + def unsubscribe(self, address, block=True): """ - Unregister all event listeners + Unregister an event listener + @param address identifier given by subscribe() + @param block Force a synchroneous dbus call if True @return None """ - for event, callback in self._events.keys(): - self.unsubscribe(event, callback) + LOGGER.debug("ProbeProxy :: Unregister adress %s issued", str(address)) + if address in self._subscribedEvents.keys(): + remote_call(self._probe.unsubscribe, (address,), + return_cb=save_args(self.__clear_event, address), + block=block) + else: + LOGGER.debug("ProbeProxy :: unsubsribe address %s failed : not registered", address) + + def detach(self, block=False): + """ + Detach the ProbeProxy from it's TProbe. All installed actions and + subscribed events should be removed. + """ + for action_addr in self._actions.keys(): + self.uninstall(action_addr, block) + + for address in self._subscribedEvents.keys(): + self.unsubscribe(address, block) + class ProbeManager(object): """ The ProbeManager provides multiplexing across multiple activity ProbeProxies For now, it only handles one at a time, though. + Actually it doesn't do much at all. But it keeps your encapsulation happy """ - def __init__(self): + def __init__(self, proxy_class=ProbeProxy): + """Constructor + @param proxy_class Class to use for creating Proxies to activities. + The class should support the same interface as ProbeProxy. Exists + to make this class unit-testable by replacing the Proxy with a mock + """ + self._ProxyClass = proxy_class self._probes = {} self._current_activity = None @@ -427,9 +440,9 @@ class ProbeManager(object): def attach(self, activity_id): if activity_id in self._probes: raise RuntimeWarning("Activity already attached") - return - self._probes[activity_id] = ProbeProxy(activity_id) + self._probes[activity_id] = self._ProxyClass(activity_id) + #TODO what do we do with this? Raise something? if self._probes[activity_id].isAlive(): print "Alive!" else: @@ -438,48 +451,66 @@ class ProbeManager(object): def detach(self, activity_id): if activity_id in self._probes: probe = self._probes.pop(activity_id) - probe.unsubscribe_all() - probe.uninstall_all() - - def install(self, action): - if self.currentActivity: - return self._probes[self.currentActivity].install(action) - else: - raise RuntimeWarning("No activity attached") + probe.detach() + if self._current_activity == activity_id: + self._current_activity = None - def update(self, action): + def install(self, action, block=False): + """ + Install an action on the current activity + @param action Action to install + @param block Force a synchroneous dbus call if True + @return None + """ if self.currentActivity: - return self._probes[self.currentActivity].update(action) + return self._probes[self.currentActivity].install(action, block) else: raise RuntimeWarning("No activity attached") - def uninstall(self, action): + def update(self, action, newaction, block=False): + """ + Update an already installed action's properties and run it again + @param action Action to update + @param newaction Action to update it with + @param block Force a synchroneous dbus call if True + @return None + """ if self.currentActivity: - return self._probes[self.currentActivity].uninstall(action) + return self._probes[self.currentActivity].update(action, newaction, block) else: raise RuntimeWarning("No activity attached") - def uninstall_all(self): + def uninstall(self, action, block=False): + """ + Uninstall an installed action + @param action Action to uninstall + @param block Force a synchroneous dbus call if True + """ if self.currentActivity: - return self._probes[self.currentActivity].uninstall_all() + return self._probes[self.currentActivity].uninstall(action, block) else: raise RuntimeWarning("No activity attached") def subscribe(self, event, callback): + """ + Register an event listener + @param event Event to listen for + @param callback callable that will be called when the event occurs + @return address identifier used for unsubscribing + """ if self.currentActivity: return self._probes[self.currentActivity].subscribe(event, callback) else: raise RuntimeWarning("No activity attached") - def unsubscribe(self, event, callback): - if self.currentActivity: - return self._probes[self.currentActivity].unsubscribe(event, callback) - else: - raise RuntimeWarning("No activity attached") - - def unsubscribe_all(self): + def unsubscribe(self, address): + """ + Unregister an event listener + @param address identifier given by subscribe() + @return None + """ if self.currentActivity: - return self._probes[self.currentActivity].unsubscribe_all() + return self._probes[self.currentActivity].unsubscribe(address) else: raise RuntimeWarning("No activity attached") diff --git a/tutorius/actions.py b/tutorius/actions.py index 7e0d65e..d5a8641 100644 --- a/tutorius/actions.py +++ b/tutorius/actions.py @@ -16,16 +16,15 @@ """ This module defines Actions that can be done and undone on a state """ +import gtk + from gettext import gettext as _ -from sugar.tutorius import gtkutils, addon -from dialog import TutoriusDialog -import overlayer -from sugar.tutorius.editor import WidgetIdentifier -from sugar.tutorius.services import ObjectStore -from sugar.tutorius.properties import * from sugar.graphics import icon -import gtk.gdk + +from . import addon +from .services import ObjectStore +from .properties import * class DragWrapper(object): """Wrapper to allow gtk widgets to be dragged around""" @@ -178,148 +177,3 @@ class Action(TPropContainer): self.position = [int(x), int(y)] self.__edit_img.destroy() -##class OnceWrapper(Action): -## """ -## Wraps a class to perform an action once only -## -## This ConcreteActions's do() method will only be called on the first do() -## and the undo() will be callable after do() has been called -## """ -## -## _action = TAddonProperty() -## -## def __init__(self, action): -## Action.__init__(self) -## self._called = False -## self._need_undo = False -## self._action = action -## -## def do(self): -## """ -## Do the action only on the first time -## """ -## if not self._called: -## self._called = True -## self._action.do() -## self._need_undo = True -## -## def undo(self): -## """ -## Undo the action if it's been done -## """ -## if self._need_undo: -## self._action.undo() -## self._need_undo = False -## -##class WidgetIdentifyAction(Action): -## def __init__(self): -## Action.__init__(self) -## self.activity = None -## self._dialog = None - -## def do(self): -## os = ObjectStore() -## if os.activity: -## self.activity = os.activity - -## self._dialog = WidgetIdentifier(self.activity) -## self._dialog.show() - - -## def undo(self): -## if self._dialog: -## self._dialog.destroy() - -##class ChainAction(Action): -## """Utility class to allow executing actions in a specific order""" -## def __init__(self, *actions): -## """ChainAction(action1, ... ) builds a chain of actions""" -## Action.__init__(self) -## self._actions = actions -## -## def do(self,**kwargs): -## """do() each action in the chain""" -## for act in self._actions: -## act.do(**kwargs) -## -## def undo(self): -## """undo() each action in the chain, starting with the last""" -## for act in reversed(self._actions): -## act.undo() - -##class DisableWidgetAction(Action): -## def __init__(self, target): -## """Constructor -## @param target target treeish -## """ -## Action.__init__(self) -## self._target = target -## self._widget = None - -## def do(self): -## """Action do""" -## os = ObjectStore() -## if os.activity: -## self._widget = gtkutils.find_widget(os.activity, self._target) -## if self._widget: -## self._widget.set_sensitive(False) - -## def undo(self): -## """Action undo""" -## if self._widget: -## self._widget.set_sensitive(True) - - -##class TypeTextAction(Action): -## """ -## Simulate a user typing text in a widget -## Work on any widget that implements a insert_text method -## -## @param widget The treehish representation of the widget -## @param text the text that is typed -## """ -## def __init__(self, widget, text): -## Action.__init__(self) -## -## self._widget = widget -## self._text = text -## -## def do(self, **kwargs): -## """ -## Type the text -## """ -## widget = gtkutils.find_widget(ObjectStore().activity, self._widget) -## if hasattr(widget, "insert_text"): -## widget.insert_text(self._text, -1) -## -## def undo(self): -## """ -## no undo -## """ -## pass -## -##class ClickAction(Action): -## """ -## Action that simulate a click on a widget -## Work on any widget that implements a clicked() method -## -## @param widget The threehish representation of the widget -## """ -## def __init__(self, widget): -## Action.__init__(self) -## self._widget = widget -## -## def do(self): -## """ -## click the widget -## """ -## widget = gtkutils.find_widget(ObjectStore().activity, self._widget) -## if hasattr(widget, "clicked"): -## widget.clicked() -## -## def undo(self): -## """ -## No undo -## """ -## pass - diff --git a/tutorius/addon.py b/tutorius/addon.py index e311a65..7ac68f7 100644 --- a/tutorius/addon.py +++ b/tutorius/addon.py @@ -38,6 +38,9 @@ import logging PREFIX = __name__+"s" PATH = re.sub("addon\\.py[c]$", "", __file__)+"addons" +TYPE_ACTION = 'action' +TYPE_EVENT = 'event' + _cache = None def _reload_addons(): @@ -47,9 +50,11 @@ def _reload_addons(): mod = __import__(PREFIX+'.'+re.sub("\\.py$", "", addon), {}, {}, [""]) if hasattr(mod, "__action__"): _cache[mod.__action__['name']] = mod.__action__ + mod.__action__['type'] = TYPE_ACTION continue if hasattr(mod, "__event__"): _cache[mod.__event__['name']] = mod.__event__ + mod.__event__['type'] = TYPE_EVENT def create(name, *args, **kwargs): global _cache @@ -62,7 +67,6 @@ def create(name, *args, **kwargs): except: logging.error("Could not instantiate %s with parameters %s, %s"%(comp_metadata['name'],str(args), str(kwargs))) return None - return _cache[name]['class'](*args, **kwargs) except KeyError: logging.error("Addon not found for class '%s'", name) return None @@ -79,4 +83,13 @@ def get_addon_meta(name): _reload_addons() return _cache[name] +def get_name_from_type(typ): + global _cache + if not _cache: + _reload_addons() + for addon in _cache.keys(): + if typ == _cache[addon]['class']: + return addon + return None + # vim:set ts=4 sts=4 sw=4 et: diff --git a/tutorius/apilib/__init__.py b/tutorius/apilib/__init__.py new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/tutorius/apilib/__init__.py diff --git a/tutorius/apilib/__init__.pyc b/tutorius/apilib/__init__.pyc Binary files differnew file mode 100644 index 0000000..bd4346b --- /dev/null +++ b/tutorius/apilib/__init__.pyc diff --git a/tutorius/apilib/httplib2/.svn/all-wcprops b/tutorius/apilib/httplib2/.svn/all-wcprops new file mode 100644 index 0000000..601feb8 --- /dev/null +++ b/tutorius/apilib/httplib2/.svn/all-wcprops @@ -0,0 +1,29 @@ +K 25 +svn:wc:ra_dav:version-url +V 30 +/svn/!svn/ver/2/trunk/httplib2 +END +iri2uri.pyc +K 25 +svn:wc:ra_dav:version-url +V 42 +/svn/!svn/ver/2/trunk/httplib2/iri2uri.pyc +END +__init__.py +K 25 +svn:wc:ra_dav:version-url +V 42 +/svn/!svn/ver/2/trunk/httplib2/__init__.py +END +__init__.pyc +K 25 +svn:wc:ra_dav:version-url +V 43 +/svn/!svn/ver/2/trunk/httplib2/__init__.pyc +END +iri2uri.py +K 25 +svn:wc:ra_dav:version-url +V 41 +/svn/!svn/ver/2/trunk/httplib2/iri2uri.py +END diff --git a/tutorius/apilib/httplib2/.svn/entries b/tutorius/apilib/httplib2/.svn/entries new file mode 100644 index 0000000..1a3c5d2 --- /dev/null +++ b/tutorius/apilib/httplib2/.svn/entries @@ -0,0 +1,66 @@ +8 + +dir +2 +https://python-rest-client.googlecode.com/svn/trunk/httplib2 +https://python-rest-client.googlecode.com/svn + + + +2008-05-14T17:00:19.245332Z +2 +bosteen + + +svn:special svn:externals svn:needs-lock + +iri2uri.pyc +file + + + + +2008-05-14T14:48:03.000000Z +6f9a0833a6dc59c42b7aec0dfdf39dd0 +2008-05-14T17:00:19.245332Z +2 +bosteen +has-props + +__init__.py +file + + + + +2007-10-23T15:25:46.000000Z +00c607566b698248d5a5c40508843cd7 +2008-05-14T17:00:19.245332Z +2 +bosteen + +__init__.pyc +file + + + + +2008-05-14T14:48:03.000000Z +bdf8607edad61c67d890de558db8006c +2008-05-14T17:00:19.245332Z +2 +bosteen +has-props + +iri2uri.py +file + + + + +2007-09-04T04:02:06.000000Z +c0f9c5cb229a22e21575322b4ba77741 +2008-05-14T17:00:19.245332Z +2 +bosteen + diff --git a/tutorius/apilib/httplib2/.svn/format b/tutorius/apilib/httplib2/.svn/format new file mode 100644 index 0000000..45a4fb7 --- /dev/null +++ b/tutorius/apilib/httplib2/.svn/format @@ -0,0 +1 @@ +8 diff --git a/tutorius/apilib/httplib2/.svn/prop-base/__init__.pyc.svn-base b/tutorius/apilib/httplib2/.svn/prop-base/__init__.pyc.svn-base new file mode 100644 index 0000000..5e9587e --- /dev/null +++ b/tutorius/apilib/httplib2/.svn/prop-base/__init__.pyc.svn-base @@ -0,0 +1,5 @@ +K 13 +svn:mime-type +V 24 +application/octet-stream +END diff --git a/tutorius/apilib/httplib2/.svn/prop-base/iri2uri.pyc.svn-base b/tutorius/apilib/httplib2/.svn/prop-base/iri2uri.pyc.svn-base new file mode 100644 index 0000000..5e9587e --- /dev/null +++ b/tutorius/apilib/httplib2/.svn/prop-base/iri2uri.pyc.svn-base @@ -0,0 +1,5 @@ +K 13 +svn:mime-type +V 24 +application/octet-stream +END diff --git a/tutorius/apilib/httplib2/.svn/text-base/__init__.py.svn-base b/tutorius/apilib/httplib2/.svn/text-base/__init__.py.svn-base new file mode 100644 index 0000000..982bf8a --- /dev/null +++ b/tutorius/apilib/httplib2/.svn/text-base/__init__.py.svn-base @@ -0,0 +1,1123 @@ +from __future__ import generators +""" +httplib2 + +A caching http interface that supports ETags and gzip +to conserve bandwidth. + +Requires Python 2.3 or later + +Changelog: +2007-08-18, Rick: Modified so it's able to use a socks proxy if needed. + +""" + +__author__ = "Joe Gregorio (joe@bitworking.org)" +__copyright__ = "Copyright 2006, Joe Gregorio" +__contributors__ = ["Thomas Broyer (t.broyer@ltgt.net)", + "James Antill", + "Xavier Verges Farrero", + "Jonathan Feinberg", + "Blair Zajac", + "Sam Ruby", + "Louis Nyffenegger"] +__license__ = "MIT" +__version__ = "$Rev: 259 $" + +import re +import sys +import md5 +import email +import email.Utils +import email.Message +import StringIO +import gzip +import zlib +import httplib +import urlparse +import base64 +import os +import copy +import calendar +import time +import random +import sha +import hmac +from gettext import gettext as _ +import socket + +try: + import socks +except ImportError: + socks = None + +if sys.version_info >= (2,3): + from iri2uri import iri2uri +else: + def iri2uri(uri): + return uri + +__all__ = ['Http', 'Response', 'ProxyInfo', 'HttpLib2Error', + 'RedirectMissingLocation', 'RedirectLimit', 'FailedToDecompressContent', + 'UnimplementedDigestAuthOptionError', 'UnimplementedHmacDigestAuthOptionError', + 'debuglevel'] + + +# The httplib debug level, set to a non-zero value to get debug output +debuglevel = 0 + +# Python 2.3 support +if sys.version_info < (2,4): + def sorted(seq): + seq.sort() + return seq + +# Python 2.3 support +def HTTPResponse__getheaders(self): + """Return list of (header, value) tuples.""" + if self.msg is None: + raise httplib.ResponseNotReady() + return self.msg.items() + +if not hasattr(httplib.HTTPResponse, 'getheaders'): + httplib.HTTPResponse.getheaders = HTTPResponse__getheaders + +# All exceptions raised here derive from HttpLib2Error +class HttpLib2Error(Exception): pass + +# Some exceptions can be caught and optionally +# be turned back into responses. +class HttpLib2ErrorWithResponse(HttpLib2Error): + def __init__(self, desc, response, content): + self.response = response + self.content = content + HttpLib2Error.__init__(self, desc) + +class RedirectMissingLocation(HttpLib2ErrorWithResponse): pass +class RedirectLimit(HttpLib2ErrorWithResponse): pass +class FailedToDecompressContent(HttpLib2ErrorWithResponse): pass +class UnimplementedDigestAuthOptionError(HttpLib2ErrorWithResponse): pass +class UnimplementedHmacDigestAuthOptionError(HttpLib2ErrorWithResponse): pass + +class RelativeURIError(HttpLib2Error): pass +class ServerNotFoundError(HttpLib2Error): pass + +# Open Items: +# ----------- +# Proxy support + +# Are we removing the cached content too soon on PUT (only delete on 200 Maybe?) + +# Pluggable cache storage (supports storing the cache in +# flat files by default. We need a plug-in architecture +# that can support Berkeley DB and Squid) + +# == Known Issues == +# Does not handle a resource that uses conneg and Last-Modified but no ETag as a cache validator. +# Does not handle Cache-Control: max-stale +# Does not use Age: headers when calculating cache freshness. + + +# The number of redirections to follow before giving up. +# Note that only GET redirects are automatically followed. +# Will also honor 301 requests by saving that info and never +# requesting that URI again. +DEFAULT_MAX_REDIRECTS = 5 + +# Which headers are hop-by-hop headers by default +HOP_BY_HOP = ['connection', 'keep-alive', 'proxy-authenticate', 'proxy-authorization', 'te', 'trailers', 'transfer-encoding', 'upgrade'] + +def _get_end2end_headers(response): + hopbyhop = list(HOP_BY_HOP) + hopbyhop.extend([x.strip() for x in response.get('connection', '').split(',')]) + return [header for header in response.keys() if header not in hopbyhop] + +URI = re.compile(r"^(([^:/?#]+):)?(//([^/?#]*))?([^?#]*)(\?([^#]*))?(#(.*))?") + +def parse_uri(uri): + """Parses a URI using the regex given in Appendix B of RFC 3986. + + (scheme, authority, path, query, fragment) = parse_uri(uri) + """ + groups = URI.match(uri).groups() + return (groups[1], groups[3], groups[4], groups[6], groups[8]) + +def urlnorm(uri): + (scheme, authority, path, query, fragment) = parse_uri(uri) + if not scheme or not authority: + raise RelativeURIError("Only absolute URIs are allowed. uri = %s" % uri) + authority = authority.lower() + scheme = scheme.lower() + if not path: + path = "/" + # Could do syntax based normalization of the URI before + # computing the digest. See Section 6.2.2 of Std 66. + request_uri = query and "?".join([path, query]) or path + scheme = scheme.lower() + defrag_uri = scheme + "://" + authority + request_uri + return scheme, authority, request_uri, defrag_uri + + +# Cache filename construction (original borrowed from Venus http://intertwingly.net/code/venus/) +re_url_scheme = re.compile(r'^\w+://') +re_slash = re.compile(r'[?/:|]+') + +def safename(filename): + """Return a filename suitable for the cache. + + Strips dangerous and common characters to create a filename we + can use to store the cache in. + """ + + try: + if re_url_scheme.match(filename): + if isinstance(filename,str): + filename = filename.decode('utf-8') + filename = filename.encode('idna') + else: + filename = filename.encode('idna') + except UnicodeError: + pass + if isinstance(filename,unicode): + filename=filename.encode('utf-8') + filemd5 = md5.new(filename).hexdigest() + filename = re_url_scheme.sub("", filename) + filename = re_slash.sub(",", filename) + + # limit length of filename + if len(filename)>200: + filename=filename[:200] + return ",".join((filename, filemd5)) + +NORMALIZE_SPACE = re.compile(r'(?:\r\n)?[ \t]+') +def _normalize_headers(headers): + return dict([ (key.lower(), NORMALIZE_SPACE.sub(value, ' ').strip()) for (key, value) in headers.iteritems()]) + +def _parse_cache_control(headers): + retval = {} + if headers.has_key('cache-control'): + parts = headers['cache-control'].split(',') + parts_with_args = [tuple([x.strip() for x in part.split("=")]) for part in parts if -1 != part.find("=")] + parts_wo_args = [(name.strip(), 1) for name in parts if -1 == name.find("=")] + retval = dict(parts_with_args + parts_wo_args) + return retval + +# Whether to use a strict mode to parse WWW-Authenticate headers +# Might lead to bad results in case of ill-formed header value, +# so disabled by default, falling back to relaxed parsing. +# Set to true to turn on, usefull for testing servers. +USE_WWW_AUTH_STRICT_PARSING = 0 + +# In regex below: +# [^\0-\x1f\x7f-\xff()<>@,;:\\\"/[\]?={} \t]+ matches a "token" as defined by HTTP +# "(?:[^\0-\x08\x0A-\x1f\x7f-\xff\\\"]|\\[\0-\x7f])*?" matches a "quoted-string" as defined by HTTP, when LWS have already been replaced by a single space +# Actually, as an auth-param value can be either a token or a quoted-string, they are combined in a single pattern which matches both: +# \"?((?<=\")(?:[^\0-\x1f\x7f-\xff\\\"]|\\[\0-\x7f])*?(?=\")|(?<!\")[^\0-\x08\x0A-\x1f\x7f-\xff()<>@,;:\\\"/[\]?={} \t]+(?!\"))\"? +WWW_AUTH_STRICT = re.compile(r"^(?:\s*(?:,\s*)?([^\0-\x1f\x7f-\xff()<>@,;:\\\"/[\]?={} \t]+)\s*=\s*\"?((?<=\")(?:[^\0-\x08\x0A-\x1f\x7f-\xff\\\"]|\\[\0-\x7f])*?(?=\")|(?<!\")[^\0-\x1f\x7f-\xff()<>@,;:\\\"/[\]?={} \t]+(?!\"))\"?)(.*)$") +WWW_AUTH_RELAXED = re.compile(r"^(?:\s*(?:,\s*)?([^ \t\r\n=]+)\s*=\s*\"?((?<=\")(?:[^\\\"]|\\.)*?(?=\")|(?<!\")[^ \t\r\n,]+(?!\"))\"?)(.*)$") +UNQUOTE_PAIRS = re.compile(r'\\(.)') +def _parse_www_authenticate(headers, headername='www-authenticate'): + """Returns a dictionary of dictionaries, one dict + per auth_scheme.""" + retval = {} + if headers.has_key(headername): + authenticate = headers[headername].strip() + www_auth = USE_WWW_AUTH_STRICT_PARSING and WWW_AUTH_STRICT or WWW_AUTH_RELAXED + while authenticate: + # Break off the scheme at the beginning of the line + if headername == 'authentication-info': + (auth_scheme, the_rest) = ('digest', authenticate) + else: + (auth_scheme, the_rest) = authenticate.split(" ", 1) + # Now loop over all the key value pairs that come after the scheme, + # being careful not to roll into the next scheme + match = www_auth.search(the_rest) + auth_params = {} + while match: + if match and len(match.groups()) == 3: + (key, value, the_rest) = match.groups() + auth_params[key.lower()] = UNQUOTE_PAIRS.sub(r'\1', value) # '\\'.join([x.replace('\\', '') for x in value.split('\\\\')]) + match = www_auth.search(the_rest) + retval[auth_scheme.lower()] = auth_params + authenticate = the_rest.strip() + return retval + + +def _entry_disposition(response_headers, request_headers): + """Determine freshness from the Date, Expires and Cache-Control headers. + + We don't handle the following: + + 1. Cache-Control: max-stale + 2. Age: headers are not used in the calculations. + + Not that this algorithm is simpler than you might think + because we are operating as a private (non-shared) cache. + This lets us ignore 's-maxage'. We can also ignore + 'proxy-invalidate' since we aren't a proxy. + We will never return a stale document as + fresh as a design decision, and thus the non-implementation + of 'max-stale'. This also lets us safely ignore 'must-revalidate' + since we operate as if every server has sent 'must-revalidate'. + Since we are private we get to ignore both 'public' and + 'private' parameters. We also ignore 'no-transform' since + we don't do any transformations. + The 'no-store' parameter is handled at a higher level. + So the only Cache-Control parameters we look at are: + + no-cache + only-if-cached + max-age + min-fresh + """ + + retval = "STALE" + cc = _parse_cache_control(request_headers) + cc_response = _parse_cache_control(response_headers) + + if request_headers.has_key('pragma') and request_headers['pragma'].lower().find('no-cache') != -1: + retval = "TRANSPARENT" + if 'cache-control' not in request_headers: + request_headers['cache-control'] = 'no-cache' + elif cc.has_key('no-cache'): + retval = "TRANSPARENT" + elif cc_response.has_key('no-cache'): + retval = "STALE" + elif cc.has_key('only-if-cached'): + retval = "FRESH" + elif response_headers.has_key('date'): + date = calendar.timegm(email.Utils.parsedate_tz(response_headers['date'])) + now = time.time() + current_age = max(0, now - date) + if cc_response.has_key('max-age'): + try: + freshness_lifetime = int(cc_response['max-age']) + except ValueError: + freshness_lifetime = 0 + elif response_headers.has_key('expires'): + expires = email.Utils.parsedate_tz(response_headers['expires']) + if None == expires: + freshness_lifetime = 0 + else: + freshness_lifetime = max(0, calendar.timegm(expires) - date) + else: + freshness_lifetime = 0 + if cc.has_key('max-age'): + try: + freshness_lifetime = int(cc['max-age']) + except ValueError: + freshness_lifetime = 0 + if cc.has_key('min-fresh'): + try: + min_fresh = int(cc['min-fresh']) + except ValueError: + min_fresh = 0 + current_age += min_fresh + if freshness_lifetime > current_age: + retval = "FRESH" + return retval + +def _decompressContent(response, new_content): + content = new_content + try: + encoding = response.get('content-encoding', None) + if encoding in ['gzip', 'deflate']: + if encoding == 'gzip': + content = gzip.GzipFile(fileobj=StringIO.StringIO(new_content)).read() + if encoding == 'deflate': + content = zlib.decompress(content) + response['content-length'] = str(len(content)) + del response['content-encoding'] + except IOError: + content = "" + raise FailedToDecompressContent(_("Content purported to be compressed with %s but failed to decompress.") % response.get('content-encoding'), response, content) + return content + +def _updateCache(request_headers, response_headers, content, cache, cachekey): + if cachekey: + cc = _parse_cache_control(request_headers) + cc_response = _parse_cache_control(response_headers) + if cc.has_key('no-store') or cc_response.has_key('no-store'): + cache.delete(cachekey) + else: + info = email.Message.Message() + for key, value in response_headers.iteritems(): + if key not in ['status','content-encoding','transfer-encoding']: + info[key] = value + + status = response_headers.status + if status == 304: + status = 200 + + status_header = 'status: %d\r\n' % response_headers.status + + header_str = info.as_string() + + header_str = re.sub("\r(?!\n)|(?<!\r)\n", "\r\n", header_str) + text = "".join([status_header, header_str, content]) + + cache.set(cachekey, text) + +def _cnonce(): + dig = md5.new("%s:%s" % (time.ctime(), ["0123456789"[random.randrange(0, 9)] for i in range(20)])).hexdigest() + return dig[:16] + +def _wsse_username_token(cnonce, iso_now, password): + return base64.encodestring(sha.new("%s%s%s" % (cnonce, iso_now, password)).digest()).strip() + + +# For credentials we need two things, first +# a pool of credential to try (not necesarily tied to BAsic, Digest, etc.) +# Then we also need a list of URIs that have already demanded authentication +# That list is tricky since sub-URIs can take the same auth, or the +# auth scheme may change as you descend the tree. +# So we also need each Auth instance to be able to tell us +# how close to the 'top' it is. + +class Authentication(object): + def __init__(self, credentials, host, request_uri, headers, response, content, http): + (scheme, authority, path, query, fragment) = parse_uri(request_uri) + self.path = path + self.host = host + self.credentials = credentials + self.http = http + + def depth(self, request_uri): + (scheme, authority, path, query, fragment) = parse_uri(request_uri) + return request_uri[len(self.path):].count("/") + + def inscope(self, host, request_uri): + # XXX Should we normalize the request_uri? + (scheme, authority, path, query, fragment) = parse_uri(request_uri) + return (host == self.host) and path.startswith(self.path) + + def request(self, method, request_uri, headers, content): + """Modify the request headers to add the appropriate + Authorization header. Over-rise this in sub-classes.""" + pass + + def response(self, response, content): + """Gives us a chance to update with new nonces + or such returned from the last authorized response. + Over-rise this in sub-classes if necessary. + + Return TRUE is the request is to be retried, for + example Digest may return stale=true. + """ + return False + + + +class BasicAuthentication(Authentication): + def __init__(self, credentials, host, request_uri, headers, response, content, http): + Authentication.__init__(self, credentials, host, request_uri, headers, response, content, http) + + def request(self, method, request_uri, headers, content): + """Modify the request headers to add the appropriate + Authorization header.""" + headers['authorization'] = 'Basic ' + base64.encodestring("%s:%s" % self.credentials).strip() + + +class DigestAuthentication(Authentication): + """Only do qop='auth' and MD5, since that + is all Apache currently implements""" + def __init__(self, credentials, host, request_uri, headers, response, content, http): + Authentication.__init__(self, credentials, host, request_uri, headers, response, content, http) + challenge = _parse_www_authenticate(response, 'www-authenticate') + self.challenge = challenge['digest'] + qop = self.challenge.get('qop') + self.challenge['qop'] = ('auth' in [x.strip() for x in qop.split()]) and 'auth' or None + if self.challenge['qop'] is None: + raise UnimplementedDigestAuthOptionError( _("Unsupported value for qop: %s." % qop)) + self.challenge['algorithm'] = self.challenge.get('algorithm', 'MD5') + if self.challenge['algorithm'] != 'MD5': + raise UnimplementedDigestAuthOptionError( _("Unsupported value for algorithm: %s." % self.challenge['algorithm'])) + self.A1 = "".join([self.credentials[0], ":", self.challenge['realm'], ":", self.credentials[1]]) + self.challenge['nc'] = 1 + + def request(self, method, request_uri, headers, content, cnonce = None): + """Modify the request headers""" + H = lambda x: md5.new(x).hexdigest() + KD = lambda s, d: H("%s:%s" % (s, d)) + A2 = "".join([method, ":", request_uri]) + self.challenge['cnonce'] = cnonce or _cnonce() + request_digest = '"%s"' % KD(H(self.A1), "%s:%s:%s:%s:%s" % (self.challenge['nonce'], + '%08x' % self.challenge['nc'], + self.challenge['cnonce'], + self.challenge['qop'], H(A2) + )) + headers['Authorization'] = 'Digest username="%s", realm="%s", nonce="%s", uri="%s", algorithm=%s, response=%s, qop=%s, nc=%08x, cnonce="%s"' % ( + self.credentials[0], + self.challenge['realm'], + self.challenge['nonce'], + request_uri, + self.challenge['algorithm'], + request_digest, + self.challenge['qop'], + self.challenge['nc'], + self.challenge['cnonce'], + ) + self.challenge['nc'] += 1 + + def response(self, response, content): + if not response.has_key('authentication-info'): + challenge = _parse_www_authenticate(response, 'www-authenticate').get('digest', {}) + if 'true' == challenge.get('stale'): + self.challenge['nonce'] = challenge['nonce'] + self.challenge['nc'] = 1 + return True + else: + updated_challenge = _parse_www_authenticate(response, 'authentication-info').get('digest', {}) + + if updated_challenge.has_key('nextnonce'): + self.challenge['nonce'] = updated_challenge['nextnonce'] + self.challenge['nc'] = 1 + return False + + +class HmacDigestAuthentication(Authentication): + """Adapted from Robert Sayre's code and DigestAuthentication above.""" + __author__ = "Thomas Broyer (t.broyer@ltgt.net)" + + def __init__(self, credentials, host, request_uri, headers, response, content, http): + Authentication.__init__(self, credentials, host, request_uri, headers, response, content, http) + challenge = _parse_www_authenticate(response, 'www-authenticate') + self.challenge = challenge['hmacdigest'] + # TODO: self.challenge['domain'] + self.challenge['reason'] = self.challenge.get('reason', 'unauthorized') + if self.challenge['reason'] not in ['unauthorized', 'integrity']: + self.challenge['reason'] = 'unauthorized' + self.challenge['salt'] = self.challenge.get('salt', '') + if not self.challenge.get('snonce'): + raise UnimplementedHmacDigestAuthOptionError( _("The challenge doesn't contain a server nonce, or this one is empty.")) + self.challenge['algorithm'] = self.challenge.get('algorithm', 'HMAC-SHA-1') + if self.challenge['algorithm'] not in ['HMAC-SHA-1', 'HMAC-MD5']: + raise UnimplementedHmacDigestAuthOptionError( _("Unsupported value for algorithm: %s." % self.challenge['algorithm'])) + self.challenge['pw-algorithm'] = self.challenge.get('pw-algorithm', 'SHA-1') + if self.challenge['pw-algorithm'] not in ['SHA-1', 'MD5']: + raise UnimplementedHmacDigestAuthOptionError( _("Unsupported value for pw-algorithm: %s." % self.challenge['pw-algorithm'])) + if self.challenge['algorithm'] == 'HMAC-MD5': + self.hashmod = md5 + else: + self.hashmod = sha + if self.challenge['pw-algorithm'] == 'MD5': + self.pwhashmod = md5 + else: + self.pwhashmod = sha + self.key = "".join([self.credentials[0], ":", + self.pwhashmod.new("".join([self.credentials[1], self.challenge['salt']])).hexdigest().lower(), + ":", self.challenge['realm'] + ]) + self.key = self.pwhashmod.new(self.key).hexdigest().lower() + + def request(self, method, request_uri, headers, content): + """Modify the request headers""" + keys = _get_end2end_headers(headers) + keylist = "".join(["%s " % k for k in keys]) + headers_val = "".join([headers[k] for k in keys]) + created = time.strftime('%Y-%m-%dT%H:%M:%SZ',time.gmtime()) + cnonce = _cnonce() + request_digest = "%s:%s:%s:%s:%s" % (method, request_uri, cnonce, self.challenge['snonce'], headers_val) + request_digest = hmac.new(self.key, request_digest, self.hashmod).hexdigest().lower() + headers['Authorization'] = 'HMACDigest username="%s", realm="%s", snonce="%s", cnonce="%s", uri="%s", created="%s", response="%s", headers="%s"' % ( + self.credentials[0], + self.challenge['realm'], + self.challenge['snonce'], + cnonce, + request_uri, + created, + request_digest, + keylist, + ) + + def response(self, response, content): + challenge = _parse_www_authenticate(response, 'www-authenticate').get('hmacdigest', {}) + if challenge.get('reason') in ['integrity', 'stale']: + return True + return False + + +class WsseAuthentication(Authentication): + """This is thinly tested and should not be relied upon. + At this time there isn't any third party server to test against. + Blogger and TypePad implemented this algorithm at one point + but Blogger has since switched to Basic over HTTPS and + TypePad has implemented it wrong, by never issuing a 401 + challenge but instead requiring your client to telepathically know that + their endpoint is expecting WSSE profile="UsernameToken".""" + def __init__(self, credentials, host, request_uri, headers, response, content, http): + Authentication.__init__(self, credentials, host, request_uri, headers, response, content, http) + + def request(self, method, request_uri, headers, content): + """Modify the request headers to add the appropriate + Authorization header.""" + headers['Authorization'] = 'WSSE profile="UsernameToken"' + iso_now = time.strftime("%Y-%m-%dT%H:%M:%SZ", time.gmtime()) + cnonce = _cnonce() + password_digest = _wsse_username_token(cnonce, iso_now, self.credentials[1]) + headers['X-WSSE'] = 'UsernameToken Username="%s", PasswordDigest="%s", Nonce="%s", Created="%s"' % ( + self.credentials[0], + password_digest, + cnonce, + iso_now) + +class GoogleLoginAuthentication(Authentication): + def __init__(self, credentials, host, request_uri, headers, response, content, http): + from urllib import urlencode + Authentication.__init__(self, credentials, host, request_uri, headers, response, content, http) + challenge = _parse_www_authenticate(response, 'www-authenticate') + service = challenge['googlelogin'].get('service', 'xapi') + # Bloggger actually returns the service in the challenge + # For the rest we guess based on the URI + if service == 'xapi' and request_uri.find("calendar") > 0: + service = "cl" + # No point in guessing Base or Spreadsheet + #elif request_uri.find("spreadsheets") > 0: + # service = "wise" + + auth = dict(Email=credentials[0], Passwd=credentials[1], service=service, source=headers['user-agent']) + resp, content = self.http.request("https://www.google.com/accounts/ClientLogin", method="POST", body=urlencode(auth), headers={'Content-Type': 'application/x-www-form-urlencoded'}) + lines = content.split('\n') + d = dict([tuple(line.split("=", 1)) for line in lines if line]) + if resp.status == 403: + self.Auth = "" + else: + self.Auth = d['Auth'] + + def request(self, method, request_uri, headers, content): + """Modify the request headers to add the appropriate + Authorization header.""" + headers['authorization'] = 'GoogleLogin Auth=' + self.Auth + + +AUTH_SCHEME_CLASSES = { + "basic": BasicAuthentication, + "wsse": WsseAuthentication, + "digest": DigestAuthentication, + "hmacdigest": HmacDigestAuthentication, + "googlelogin": GoogleLoginAuthentication +} + +AUTH_SCHEME_ORDER = ["hmacdigest", "googlelogin", "digest", "wsse", "basic"] + +def _md5(s): + return + +class FileCache(object): + """Uses a local directory as a store for cached files. + Not really safe to use if multiple threads or processes are going to + be running on the same cache. + """ + def __init__(self, cache, safe=safename): # use safe=lambda x: md5.new(x).hexdigest() for the old behavior + self.cache = cache + self.safe = safe + if not os.path.exists(cache): + os.makedirs(self.cache) + + def get(self, key): + retval = None + cacheFullPath = os.path.join(self.cache, self.safe(key)) + try: + f = file(cacheFullPath, "r") + retval = f.read() + f.close() + except IOError: + pass + return retval + + def set(self, key, value): + cacheFullPath = os.path.join(self.cache, self.safe(key)) + f = file(cacheFullPath, "w") + f.write(value) + f.close() + + def delete(self, key): + cacheFullPath = os.path.join(self.cache, self.safe(key)) + if os.path.exists(cacheFullPath): + os.remove(cacheFullPath) + +class Credentials(object): + def __init__(self): + self.credentials = [] + + def add(self, name, password, domain=""): + self.credentials.append((domain.lower(), name, password)) + + def clear(self): + self.credentials = [] + + def iter(self, domain): + for (cdomain, name, password) in self.credentials: + if cdomain == "" or domain == cdomain: + yield (name, password) + +class KeyCerts(Credentials): + """Identical to Credentials except that + name/password are mapped to key/cert.""" + pass + + +class ProxyInfo(object): + """Collect information required to use a proxy.""" + def __init__(self, proxy_type, proxy_host, proxy_port, proxy_rdns=None, proxy_user=None, proxy_pass=None): + """The parameter proxy_type must be set to one of socks.PROXY_TYPE_XXX + constants. For example: + +p = ProxyInfo(proxy_type=socks.PROXY_TYPE_HTTP, proxy_host='localhost', proxy_port=8000) + """ + self.proxy_type, self.proxy_host, self.proxy_port, self.proxy_rdns, self.proxy_user, self.proxy_pass = proxy_type, proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass + + def astuple(self): + return (self.proxy_type, self.proxy_host, self.proxy_port, self.proxy_rdns, + self.proxy_user, self.proxy_pass) + + def isgood(self): + return socks and (self.proxy_host != None) and (self.proxy_port != None) + + +class HTTPConnectionWithTimeout(httplib.HTTPConnection): + """HTTPConnection subclass that supports timeouts""" + + def __init__(self, host, port=None, strict=None, timeout=None, proxy_info=None): + httplib.HTTPConnection.__init__(self, host, port, strict) + self.timeout = timeout + self.proxy_info = proxy_info + + def connect(self): + """Connect to the host and port specified in __init__.""" + # Mostly verbatim from httplib.py. + msg = "getaddrinfo returns an empty list" + for res in socket.getaddrinfo(self.host, self.port, 0, + socket.SOCK_STREAM): + af, socktype, proto, canonname, sa = res + try: + if self.proxy_info and self.proxy_info.isgood(): + self.sock = socks.socksocket(af, socktype, proto) + self.sock.setproxy(*self.proxy_info.astuple()) + else: + self.sock = socket.socket(af, socktype, proto) + # Different from httplib: support timeouts. + if self.timeout is not None: + self.sock.settimeout(self.timeout) + # End of difference from httplib. + if self.debuglevel > 0: + print "connect: (%s, %s)" % (self.host, self.port) + self.sock.connect(sa) + except socket.error, msg: + if self.debuglevel > 0: + print 'connect fail:', (self.host, self.port) + if self.sock: + self.sock.close() + self.sock = None + continue + break + if not self.sock: + raise socket.error, msg + +class HTTPSConnectionWithTimeout(httplib.HTTPSConnection): + "This class allows communication via SSL." + + def __init__(self, host, port=None, key_file=None, cert_file=None, + strict=None, timeout=None, proxy_info=None): + self.timeout = timeout + self.proxy_info = proxy_info + httplib.HTTPSConnection.__init__(self, host, port=port, key_file=key_file, + cert_file=cert_file, strict=strict) + + def connect(self): + "Connect to a host on a given (SSL) port." + + if self.proxy_info and self.proxy_info.isgood(): + self.sock.setproxy(*self.proxy_info.astuple()) + sock.setproxy(*self.proxy_info.astuple()) + else: + sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + if self.timeout is not None: + sock.settimeout(self.timeout) + sock.connect((self.host, self.port)) + ssl = socket.ssl(sock, self.key_file, self.cert_file) + self.sock = httplib.FakeSocket(sock, ssl) + + + +class Http(object): + """An HTTP client that handles: +- all methods +- caching +- ETags +- compression, +- HTTPS +- Basic +- Digest +- WSSE + +and more. + """ + def __init__(self, cache=None, timeout=None, proxy_info=None): + """The value of proxy_info is a ProxyInfo instance. + +If 'cache' is a string then it is used as a directory name +for a disk cache. Otherwise it must be an object that supports +the same interface as FileCache.""" + self.proxy_info = proxy_info + # Map domain name to an httplib connection + self.connections = {} + # The location of the cache, for now a directory + # where cached responses are held. + if cache and isinstance(cache, str): + self.cache = FileCache(cache) + else: + self.cache = cache + + # Name/password + self.credentials = Credentials() + + # Key/cert + self.certificates = KeyCerts() + + # authorization objects + self.authorizations = [] + + # If set to False then no redirects are followed, even safe ones. + self.follow_redirects = True + + # If 'follow_redirects' is True, and this is set to True then + # all redirecs are followed, including unsafe ones. + self.follow_all_redirects = False + + self.ignore_etag = False + + self.force_exception_to_status_code = False + + self.timeout = timeout + + def _auth_from_challenge(self, host, request_uri, headers, response, content): + """A generator that creates Authorization objects + that can be applied to requests. + """ + challenges = _parse_www_authenticate(response, 'www-authenticate') + for cred in self.credentials.iter(host): + for scheme in AUTH_SCHEME_ORDER: + if challenges.has_key(scheme): + yield AUTH_SCHEME_CLASSES[scheme](cred, host, request_uri, headers, response, content, self) + + def add_credentials(self, name, password, domain=""): + """Add a name and password that will be used + any time a request requires authentication.""" + self.credentials.add(name, password, domain) + + def add_certificate(self, key, cert, domain): + """Add a key and cert that will be used + any time a request requires authentication.""" + self.certificates.add(key, cert, domain) + + def clear_credentials(self): + """Remove all the names and passwords + that are used for authentication""" + self.credentials.clear() + self.authorizations = [] + + def _conn_request(self, conn, request_uri, method, body, headers): + for i in range(2): + try: + conn.request(method, request_uri, body, headers) + response = conn.getresponse() + except socket.gaierror: + conn.close() + raise ServerNotFoundError("Unable to find the server at %s" % conn.host) + except httplib.HTTPException, e: + if i == 0: + conn.close() + conn.connect() + continue + else: + raise + else: + content = response.read() + response = Response(response) + if method != "HEAD": + content = _decompressContent(response, content) + + break; + return (response, content) + + + def _request(self, conn, host, absolute_uri, request_uri, method, body, headers, redirections, cachekey): + """Do the actual request using the connection object + and also follow one level of redirects if necessary""" + + auths = [(auth.depth(request_uri), auth) for auth in self.authorizations if auth.inscope(host, request_uri)] + auth = auths and sorted(auths)[0][1] or None + if auth: + auth.request(method, request_uri, headers, body) + + (response, content) = self._conn_request(conn, request_uri, method, body, headers) + + if auth: + if auth.response(response, body): + auth.request(method, request_uri, headers, body) + (response, content) = self._conn_request(conn, request_uri, method, body, headers ) + response._stale_digest = 1 + + if response.status == 401: + for authorization in self._auth_from_challenge(host, request_uri, headers, response, content): + authorization.request(method, request_uri, headers, body) + (response, content) = self._conn_request(conn, request_uri, method, body, headers, ) + if response.status != 401: + self.authorizations.append(authorization) + authorization.response(response, body) + break + + if (self.follow_all_redirects or (method in ["GET", "HEAD"]) or response.status == 303): + if self.follow_redirects and response.status in [300, 301, 302, 303, 307]: + # Pick out the location header and basically start from the beginning + # remembering first to strip the ETag header and decrement our 'depth' + if redirections: + if not response.has_key('location') and response.status != 300: + raise RedirectMissingLocation( _("Redirected but the response is missing a Location: header."), response, content) + # Fix-up relative redirects (which violate an RFC 2616 MUST) + if response.has_key('location'): + location = response['location'] + (scheme, authority, path, query, fragment) = parse_uri(location) + if authority == None: + response['location'] = urlparse.urljoin(absolute_uri, location) + if response.status == 301 and method in ["GET", "HEAD"]: + response['-x-permanent-redirect-url'] = response['location'] + if not response.has_key('content-location'): + response['content-location'] = absolute_uri + _updateCache(headers, response, content, self.cache, cachekey) + if headers.has_key('if-none-match'): + del headers['if-none-match'] + if headers.has_key('if-modified-since'): + del headers['if-modified-since'] + if response.has_key('location'): + location = response['location'] + old_response = copy.deepcopy(response) + if not old_response.has_key('content-location'): + old_response['content-location'] = absolute_uri + redirect_method = ((response.status == 303) and (method not in ["GET", "HEAD"])) and "GET" or method + (response, content) = self.request(location, redirect_method, body=body, headers = headers, redirections = redirections - 1) + response.previous = old_response + else: + raise RedirectLimit( _("Redirected more times than rediection_limit allows."), response, content) + elif response.status in [200, 203] and method == "GET": + # Don't cache 206's since we aren't going to handle byte range requests + if not response.has_key('content-location'): + response['content-location'] = absolute_uri + _updateCache(headers, response, content, self.cache, cachekey) + + return (response, content) + + +# Need to catch and rebrand some exceptions +# Then need to optionally turn all exceptions into status codes +# including all socket.* and httplib.* exceptions. + + + def request(self, uri, method="GET", body=None, headers=None, redirections=DEFAULT_MAX_REDIRECTS, connection_type=None): + """ Performs a single HTTP request. +The 'uri' is the URI of the HTTP resource and can begin +with either 'http' or 'https'. The value of 'uri' must be an absolute URI. + +The 'method' is the HTTP method to perform, such as GET, POST, DELETE, etc. +There is no restriction on the methods allowed. + +The 'body' is the entity body to be sent with the request. It is a string +object. + +Any extra headers that are to be sent with the request should be provided in the +'headers' dictionary. + +The maximum number of redirect to follow before raising an +exception is 'redirections. The default is 5. + +The return value is a tuple of (response, content), the first +being and instance of the 'Response' class, the second being +a string that contains the response entity body. + """ + try: + if headers is None: + headers = {} + else: + headers = _normalize_headers(headers) + + if not headers.has_key('user-agent'): + headers['user-agent'] = "Python-httplib2/%s" % __version__ + + uri = iri2uri(uri) + + (scheme, authority, request_uri, defrag_uri) = urlnorm(uri) + + conn_key = scheme+":"+authority + if conn_key in self.connections: + conn = self.connections[conn_key] + else: + if not connection_type: + connection_type = (scheme == 'https') and HTTPSConnectionWithTimeout or HTTPConnectionWithTimeout + certs = list(self.certificates.iter(authority)) + if scheme == 'https' and certs: + conn = self.connections[conn_key] = connection_type(authority, key_file=certs[0][0], + cert_file=certs[0][1], timeout=self.timeout, proxy_info=self.proxy_info) + else: + conn = self.connections[conn_key] = connection_type(authority, timeout=self.timeout, proxy_info=self.proxy_info) + conn.set_debuglevel(debuglevel) + + if method in ["GET", "HEAD"] and 'range' not in headers: + headers['accept-encoding'] = 'compress, gzip' + + info = email.Message.Message() + cached_value = None + if self.cache: + cachekey = defrag_uri + cached_value = self.cache.get(cachekey) + if cached_value: + info = email.message_from_string(cached_value) + try: + content = cached_value.split('\r\n\r\n', 1)[1] + except IndexError: + self.cache.delete(cachekey) + cachekey = None + cached_value = None + else: + cachekey = None + + if method in ["PUT"] and self.cache and info.has_key('etag') and not self.ignore_etag and 'if-match' not in headers: + # http://www.w3.org/1999/04/Editing/ + headers['if-match'] = info['etag'] + + if method not in ["GET", "HEAD"] and self.cache and cachekey: + # RFC 2616 Section 13.10 + self.cache.delete(cachekey) + + if cached_value and method in ["GET", "HEAD"] and self.cache and 'range' not in headers: + if info.has_key('-x-permanent-redirect-url'): + # Should cached permanent redirects be counted in our redirection count? For now, yes. + (response, new_content) = self.request(info['-x-permanent-redirect-url'], "GET", headers = headers, redirections = redirections - 1) + response.previous = Response(info) + response.previous.fromcache = True + else: + # Determine our course of action: + # Is the cached entry fresh or stale? + # Has the client requested a non-cached response? + # + # There seems to be three possible answers: + # 1. [FRESH] Return the cache entry w/o doing a GET + # 2. [STALE] Do the GET (but add in cache validators if available) + # 3. [TRANSPARENT] Do a GET w/o any cache validators (Cache-Control: no-cache) on the request + entry_disposition = _entry_disposition(info, headers) + + if entry_disposition == "FRESH": + if not cached_value: + info['status'] = '504' + content = "" + response = Response(info) + if cached_value: + response.fromcache = True + return (response, content) + + if entry_disposition == "STALE": + if info.has_key('etag') and not self.ignore_etag and not 'if-none-match' in headers: + headers['if-none-match'] = info['etag'] + if info.has_key('last-modified') and not 'last-modified' in headers: + headers['if-modified-since'] = info['last-modified'] + elif entry_disposition == "TRANSPARENT": + pass + + (response, new_content) = self._request(conn, authority, uri, request_uri, method, body, headers, redirections, cachekey) + + if response.status == 304 and method == "GET": + # Rewrite the cache entry with the new end-to-end headers + # Take all headers that are in response + # and overwrite their values in info. + # unless they are hop-by-hop, or are listed in the connection header. + + for key in _get_end2end_headers(response): + info[key] = response[key] + merged_response = Response(info) + if hasattr(response, "_stale_digest"): + merged_response._stale_digest = response._stale_digest + _updateCache(headers, merged_response, content, self.cache, cachekey) + response = merged_response + response.status = 200 + response.fromcache = True + + elif response.status == 200: + content = new_content + else: + self.cache.delete(cachekey) + content = new_content + else: + (response, content) = self._request(conn, authority, uri, request_uri, method, body, headers, redirections, cachekey) + except Exception, e: + if self.force_exception_to_status_code: + if isinstance(e, HttpLib2ErrorWithResponse): + response = e.response + content = e.content + response.status = 500 + response.reason = str(e) + elif isinstance(e, socket.timeout): + content = "Request Timeout" + response = Response( { + "content-type": "text/plain", + "status": "408", + "content-length": len(content) + }) + response.reason = "Request Timeout" + else: + content = str(e) + response = Response( { + "content-type": "text/plain", + "status": "400", + "content-length": len(content) + }) + response.reason = "Bad Request" + else: + raise + + + return (response, content) + + + +class Response(dict): + """An object more like email.Message than httplib.HTTPResponse.""" + + """Is this response from our local cache""" + fromcache = False + + """HTTP protocol version used by server. 10 for HTTP/1.0, 11 for HTTP/1.1. """ + version = 11 + + "Status code returned by server. " + status = 200 + + """Reason phrase returned by server.""" + reason = "Ok" + + previous = None + + def __init__(self, info): + # info is either an email.Message or + # an httplib.HTTPResponse object. + if isinstance(info, httplib.HTTPResponse): + for key, value in info.getheaders(): + self[key] = value + self.status = info.status + self['status'] = str(self.status) + self.reason = info.reason + self.version = info.version + elif isinstance(info, email.Message.Message): + for key, value in info.items(): + self[key] = value + self.status = int(self['status']) + else: + for key, value in info.iteritems(): + self[key] = value + self.status = int(self.get('status', self.status)) + + + def __getattr__(self, name): + if name == 'dict': + return self + else: + raise AttributeError, name diff --git a/tutorius/apilib/httplib2/.svn/text-base/__init__.pyc.svn-base b/tutorius/apilib/httplib2/.svn/text-base/__init__.pyc.svn-base Binary files differnew file mode 100644 index 0000000..f092204 --- /dev/null +++ b/tutorius/apilib/httplib2/.svn/text-base/__init__.pyc.svn-base diff --git a/tutorius/apilib/httplib2/.svn/text-base/iri2uri.py.svn-base b/tutorius/apilib/httplib2/.svn/text-base/iri2uri.py.svn-base new file mode 100644 index 0000000..70667ed --- /dev/null +++ b/tutorius/apilib/httplib2/.svn/text-base/iri2uri.py.svn-base @@ -0,0 +1,110 @@ +""" +iri2uri + +Converts an IRI to a URI. + +""" +__author__ = "Joe Gregorio (joe@bitworking.org)" +__copyright__ = "Copyright 2006, Joe Gregorio" +__contributors__ = [] +__version__ = "1.0.0" +__license__ = "MIT" +__history__ = """ +""" + +import urlparse + + +# Convert an IRI to a URI following the rules in RFC 3987 +# +# The characters we need to enocde and escape are defined in the spec: +# +# iprivate = %xE000-F8FF / %xF0000-FFFFD / %x100000-10FFFD +# ucschar = %xA0-D7FF / %xF900-FDCF / %xFDF0-FFEF +# / %x10000-1FFFD / %x20000-2FFFD / %x30000-3FFFD +# / %x40000-4FFFD / %x50000-5FFFD / %x60000-6FFFD +# / %x70000-7FFFD / %x80000-8FFFD / %x90000-9FFFD +# / %xA0000-AFFFD / %xB0000-BFFFD / %xC0000-CFFFD +# / %xD0000-DFFFD / %xE1000-EFFFD + +escape_range = [ + (0xA0, 0xD7FF ), + (0xE000, 0xF8FF ), + (0xF900, 0xFDCF ), + (0xFDF0, 0xFFEF), + (0x10000, 0x1FFFD ), + (0x20000, 0x2FFFD ), + (0x30000, 0x3FFFD), + (0x40000, 0x4FFFD ), + (0x50000, 0x5FFFD ), + (0x60000, 0x6FFFD), + (0x70000, 0x7FFFD ), + (0x80000, 0x8FFFD ), + (0x90000, 0x9FFFD), + (0xA0000, 0xAFFFD ), + (0xB0000, 0xBFFFD ), + (0xC0000, 0xCFFFD), + (0xD0000, 0xDFFFD ), + (0xE1000, 0xEFFFD), + (0xF0000, 0xFFFFD ), + (0x100000, 0x10FFFD) +] + +def encode(c): + retval = c + i = ord(c) + for low, high in escape_range: + if i < low: + break + if i >= low and i <= high: + retval = "".join(["%%%2X" % ord(o) for o in c.encode('utf-8')]) + break + return retval + + +def iri2uri(uri): + """Convert an IRI to a URI. Note that IRIs must be + passed in a unicode strings. That is, do not utf-8 encode + the IRI before passing it into the function.""" + if isinstance(uri ,unicode): + (scheme, authority, path, query, fragment) = urlparse.urlsplit(uri) + authority = authority.encode('idna') + # For each character in 'ucschar' or 'iprivate' + # 1. encode as utf-8 + # 2. then %-encode each octet of that utf-8 + uri = urlparse.urlunsplit((scheme, authority, path, query, fragment)) + uri = "".join([encode(c) for c in uri]) + return uri + +if __name__ == "__main__": + import unittest + + class Test(unittest.TestCase): + + def test_uris(self): + """Test that URIs are invariant under the transformation.""" + invariant = [ + u"ftp://ftp.is.co.za/rfc/rfc1808.txt", + u"http://www.ietf.org/rfc/rfc2396.txt", + u"ldap://[2001:db8::7]/c=GB?objectClass?one", + u"mailto:John.Doe@example.com", + u"news:comp.infosystems.www.servers.unix", + u"tel:+1-816-555-1212", + u"telnet://192.0.2.16:80/", + u"urn:oasis:names:specification:docbook:dtd:xml:4.1.2" ] + for uri in invariant: + self.assertEqual(uri, iri2uri(uri)) + + def test_iri(self): + """ Test that the right type of escaping is done for each part of the URI.""" + self.assertEqual("http://xn--o3h.com/%E2%98%84", iri2uri(u"http://\N{COMET}.com/\N{COMET}")) + self.assertEqual("http://bitworking.org/?fred=%E2%98%84", iri2uri(u"http://bitworking.org/?fred=\N{COMET}")) + self.assertEqual("http://bitworking.org/#%E2%98%84", iri2uri(u"http://bitworking.org/#\N{COMET}")) + self.assertEqual("#%E2%98%84", iri2uri(u"#\N{COMET}")) + self.assertEqual("/fred?bar=%E2%98%9A#%E2%98%84", iri2uri(u"/fred?bar=\N{BLACK LEFT POINTING INDEX}#\N{COMET}")) + self.assertEqual("/fred?bar=%E2%98%9A#%E2%98%84", iri2uri(iri2uri(u"/fred?bar=\N{BLACK LEFT POINTING INDEX}#\N{COMET}"))) + self.assertNotEqual("/fred?bar=%E2%98%9A#%E2%98%84", iri2uri(u"/fred?bar=\N{BLACK LEFT POINTING INDEX}#\N{COMET}".encode('utf-8'))) + + unittest.main() + + diff --git a/tutorius/apilib/httplib2/.svn/text-base/iri2uri.pyc.svn-base b/tutorius/apilib/httplib2/.svn/text-base/iri2uri.pyc.svn-base Binary files differnew file mode 100644 index 0000000..e16a3db --- /dev/null +++ b/tutorius/apilib/httplib2/.svn/text-base/iri2uri.pyc.svn-base diff --git a/tutorius/apilib/httplib2/__init__.py b/tutorius/apilib/httplib2/__init__.py new file mode 100644 index 0000000..982bf8a --- /dev/null +++ b/tutorius/apilib/httplib2/__init__.py @@ -0,0 +1,1123 @@ +from __future__ import generators +""" +httplib2 + +A caching http interface that supports ETags and gzip +to conserve bandwidth. + +Requires Python 2.3 or later + +Changelog: +2007-08-18, Rick: Modified so it's able to use a socks proxy if needed. + +""" + +__author__ = "Joe Gregorio (joe@bitworking.org)" +__copyright__ = "Copyright 2006, Joe Gregorio" +__contributors__ = ["Thomas Broyer (t.broyer@ltgt.net)", + "James Antill", + "Xavier Verges Farrero", + "Jonathan Feinberg", + "Blair Zajac", + "Sam Ruby", + "Louis Nyffenegger"] +__license__ = "MIT" +__version__ = "$Rev: 259 $" + +import re +import sys +import md5 +import email +import email.Utils +import email.Message +import StringIO +import gzip +import zlib +import httplib +import urlparse +import base64 +import os +import copy +import calendar +import time +import random +import sha +import hmac +from gettext import gettext as _ +import socket + +try: + import socks +except ImportError: + socks = None + +if sys.version_info >= (2,3): + from iri2uri import iri2uri +else: + def iri2uri(uri): + return uri + +__all__ = ['Http', 'Response', 'ProxyInfo', 'HttpLib2Error', + 'RedirectMissingLocation', 'RedirectLimit', 'FailedToDecompressContent', + 'UnimplementedDigestAuthOptionError', 'UnimplementedHmacDigestAuthOptionError', + 'debuglevel'] + + +# The httplib debug level, set to a non-zero value to get debug output +debuglevel = 0 + +# Python 2.3 support +if sys.version_info < (2,4): + def sorted(seq): + seq.sort() + return seq + +# Python 2.3 support +def HTTPResponse__getheaders(self): + """Return list of (header, value) tuples.""" + if self.msg is None: + raise httplib.ResponseNotReady() + return self.msg.items() + +if not hasattr(httplib.HTTPResponse, 'getheaders'): + httplib.HTTPResponse.getheaders = HTTPResponse__getheaders + +# All exceptions raised here derive from HttpLib2Error +class HttpLib2Error(Exception): pass + +# Some exceptions can be caught and optionally +# be turned back into responses. +class HttpLib2ErrorWithResponse(HttpLib2Error): + def __init__(self, desc, response, content): + self.response = response + self.content = content + HttpLib2Error.__init__(self, desc) + +class RedirectMissingLocation(HttpLib2ErrorWithResponse): pass +class RedirectLimit(HttpLib2ErrorWithResponse): pass +class FailedToDecompressContent(HttpLib2ErrorWithResponse): pass +class UnimplementedDigestAuthOptionError(HttpLib2ErrorWithResponse): pass +class UnimplementedHmacDigestAuthOptionError(HttpLib2ErrorWithResponse): pass + +class RelativeURIError(HttpLib2Error): pass +class ServerNotFoundError(HttpLib2Error): pass + +# Open Items: +# ----------- +# Proxy support + +# Are we removing the cached content too soon on PUT (only delete on 200 Maybe?) + +# Pluggable cache storage (supports storing the cache in +# flat files by default. We need a plug-in architecture +# that can support Berkeley DB and Squid) + +# == Known Issues == +# Does not handle a resource that uses conneg and Last-Modified but no ETag as a cache validator. +# Does not handle Cache-Control: max-stale +# Does not use Age: headers when calculating cache freshness. + + +# The number of redirections to follow before giving up. +# Note that only GET redirects are automatically followed. +# Will also honor 301 requests by saving that info and never +# requesting that URI again. +DEFAULT_MAX_REDIRECTS = 5 + +# Which headers are hop-by-hop headers by default +HOP_BY_HOP = ['connection', 'keep-alive', 'proxy-authenticate', 'proxy-authorization', 'te', 'trailers', 'transfer-encoding', 'upgrade'] + +def _get_end2end_headers(response): + hopbyhop = list(HOP_BY_HOP) + hopbyhop.extend([x.strip() for x in response.get('connection', '').split(',')]) + return [header for header in response.keys() if header not in hopbyhop] + +URI = re.compile(r"^(([^:/?#]+):)?(//([^/?#]*))?([^?#]*)(\?([^#]*))?(#(.*))?") + +def parse_uri(uri): + """Parses a URI using the regex given in Appendix B of RFC 3986. + + (scheme, authority, path, query, fragment) = parse_uri(uri) + """ + groups = URI.match(uri).groups() + return (groups[1], groups[3], groups[4], groups[6], groups[8]) + +def urlnorm(uri): + (scheme, authority, path, query, fragment) = parse_uri(uri) + if not scheme or not authority: + raise RelativeURIError("Only absolute URIs are allowed. uri = %s" % uri) + authority = authority.lower() + scheme = scheme.lower() + if not path: + path = "/" + # Could do syntax based normalization of the URI before + # computing the digest. See Section 6.2.2 of Std 66. + request_uri = query and "?".join([path, query]) or path + scheme = scheme.lower() + defrag_uri = scheme + "://" + authority + request_uri + return scheme, authority, request_uri, defrag_uri + + +# Cache filename construction (original borrowed from Venus http://intertwingly.net/code/venus/) +re_url_scheme = re.compile(r'^\w+://') +re_slash = re.compile(r'[?/:|]+') + +def safename(filename): + """Return a filename suitable for the cache. + + Strips dangerous and common characters to create a filename we + can use to store the cache in. + """ + + try: + if re_url_scheme.match(filename): + if isinstance(filename,str): + filename = filename.decode('utf-8') + filename = filename.encode('idna') + else: + filename = filename.encode('idna') + except UnicodeError: + pass + if isinstance(filename,unicode): + filename=filename.encode('utf-8') + filemd5 = md5.new(filename).hexdigest() + filename = re_url_scheme.sub("", filename) + filename = re_slash.sub(",", filename) + + # limit length of filename + if len(filename)>200: + filename=filename[:200] + return ",".join((filename, filemd5)) + +NORMALIZE_SPACE = re.compile(r'(?:\r\n)?[ \t]+') +def _normalize_headers(headers): + return dict([ (key.lower(), NORMALIZE_SPACE.sub(value, ' ').strip()) for (key, value) in headers.iteritems()]) + +def _parse_cache_control(headers): + retval = {} + if headers.has_key('cache-control'): + parts = headers['cache-control'].split(',') + parts_with_args = [tuple([x.strip() for x in part.split("=")]) for part in parts if -1 != part.find("=")] + parts_wo_args = [(name.strip(), 1) for name in parts if -1 == name.find("=")] + retval = dict(parts_with_args + parts_wo_args) + return retval + +# Whether to use a strict mode to parse WWW-Authenticate headers +# Might lead to bad results in case of ill-formed header value, +# so disabled by default, falling back to relaxed parsing. +# Set to true to turn on, usefull for testing servers. +USE_WWW_AUTH_STRICT_PARSING = 0 + +# In regex below: +# [^\0-\x1f\x7f-\xff()<>@,;:\\\"/[\]?={} \t]+ matches a "token" as defined by HTTP +# "(?:[^\0-\x08\x0A-\x1f\x7f-\xff\\\"]|\\[\0-\x7f])*?" matches a "quoted-string" as defined by HTTP, when LWS have already been replaced by a single space +# Actually, as an auth-param value can be either a token or a quoted-string, they are combined in a single pattern which matches both: +# \"?((?<=\")(?:[^\0-\x1f\x7f-\xff\\\"]|\\[\0-\x7f])*?(?=\")|(?<!\")[^\0-\x08\x0A-\x1f\x7f-\xff()<>@,;:\\\"/[\]?={} \t]+(?!\"))\"? +WWW_AUTH_STRICT = re.compile(r"^(?:\s*(?:,\s*)?([^\0-\x1f\x7f-\xff()<>@,;:\\\"/[\]?={} \t]+)\s*=\s*\"?((?<=\")(?:[^\0-\x08\x0A-\x1f\x7f-\xff\\\"]|\\[\0-\x7f])*?(?=\")|(?<!\")[^\0-\x1f\x7f-\xff()<>@,;:\\\"/[\]?={} \t]+(?!\"))\"?)(.*)$") +WWW_AUTH_RELAXED = re.compile(r"^(?:\s*(?:,\s*)?([^ \t\r\n=]+)\s*=\s*\"?((?<=\")(?:[^\\\"]|\\.)*?(?=\")|(?<!\")[^ \t\r\n,]+(?!\"))\"?)(.*)$") +UNQUOTE_PAIRS = re.compile(r'\\(.)') +def _parse_www_authenticate(headers, headername='www-authenticate'): + """Returns a dictionary of dictionaries, one dict + per auth_scheme.""" + retval = {} + if headers.has_key(headername): + authenticate = headers[headername].strip() + www_auth = USE_WWW_AUTH_STRICT_PARSING and WWW_AUTH_STRICT or WWW_AUTH_RELAXED + while authenticate: + # Break off the scheme at the beginning of the line + if headername == 'authentication-info': + (auth_scheme, the_rest) = ('digest', authenticate) + else: + (auth_scheme, the_rest) = authenticate.split(" ", 1) + # Now loop over all the key value pairs that come after the scheme, + # being careful not to roll into the next scheme + match = www_auth.search(the_rest) + auth_params = {} + while match: + if match and len(match.groups()) == 3: + (key, value, the_rest) = match.groups() + auth_params[key.lower()] = UNQUOTE_PAIRS.sub(r'\1', value) # '\\'.join([x.replace('\\', '') for x in value.split('\\\\')]) + match = www_auth.search(the_rest) + retval[auth_scheme.lower()] = auth_params + authenticate = the_rest.strip() + return retval + + +def _entry_disposition(response_headers, request_headers): + """Determine freshness from the Date, Expires and Cache-Control headers. + + We don't handle the following: + + 1. Cache-Control: max-stale + 2. Age: headers are not used in the calculations. + + Not that this algorithm is simpler than you might think + because we are operating as a private (non-shared) cache. + This lets us ignore 's-maxage'. We can also ignore + 'proxy-invalidate' since we aren't a proxy. + We will never return a stale document as + fresh as a design decision, and thus the non-implementation + of 'max-stale'. This also lets us safely ignore 'must-revalidate' + since we operate as if every server has sent 'must-revalidate'. + Since we are private we get to ignore both 'public' and + 'private' parameters. We also ignore 'no-transform' since + we don't do any transformations. + The 'no-store' parameter is handled at a higher level. + So the only Cache-Control parameters we look at are: + + no-cache + only-if-cached + max-age + min-fresh + """ + + retval = "STALE" + cc = _parse_cache_control(request_headers) + cc_response = _parse_cache_control(response_headers) + + if request_headers.has_key('pragma') and request_headers['pragma'].lower().find('no-cache') != -1: + retval = "TRANSPARENT" + if 'cache-control' not in request_headers: + request_headers['cache-control'] = 'no-cache' + elif cc.has_key('no-cache'): + retval = "TRANSPARENT" + elif cc_response.has_key('no-cache'): + retval = "STALE" + elif cc.has_key('only-if-cached'): + retval = "FRESH" + elif response_headers.has_key('date'): + date = calendar.timegm(email.Utils.parsedate_tz(response_headers['date'])) + now = time.time() + current_age = max(0, now - date) + if cc_response.has_key('max-age'): + try: + freshness_lifetime = int(cc_response['max-age']) + except ValueError: + freshness_lifetime = 0 + elif response_headers.has_key('expires'): + expires = email.Utils.parsedate_tz(response_headers['expires']) + if None == expires: + freshness_lifetime = 0 + else: + freshness_lifetime = max(0, calendar.timegm(expires) - date) + else: + freshness_lifetime = 0 + if cc.has_key('max-age'): + try: + freshness_lifetime = int(cc['max-age']) + except ValueError: + freshness_lifetime = 0 + if cc.has_key('min-fresh'): + try: + min_fresh = int(cc['min-fresh']) + except ValueError: + min_fresh = 0 + current_age += min_fresh + if freshness_lifetime > current_age: + retval = "FRESH" + return retval + +def _decompressContent(response, new_content): + content = new_content + try: + encoding = response.get('content-encoding', None) + if encoding in ['gzip', 'deflate']: + if encoding == 'gzip': + content = gzip.GzipFile(fileobj=StringIO.StringIO(new_content)).read() + if encoding == 'deflate': + content = zlib.decompress(content) + response['content-length'] = str(len(content)) + del response['content-encoding'] + except IOError: + content = "" + raise FailedToDecompressContent(_("Content purported to be compressed with %s but failed to decompress.") % response.get('content-encoding'), response, content) + return content + +def _updateCache(request_headers, response_headers, content, cache, cachekey): + if cachekey: + cc = _parse_cache_control(request_headers) + cc_response = _parse_cache_control(response_headers) + if cc.has_key('no-store') or cc_response.has_key('no-store'): + cache.delete(cachekey) + else: + info = email.Message.Message() + for key, value in response_headers.iteritems(): + if key not in ['status','content-encoding','transfer-encoding']: + info[key] = value + + status = response_headers.status + if status == 304: + status = 200 + + status_header = 'status: %d\r\n' % response_headers.status + + header_str = info.as_string() + + header_str = re.sub("\r(?!\n)|(?<!\r)\n", "\r\n", header_str) + text = "".join([status_header, header_str, content]) + + cache.set(cachekey, text) + +def _cnonce(): + dig = md5.new("%s:%s" % (time.ctime(), ["0123456789"[random.randrange(0, 9)] for i in range(20)])).hexdigest() + return dig[:16] + +def _wsse_username_token(cnonce, iso_now, password): + return base64.encodestring(sha.new("%s%s%s" % (cnonce, iso_now, password)).digest()).strip() + + +# For credentials we need two things, first +# a pool of credential to try (not necesarily tied to BAsic, Digest, etc.) +# Then we also need a list of URIs that have already demanded authentication +# That list is tricky since sub-URIs can take the same auth, or the +# auth scheme may change as you descend the tree. +# So we also need each Auth instance to be able to tell us +# how close to the 'top' it is. + +class Authentication(object): + def __init__(self, credentials, host, request_uri, headers, response, content, http): + (scheme, authority, path, query, fragment) = parse_uri(request_uri) + self.path = path + self.host = host + self.credentials = credentials + self.http = http + + def depth(self, request_uri): + (scheme, authority, path, query, fragment) = parse_uri(request_uri) + return request_uri[len(self.path):].count("/") + + def inscope(self, host, request_uri): + # XXX Should we normalize the request_uri? + (scheme, authority, path, query, fragment) = parse_uri(request_uri) + return (host == self.host) and path.startswith(self.path) + + def request(self, method, request_uri, headers, content): + """Modify the request headers to add the appropriate + Authorization header. Over-rise this in sub-classes.""" + pass + + def response(self, response, content): + """Gives us a chance to update with new nonces + or such returned from the last authorized response. + Over-rise this in sub-classes if necessary. + + Return TRUE is the request is to be retried, for + example Digest may return stale=true. + """ + return False + + + +class BasicAuthentication(Authentication): + def __init__(self, credentials, host, request_uri, headers, response, content, http): + Authentication.__init__(self, credentials, host, request_uri, headers, response, content, http) + + def request(self, method, request_uri, headers, content): + """Modify the request headers to add the appropriate + Authorization header.""" + headers['authorization'] = 'Basic ' + base64.encodestring("%s:%s" % self.credentials).strip() + + +class DigestAuthentication(Authentication): + """Only do qop='auth' and MD5, since that + is all Apache currently implements""" + def __init__(self, credentials, host, request_uri, headers, response, content, http): + Authentication.__init__(self, credentials, host, request_uri, headers, response, content, http) + challenge = _parse_www_authenticate(response, 'www-authenticate') + self.challenge = challenge['digest'] + qop = self.challenge.get('qop') + self.challenge['qop'] = ('auth' in [x.strip() for x in qop.split()]) and 'auth' or None + if self.challenge['qop'] is None: + raise UnimplementedDigestAuthOptionError( _("Unsupported value for qop: %s." % qop)) + self.challenge['algorithm'] = self.challenge.get('algorithm', 'MD5') + if self.challenge['algorithm'] != 'MD5': + raise UnimplementedDigestAuthOptionError( _("Unsupported value for algorithm: %s." % self.challenge['algorithm'])) + self.A1 = "".join([self.credentials[0], ":", self.challenge['realm'], ":", self.credentials[1]]) + self.challenge['nc'] = 1 + + def request(self, method, request_uri, headers, content, cnonce = None): + """Modify the request headers""" + H = lambda x: md5.new(x).hexdigest() + KD = lambda s, d: H("%s:%s" % (s, d)) + A2 = "".join([method, ":", request_uri]) + self.challenge['cnonce'] = cnonce or _cnonce() + request_digest = '"%s"' % KD(H(self.A1), "%s:%s:%s:%s:%s" % (self.challenge['nonce'], + '%08x' % self.challenge['nc'], + self.challenge['cnonce'], + self.challenge['qop'], H(A2) + )) + headers['Authorization'] = 'Digest username="%s", realm="%s", nonce="%s", uri="%s", algorithm=%s, response=%s, qop=%s, nc=%08x, cnonce="%s"' % ( + self.credentials[0], + self.challenge['realm'], + self.challenge['nonce'], + request_uri, + self.challenge['algorithm'], + request_digest, + self.challenge['qop'], + self.challenge['nc'], + self.challenge['cnonce'], + ) + self.challenge['nc'] += 1 + + def response(self, response, content): + if not response.has_key('authentication-info'): + challenge = _parse_www_authenticate(response, 'www-authenticate').get('digest', {}) + if 'true' == challenge.get('stale'): + self.challenge['nonce'] = challenge['nonce'] + self.challenge['nc'] = 1 + return True + else: + updated_challenge = _parse_www_authenticate(response, 'authentication-info').get('digest', {}) + + if updated_challenge.has_key('nextnonce'): + self.challenge['nonce'] = updated_challenge['nextnonce'] + self.challenge['nc'] = 1 + return False + + +class HmacDigestAuthentication(Authentication): + """Adapted from Robert Sayre's code and DigestAuthentication above.""" + __author__ = "Thomas Broyer (t.broyer@ltgt.net)" + + def __init__(self, credentials, host, request_uri, headers, response, content, http): + Authentication.__init__(self, credentials, host, request_uri, headers, response, content, http) + challenge = _parse_www_authenticate(response, 'www-authenticate') + self.challenge = challenge['hmacdigest'] + # TODO: self.challenge['domain'] + self.challenge['reason'] = self.challenge.get('reason', 'unauthorized') + if self.challenge['reason'] not in ['unauthorized', 'integrity']: + self.challenge['reason'] = 'unauthorized' + self.challenge['salt'] = self.challenge.get('salt', '') + if not self.challenge.get('snonce'): + raise UnimplementedHmacDigestAuthOptionError( _("The challenge doesn't contain a server nonce, or this one is empty.")) + self.challenge['algorithm'] = self.challenge.get('algorithm', 'HMAC-SHA-1') + if self.challenge['algorithm'] not in ['HMAC-SHA-1', 'HMAC-MD5']: + raise UnimplementedHmacDigestAuthOptionError( _("Unsupported value for algorithm: %s." % self.challenge['algorithm'])) + self.challenge['pw-algorithm'] = self.challenge.get('pw-algorithm', 'SHA-1') + if self.challenge['pw-algorithm'] not in ['SHA-1', 'MD5']: + raise UnimplementedHmacDigestAuthOptionError( _("Unsupported value for pw-algorithm: %s." % self.challenge['pw-algorithm'])) + if self.challenge['algorithm'] == 'HMAC-MD5': + self.hashmod = md5 + else: + self.hashmod = sha + if self.challenge['pw-algorithm'] == 'MD5': + self.pwhashmod = md5 + else: + self.pwhashmod = sha + self.key = "".join([self.credentials[0], ":", + self.pwhashmod.new("".join([self.credentials[1], self.challenge['salt']])).hexdigest().lower(), + ":", self.challenge['realm'] + ]) + self.key = self.pwhashmod.new(self.key).hexdigest().lower() + + def request(self, method, request_uri, headers, content): + """Modify the request headers""" + keys = _get_end2end_headers(headers) + keylist = "".join(["%s " % k for k in keys]) + headers_val = "".join([headers[k] for k in keys]) + created = time.strftime('%Y-%m-%dT%H:%M:%SZ',time.gmtime()) + cnonce = _cnonce() + request_digest = "%s:%s:%s:%s:%s" % (method, request_uri, cnonce, self.challenge['snonce'], headers_val) + request_digest = hmac.new(self.key, request_digest, self.hashmod).hexdigest().lower() + headers['Authorization'] = 'HMACDigest username="%s", realm="%s", snonce="%s", cnonce="%s", uri="%s", created="%s", response="%s", headers="%s"' % ( + self.credentials[0], + self.challenge['realm'], + self.challenge['snonce'], + cnonce, + request_uri, + created, + request_digest, + keylist, + ) + + def response(self, response, content): + challenge = _parse_www_authenticate(response, 'www-authenticate').get('hmacdigest', {}) + if challenge.get('reason') in ['integrity', 'stale']: + return True + return False + + +class WsseAuthentication(Authentication): + """This is thinly tested and should not be relied upon. + At this time there isn't any third party server to test against. + Blogger and TypePad implemented this algorithm at one point + but Blogger has since switched to Basic over HTTPS and + TypePad has implemented it wrong, by never issuing a 401 + challenge but instead requiring your client to telepathically know that + their endpoint is expecting WSSE profile="UsernameToken".""" + def __init__(self, credentials, host, request_uri, headers, response, content, http): + Authentication.__init__(self, credentials, host, request_uri, headers, response, content, http) + + def request(self, method, request_uri, headers, content): + """Modify the request headers to add the appropriate + Authorization header.""" + headers['Authorization'] = 'WSSE profile="UsernameToken"' + iso_now = time.strftime("%Y-%m-%dT%H:%M:%SZ", time.gmtime()) + cnonce = _cnonce() + password_digest = _wsse_username_token(cnonce, iso_now, self.credentials[1]) + headers['X-WSSE'] = 'UsernameToken Username="%s", PasswordDigest="%s", Nonce="%s", Created="%s"' % ( + self.credentials[0], + password_digest, + cnonce, + iso_now) + +class GoogleLoginAuthentication(Authentication): + def __init__(self, credentials, host, request_uri, headers, response, content, http): + from urllib import urlencode + Authentication.__init__(self, credentials, host, request_uri, headers, response, content, http) + challenge = _parse_www_authenticate(response, 'www-authenticate') + service = challenge['googlelogin'].get('service', 'xapi') + # Bloggger actually returns the service in the challenge + # For the rest we guess based on the URI + if service == 'xapi' and request_uri.find("calendar") > 0: + service = "cl" + # No point in guessing Base or Spreadsheet + #elif request_uri.find("spreadsheets") > 0: + # service = "wise" + + auth = dict(Email=credentials[0], Passwd=credentials[1], service=service, source=headers['user-agent']) + resp, content = self.http.request("https://www.google.com/accounts/ClientLogin", method="POST", body=urlencode(auth), headers={'Content-Type': 'application/x-www-form-urlencoded'}) + lines = content.split('\n') + d = dict([tuple(line.split("=", 1)) for line in lines if line]) + if resp.status == 403: + self.Auth = "" + else: + self.Auth = d['Auth'] + + def request(self, method, request_uri, headers, content): + """Modify the request headers to add the appropriate + Authorization header.""" + headers['authorization'] = 'GoogleLogin Auth=' + self.Auth + + +AUTH_SCHEME_CLASSES = { + "basic": BasicAuthentication, + "wsse": WsseAuthentication, + "digest": DigestAuthentication, + "hmacdigest": HmacDigestAuthentication, + "googlelogin": GoogleLoginAuthentication +} + +AUTH_SCHEME_ORDER = ["hmacdigest", "googlelogin", "digest", "wsse", "basic"] + +def _md5(s): + return + +class FileCache(object): + """Uses a local directory as a store for cached files. + Not really safe to use if multiple threads or processes are going to + be running on the same cache. + """ + def __init__(self, cache, safe=safename): # use safe=lambda x: md5.new(x).hexdigest() for the old behavior + self.cache = cache + self.safe = safe + if not os.path.exists(cache): + os.makedirs(self.cache) + + def get(self, key): + retval = None + cacheFullPath = os.path.join(self.cache, self.safe(key)) + try: + f = file(cacheFullPath, "r") + retval = f.read() + f.close() + except IOError: + pass + return retval + + def set(self, key, value): + cacheFullPath = os.path.join(self.cache, self.safe(key)) + f = file(cacheFullPath, "w") + f.write(value) + f.close() + + def delete(self, key): + cacheFullPath = os.path.join(self.cache, self.safe(key)) + if os.path.exists(cacheFullPath): + os.remove(cacheFullPath) + +class Credentials(object): + def __init__(self): + self.credentials = [] + + def add(self, name, password, domain=""): + self.credentials.append((domain.lower(), name, password)) + + def clear(self): + self.credentials = [] + + def iter(self, domain): + for (cdomain, name, password) in self.credentials: + if cdomain == "" or domain == cdomain: + yield (name, password) + +class KeyCerts(Credentials): + """Identical to Credentials except that + name/password are mapped to key/cert.""" + pass + + +class ProxyInfo(object): + """Collect information required to use a proxy.""" + def __init__(self, proxy_type, proxy_host, proxy_port, proxy_rdns=None, proxy_user=None, proxy_pass=None): + """The parameter proxy_type must be set to one of socks.PROXY_TYPE_XXX + constants. For example: + +p = ProxyInfo(proxy_type=socks.PROXY_TYPE_HTTP, proxy_host='localhost', proxy_port=8000) + """ + self.proxy_type, self.proxy_host, self.proxy_port, self.proxy_rdns, self.proxy_user, self.proxy_pass = proxy_type, proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass + + def astuple(self): + return (self.proxy_type, self.proxy_host, self.proxy_port, self.proxy_rdns, + self.proxy_user, self.proxy_pass) + + def isgood(self): + return socks and (self.proxy_host != None) and (self.proxy_port != None) + + +class HTTPConnectionWithTimeout(httplib.HTTPConnection): + """HTTPConnection subclass that supports timeouts""" + + def __init__(self, host, port=None, strict=None, timeout=None, proxy_info=None): + httplib.HTTPConnection.__init__(self, host, port, strict) + self.timeout = timeout + self.proxy_info = proxy_info + + def connect(self): + """Connect to the host and port specified in __init__.""" + # Mostly verbatim from httplib.py. + msg = "getaddrinfo returns an empty list" + for res in socket.getaddrinfo(self.host, self.port, 0, + socket.SOCK_STREAM): + af, socktype, proto, canonname, sa = res + try: + if self.proxy_info and self.proxy_info.isgood(): + self.sock = socks.socksocket(af, socktype, proto) + self.sock.setproxy(*self.proxy_info.astuple()) + else: + self.sock = socket.socket(af, socktype, proto) + # Different from httplib: support timeouts. + if self.timeout is not None: + self.sock.settimeout(self.timeout) + # End of difference from httplib. + if self.debuglevel > 0: + print "connect: (%s, %s)" % (self.host, self.port) + self.sock.connect(sa) + except socket.error, msg: + if self.debuglevel > 0: + print 'connect fail:', (self.host, self.port) + if self.sock: + self.sock.close() + self.sock = None + continue + break + if not self.sock: + raise socket.error, msg + +class HTTPSConnectionWithTimeout(httplib.HTTPSConnection): + "This class allows communication via SSL." + + def __init__(self, host, port=None, key_file=None, cert_file=None, + strict=None, timeout=None, proxy_info=None): + self.timeout = timeout + self.proxy_info = proxy_info + httplib.HTTPSConnection.__init__(self, host, port=port, key_file=key_file, + cert_file=cert_file, strict=strict) + + def connect(self): + "Connect to a host on a given (SSL) port." + + if self.proxy_info and self.proxy_info.isgood(): + self.sock.setproxy(*self.proxy_info.astuple()) + sock.setproxy(*self.proxy_info.astuple()) + else: + sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + if self.timeout is not None: + sock.settimeout(self.timeout) + sock.connect((self.host, self.port)) + ssl = socket.ssl(sock, self.key_file, self.cert_file) + self.sock = httplib.FakeSocket(sock, ssl) + + + +class Http(object): + """An HTTP client that handles: +- all methods +- caching +- ETags +- compression, +- HTTPS +- Basic +- Digest +- WSSE + +and more. + """ + def __init__(self, cache=None, timeout=None, proxy_info=None): + """The value of proxy_info is a ProxyInfo instance. + +If 'cache' is a string then it is used as a directory name +for a disk cache. Otherwise it must be an object that supports +the same interface as FileCache.""" + self.proxy_info = proxy_info + # Map domain name to an httplib connection + self.connections = {} + # The location of the cache, for now a directory + # where cached responses are held. + if cache and isinstance(cache, str): + self.cache = FileCache(cache) + else: + self.cache = cache + + # Name/password + self.credentials = Credentials() + + # Key/cert + self.certificates = KeyCerts() + + # authorization objects + self.authorizations = [] + + # If set to False then no redirects are followed, even safe ones. + self.follow_redirects = True + + # If 'follow_redirects' is True, and this is set to True then + # all redirecs are followed, including unsafe ones. + self.follow_all_redirects = False + + self.ignore_etag = False + + self.force_exception_to_status_code = False + + self.timeout = timeout + + def _auth_from_challenge(self, host, request_uri, headers, response, content): + """A generator that creates Authorization objects + that can be applied to requests. + """ + challenges = _parse_www_authenticate(response, 'www-authenticate') + for cred in self.credentials.iter(host): + for scheme in AUTH_SCHEME_ORDER: + if challenges.has_key(scheme): + yield AUTH_SCHEME_CLASSES[scheme](cred, host, request_uri, headers, response, content, self) + + def add_credentials(self, name, password, domain=""): + """Add a name and password that will be used + any time a request requires authentication.""" + self.credentials.add(name, password, domain) + + def add_certificate(self, key, cert, domain): + """Add a key and cert that will be used + any time a request requires authentication.""" + self.certificates.add(key, cert, domain) + + def clear_credentials(self): + """Remove all the names and passwords + that are used for authentication""" + self.credentials.clear() + self.authorizations = [] + + def _conn_request(self, conn, request_uri, method, body, headers): + for i in range(2): + try: + conn.request(method, request_uri, body, headers) + response = conn.getresponse() + except socket.gaierror: + conn.close() + raise ServerNotFoundError("Unable to find the server at %s" % conn.host) + except httplib.HTTPException, e: + if i == 0: + conn.close() + conn.connect() + continue + else: + raise + else: + content = response.read() + response = Response(response) + if method != "HEAD": + content = _decompressContent(response, content) + + break; + return (response, content) + + + def _request(self, conn, host, absolute_uri, request_uri, method, body, headers, redirections, cachekey): + """Do the actual request using the connection object + and also follow one level of redirects if necessary""" + + auths = [(auth.depth(request_uri), auth) for auth in self.authorizations if auth.inscope(host, request_uri)] + auth = auths and sorted(auths)[0][1] or None + if auth: + auth.request(method, request_uri, headers, body) + + (response, content) = self._conn_request(conn, request_uri, method, body, headers) + + if auth: + if auth.response(response, body): + auth.request(method, request_uri, headers, body) + (response, content) = self._conn_request(conn, request_uri, method, body, headers ) + response._stale_digest = 1 + + if response.status == 401: + for authorization in self._auth_from_challenge(host, request_uri, headers, response, content): + authorization.request(method, request_uri, headers, body) + (response, content) = self._conn_request(conn, request_uri, method, body, headers, ) + if response.status != 401: + self.authorizations.append(authorization) + authorization.response(response, body) + break + + if (self.follow_all_redirects or (method in ["GET", "HEAD"]) or response.status == 303): + if self.follow_redirects and response.status in [300, 301, 302, 303, 307]: + # Pick out the location header and basically start from the beginning + # remembering first to strip the ETag header and decrement our 'depth' + if redirections: + if not response.has_key('location') and response.status != 300: + raise RedirectMissingLocation( _("Redirected but the response is missing a Location: header."), response, content) + # Fix-up relative redirects (which violate an RFC 2616 MUST) + if response.has_key('location'): + location = response['location'] + (scheme, authority, path, query, fragment) = parse_uri(location) + if authority == None: + response['location'] = urlparse.urljoin(absolute_uri, location) + if response.status == 301 and method in ["GET", "HEAD"]: + response['-x-permanent-redirect-url'] = response['location'] + if not response.has_key('content-location'): + response['content-location'] = absolute_uri + _updateCache(headers, response, content, self.cache, cachekey) + if headers.has_key('if-none-match'): + del headers['if-none-match'] + if headers.has_key('if-modified-since'): + del headers['if-modified-since'] + if response.has_key('location'): + location = response['location'] + old_response = copy.deepcopy(response) + if not old_response.has_key('content-location'): + old_response['content-location'] = absolute_uri + redirect_method = ((response.status == 303) and (method not in ["GET", "HEAD"])) and "GET" or method + (response, content) = self.request(location, redirect_method, body=body, headers = headers, redirections = redirections - 1) + response.previous = old_response + else: + raise RedirectLimit( _("Redirected more times than rediection_limit allows."), response, content) + elif response.status in [200, 203] and method == "GET": + # Don't cache 206's since we aren't going to handle byte range requests + if not response.has_key('content-location'): + response['content-location'] = absolute_uri + _updateCache(headers, response, content, self.cache, cachekey) + + return (response, content) + + +# Need to catch and rebrand some exceptions +# Then need to optionally turn all exceptions into status codes +# including all socket.* and httplib.* exceptions. + + + def request(self, uri, method="GET", body=None, headers=None, redirections=DEFAULT_MAX_REDIRECTS, connection_type=None): + """ Performs a single HTTP request. +The 'uri' is the URI of the HTTP resource and can begin +with either 'http' or 'https'. The value of 'uri' must be an absolute URI. + +The 'method' is the HTTP method to perform, such as GET, POST, DELETE, etc. +There is no restriction on the methods allowed. + +The 'body' is the entity body to be sent with the request. It is a string +object. + +Any extra headers that are to be sent with the request should be provided in the +'headers' dictionary. + +The maximum number of redirect to follow before raising an +exception is 'redirections. The default is 5. + +The return value is a tuple of (response, content), the first +being and instance of the 'Response' class, the second being +a string that contains the response entity body. + """ + try: + if headers is None: + headers = {} + else: + headers = _normalize_headers(headers) + + if not headers.has_key('user-agent'): + headers['user-agent'] = "Python-httplib2/%s" % __version__ + + uri = iri2uri(uri) + + (scheme, authority, request_uri, defrag_uri) = urlnorm(uri) + + conn_key = scheme+":"+authority + if conn_key in self.connections: + conn = self.connections[conn_key] + else: + if not connection_type: + connection_type = (scheme == 'https') and HTTPSConnectionWithTimeout or HTTPConnectionWithTimeout + certs = list(self.certificates.iter(authority)) + if scheme == 'https' and certs: + conn = self.connections[conn_key] = connection_type(authority, key_file=certs[0][0], + cert_file=certs[0][1], timeout=self.timeout, proxy_info=self.proxy_info) + else: + conn = self.connections[conn_key] = connection_type(authority, timeout=self.timeout, proxy_info=self.proxy_info) + conn.set_debuglevel(debuglevel) + + if method in ["GET", "HEAD"] and 'range' not in headers: + headers['accept-encoding'] = 'compress, gzip' + + info = email.Message.Message() + cached_value = None + if self.cache: + cachekey = defrag_uri + cached_value = self.cache.get(cachekey) + if cached_value: + info = email.message_from_string(cached_value) + try: + content = cached_value.split('\r\n\r\n', 1)[1] + except IndexError: + self.cache.delete(cachekey) + cachekey = None + cached_value = None + else: + cachekey = None + + if method in ["PUT"] and self.cache and info.has_key('etag') and not self.ignore_etag and 'if-match' not in headers: + # http://www.w3.org/1999/04/Editing/ + headers['if-match'] = info['etag'] + + if method not in ["GET", "HEAD"] and self.cache and cachekey: + # RFC 2616 Section 13.10 + self.cache.delete(cachekey) + + if cached_value and method in ["GET", "HEAD"] and self.cache and 'range' not in headers: + if info.has_key('-x-permanent-redirect-url'): + # Should cached permanent redirects be counted in our redirection count? For now, yes. + (response, new_content) = self.request(info['-x-permanent-redirect-url'], "GET", headers = headers, redirections = redirections - 1) + response.previous = Response(info) + response.previous.fromcache = True + else: + # Determine our course of action: + # Is the cached entry fresh or stale? + # Has the client requested a non-cached response? + # + # There seems to be three possible answers: + # 1. [FRESH] Return the cache entry w/o doing a GET + # 2. [STALE] Do the GET (but add in cache validators if available) + # 3. [TRANSPARENT] Do a GET w/o any cache validators (Cache-Control: no-cache) on the request + entry_disposition = _entry_disposition(info, headers) + + if entry_disposition == "FRESH": + if not cached_value: + info['status'] = '504' + content = "" + response = Response(info) + if cached_value: + response.fromcache = True + return (response, content) + + if entry_disposition == "STALE": + if info.has_key('etag') and not self.ignore_etag and not 'if-none-match' in headers: + headers['if-none-match'] = info['etag'] + if info.has_key('last-modified') and not 'last-modified' in headers: + headers['if-modified-since'] = info['last-modified'] + elif entry_disposition == "TRANSPARENT": + pass + + (response, new_content) = self._request(conn, authority, uri, request_uri, method, body, headers, redirections, cachekey) + + if response.status == 304 and method == "GET": + # Rewrite the cache entry with the new end-to-end headers + # Take all headers that are in response + # and overwrite their values in info. + # unless they are hop-by-hop, or are listed in the connection header. + + for key in _get_end2end_headers(response): + info[key] = response[key] + merged_response = Response(info) + if hasattr(response, "_stale_digest"): + merged_response._stale_digest = response._stale_digest + _updateCache(headers, merged_response, content, self.cache, cachekey) + response = merged_response + response.status = 200 + response.fromcache = True + + elif response.status == 200: + content = new_content + else: + self.cache.delete(cachekey) + content = new_content + else: + (response, content) = self._request(conn, authority, uri, request_uri, method, body, headers, redirections, cachekey) + except Exception, e: + if self.force_exception_to_status_code: + if isinstance(e, HttpLib2ErrorWithResponse): + response = e.response + content = e.content + response.status = 500 + response.reason = str(e) + elif isinstance(e, socket.timeout): + content = "Request Timeout" + response = Response( { + "content-type": "text/plain", + "status": "408", + "content-length": len(content) + }) + response.reason = "Request Timeout" + else: + content = str(e) + response = Response( { + "content-type": "text/plain", + "status": "400", + "content-length": len(content) + }) + response.reason = "Bad Request" + else: + raise + + + return (response, content) + + + +class Response(dict): + """An object more like email.Message than httplib.HTTPResponse.""" + + """Is this response from our local cache""" + fromcache = False + + """HTTP protocol version used by server. 10 for HTTP/1.0, 11 for HTTP/1.1. """ + version = 11 + + "Status code returned by server. " + status = 200 + + """Reason phrase returned by server.""" + reason = "Ok" + + previous = None + + def __init__(self, info): + # info is either an email.Message or + # an httplib.HTTPResponse object. + if isinstance(info, httplib.HTTPResponse): + for key, value in info.getheaders(): + self[key] = value + self.status = info.status + self['status'] = str(self.status) + self.reason = info.reason + self.version = info.version + elif isinstance(info, email.Message.Message): + for key, value in info.items(): + self[key] = value + self.status = int(self['status']) + else: + for key, value in info.iteritems(): + self[key] = value + self.status = int(self.get('status', self.status)) + + + def __getattr__(self, name): + if name == 'dict': + return self + else: + raise AttributeError, name diff --git a/tutorius/apilib/httplib2/__init__.pyc b/tutorius/apilib/httplib2/__init__.pyc Binary files differnew file mode 100644 index 0000000..e5f8ebe --- /dev/null +++ b/tutorius/apilib/httplib2/__init__.pyc diff --git a/tutorius/apilib/httplib2/iri2uri.py b/tutorius/apilib/httplib2/iri2uri.py new file mode 100644 index 0000000..70667ed --- /dev/null +++ b/tutorius/apilib/httplib2/iri2uri.py @@ -0,0 +1,110 @@ +""" +iri2uri + +Converts an IRI to a URI. + +""" +__author__ = "Joe Gregorio (joe@bitworking.org)" +__copyright__ = "Copyright 2006, Joe Gregorio" +__contributors__ = [] +__version__ = "1.0.0" +__license__ = "MIT" +__history__ = """ +""" + +import urlparse + + +# Convert an IRI to a URI following the rules in RFC 3987 +# +# The characters we need to enocde and escape are defined in the spec: +# +# iprivate = %xE000-F8FF / %xF0000-FFFFD / %x100000-10FFFD +# ucschar = %xA0-D7FF / %xF900-FDCF / %xFDF0-FFEF +# / %x10000-1FFFD / %x20000-2FFFD / %x30000-3FFFD +# / %x40000-4FFFD / %x50000-5FFFD / %x60000-6FFFD +# / %x70000-7FFFD / %x80000-8FFFD / %x90000-9FFFD +# / %xA0000-AFFFD / %xB0000-BFFFD / %xC0000-CFFFD +# / %xD0000-DFFFD / %xE1000-EFFFD + +escape_range = [ + (0xA0, 0xD7FF ), + (0xE000, 0xF8FF ), + (0xF900, 0xFDCF ), + (0xFDF0, 0xFFEF), + (0x10000, 0x1FFFD ), + (0x20000, 0x2FFFD ), + (0x30000, 0x3FFFD), + (0x40000, 0x4FFFD ), + (0x50000, 0x5FFFD ), + (0x60000, 0x6FFFD), + (0x70000, 0x7FFFD ), + (0x80000, 0x8FFFD ), + (0x90000, 0x9FFFD), + (0xA0000, 0xAFFFD ), + (0xB0000, 0xBFFFD ), + (0xC0000, 0xCFFFD), + (0xD0000, 0xDFFFD ), + (0xE1000, 0xEFFFD), + (0xF0000, 0xFFFFD ), + (0x100000, 0x10FFFD) +] + +def encode(c): + retval = c + i = ord(c) + for low, high in escape_range: + if i < low: + break + if i >= low and i <= high: + retval = "".join(["%%%2X" % ord(o) for o in c.encode('utf-8')]) + break + return retval + + +def iri2uri(uri): + """Convert an IRI to a URI. Note that IRIs must be + passed in a unicode strings. That is, do not utf-8 encode + the IRI before passing it into the function.""" + if isinstance(uri ,unicode): + (scheme, authority, path, query, fragment) = urlparse.urlsplit(uri) + authority = authority.encode('idna') + # For each character in 'ucschar' or 'iprivate' + # 1. encode as utf-8 + # 2. then %-encode each octet of that utf-8 + uri = urlparse.urlunsplit((scheme, authority, path, query, fragment)) + uri = "".join([encode(c) for c in uri]) + return uri + +if __name__ == "__main__": + import unittest + + class Test(unittest.TestCase): + + def test_uris(self): + """Test that URIs are invariant under the transformation.""" + invariant = [ + u"ftp://ftp.is.co.za/rfc/rfc1808.txt", + u"http://www.ietf.org/rfc/rfc2396.txt", + u"ldap://[2001:db8::7]/c=GB?objectClass?one", + u"mailto:John.Doe@example.com", + u"news:comp.infosystems.www.servers.unix", + u"tel:+1-816-555-1212", + u"telnet://192.0.2.16:80/", + u"urn:oasis:names:specification:docbook:dtd:xml:4.1.2" ] + for uri in invariant: + self.assertEqual(uri, iri2uri(uri)) + + def test_iri(self): + """ Test that the right type of escaping is done for each part of the URI.""" + self.assertEqual("http://xn--o3h.com/%E2%98%84", iri2uri(u"http://\N{COMET}.com/\N{COMET}")) + self.assertEqual("http://bitworking.org/?fred=%E2%98%84", iri2uri(u"http://bitworking.org/?fred=\N{COMET}")) + self.assertEqual("http://bitworking.org/#%E2%98%84", iri2uri(u"http://bitworking.org/#\N{COMET}")) + self.assertEqual("#%E2%98%84", iri2uri(u"#\N{COMET}")) + self.assertEqual("/fred?bar=%E2%98%9A#%E2%98%84", iri2uri(u"/fred?bar=\N{BLACK LEFT POINTING INDEX}#\N{COMET}")) + self.assertEqual("/fred?bar=%E2%98%9A#%E2%98%84", iri2uri(iri2uri(u"/fred?bar=\N{BLACK LEFT POINTING INDEX}#\N{COMET}"))) + self.assertNotEqual("/fred?bar=%E2%98%9A#%E2%98%84", iri2uri(u"/fred?bar=\N{BLACK LEFT POINTING INDEX}#\N{COMET}".encode('utf-8'))) + + unittest.main() + + diff --git a/tutorius/apilib/httplib2/iri2uri.pyc b/tutorius/apilib/httplib2/iri2uri.pyc Binary files differnew file mode 100644 index 0000000..879e719 --- /dev/null +++ b/tutorius/apilib/httplib2/iri2uri.pyc diff --git a/tutorius/apilib/mimeTypes.py b/tutorius/apilib/mimeTypes.py new file mode 100644 index 0000000..ff8f641 --- /dev/null +++ b/tutorius/apilib/mimeTypes.py @@ -0,0 +1,57 @@ +""" + Copyright (C) 2008 Benjamin O'Steen + + This file is part of python-fedoracommons. + + python-fedoracommons is free software: you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + python-fedoracommons is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with python-fedoracommons. If not, see <http://www.gnu.org/licenses/>. +""" + +__license__ = 'GPL http://www.gnu.org/licenses/gpl.txt' +__author__ = "Benjamin O'Steen <bosteen@gmail.com>" +__version__ = '0.1' + +class mimeTypes(object): + def getDictionary(self): + mimetype_to_extension = {} + extension_to_mimetype = {} + mimetype_to_extension['text/plain'] = 'txt' + mimetype_to_extension['text/xml'] = 'xml' + mimetype_to_extension['text/css'] = 'css' + mimetype_to_extension['text/javascript'] = 'js' + mimetype_to_extension['text/rtf'] = 'rtf' + mimetype_to_extension['text/calendar'] = 'ics' + mimetype_to_extension['application/msword'] = 'doc' + mimetype_to_extension['application/msexcel'] = 'xls' + mimetype_to_extension['application/x-msword'] = 'doc' + mimetype_to_extension['application/vnd.ms-excel'] = 'xls' + mimetype_to_extension['application/vnd.ms-powerpoint'] = 'ppt' + mimetype_to_extension['application/pdf'] = 'pdf' + mimetype_to_extension['text/comma-separated-values'] = 'csv' + + + mimetype_to_extension['image/jpeg'] = 'jpg' + mimetype_to_extension['image/gif'] = 'gif' + mimetype_to_extension['image/jpg'] = 'jpg' + mimetype_to_extension['image/tiff'] = 'tiff' + mimetype_to_extension['image/png'] = 'png' + + # And hacky reverse lookups + for mimetype in mimetype_to_extension: + extension_to_mimetype[mimetype_to_extension[mimetype]] = mimetype + + mimetype_extension_mapping = {} + mimetype_extension_mapping.update(mimetype_to_extension) + mimetype_extension_mapping.update(extension_to_mimetype) + + return mimetype_extension_mapping diff --git a/tutorius/apilib/mimeTypes.pyc b/tutorius/apilib/mimeTypes.pyc Binary files differnew file mode 100644 index 0000000..35ef2b2 --- /dev/null +++ b/tutorius/apilib/mimeTypes.pyc diff --git a/tutorius/apilib/restful_lib.py b/tutorius/apilib/restful_lib.py new file mode 100644 index 0000000..e1ee0af --- /dev/null +++ b/tutorius/apilib/restful_lib.py @@ -0,0 +1,129 @@ +""" + Copyright (C) 2008 Benjamin O'Steen + + This file is part of python-fedoracommons. + + python-fedoracommons is free software: you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + python-fedoracommons is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with python-fedoracommons. If not, see <http://www.gnu.org/licenses/>. +""" + +__license__ = 'GPL http://www.gnu.org/licenses/gpl.txt' +__author__ = "Benjamin O'Steen <bosteen@gmail.com>" +__version__ = '0.1' + +import httplib2 +import urlparse +import urllib +import base64 +from base64 import encodestring + +from mimeTypes import * + +import mimetypes + +from cStringIO import StringIO + +class Connection: + def __init__(self, base_url, username=None, password=None): + self.base_url = base_url + self.username = username + m = mimeTypes() + self.mimetypes = m.getDictionary() + + self.url = urlparse.urlparse(base_url) + + (scheme, netloc, path, query, fragment) = urlparse.urlsplit(base_url) + + self.scheme = scheme + self.host = netloc + self.path = path + + # Create Http class with support for Digest HTTP Authentication, if necessary + self.h = httplib2.Http(".cache") + self.h.follow_all_redirects = True + if username and password: + self.h.add_credentials(username, password) + + def request_get(self, resource, args = None, headers={}): + return self.request(resource, "get", args, headers=headers) + + def request_delete(self, resource, args = None, headers={}): + return self.request(resource, "delete", args, headers=headers) + + def request_head(self, resource, args = None, headers={}): + return self.request(resource, "head", args, headers=headers) + + def request_post(self, resource, args = None, body = None, filename=None, headers={}): + return self.request(resource, "post", args , body = body, filename=filename, headers=headers) + + def request_put(self, resource, args = None, body = None, filename=None, headers={}): + return self.request(resource, "put", args , body = body, filename=filename, headers=headers) + + def get_content_type(self, filename): + extension = filename.split('.')[-1] + guessed_mimetype = self.mimetypes.get(extension, mimetypes.guess_type(filename)[0]) + return guessed_mimetype or 'application/octet-stream' + + def request(self, resource, method = "get", args = None, body = None, filename=None, headers={}): + params = None + path = resource + headers['User-Agent'] = 'Basic Agent' + + BOUNDARY = u'00hoYUXOnLD5RQ8SKGYVgLLt64jejnMwtO7q8XE1' + CRLF = u'\r\n' + + if filename and body: + #fn = open(filename ,'r') + #chunks = fn.read() + #fn.close() + + # Attempt to find the Mimetype + content_type = self.get_content_type(filename) + headers['Content-Type']='multipart/form-data; boundary='+BOUNDARY + encode_string = StringIO() + encode_string.write(CRLF) + encode_string.write(u'--' + BOUNDARY + CRLF) + encode_string.write(u'Content-Disposition: form-data; name="file"; filename="%s"' % filename) + encode_string.write(CRLF) + encode_string.write(u'Content-Type: %s' % content_type + CRLF) + encode_string.write(CRLF) + encode_string.write(body) + encode_string.write(CRLF) + encode_string.write(u'--' + BOUNDARY + u'--' + CRLF) + + body = encode_string.getvalue() + headers['Content-Length'] = str(len(body)) + elif body: + if not headers.get('Content-Type', None): + headers['Content-Type']='text/xml' + headers['Content-Length'] = str(len(body)) + else: + headers['Content-Type']='text/xml' + + if args: + path += u"?" + urllib.urlencode(args) + + request_path = [] + if self.path != "/": + if self.path.endswith('/'): + request_path.append(self.path[:-1]) + else: + request_path.append(self.path) + if path.startswith('/'): + request_path.append(path[1:]) + else: + request_path.append(path) + + resp, content = self.h.request(u"%s://%s%s" % (self.scheme, self.host, u'/'.join(request_path)), method.upper(), body=body, headers=headers ) + + return {u'headers':resp, u'body':content.decode('UTF-8')} diff --git a/tutorius/apilib/restful_lib.pyc b/tutorius/apilib/restful_lib.pyc Binary files differnew file mode 100644 index 0000000..5b06765 --- /dev/null +++ b/tutorius/apilib/restful_lib.pyc diff --git a/tutorius/bundler.py b/tutorius/bundler.py deleted file mode 100644 index 734c679..0000000 --- a/tutorius/bundler.py +++ /dev/null @@ -1,723 +0,0 @@ -# Copyright (C) 2009, Tutorius.org -# Copyright (C) 2009, Jean-Christophe Savard <savard.jean.christophe@gmail.com> -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation; either version 2 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA - - -""" -This module contains all the data handling class of Tutorius -""" - -import logging -import os -import uuid -import xml.dom.minidom - -from sugar.tutorius import addon -from sugar.tutorius.core import Tutorial, State, FiniteStateMachine -from sugar.tutorius.filters import * -from sugar.tutorius.actions import * -from ConfigParser import SafeConfigParser - -# this is where user installed/generated tutorials will go -def _get_store_root(): - profile_name = os.getenv("SUGAR_PROFILE") or "default" - return os.path.join(os.getenv("HOME"), - ".sugar",profile_name,"tutorius","data") -# this is where activity bundled tutorials should be, under the activity bundle -def _get_bundle_root(): - return os.path.join(os.getenv("SUGAR_BUNDLE_PATH"),"data","tutorius","data") - -INI_ACTIVITY_SECTION = "RELATED_ACTIVITIES" -INI_METADATA_SECTION = "GENERAL_METADATA" -INI_GUID_PROPERTY = "GUID" -INI_NAME_PROPERTY = "NAME" -INI_XML_FSM_PROPERTY = "FSM_FILENAME" -INI_FILENAME = "meta.ini" -TUTORIAL_FILENAME = "tutorial.xml" -NODE_COMPONENT = "Component" -NODE_SUBCOMPONENT = "SubComponent" -NODE_SUBCOMPONENTLIST = "SubComponentList" - -class Vault(object): - """ - The Vault is the primary interface for the storage and installation of tutorials - on the machine. It needs to accomplish the following tasks : - - query() : Lists the - - installTutorial() : - - deleteTutorial() : - - readTutorial() : - - saveTutorial() : - """ - def query(keyword="", category="", start_index=0, num_results=10): - """ - Returns a list of tutorial meta-data corresponding to the keywords - and category mentionned. - - @param keyword The keyword to look for in the tutorial title and description. - @param category The category in which to look for tutorials - @param start_index The first result to be shown (e.g. ) - @param num_results The number of results to show - @return The list of tutorial metadata that corresponds to the query parameters. - """ - raise NotImplementedError("The query function on the Vault is not implemented") - - def installTutorial(path ,force_install=False): - """ - Inserts the tutorial inside the Vault. Once installed, it will show up - """ - raise NotImplementedError("Installation in the Vault not supported yet") - - def deleteTutorial(tutorial_id): - raise NotImplementedError("") - - def readTutorial(tutorial_id): - raise NotImplementedError("") - - def saveTutorial(tutorial, metadata, resource_list): - raise NotImplementedError("") - -class TutorialStore(object): - - def list_available_tutorials(self, activity_name, activity_vers): - """ - Generate the list of all tutorials present on disk for a - given activity. - - @returns a map of tutorial {names : GUID}. - """ - # check both under the activity data and user installed folders - paths = [_get_store_root(), _get_bundle_root()] - - tutoGuidName = {} - - for repository in paths: - # (our) convention dictates that tutorial folders are named - # with their GUID (for unicity) but this is not enforced. - try: - for tuto in os.listdir(repository): - parser = SafeConfigParser() - parser.read(os.path.join(repository, tuto, INI_FILENAME)) - guid = parser.get(INI_METADATA_SECTION, INI_GUID_PROPERTY) - name = parser.get(INI_METADATA_SECTION, INI_NAME_PROPERTY) - activities = parser.options(INI_ACTIVITY_SECTION) - # enforce matching activity name AND version, as UI changes - # break tutorials. We may lower this requirement when the - # UAM gets less dependent on the widget order. - # Also note property names are always stored lowercase. - if activity_name.lower() in activities: - version = parser.get(INI_ACTIVITY_SECTION, activity_name) - if activity_vers == version: - tutoGuidName[guid] = name - except OSError: - # the repository may not exist. Continue scanning - pass - - return tutoGuidName - - def load_tutorial(self, Guid): - """ - Rebuilds a tutorial object from it's serialized state. - Common storing paths will be scanned. - - @param Guid the generic identifier of the tutorial - @returns a Tutorial object containing an FSM - """ - bundle = TutorialBundler(Guid) - bundle_path = bundle.get_tutorial_path() - config = SafeConfigParser() - config.read(os.path.join(bundle_path, INI_FILENAME)) - - serializer = XMLSerializer() - - name = config.get(INI_METADATA_SECTION, INI_NAME_PROPERTY) - fsm = serializer.load_fsm(Guid) - - tuto = Tutorial(name, fsm) - return tuto - - -class Serializer(object): - """ - Interface that provide serializing and deserializing of the FSM - used in the tutorials to/from disk. Must be inherited. - """ - - def save_fsm(self,fsm): - """ - Save fsm to disk. If a GUID parameter is provided, the existing GUID is - located in the .ini files in the store root and bundle root and - the corresponding FSM is/are overwritten. If the GUId is not found, an - exception occur. If no GUID is provided, FSM is written in a new file - in the store root. - """ - NotImplementedError - - def load_fsm(self): - """ - Load fsm from disk. - """ - NotImplementedError - -class XMLSerializer(Serializer): - """ - Class that provide serializing and deserializing of the FSM - used in the tutorials to/from a .xml file. Inherit from Serializer - """ - - def _create_state_dict_node(self, state_dict, doc): - """ - Create and return a xml Node from a State dictionnary. - """ - statesList = doc.createElement("States") - for state_name, state in state_dict.items(): - stateNode = doc.createElement("State") - statesList.appendChild(stateNode) - stateNode.setAttribute("Name", state_name) - actionsList = stateNode.appendChild(self._create_action_list_node(state.get_action_list(), doc)) - eventfiltersList = stateNode.appendChild(self._create_event_filters_node(state.get_event_filter_list(), doc)) - return statesList - - def _create_addon_component_node(self, parent_attr_name, comp, doc): - """ - Takes a component that is embedded in another component (e.g. the content - of a OnceWrapper) and encapsulate it in a node with the property name. - - e.g. - <Component Class="OnceWrapper"> - <SubComponent property="addon"> - <Component Class="BubbleMessage" message="'Hi!'" position="[12,32]"/> - </SubComponent> - </Component> - - When reloading this node, we should look up the property name for the parent - in the attribute of the node, then examine the subnode to create the addon - object itself. - - @param parent_attr_name The name of the parent's attribute for this addon - e.g. the OnceWrapper has the action attribute, which corresponds to a - sub-action it must execute once. - @param comp The component node itself - @param doc The XML document root (only used to create the nodes) - @returns A NODE_SUBCOMPONENT node, with the property attribute and a sub node - that represents another component. - """ - subCompNode = doc.createElement(NODE_SUBCOMPONENT) - subCompNode.setAttribute("property", parent_attr_name) - - subNode = self._create_component_node(comp, doc) - - subCompNode.appendChild(subNode) - - return subCompNode - - def _create_addonlist_component_node(self, parent_attr_name, comp_list, doc): - """ - Takes a list of components that are embedded in another component (ex. the - content of a ChainAction) and encapsulate them in a node with the property - name. - - e.g. - <Component Class="ChainAction"> - <SubComponentList property="actions"> - <Component Class="BubbleMessage" message="'Hi!'" position="[15,35]"/> - <Component Class="DialogMessage" message="'Multi-action!'" position="[45,10]"/> - </SubComponentList> - </Component> - - When reloading this node, we should look up the property name for the parent - in the the attribute of the node, then rebuild the list by appending the - content of all the subnodes. - - @param parent_attr_name The name of the parent component's property - @param comp_list A list of components that comprise the property - @param doc The XML document root (only for creating new nodes) - @returns A NODE_SUBCOMPONENTLIST node with the property attribute - """ - subCompListNode = doc.createElement(NODE_SUBCOMPONENTLIST) - subCompListNode.setAttribute("property", parent_attr_name) - - for comp in comp_list: - compNode = self._create_component_node(comp, doc) - subCompListNode.appendChild(compNode) - - return subCompListNode - - def _create_component_node(self, comp, doc): - """ - Takes a single component (action or eventfilter) and transforms it - into a xml node. - - @param comp A single component - @param doc The XML document root (used to create nodes only - @return A XML Node object with the component tag name - """ - compNode = doc.createElement(NODE_COMPONENT) - - # Write down just the name of the Action class as the Class - # property -- - compNode.setAttribute("Class",type(comp).__name__) - - # serialize all tutorius properties - for propname in comp.get_properties(): - propval = getattr(comp, propname) - if getattr(type(comp), propname).type == "addonlist": - compNode.appendChild(self._create_addonlist_component_node(propname, propval, doc)) - elif getattr(type(comp), propname).type == "addon": - #import rpdb2; rpdb2.start_embedded_debugger('pass') - compNode.appendChild(self._create_addon_component_node(propname, propval, doc)) - else: - # repr instead of str, as we want to be able to eval() it into a - # valid object. - compNode.setAttribute(propname, repr(propval)) - - return compNode - - def _create_action_list_node(self, action_list, doc): - """ - Create and return a xml Node from a Action list. - - @param action_list A list of actions - @param doc The XML document root (used to create new nodes only) - @return A XML Node object with the Actions tag name and a serie of - Action children - """ - actionsList = doc.createElement("Actions") - for action in action_list: - # Create the action node - actionNode = self._create_component_node(action, doc) - # Append it to the list - actionsList.appendChild(actionNode) - - return actionsList - - def _create_event_filters_node(self, event_filters, doc): - """ - Create and return a xml Node from a event filters. - """ - eventFiltersList = doc.createElement("EventFiltersList") - for event_f in event_filters: - eventFilterNode = self._create_component_node(event_f, doc) - eventFiltersList.appendChild(eventFilterNode) - - return eventFiltersList - - def save_fsm(self, fsm, xml_filename, path): - """ - Save fsm to disk, in the xml file specified by "xml_filename", in the - "path" folder. If the specified file doesn't exist, it will be created. - """ - self.doc = doc = xml.dom.minidom.Document() - fsm_element = doc.createElement("FSM") - doc.appendChild(fsm_element) - fsm_element.setAttribute("Name", fsm.name) - fsm_element.setAttribute("StartStateName", fsm.start_state_name) - statesDict = fsm_element.appendChild(self._create_state_dict_node(fsm._states, doc)) - - fsm_actions_node = self._create_action_list_node(fsm.actions, doc) - fsm_actions_node.tagName = "FSMActions" - actionsList = fsm_element.appendChild(fsm_actions_node) - - file_object = open(os.path.join(path, xml_filename), "w") - file_object.write(doc.toprettyxml()) - file_object.close() - - - def _find_tutorial_dir_with_guid(self, guid): - """ - Finds the tutorial with the associated GUID. If it is found, return - the path to the tutorial's directory. If it doesn't exist, raise an - IOError. - - A note : if there are two tutorials with this GUID in the folders, - they will both be inspected and the one with the highest version - number will be returned. If they have the same version number, the one - from the global store will be returned. - - @param guid The GUID of the tutorial that is to be loaded. - """ - # Attempt to find the tutorial's directory in the global directory - global_dir = os.path.join(_get_store_root(), guid) - # Then in the activty's bundle path - activity_dir = os.path.join(_get_bundle_root(), guid) - - # If they both exist - if os.path.isdir(global_dir) and os.path.isdir(activity_dir): - # Inspect both metadata files - global_meta = os.path.join(global_dir, "meta.ini") - activity_meta = os.path.join(activity_dir, "meta.ini") - - # Open both config files - global_parser = SafeConfigParser() - global_parser.read(global_meta) - - activity_parser = SafeConfigParser() - activity_parser.read(activity_meta) - - # Get the version number for each tutorial - global_version = global_parser.get(INI_METADATA_SECTION, "version") - activity_version = activity_parser.get(INI_METADATA_SECTION, "version") - - # If the global version is higher or equal, we'll take it - if global_version >= activity_version: - return global_dir - else: - return activity_dir - - # Do we just have the global directory? - if os.path.isdir(global_dir): - return global_dir - - # Or just the activity's bundle directory? - if os.path.isdir(activity_dir): - return activity_dir - - # Error : none of these directories contain the tutorial - raise IOError(2, "Neither the global nor the bundle directory contained the tutorial with GUID %s"%guid) - - def _get_direct_descendants_by_tag_name(self, node, name): - """ - Searches in the list of direct descendants of a node to find all the node - that have the given name. - - This is used because the Document.getElementsByTagName() function returns the - list of all the descendants (whatever their distance to the start node) that - have that name. In the case of complex components, we absolutely need to inspect - a single layer of the tree at the time. - - @param node The node from which we want the direct descendants with a particular - name - @param name The name of the node - @returns A list, possibly empty, of direct descendants of node that have this name - """ - return_list = [] - for childNode in node.childNodes: - if childNode.nodeName == name: - return_list.append(childNode) - return return_list - - def _load_xml_properties(self, properties_elem): - """ - Changes a list of properties into fully instanciated properties. - - @param properties_elem An XML element reprensenting a list of - properties - """ - return [] - - def _load_xml_event_filters(self, filters_elem): - """ - Loads up a list of Event Filters. - - @param filters_elem An XML Element representing a list of event filters - """ - reformed_event_filters_list = [] - event_filter_element_list = self._get_direct_descendants_by_tag_name(filters_elem, NODE_COMPONENT) - new_event_filter = None - - for event_filter in event_filter_element_list: - new_event_filter = self._load_xml_component(event_filter) - - if new_event_filter is not None: - reformed_event_filters_list.append(new_event_filter) - - return reformed_event_filters_list - - def _load_xml_subcomponents(self, node, properties): - """ - Loads all the subcomponent node below the given node and inserts them with - the right property name inside the properties dictionnary. - - @param node The parent node that contains one or many SubComponent nodes. - @param properties A dictionnary where the subcomponent property names - and the instantiated components will be stored - @returns Nothing. The properties dict will contain the property->comp mapping. - """ - subCompList = self._get_direct_descendants_by_tag_name(node, NODE_SUBCOMPONENT) - - for subComp in subCompList: - property_name = subComp.getAttribute("property") - internal_comp_node = self._get_direct_descendants_by_tag_name(subComp, NODE_COMPONENT)[0] - internal_comp = self._load_xml_component(internal_comp_node) - properties[str(property_name)] = internal_comp - - def _load_xml_subcomponent_lists(self, node, properties): - """ - Loads all the subcomponent lists below the given node and stores them - under the correct property name for that node. - - @param node The node from which we want to read the subComponent lists - @param properties The dictionnary that will contain the mapping of prop->subCompList - @returns Nothing. The values are returns inside the properties dict. - """ - listOf_subCompListNode = self._get_direct_descendants_by_tag_name(node, NODE_SUBCOMPONENTLIST) - for subCompListNode in listOf_subCompListNode: - property_name = subCompListNode.getAttribute("property") - subCompList = [] - for subCompNode in self._get_direct_descendants_by_tag_name(subCompListNode, NODE_COMPONENT): - subComp = self._load_xml_component(subCompNode) - subCompList.append(subComp) - properties[str(property_name)] = subCompList - - def _load_xml_component(self, node): - """ - Loads a single addon component instance from an Xml node. - - @param node The component XML Node to transform - object - @return The addon component object of the correct type according to the XML - description - """ - class_name = node.getAttribute("Class") - - properties = {} - - for prop in node.attributes.keys(): - if prop == "Class" : continue - # security : keep sandboxed - properties[str(prop)] = eval(node.getAttribute(prop)) - - # Read the complex attributes - self._load_xml_subcomponents(node, properties) - self._load_xml_subcomponent_lists(node, properties) - - new_action = addon.create(class_name, **properties) - - if not new_action: - return None - - return new_action - - def _load_xml_actions(self, actions_elem): - """ - Transforms an Actions element into a list of instanciated Action. - - @param actions_elem An XML Element representing a list of Actions - """ - reformed_actions_list = [] - actions_element_list = self._get_direct_descendants_by_tag_name(actions_elem, NODE_COMPONENT) - - for action in actions_element_list: - new_action = self._load_xml_component(action) - - reformed_actions_list.append(new_action) - - return reformed_actions_list - - def _load_xml_states(self, states_elem): - """ - Takes in a States element and fleshes out a complete list of State - objects. - - @param states_elem An XML Element that represents a list of States - """ - reformed_state_list = [] - # item(0) because there is always only one <States> tag in the xml file - # so states_elem should always contain only one element - states_element_list = states_elem.item(0).getElementsByTagName("State") - - for state in states_element_list: - stateName = state.getAttribute("Name") - # Using item 0 in the list because there is always only one - # Actions and EventFilterList element per State node. - actions_list = self._load_xml_actions(state.getElementsByTagName("Actions")[0]) - event_filters_list = self._load_xml_event_filters(state.getElementsByTagName("EventFiltersList")[0]) - reformed_state_list.append(State(stateName, actions_list, event_filters_list)) - - return reformed_state_list - - def _load_xml_fsm(self, fsm_elem): - """ - Takes in an XML element representing an FSM and returns the fully - crafted FSM. - - @param fsm_elem The XML element that describes a FSM - """ - # Load the FSM's name and start state's name - fsm_name = fsm_elem.getAttribute("Name") - - fsm_start_state_name = None - try: - fsm_start_state_name = fsm_elem.getAttribute("StartStateName") - except: - pass - - fsm = FiniteStateMachine(fsm_name, start_state_name=fsm_start_state_name) - - # Load the states - states = self._load_xml_states(fsm_elem.getElementsByTagName("States")) - for state in states: - fsm.add_state(state) - - # Load the actions on this FSM - actions = self._load_xml_actions(fsm_elem.getElementsByTagName("FSMActions")[0]) - for action in actions: - fsm.add_action(action) - - # Load the event filters - events = self._load_xml_event_filters(fsm_elem.getElementsByTagName("EventFiltersList")[0]) - for event in events: - fsm.add_event_filter(event) - - return fsm - - - def load_fsm(self, guid): - """ - Load fsm from xml file whose .ini file guid match argument guid. - """ - # Fetch the directory (if any) - tutorial_dir = self._find_tutorial_dir_with_guid(guid) - - # Open the XML file - tutorial_file = os.path.join(tutorial_dir, TUTORIAL_FILENAME) - - xml_dom = xml.dom.minidom.parse(tutorial_file) - - fsm_elem = xml_dom.getElementsByTagName("FSM")[0] - - return self._load_xml_fsm(fsm_elem) - - -class TutorialBundler(object): - """ - This class provide the various data handling methods useable by the tutorial - editor. - """ - - def __init__(self,generated_guid = None): - """ - Tutorial_bundler constructor. If a GUID is given in the parameter, the - Tutorial_bundler object will be associated with it. If no GUID is given, - a new GUID will be generated, - """ - - self.Guid = generated_guid or str(uuid.uuid1()) - - #Look for the file in the path if a uid is supplied - if generated_guid: - #General store - store_path = os.path.join(_get_store_root(), generated_guid, INI_FILENAME) - if os.path.isfile(store_path): - self.Path = os.path.dirname(store_path) - else: - #Bundle store - bundle_path = os.path.join(_get_bundle_root(), generated_guid, INI_FILENAME) - if os.path.isfile(bundle_path): - self.Path = os.path.dirname(bundle_path) - else: - raise IOError(2,"Unable to locate metadata file for guid '%s'" % generated_guid) - - else: - #Create the folder, any failure will go through to the caller for now - store_path = os.path.join(_get_store_root(), self.Guid) - os.makedirs(store_path) - self.Path = store_path - - def write_metadata_file(self, tutorial): - """ - Write metadata to the property file. - @param tutorial Tutorial for which to write metadata - """ - #Create the Config Object and populate it - cfg = SafeConfigParser() - cfg.add_section(INI_METADATA_SECTION) - cfg.set(INI_METADATA_SECTION, INI_GUID_PROPERTY, self.Guid) - cfg.set(INI_METADATA_SECTION, INI_NAME_PROPERTY, tutorial.name) - cfg.set(INI_METADATA_SECTION, INI_XML_FSM_PROPERTY, TUTORIAL_FILENAME) - cfg.add_section(INI_ACTIVITY_SECTION) - cfg.set(INI_ACTIVITY_SECTION, os.environ['SUGAR_BUNDLE_NAME'], - os.environ['SUGAR_BUNDLE_VERSION']) - - #Write the ini file - cfg.write( file( os.path.join(self.Path, INI_FILENAME), 'w' ) ) - - def get_tutorial_path(self): - """ - Return the path of the .ini file associated with the guiven guid set in - the Guid property of the Tutorial_Bundler. If the guid is present in - more than one path, the store_root is given priority. - """ - - store_root = _get_store_root() - bundle_root = _get_bundle_root() - - config = SafeConfigParser() - path = None - - logging.debug("************ Path of store_root folder of activity : " \ - + store_root) - - # iterate in each GUID subfolder - for dir in os.listdir(store_root): - - # iterate for each .ini file in the store_root folder - - for file_name in os.listdir(os.path.join(store_root, dir)): - if file_name.endswith(".ini"): - logging.debug("******************* Found .ini file : " \ - + file_name) - config.read(os.path.join(store_root, dir, file_name)) - if config.get(INI_METADATA_SECTION, INI_GUID_PROPERTY) == self.Guid: - xml_filename = config.get(INI_METADATA_SECTION, - INI_XML_FSM_PROPERTY) - - path = os.path.join(store_root, dir) - return path - - logging.debug("************ Path of bundle_root folder of activity : " \ - + bundle_root) - - - # iterate in each GUID subfolder - for dir in os.listdir(bundle_root): - - # iterate for each .ini file in the bundle_root folder - for file_name in os.listdir(os.path.join(bundle_root, dir)): - if file_name.endswith(".ini"): - logging.debug("******************* Found .ini file : " \ - + file_name) - config.read(os.path.join(bundle_root, dir, file_name)) - if config.get(INI_METADATA_SECTION, INI_GUID_PROPERTY) == self.Guid: - path = os.path.join(bundle_root, self.Guid) - return path - - if path is None: - logging.debug("**************** Error : GUID not found") - raise KeyError - - def write_fsm(self, fsm): - - """ - Save fsm to disk. If a GUID parameter is provided, the existing GUID is - located in the .ini files in the store root and bundle root and - the corresponding FSM is/are created or overwritten. If the GUID is not - found, an exception occur. - """ - - config = SafeConfigParser() - - serializer = XMLSerializer() - path = os.path.join(self.Path, "meta.ini") - config.read(path) - xml_filename = config.get(INI_METADATA_SECTION, INI_XML_FSM_PROPERTY) - serializer.save_fsm(fsm, xml_filename, self.Path) - - - def add_resources(self, typename, file): - """ - Add ressources to metadata. - """ - raise NotImplementedError("add_resources not implemented") diff --git a/tutorius/constraints.py b/tutorius/constraints.py index 2bc27aa..cd71167 100644 --- a/tutorius/constraints.py +++ b/tutorius/constraints.py @@ -24,6 +24,14 @@ for some properties. # For the File Constraint import os +# For the Resource Constraint +import re + +class ConstraintException(Exception): + """ + Parent class for all constraint exceptions + """ + pass class Constraint(): """ @@ -47,7 +55,7 @@ class ValueConstraint(Constraint): def __init__(self, limit): self.limit = limit -class UpperLimitConstraintError(Exception): +class UpperLimitConstraintError(ConstraintException): pass class UpperLimitConstraint(ValueConstraint): @@ -64,7 +72,7 @@ class UpperLimitConstraint(ValueConstraint): raise UpperLimitConstraintError() return -class LowerLimitConstraintError(Exception): +class LowerLimitConstraintError(ConstraintException): pass class LowerLimitConstraint(ValueConstraint): @@ -81,7 +89,7 @@ class LowerLimitConstraint(ValueConstraint): raise LowerLimitConstraintError() return -class MaxSizeConstraintError(Exception): +class MaxSizeConstraintError(ConstraintException): pass class MaxSizeConstraint(ValueConstraint): @@ -99,7 +107,7 @@ class MaxSizeConstraint(ValueConstraint): raise MaxSizeConstraintError("Setter : trying to set value of length %d while limit is %d"%(len(value), self.limit)) return -class MinSizeConstraintError(Exception): +class MinSizeConstraintError(ConstraintException): pass class MinSizeConstraint(ValueConstraint): @@ -117,7 +125,7 @@ class MinSizeConstraint(ValueConstraint): raise MinSizeConstraintError("Setter : trying to set value of length %d while limit is %d"%(len(value), self.limit)) return -class ColorConstraintError(Exception): +class ColorConstraintError(ConstraintException): pass class ColorArraySizeError(ColorConstraintError): @@ -153,7 +161,7 @@ class ColorConstraint(Constraint): return -class BooleanConstraintError(Exception): +class BooleanConstraintError(ConstraintException): pass class BooleanConstraint(Constraint): @@ -165,7 +173,7 @@ class BooleanConstraint(Constraint): return raise BooleanConstraintError("Value is not True or False") -class EnumConstraintError(Exception): +class EnumConstraintError(ConstraintException): pass class EnumConstraint(Constraint): @@ -190,7 +198,7 @@ class EnumConstraint(Constraint): raise EnumConstraintError("Value is not part of the enumeration") return -class FileConstraintError(Exception): +class FileConstraintError(ConstraintException): pass class FileConstraint(Constraint): @@ -200,10 +208,48 @@ class FileConstraint(Constraint): def validate(self, value): # TODO : Decide on the architecture for file retrieval on disk # Relative paths? From where? Support macros? - # + # FIXME This is a hack to make cases where a default file is not valid + # work. It allows None values to be validated, though if value is None: return if not os.path.isfile(value): raise FileConstraintError("Non-existing file : %s"%value) return +class ResourceConstraintError(ConstraintException): + pass + +class ResourceConstraint(Constraint): + """ + Ensures that the value is looking like a resource name, like + <filename>_<GUID>[.<extension>]. We are not validating that this is a + valid resource for the reason that the property does not have any notion + of tutorial guid. + + TODO : Find a way to properly validate resources by looking them up in the + Vault. + """ + + # Regular expression to parse a resource-like name + resource_regexp_text = "(.+)_([a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12})(\..*)?$" + resource_regexp = re.compile(resource_regexp_text) + + def validate(self, value): + # TODO : Validate that we will not use an empty resource or if we can + # have transitory resource names + if value is None: + raise ResourceConstraintError("Resource not allowed to have a null value!") + + # Special case : We allow the empty resource name for now + if value == "": + return value + + # Attempt to see if the value has a resource name inside it + match = self.resource_regexp.search(value) + + # If there was no match on the reg exp + if not match: + raise ResourceConstraintError("Resource name does not seem to be valid : %s" % value) + + # If the name matched, then the value is _PROBABLY_ good + return value diff --git a/tutorius/core.py b/tutorius/core.py index be2b0ba..80e1b4f 100644 --- a/tutorius/core.py +++ b/tutorius/core.py @@ -24,8 +24,9 @@ This module contains the core classes for tutorius import logging import os -from sugar.tutorius.TProbe import ProbeManager -from sugar.tutorius import addon +from .TProbe import ProbeManager +from .dbustools import save_args +from . import addon logger = logging.getLogger("tutorius") @@ -33,6 +34,9 @@ class Tutorial (object): """ Tutorial Class, used to run through the FSM. """ + #Properties + probeManager = property(lambda self: self._probeMgr) + activityId = property(lambda self: self._activity_id) def __init__(self, name, fsm, filename=None): """ @@ -52,9 +56,6 @@ class Tutorial (object): self._activity_id = None #Rest of initialisation happens when attached - probeManager = property(lambda self: self._probeMgr) - activityId = property(lambda self: self._activity_id) - def attach(self, activity_id): """ Attach to a running activity @@ -78,7 +79,6 @@ class Tutorial (object): # Uninstall the whole FSM self.state_machine.teardown() - #FIXME (Old) There should be some amount of resetting done here... if not self._activity_id is None: self._probeMgr.detach(self._activity_id) self._activity_id = None @@ -91,18 +91,6 @@ class Tutorial (object): self.state_machine.set_state(name) - - # Currently unused -- equivalent function is in each state - def _eventfilter_state_done(self, eventfilter): - """ - Callback handler for eventfilter to notify - when we must go to the next state. - """ - #XXX Tests should be run here normally - - #Swith to the next state pointed by the eventfilter - self.set_state(eventfilter.get_next_state()) - def _prepare_activity(self): """ Prepare the activity for the tutorial by loading the saved state and @@ -116,7 +104,9 @@ class Tutorial (object): self.activity_init_state_filename readfile = addon.create("ReadFile", filename=filename) if readfile: - self._probeMgr.install(self._activity_id, readfile) + self._probeMgr.install(readfile) + #Uninstall now while we have the reference handy + self._probeMgr.uninstall(readfile) class State(object): """ @@ -143,10 +133,9 @@ class State(object): self._actions = action_list or [] - # Unused for now - #self.tests = [] + self._transitions= dict(event_filter_list or []) - self._event_filters = event_filter_list or [] + self._installedEvents = set() self.tutorial = tutorial @@ -170,8 +159,8 @@ class State(object): Install the state itself, by first registering the event filters and then triggering the actions. """ - for eventfilter in self._event_filters: - self.tutorial.probeManager.subscribe(eventfilter, self._event_filter_state_done_cb ) + for (event, next_state) in self._transitions.items(): + self._installedEvents.add(self.tutorial.probeManager.subscribe(event, save_args(self._event_filter_state_done_cb, next_state ))) for action in self._actions: self.tutorial.probeManager.install(action) @@ -183,38 +172,37 @@ class State(object): removing dialogs that were displayed, removing highlights, etc... """ # Remove the handlers for the all of the state's event filters - for event_filter in self._event_filters: - self.tutorial.probeManager.unsubscribe(event_filter, self._event_filter_state_done_cb ) + while len(self._installedEvents) > 0: + self.tutorial.probeManager.unsubscribe(self._installedEvents.pop()) # Undo all the actions related to this state for action in self._actions: self.tutorial.probeManager.uninstall(action) - def _event_filter_state_done_cb(self, event_filter): + def _event_filter_state_done_cb(self, next_state, event): """ Callback for event filters. This function needs to inform the tutorial that the state is over and tell it what is the next state. - @param event_filter The event filter that was called + @param next_state The next state for the transition + @param event The event that occured """ # Run the tests here, if need be # Warn the higher level that we wish to change state - self.tutorial.set_state(event_filter.get_next_state()) + self.tutorial.set_state(next_state) # Model manipulation # These functions are used to simplify the creation of states def add_action(self, new_action): """ - Adds an action to the state (only if it wasn't added before) + Adds an action to the state @param new_action The new action to execute when in this state @return True if added, False otherwise """ - if new_action not in self._actions: - self._actions.append(new_action) - return True - return False + self._actions.append(new_action) + return True # remove_action - We did not define names for the action, hence they're # pretty hard to remove on a precise basis @@ -230,19 +218,21 @@ class State(object): Removes all the action associated with this state. A cleared state will not do anything when entered or exited. """ + #FIXME What if the action is currently installed? self._actions = [] - def add_event_filter(self, event_filter): + def add_event_filter(self, event, next_state): """ Adds an event filter that will cause a transition from this state. The same event filter may not be added twice. - @param event_filter The new event filter that will trigger a transition + @param event The event that will trigger a transition + @param next_state The state to which the transition will lead @return True if added, False otherwise """ - if event_filter not in self._event_filters: - self._event_filters.append(event_filter) + if event not in self._transitions.keys(): + self._transitions[event]=next_state return True return False @@ -250,7 +240,7 @@ class State(object): """ @return The list of event filters associated with this state. """ - return self._event_filters + return self._transitions.items() def clear_event_filters(self): """ @@ -258,12 +248,19 @@ class State(object): was just cleared will become a sink and will be the end of the tutorial. """ - self._event_filters = [] + self._transitions = {} - def is_identical(self, otherState): + def __eq__(self, otherState): """ - Compares two states and tells whether they contain the same states and + Compares two states and tells whether they contain the same states with the + same actions and event filters. + @param otherState The other State that we wish to match + @returns True if every action in this state has a matching action in the + other state with the same properties and values AND if every + event filters in this state has a matching filter in the + other state having the same properties and values AND if both + states have the same name. ` """ if not isinstance(otherState, State): return False @@ -273,27 +270,28 @@ class State(object): # Do they have the same actions? if len(self._actions) != len(otherState._actions): return False + + if len(self._transitions) != len(otherState._transitions): + return False + for act in self._actions: found = False + # For each action in the other state, try to match it with this one. for otherAct in otherState._actions: - if act.is_identical(otherAct): + if act == otherAct: found = True break if found == False: + # If we arrive here, then we could not find an action with the + # same values in the other state. We know they're not identical return False # Do they have the same event filters? - if len(self._actions) != len(otherState._actions): + if self._transitions != otherState._transitions: return False - for event in self._event_filters: - found = False - for otherEvent in otherState._event_filters: - if event.is_identical(otherEvent): - found = True - break - if found == False: - return False + # If nothing failed up to now, then every actions and every filters can + # be found in the other state return True class FiniteStateMachine(State): @@ -507,9 +505,9 @@ class FiniteStateMachine(State): #TODO : Move this code inside the State itself - we're breaking # encap :P - for event_filter in st._event_filters: - if event_filter.get_next_state() == state_name: - st._event_filters.remove(event_filter) + for event in st._transitions: + if st._transitions[event] == state_name: + del st._transitions[event] # Remove the state from the dictionary del self._states[state_name] @@ -527,8 +525,8 @@ class FiniteStateMachine(State): next_states = set() - for event_filter in state._event_filters: - next_states.add(event_filter.get_next_state()) + for event, state in state._transitions.items(): + next_states.add(state) return tuple(next_states) @@ -550,9 +548,9 @@ class FiniteStateMachine(State): states = [] # Walk through the list of states for st in self._states.itervalues(): - for event_filter in st._event_filters: - if event_filter.get_next_state() == state_name: - states.append(event_filter.get_next_state()) + for event, state in st._transitions.items(): + if state == state_name: + states.append(state) continue return tuple(states) @@ -564,42 +562,79 @@ class FiniteStateMachine(State): out_string += st.name + ", " return out_string - def is_identical(self, otherFSM): + def __eq__(self, otherFSM): """ Compares the elements of two FSM to ensure and returns true if they have the same set of states, containing the same actions and the same event filters. - @returns True if the two FSMs have the same content false otherwise + @returns True if the two FSMs have the same content, False otherwise """ if not isinstance(otherFSM, FiniteStateMachine): return False + # Make sure they share the same name if not (self.name == otherFSM.name) or \ not (self.start_state_name == otherFSM.start_state_name): return False - + + # Ensure they have the same number of FSM-level actions if len(self._actions) != len(otherFSM._actions): return False + # Test that we have all the same FSM level actions for act in self._actions: found = False + # For every action in the other FSM, try to match it with the + # current one. for otherAct in otherFSM._actions: - if act.is_identical(otherAct): + if act == otherAct: found = True break if found == False: return False + # Make sure we have the same number of states in both FSMs if len(self._states) != len(otherFSM._states): return False - for state in self._states.itervalues(): - found = False - for otherState in otherFSM._states.itervalues(): - if state.is_identical(otherState): - found = True - break - if found == False: + # For each state, try to find a corresponding state in the other FSM + for state_name in self._states.keys(): + state = self._states[state_name] + other_state = None + try: + # Attempt to use this key in the other FSM. If it's not present + # the dictionary will throw an exception and we'll know we have + # at least one different state in the other FSM + other_state = otherFSM._states[state_name] + except: + return False + # If two states with the same name exist, then we want to make sure + # they are also identical + if not state == other_state: + return False + + # If we made it here, then all the states in this FSM could be matched to an + # identical state in the other FSM. + return True + if len(self._states) != len(otherFSM._states): + return False + + # For each state, try to find a corresponding state in the other FSM + for state_name in self._states.keys(): + state = self._states[state_name] + other_state = None + try: + # Attempt to use this key in the other FSM. If it's not present + # the dictionary will throw an exception and we'll know we have + # at least one different state in the other FSM + other_state = otherFSM._states[state_name] + except: + return False + # If two states with the same name exist, then we want to make sure + # they are also identical + if not state == other_state: return False + # If we made it here, then all the states in this FSM could be matched to an + # identical state in the other FSM. return True diff --git a/tutorius/creator.py b/tutorius/creator.py index 513e312..c477056 100644 --- a/tutorius/creator.py +++ b/tutorius/creator.py @@ -22,16 +22,19 @@ the activity itself. # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA import gtk.gdk +import gtk.glade import gobject from gettext import gettext as T -from sugar.graphics.toolbutton import ToolButton +import os +from sugar.graphics import icon +import copy -from sugar.tutorius import overlayer, gtkutils, actions, bundler, properties, addon -from sugar.tutorius import filters -from sugar.tutorius.services import ObjectStore -from sugar.tutorius.linear_creator import LinearCreator -from sugar.tutorius.core import Tutorial +from . import overlayer, gtkutils, actions, vault, properties, addon +from . import filters +from .services import ObjectStore +from .core import Tutorial, FiniteStateMachine, State +from . import viewer class Creator(object): """ @@ -47,81 +50,162 @@ class Creator(object): """ self._activity = activity if not tutorial: - self._tutorial = LinearCreator() + self._tutorial = FiniteStateMachine('Untitled') + self._state = State(name='INIT') + self._tutorial.add_state(self._state) + self._state_counter = 1 else: self._tutorial = tutorial + # TODO load existing tutorial; unused yet self._action_panel = None self._current_filter = None self._intro_mask = None self._intro_handle = None - self._state_bubble = overlayer.TextBubble(self._tutorial.state_name) allocation = self._activity.get_allocation() self._width = allocation.width self._height = allocation.height self._selected_widget = None self._eventmenu = None + self.tuto = None + self._guid = None self._hlmask = overlayer.Rectangle(None, (1.0, 0.0, 0.0, 0.5)) self._activity._overlayer.put(self._hlmask, 0, 0) - self._activity._overlayer.put(self._state_bubble, - self._width/2-self._state_bubble.allocation.width/2, 0) - self._state_bubble.show() - dlg_width = 300 dlg_height = 70 sw = gtk.gdk.screen_width() sh = gtk.gdk.screen_height() - self._tooldialog = gtk.Window() - self._tooldialog.set_title("Tutorius tools") - self._tooldialog.set_transient_for(self._activity) - self._tooldialog.set_decorated(True) - self._tooldialog.set_resizable(False) - self._tooldialog.set_type_hint(gtk.gdk.WINDOW_TYPE_HINT_UTILITY) - self._tooldialog.set_destroy_with_parent(True) - self._tooldialog.set_deletable(False) - self._tooldialog.set_size_request(dlg_width, dlg_height) - - toolbar = gtk.Toolbar() - for tool in addon.list_addons(): - meta = addon.get_addon_meta(tool) - toolitem = ToolButton(meta['icon']) - toolitem.set_tooltip(meta['display_name']) - toolitem.connect("clicked", self._add_action_cb, tool) - toolbar.insert(toolitem, -1) - toolitem = ToolButton("go-next") - toolitem.connect("clicked", self._add_step_cb) - toolitem.set_tooltip("Add Step") - toolbar.insert(toolitem, -1) - toolitem = ToolButton("stop") - toolitem.connect("clicked", self._cleanup_cb) - toolitem.set_tooltip("End Tutorial") - toolbar.insert(toolitem, -1) - self._tooldialog.add(toolbar) - self._tooldialog.show_all() - # simpoir: I suspect the realized widget is a tiny bit larger than - # it should be, thus the -10. - self._tooldialog.move(sw-10-dlg_width, sh-dlg_height) - - self._propedit = EditToolBox(self._activity) - - def _evfilt_cb(self, menuitem, event_name, *args): + + self._propedit = ToolBox(self._activity) + self._propedit.tree.signal_autoconnect({ + 'on_quit_clicked': self._cleanup_cb, + 'on_save_clicked': self.save, + 'on_action_activate': self._add_action_cb, + 'on_event_activate': self._add_event_cb, + }) + self._propedit.window.move( + gtk.gdk.screen_width()-self._propedit.window.get_allocation().width, + 100) + + + self._overview = viewer.Viewer(self._tutorial, self) + self._overview.win.set_transient_for(self._activity) + + self._overview.win.move(0, gtk.gdk.screen_height()- \ + self._overview.win.get_allocation().height) + + self._transitions = dict() + + def _update_next_state(self, state, event, next_state): + self._transitions[event] = next_state + + evts = state.get_event_filter_list() + state.clear_event_filters() + for evt, next_state in evts: + state.add_event_filter(evt, self._transitions[evt]) + + def delete_action(self, action): + """ + Removes the first instance of specified action from the tutorial. + + @param action: the action object to remove from the tutorial + @returns: True if successful, otherwise False. + """ + state = self._tutorial.get_state_by_name("INIT") + + while True: + state_actions = state.get_action_list() + for fsm_action in state_actions: + if fsm_action is action: + state.clear_actions() + if state is self._state: + fsm_action.exit_editmode() + state_actions.remove(fsm_action) + self.set_insertion_point(state.name) + for keep_action in state_actions: + state.add_action(keep_action) + return True + + ev_list = state.get_event_filter_list() + if ev_list: + state = self._tutorial.get_state_by_name(ev_list[0][1]) + continue + + return False + + def delete_state(self): + """ + Remove current state. + Limitation: The last state cannot be removed, as it doesn't have + any transitions to remove anyway. + + @returns: True if successful, otherwise False. + """ + if not self._state.get_event_filter_list(): + # last state cannot be removed + return False + + state = self._tutorial.get_state_by_name("INIT") + ev_list = state.get_event_filter_list() + if state is self._state: + next_state = self._tutorial.get_state_by_name(ev_list[0][1]) + self.set_insertion_point(next_state.name) + self._tutorial.remove_state(state.name) + self._tutorial.remove_state(next_state.name) + next_state.name = "INIT" + self._tutorial.add_state(next_state) + return True + + # loop to repair links from deleted state + while ev_list: + next_state = self._tutorial.get_state_by_name(ev_list[0][1]) + if next_state is self._state: + # the tutorial will flush the event filters. We'll need to + # clear and re-add them. + self._tutorial.remove_state(self._state.name) + state.clear_event_filters() + self._update_next_state(state, ev_list[0][0], next_state.get_event_filter_list()[0][1]) + for ev, next_state in ev_list: + state.add_event_filter(ev, next_state) + + self.set_insertion_point(ev_list[0][1]) + return True + + state = next_state + ev_list = state.get_event_filter_list() + return False + + def get_insertion_point(self): + return self._state.name + + def set_insertion_point(self, state_name): + for action in self._state.get_action_list(): + action.exit_editmode() + self._state = self._tutorial.get_state_by_name(state_name) + self._overview.win.queue_draw() + state_actions = self._state.get_action_list() + for action in state_actions: + action.enter_editmode() + action._drag._eventbox.connect_after( + "button-release-event", self._action_refresh_cb, action) + + if state_actions: + self._propedit.action = state_actions[0] + else: + self._propedit.action = None + + + def _evfilt_cb(self, menuitem, event): """ This will get called once the user has selected a menu item from the event filter popup menu. This should add the correct event filter to the FSM and increment states. """ - self.introspecting = False - eventfilter = addon.create('GtkWidgetEventFilter', - next_state=None, - object_id=self._selected_widget, - event_name=event_name) # undo actions so they don't persist through step editing - for action in self._tutorial.current_actions: + for action in self._state.get_action_list(): action.exit_editmode() - self._tutorial.event(eventfilter) - self._state_bubble.label = self._tutorial.state_name self._hlmask.covered = None self._propedit.action = None self._activity.queue_draw() @@ -160,63 +244,70 @@ class Creator(object): self._eventmenu.popup(None, None, None, evt.button, evt.time) self._activity.queue_draw() - def set_intropecting(self, value): - """ - Set whether creator is in UI introspection mode. Setting this will - connect necessary handlers. - @param value True to setup introspection handlers. - """ - if bool(value) ^ bool(self._intro_mask): - if value: - self._intro_mask = overlayer.Mask(catch_events=True) - self._intro_handle = self._intro_mask.connect_after( - "button-press-event", self._intro_cb) - self._activity._overlayer.put(self._intro_mask, 0, 0) - else: - self._intro_mask.catch_events = False - self._intro_mask.disconnect(self._intro_handle) - self._intro_handle = None - self._activity._overlayer.remove(self._intro_mask) - self._intro_mask = None - - def get_introspecting(self): - """ - Whether creator is in UI introspection (catch all event) mode. - @return True if introspection handlers are connected, or False if not. - """ - return bool(self._intro_mask) - - introspecting = property(fset=set_intropecting, fget=get_introspecting) - - def _add_action_cb(self, widget, actiontype): + def _add_action_cb(self, widget, path): """Callback for the action creation toolbar tool""" - action = addon.create(actiontype) - if isinstance(action, actions.Action): - action.enter_editmode() - self._tutorial.action(action) - # FIXME: replace following with event catching - action._drag._eventbox.connect_after( - "button-release-event", self._action_refresh_cb, action) + action_type = self._propedit.actions_list[path][ToolBox.ICON_NAME] + action = addon.create(action_type) + action.enter_editmode() + self._state.add_action(action) + # FIXME: replace following with event catching + action._drag._eventbox.connect_after( + "button-release-event", self._action_refresh_cb, action) + self._overview.win.queue_draw() + + def _add_event_cb(self, widget, path): + """Callback for the event creation toolbar tool""" + event_type = self._propedit.events_list[path][ToolBox.ICON_NAME] + event = addon.create(event_type) + addonname = type(event).__name__ + meta = addon.get_addon_meta(addonname) + for propname in meta['mandatory_props']: + prop = getattr(type(event), propname) + if isinstance(prop, properties.TUAMProperty): + selector = WidgetSelector(self._activity) + setattr(event, propname, selector.select()) + elif isinstance(prop, properties.TEventType): + try: + dlg = SignalInputDialog(self._activity, + text="Mandatory property", + field=propname, + addr=event.object_id) + setattr(event, propname, dlg.pop()) + except AttributeError: + pass + elif isinstance(prop, properties.TStringProperty): + dlg = TextInputDialog(self._activity, + text="Mandatory property", + field=propname) + setattr(event, propname, dlg.pop()) + else: + raise NotImplementedError() + + event_filters = self._state.get_event_filter_list() + if event_filters: + # linearize tutorial by inserting state + new_state = State(name=str(self._state_counter)) + self._state_counter += 1 + self._state.clear_event_filters() + for evt_filt, next_state in event_filters: + new_state.add_event_filter(evt_filt, next_state) + self._update_next_state(self._state, event, new_state.name) + next_state = new_state.name + # blocks are shifted, full redraw is necessary + self._overview.win.queue_draw() else: - addonname = type(action).__name__ - meta = addon.get_addon_meta(addonname) - had_introspect = False - for propname in meta['mandatory_props']: - prop = getattr(type(action), propname) - if isinstance(prop, properties.TUAMProperty): - had_introspect = True - self.introspecting = True - elif isinstance(prop, properties.TStringProperty): - dlg = TextInputDialog(title="Mandatory property", - field=propname) - setattr(action, propname, dlg.pop()) - else: - raise NotImplementedError() - - # FIXME: hack to reuse previous introspection code - if not had_introspect: - self._tutorial.event(action) + # append empty state only if edit inserting at end of linearized + # tutorial. + self._update_next_state(self._state, event, str(self._state_counter)) + next_state = str(self._state_counter) + new_state = State(name=str(self._state_counter)) + self._state_counter += 1 + + self._state.add_event_filter(event, next_state) + self._tutorial.add_state(new_state) + self._overview.win.queue_draw() + self.set_insertion_point(new_state.name) def _action_refresh_cb(self, widget, evt, action): """ @@ -231,44 +322,54 @@ class Creator(object): "button-release-event", self._action_refresh_cb, action) self._propedit.action = action - def _add_step_cb(self, widget): - """Callback for the "add step" tool""" - self.introspecting = True + self._overview.win.queue_draw() def _cleanup_cb(self, *args): """ Quit editing and cleanup interface artifacts. """ - self.introspecting = False - eventfilter = filters.EventFilter(None) # undo actions so they don't persist through step editing - for action in self._tutorial.current_actions: + for action in self._state.get_action_list(): action.exit_editmode() - self._tutorial.event(eventfilter) - dlg = TextInputDialog(text=T("Enter a tutorial title."), - field=T("Title")) - tutorialName = "" - while not tutorialName: tutorialName = dlg.pop() - dlg.destroy() - - # prepare tutorial for serialization - tuto = Tutorial(tutorialName, self._tutorial.fsm) - bundle = bundler.TutorialBundler() - bundle.write_metadata_file(tuto) - bundle.write_fsm(self._tutorial.fsm) + dialog = gtk.MessageDialog( + parent=self._activity, + flags=gtk.DIALOG_MODAL, + type=gtk.MESSAGE_QUESTION, + buttons=gtk.BUTTONS_YES_NO, + message_format=T('Do you want to save before stopping edition?')) + do_save = dialog.run() + dialog.destroy() + if do_save == gtk.RESPONSE_YES: + self.save() # remove UI remains self._hlmask.covered = None self._activity._overlayer.remove(self._hlmask) - self._activity._overlayer.remove(self._state_bubble) self._hlmask.destroy() self._hlmask = None - self._tooldialog.destroy() self._propedit.destroy() + self._overview.destroy() self._activity.queue_draw() del self._activity._creator + def save(self, widget=None): + if not self.tuto: + dlg = TextInputDialog(self._activity, + text=T("Enter a tutorial title."), + field=T("Title")) + tutorialName = "" + while not tutorialName: tutorialName = dlg.pop() + dlg.destroy() + + # prepare tutorial for serialization + self.tuto = Tutorial(tutorialName, self._tutorial) + bundle = vault.TutorialBundler(self._guid) + self._guid = bundle.Guid + bundle.write_metadata_file(self.tuto) + bundle.write_fsm(self._tutorial) + + def launch(*args, **kwargs): """ Launch and attach a creator to the currently running activity. @@ -278,46 +379,59 @@ class Creator(object): activity._creator = Creator(activity) launch = staticmethod(launch) -class EditToolBox(gtk.Window): - """Helper toolbox class for managing action properties""" - def __init__(self, parent, action=None): - """ - Create the property edition toolbox and display it. +class ToolBox(object): + ICON_LABEL = 0 + ICON_IMAGE = 1 + ICON_NAME = 2 + ICON_TIP = 3 + def __init__(self, parent): + super(ToolBox, self).__init__() + self.__parent = parent + sugar_prefix = os.getenv("SUGAR_PREFIX",default="/usr") + glade_file = os.path.join(sugar_prefix, 'share', 'tutorius', + 'ui', 'creator.glade') + self.tree = gtk.glade.XML(glade_file) + self.window = self.tree.get_widget('mainwindow') + self._propbox = self.tree.get_widget('propbox') + + self.window.set_transient_for(parent) - @param parent the parent window of this toolbox, usually an activity - @param action the action to introspect/edit - """ - gtk.Window.__init__(self) self._action = None - self.__parent = parent # private avoid gtk clash - - self.set_title("Action Properties") - self.set_transient_for(parent) - self.set_decorated(True) - self.set_resizable(False) - self.set_type_hint(gtk.gdk.WINDOW_TYPE_HINT_UTILITY) - self.set_destroy_with_parent(True) - self.set_deletable(False) - self.set_size_request(200, 400) - - self._vbox = gtk.VBox() - self.add(self._vbox) - propwin = gtk.ScrolledWindow() - propwin.props.hscrollbar_policy = gtk.POLICY_AUTOMATIC - propwin.props.vscrollbar_policy = gtk.POLICY_AUTOMATIC - self._vbox.pack_start(propwin) - self._propbox = gtk.VBox(spacing=10) - propwin.add(self._propbox) - - self.action = action - - sw = gtk.gdk.screen_width() - sh = gtk.gdk.screen_height() - - self.show_all() - self.move(sw-10-200, (sh-400)/2) - - def refresh(self): + self.actions_list = gtk.ListStore(str, gtk.gdk.Pixbuf, str, str) + self.actions_list.set_sort_column_id(self.ICON_LABEL, gtk.SORT_ASCENDING) + self.events_list = gtk.ListStore(str, gtk.gdk.Pixbuf, str, str) + self.events_list.set_sort_column_id(self.ICON_LABEL, gtk.SORT_ASCENDING) + + for toolname in addon.list_addons(): + meta = addon.get_addon_meta(toolname) + iconfile = gtk.Image() + iconfile.set_from_file(icon.get_icon_file_name(meta['icon'])) + img = iconfile.get_pixbuf() + label = format_multiline(meta['display_name']) + + if meta['type'] == addon.TYPE_ACTION: + self.actions_list.append((label, img, toolname, meta['display_name'])) + else: + self.events_list.append((label, img, toolname, meta['display_name'])) + + iconview_action = self.tree.get_widget('iconview1') + iconview_action.set_model(self.actions_list) + iconview_action.set_text_column(self.ICON_LABEL) + iconview_action.set_pixbuf_column(self.ICON_IMAGE) + iconview_action.set_tooltip_column(self.ICON_TIP) + iconview_event = self.tree.get_widget('iconview2') + iconview_event.set_model(self.events_list) + iconview_event.set_text_column(self.ICON_LABEL) + iconview_event.set_pixbuf_column(self.ICON_IMAGE) + iconview_event.set_tooltip_column(self.ICON_TIP) + + self.window.show() + + def destroy(self): + """ clean and free the toolbox """ + self.window.destroy() + + def refresh_properties(self): """Refresh property values from the selected action.""" if self._action is None: return @@ -330,6 +444,9 @@ class EditToolBox(gtk.Window): if isinstance(prop, properties.TStringProperty): propwdg = row.get_children()[1] propwdg.get_buffer().set_text(propval) + elif isinstance(prop, properties.TUAMProperty): + propwdg = row.get_children()[1] + propwdg.set_label(propval) elif isinstance(prop, properties.TIntProperty): propwdg = row.get_children()[1] propwdg.set_value(propval) @@ -345,12 +462,10 @@ class EditToolBox(gtk.Window): def set_action(self, action): """Setter for the action property.""" if self._action is action: - self.refresh() + self.refresh_properties() return - parent = self._propbox.get_parent() - parent.remove(self._propbox) - self._propbox = gtk.VBox(spacing=10) - parent.add(self._propbox) + for old_prop in self._propbox.get_children(): + self._propbox.remove(old_prop) self._action = action if action is None: @@ -365,6 +480,10 @@ class EditToolBox(gtk.Window): propwdg.get_buffer().set_text(propval) propwdg.connect_after("focus-out-event", \ self._str_prop_changed, action, propname) + elif isinstance(prop, properties.TUAMProperty): + propwdg = gtk.Button(propval) + propwdg.connect_after("clicked", \ + self._uam_prop_changed, action, propname) elif isinstance(prop, properties.TIntProperty): adjustment = gtk.Adjustment(value=propval, lower=prop.lower_limit.limit, @@ -385,8 +504,8 @@ class EditToolBox(gtk.Window): propwdg.set_text(str(propval)) row.pack_end(propwdg) self._propbox.pack_start(row, expand=False) - self._vbox.show_all() - self.refresh() + self._propbox.show_all() + self.refresh_properties() def get_action(self): """Getter for the action property""" @@ -396,10 +515,18 @@ class EditToolBox(gtk.Window): def _list_prop_changed(self, widget, evt, action, propname, idx): try: - getattr(action, propname)[idx] = int(widget.get_text()) + #Save props as tuples so that they can be hashed + attr = list(getattr(action, propname)) + attr[idx] = int(widget.get_text()) + setattr(action, propname, tuple(attr)) except ValueError: widget.set_text(str(getattr(action, propname)[idx])) self.__parent._creator._action_refresh_cb(None, None, action) + def _uam_prop_changed(self, widget, action, propname): + selector = WidgetSelector(self.__parent) + selection = selector.select() + setattr(action, propname, selection) + self.__parent._creator._action_refresh_cb(None, None, action) def _str_prop_changed(self, widget, evt, action, propname): buf = widget.get_buffer() setattr(action, propname, buf.get_text(buf.get_start_iter(), buf.get_end_iter())) @@ -408,9 +535,143 @@ class EditToolBox(gtk.Window): setattr(action, propname, widget.get_value_as_int()) self.__parent._creator._action_refresh_cb(None, None, action) + +class WidgetSelector(object): + """ + Allow selecting a widget from within a window without interrupting the + flow of the current call. + + The selector will run on the specified window until either a widget + is selected or abort() gets called. + """ + def __init__(self, window): + super(WidgetSelector, self).__init__() + self.window = window + self._intro_mask = None + self._intro_handle = None + self._select_handle = None + self._prelight = None + + def select(self): + """ + Starts selecting a widget, by grabbing control of the mouse and + highlighting hovered widgets until one is clicked. + @returns: a widget address or None + """ + if not self._intro_mask: + self._prelight = None + self._intro_mask = overlayer.Mask(catch_events=True) + self._select_handle = self._intro_mask.connect_after( + "button-press-event", self._end_introspect) + self._intro_handle = self._intro_mask.connect_after( + "motion-notify-event", self._intro_cb) + self.window._overlayer.put(self._intro_mask, 0, 0) + self.window._overlayer.queue_draw() + + while bool(self._intro_mask) and not gtk.main_iteration(): + pass + + return gtkutils.raddr_lookup(self._prelight) + + def _end_introspect(self, widget, evt): + if evt.type == gtk.gdk.BUTTON_PRESS and self._prelight: + self._intro_mask.catch_events = False + self._intro_mask.disconnect(self._intro_handle) + self._intro_handle = None + self._intro_mask.disconnect(self._select_handle) + self._select_handle = None + self.window._overlayer.remove(self._intro_mask) + self._intro_mask = None + # for some reason, gtk may not redraw after this unless told to. + self.window.queue_draw() + + def _intro_cb(self, widget, evt): + """ + Callback for capture of widget events, when in introspect mode. + """ + # widget has focus, let's hilight it + win = gtk.gdk.display_get_default().get_window_at_pointer() + if not win: + return + click_wdg = win[0].get_user_data() + if not click_wdg.is_ancestor(self.window._overlayer): + # as popups are not (yet) supported, it would break + # badly if we were to play with a widget not in the + # hierarchy. + return + for hole in self._intro_mask.pass_thru: + self._intro_mask.mask(hole) + self._intro_mask.unmask(click_wdg) + self._prelight = click_wdg + + self.window.queue_draw() + + def abort(self): + """ + Ends the selection. The control will return to the select() caller + with a return value of None, as selection was aborted. + """ + self._intro_mask.catch_events = False + self._intro_mask.disconnect(self._intro_handle) + self._intro_handle = None + self._intro_mask.disconnect(self._select_handle) + self._select_handle = None + self.window._overlayer.remove(self._intro_mask) + self._intro_mask = None + self._prelight = None + +class SignalInputDialog(gtk.MessageDialog): + def __init__(self, parent, text, field, addr): + """ + Create a gtk signal selection dialog. + + @param parent: the parent window this dialog should stay over. + @param text: the title of the dialog. + @param field: the field description of the dialog. + @param addr: the widget address from which to fetch signal list. + """ + gtk.MessageDialog.__init__(self, parent, + gtk.DIALOG_MODAL | gtk.DIALOG_DESTROY_WITH_PARENT, + gtk.MESSAGE_QUESTION, + gtk.BUTTONS_OK, + None) + self.set_markup(text) + self.model = gtk.ListStore(str) + widget = gtkutils.find_widget(parent, addr) + for signal_name in gobject.signal_list_names(widget): + self.model.append(row=(signal_name,)) + self.entry = gtk.ComboBox(self.model) + cell = gtk.CellRendererText() + self.entry.pack_start(cell) + self.entry.add_attribute(cell, 'text', 0) + hbox = gtk.HBox() + lbl = gtk.Label(field) + hbox.pack_start(lbl, False) + hbox.pack_end(self.entry) + self.vbox.pack_end(hbox, True, True) + self.show_all() + + def pop(self): + """ + Show the dialog. It will run in it's own loop and return control + to the caller when a signal has been selected. + + @returns: a signal name or None if no signal was selected + """ + self.run() + self.hide() + iter = self.entry.get_active_iter() + if iter: + text = self.model.get_value(iter, 0) + return text + return None + + def _dialog_done_cb(self, entry, response): + self.response(response) + class TextInputDialog(gtk.MessageDialog): - def __init__(self, text, field): - gtk.MessageDialog.__init__(self, None, + def __init__(self, parent, text, field): + gtk.MessageDialog.__init__(self, parent, gtk.DIALOG_MODAL | gtk.DIALOG_DESTROY_WITH_PARENT, gtk.MESSAGE_QUESTION, gtk.BUTTONS_OK, @@ -434,4 +695,39 @@ class TextInputDialog(gtk.MessageDialog): def _dialog_done_cb(self, entry, response): self.response(response) +# The purpose of this function is to reformat text, as current IconView +# implentation does not insert carriage returns on long lines. +# To preserve layout, this call reformat text to fit in small space under an +# icon. +def format_multiline(text, length=10, lines=3, line_separator='\n'): + """ + Reformat a text to fit in a small space. + + @param length: maximum char per line + @param lines: maximum number of lines + """ + words = text.split(' ') + line = list() + return_val = [] + linelen = 0 + + for word in words: + t_len = linelen+len(word) + if t_len < length: + line.append(word) + linelen = t_len+1 # count space + else: + if len(return_val)+1 < lines: + return_val.append(' '.join(line)) + line = list() + linelen = 0 + line.append(word) + else: + return_val.append(' '.join(line+['...'])) + return line_separator.join(return_val) + + return_val.append(' '.join(line)) + return line_separator.join(return_val) + + # vim:set ts=4 sts=4 sw=4 et: diff --git a/tutorius/dbustools.py b/tutorius/dbustools.py new file mode 100644 index 0000000..5d70d7b --- /dev/null +++ b/tutorius/dbustools.py @@ -0,0 +1,42 @@ +import logging +LOGGER = logging.getLogger("sugar.tutorius.dbustools") + +def save_args(callable, *xargs, **xkwargs): + def __call(*args, **kwargs): + kw = dict() + kw.update(kwargs) + kw.update(xkwargs) + return callable(*(xargs+args), **kw) + return __call + +def ignore(*args): + LOGGER.debug("Unhandled asynchronous dbus call response with arguments: %s", str(args)) + +def logError(error): + LOGGER.error("Unhandled asynchronous dbus call error: %s", error) + +def remote_call(callable, args, return_cb=None, error_cb=None, block=False): + reply_cb = return_cb or ignore + errhandler_cb = error_cb or logError + if block: + try: + ret_val = callable(*args) + LOGGER.debug("remote_call return arguments: %s", str(ret_val)) + except Exception, e: + #Use the specified error handler even for blocking calls + errhandler_cb(e) + return + + #Return value signature might be : + if ret_val is None: + #Nothing + return reply_cb() + elif type(ret_val) in (list, tuple): + #Several parameters + return reply_cb(*ret_val) + else: + #One parameter + return reply_cb(ret_val) + else: + callable(*args, reply_handler=reply_cb, error_handler=errhandler_cb) + diff --git a/tutorius/editor.py b/tutorius/editor.py index 42cc718..9d2effe 100644 --- a/tutorius/editor.py +++ b/tutorius/editor.py @@ -24,7 +24,7 @@ import gobject from gettext import gettext as _ -from sugar.tutorius.gtkutils import register_signals_numbered, get_children +from .gtkutils import register_signals_numbered, get_children class WidgetIdentifier(gtk.Window): """ diff --git a/tutorius/engine.py b/tutorius/engine.py index 57c08e4..e77a018 100644 --- a/tutorius/engine.py +++ b/tutorius/engine.py @@ -1,6 +1,9 @@ +import logging import dbus.mainloop.glib -from sugar.tutorius.TProbe import ProbeProxy -import sugar.tutorius.addon as addon +from jarabe.model import shell +from sugar.bundle.activitybundle import ActivityBundle + +from .vault import Vault class Engine: """ @@ -10,30 +13,34 @@ class Engine: def __init__(self): # FIXME Probe management should be in the probe manager dbus.mainloop.glib.DBusGMainLoop(set_as_default=True) - self._probe = ProbeProxy("org.laptop.Calculate") - self._bm = None + #FIXME shell.get_model() will only be useful in the shell process + self._shell = shell.get_model() + self._tutorial = None - def launch(self, tutorialID): """ Launch a tutorial @param tutorialID unique tutorial identifier used to retrieve it from the disk """ - if self._bm == None: - self._bm = addon.create("BubbleMessage") - self._bm.position = (300,300) - self._bm.message = "Tutorial Started" + if self._tutorial: + self._tutorial.detach() + self._tutorial = None + + #Get the active activity from the shell + activity = self._shell.get_active_activity() + self._tutorial = Vault.loadTutorial(tutorialID) - self._probe.install(self._bm) - + #TProbes automatically use the bundle id, available from the ActivityBundle + bundle = ActivityBundle(activity.get_bundle_path()) + self._tutorial.attach(bundle.get_bundle_id()) def stop(self): """ Stop the current tutorial """ - self._probe.uninstall(self._bm) + self._tutorial.detach() + self._tutorial = None def pause(self): """ Interrupt the current tutorial and save its state in the journal """ - self._bm.message = "Tutorial State would be saved" - self._probe.update(self._bm) + raise NotImplementedError("Unable to store tutorial state") diff --git a/tutorius/filters.py b/tutorius/filters.py index fc58562..38cf86b 100644 --- a/tutorius/filters.py +++ b/tutorius/filters.py @@ -15,14 +15,10 @@ # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA -import gobject -import gtk import logging logger = logging.getLogger("filters") -from sugar.tutorius.gtkutils import find_widget -from sugar.tutorius.services import ObjectStore -from sugar.tutorius import properties +from . import properties class EventFilter(properties.TPropContainer): @@ -30,31 +26,13 @@ class EventFilter(properties.TPropContainer): Base class for an event filter """ - next_state = properties.TStringProperty("None") - - def __init__(self, next_state=None): + def __init__(self): """ Constructor. - @param next_state name of the next state """ super(EventFilter, self).__init__() - if next_state: - self.next_state = next_state self._callback = None - def get_next_state(self): - """ - Getter for the next state - """ - return self.next_state - - def set_next_state(self, new_next_name): - """ - Setter for the next state. Should only be used during construction of - the event_fitler, not while the tutorial is running. - """ - self.next_state = new_next_name - def install_handlers(self, callback, **kwargs): """ install_handlers is called for eventfilters to setup all @@ -94,111 +72,3 @@ class EventFilter(properties.TPropContainer): if self._callback: self._callback(self) -##class TimerEvent(EventFilter): -## """ -## TimerEvent is a special EventFilter that uses gobject -## timeouts to trigger a state change after a specified amount -## of time. It must be used inside a gobject main loop to work. -## """ -## def __init__(self,next_state,timeout_s): -## """Constructor. -## -## @param next_state default EventFilter param, passed on to EventFilter -## @param timeout_s timeout in seconds -## """ -## super(TimerEvent,self).__init__(next_state) -## self._timeout = timeout_s -## self._handler_id = None -## -## def install_handlers(self, callback, **kwargs): -## """install_handlers creates the timer and starts it""" -## super(TimerEvent,self).install_handlers(callback, **kwargs) -## #Create the timer -## self._handler_id = gobject.timeout_add_seconds(self._timeout, self._timeout_cb) -## -## def remove_handlers(self): -## """remove handler removes the timer""" -## super(TimerEvent,self).remove_handlers() -## if self._handler_id: -## try: -## #XXX What happens if this was already triggered? -## #remove the timer -## gobject.source_remove(self._handler_id) -## except: -## pass -## -## def _timeout_cb(self): -## """ -## _timeout_cb triggers the eventfilter callback. -## -## It is necessary because gobject timers only stop if the callback they -## trigger returns False -## """ -## self.do_callback() -## return False #Stops timeout -## -##class GtkWidgetTypeFilter(EventFilter): -## """ -## Event Filter that listens for keystrokes on a widget -## """ -## def __init__(self, next_state, object_id, text=None, strokes=None): -## """Constructor -## @param next_state default EventFilter param, passed on to EventFilter -## @param object_id object tree-ish identifier -## @param text resulting text expected -## @param strokes list of strokes expected -## -## At least one of text or strokes must be supplied -## """ -## super(GtkWidgetTypeFilter, self).__init__(next_state) -## self._object_id = object_id -## self._text = text -## self._captext = "" -## self._strokes = strokes -## self._capstrokes = [] -## self._widget = None -## self._handler_id = None -## -## def install_handlers(self, callback, **kwargs): -## """install handlers -## @param callback default EventFilter callback arg -## """ -## super(GtkWidgetTypeFilter, self).install_handlers(callback, **kwargs) -## logger.debug("~~~GtkWidgetTypeFilter install") -## activity = ObjectStore().activity -## if activity is None: -## logger.error("No activity") -## raise RuntimeWarning("no activity in the objectstore") -## -## self._widget = find_widget(activity, self._object_id) -## if self._widget: -## self._handler_id= self._widget.connect("key-press-event",self.__keypress_cb) -## logger.debug("~~~Connected handler %d on %s" % (self._handler_id,self._object_id) ) -## -## def remove_handlers(self): -## """remove handlers""" -## super(GtkWidgetTypeFilter, self).remove_handlers() -## #if an event was connected, disconnect it -## if self._handler_id: -## self._widget.handler_disconnect(self._handler_id) -## self._handler_id=None -## -## def __keypress_cb(self, widget, event, *args): -## """keypress callback""" -## logger.debug("~~~keypressed!") -## key = event.keyval -## keystr = event.string -## logger.debug("~~~Got key: " + str(key) + ":"+ keystr) -## self._capstrokes += [key] -## #TODO Treat other stuff, such as arrows -## if key == gtk.keysyms.BackSpace: -## self._captext = self._captext[:-1] -## else: -## self._captext = self._captext + keystr -## -## logger.debug("~~~Current state: " + str(self._capstrokes) + ":" + str(self._captext)) -## if not self._strokes is None and self._strokes in self._capstrokes: -## self.do_callback() -## if not self._text is None and self._text in self._captext: -## self.do_callback() - diff --git a/tutorius/linear_creator.py b/tutorius/linear_creator.py index 91b11f4..f664c49 100644 --- a/tutorius/linear_creator.py +++ b/tutorius/linear_creator.py @@ -15,12 +15,12 @@ # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA -from sugar.tutorius.core import * -from sugar.tutorius.actions import * -from sugar.tutorius.filters import * - from copy import deepcopy +from .core import * +from .actions import * +from .filters import * + class LinearCreator(object): """ This class is used to create a FSM from a linear sequence of orders. The @@ -58,9 +58,8 @@ class LinearCreator(object): # Set the next state name - there is no way the caller should have # to deal with that. next_state_name = "State %d" % (self.nb_state+1) - event_filter.set_next_state(next_state_name) state = State(self.state_name, action_list=self.current_actions, - event_filter_list=[event_filter]) + event_filter_list=[(event_filter, next_state_name),]) self.state_name = next_state_name self.nb_state += 1 diff --git a/tutorius/overlayer.py b/tutorius/overlayer.py index 6b1b948..b967739 100644 --- a/tutorius/overlayer.py +++ b/tutorius/overlayer.py @@ -58,7 +58,7 @@ class Overlayer(gtk.Layout): @param overlayed widget to be overlayed. Will be resized to full size. """ def __init__(self, overlayed=None): - gtk.Layout.__init__(self) + super(Overlayer, self).__init__() self._overlayed = overlayed if overlayed: @@ -83,7 +83,7 @@ class Overlayer(gtk.Layout): if hasattr(child, "draw_with_context"): # if the widget has the CanvasDrawable protocol, use it. child.no_expose = True - gtk.Layout.put(self, child, x, y) + super(Overlayer, self).put(child, x, y) # be sure to redraw or the overlay may not show self.queue_draw() @@ -157,7 +157,7 @@ class TextBubble(gtk.Widget): A CanvasDrawableWidget drawing a round textbox and a tail pointing to a specified widget. """ - def __init__(self, text, speaker=None, tailpos=[0,0]): + def __init__(self, text, speaker=None, tailpos=(0,0)): """ Creates a new cairo rendered text bubble. @@ -199,7 +199,7 @@ class TextBubble(gtk.Widget): # TODO fetch speaker coordinates # draw bubble tail if present - if self.tailpos != [0,0]: + if self.tailpos != (0,0): context.move_to(xradius-width/4, yradius) context.line_to(self.tailpos[0], self.tailpos[1]) context.line_to(xradius+width/4, yradius) @@ -228,7 +228,7 @@ class TextBubble(gtk.Widget): context.fill() # bubble painting. Redrawing the inside after the tail will combine - if self.tailpos != [0,0]: + if self.tailpos != (0,0): context.move_to(xradius-width/4, yradius) context.line_to(self.tailpos[0], self.tailpos[1]) context.line_to(xradius+width/4, yradius) diff --git a/tutorius/properties.py b/tutorius/properties.py index 896ae67..ba3c211 100644 --- a/tutorius/properties.py +++ b/tutorius/properties.py @@ -19,12 +19,13 @@ TutoriusProperties have the same behaviour as python properties (assuming you also use the TPropContainer), with the added benefit of having builtin dialog prompts and constraint validation. """ +from copy import copy, deepcopy -from sugar.tutorius.constraints import Constraint, \ +from .constraints import Constraint, \ UpperLimitConstraint, LowerLimitConstraint, \ MaxSizeConstraint, MinSizeConstraint, \ - ColorConstraint, FileConstraint, BooleanConstraint, EnumConstraint -from copy import copy + ColorConstraint, FileConstraint, BooleanConstraint, EnumConstraint, \ + ResourceConstraint class TPropContainer(object): """ @@ -95,39 +96,31 @@ class TPropContainer(object): """ return object.__getattribute__(self, "_props").keys() - def is_identical(self, otherContainer): - for prop in self._props.keys(): - found = False - for otherProp in otherContainer._props.keys(): - if prop == otherProp: - this_type = getattr(type(self), prop).type - other_type = getattr(type(otherContainer), prop).type - if this_type != other_type: - return False - if this_type == "addonlist": - for inner_cont in self._props[prop]: - inner_found = False - for other_inner in otherContainer._props[prop]: - if inner_cont.is_identical(other_inner): - inner_found = True - break - if inner_found == False: - return False - found = True - break - elif this_type == "addon": - if not self._props[prop].is_identical(otherContainer._props[prop]): - return False - found = True - break - else: - if self._props[prop]== otherContainer._props[prop]: - found = True - break - if found == False: - return False - return True - + def get_properties_dict_copy(self): + """ + Return a deep copy of the dictionary of properties from that object. + """ + return deepcopy(self._props) + + # Providing the hash methods necessary to use TPropContainers + # in a dictionary, according to their properties + def __hash__(self): + #Return a hash of properties (key, value) sorted by key + #We need to transform the list of property key, value lists into + # a tuple of key, value tuples + return hash(tuple(map(tuple,sorted(self._props.items(), cmp=lambda x, y: cmp(x[0], y[0]))))) + + def __eq__(self, e2): + return isinstance(e2, type(self)) and self._props == e2._props + + # Adding methods for pickling and unpickling an object with + # properties + def __getstate__(self): + return self._props.copy() + + def __setstate__(self, dict): + self._props.update(dict) + class TutoriusProperty(object): """ The base class for all actions' properties. The interface is the following : @@ -178,19 +171,6 @@ class TAddonListProperty(TutoriusProperty): """ pass - - def get_constraints(self): - """ - Returns the list of constraints associated to this property. - """ - if self._constraints is None: - self._constraints = [] - for i in dir(self): - typ = getattr(self, i) - if isinstance(typ, Constraint): - self._constraints.append(i) - return self._constraints - class TIntProperty(TutoriusProperty): """ Represents an integer. Can have an upper value limit and/or a lower value @@ -240,8 +220,20 @@ class TArrayProperty(TutoriusProperty): self.type = "array" self.max_size_limit = MaxSizeConstraint(max_size_limit) self.min_size_limit = MinSizeConstraint(min_size_limit) - self.default = self.validate(value) + self.default = tuple(self.validate(value)) + #Make this thing hashable + def __setstate__(self, state): + self.max_size_limit = MaxSizeConstraint(state["max_size_limit"]) + self.min_size_limit = MinSizeConstraint(state["min_size_limit"]) + self.value = state["value"] + + def __getstate__(self): + return dict( + max_size_limit=self.max_size_limit.limit, + min_size_limit=self.min_size_limit.limit, + value=self.value, + ) class TColorProperty(TutoriusProperty): """ Represents a RGB color with 3 8-bit integer values. @@ -270,8 +262,6 @@ class TFileProperty(TutoriusProperty): For now, the path may be relative or absolute, as long as it exists on the local machine. - TODO : Make sure that we have a file scheme that supports distribution - on other computers (LP 355197) """ TutoriusProperty.__init__(self) @@ -320,13 +310,12 @@ class TUAMProperty(TutoriusProperty): """ Represents a widget of the interface by storing its UAM. """ - # TODO : Pending UAM check-in (LP 355199) def __init__(self, value=None): TutoriusProperty.__init__(self) self.type = "uam" - + self.default = self.validate(value) class TAddonProperty(TutoriusProperty): """ @@ -349,9 +338,19 @@ class TAddonProperty(TutoriusProperty): return super(TAddonProperty, self).validate(value) raise ValueError("Expected TPropContainer instance as TaddonProperty value") +class TEventType(TutoriusProperty): + """ + Represents an GUI signal for a widget. + """ + def __init__(self, value): + super(TEventType, self).__init__() + self.type = "gtk-signal" + + self.default = self.validate(value) + class TAddonListProperty(TutoriusProperty): """ - Reprensents an embedded tutorius Addon List Component. + Represents an embedded tutorius Addon List Component. See TAddonProperty """ def __init__(self): @@ -369,18 +368,32 @@ class TAddonListProperty(TutoriusProperty): class TResourceProperty(TutoriusProperty): """ - Represents a resource associated to a tutorial. This resource is a file - that in distributed along with the tutorial. + Represents a resource in the tutorial. A resource is a file with a specific + name that exists under the tutorials folder. It is distributed alongside the + tutorial itself. - Its value should always be a file path relative to a tutorial's base - directory. + When the system encounters a resource, it knows that it refers to a file in + the resource folder and that it should translate this resource name to an + absolute file name before it is executed. - This is a data-model only property and it will always be replaced by a - TFileProperty when calling the action's do() method. + E.g. An image is added to a tutorial in an action. On doing so, the creator + adds a resource to the tutorial, then saves its name in the resource + property of that action. When this tutorial is executed, the Engine + replaces all the TResourceProperties inside the action by their equivalent + TFileProperties with absolute paths, so that they can be used on any + machine. """ - def __init__(self): + def __init__(self, resource_name=""): + """ + Creates a new resource pointing to an existing resource. + + @param resource_name The file name of the resource (should be only the + file name itself, no directory information) + """ TutoriusProperty.__init__(self) self.type = "resource" - self.default = "" - + self.resource_cons = ResourceConstraint() + + self.default = self.validate("") + diff --git a/tutorius/service.py b/tutorius/service.py index 61c6526..eb246a1 100644 --- a/tutorius/service.py +++ b/tutorius/service.py @@ -1,6 +1,8 @@ -from engine import Engine import dbus +from .engine import Engine +from .dbustools import remote_call + _DBUS_SERVICE = "org.tutorius.Service" _DBUS_PATH = "/org/tutorius/Service" _DBUS_SERVICE_IFACE = "org.tutorius.Service" @@ -27,7 +29,7 @@ class Service(dbus.service.Object): @dbus.service.method(_DBUS_SERVICE_IFACE, in_signature="s", out_signature="") def launch(self, tutorialID): - """ Launch a tutorial + """ Launch a tutorial @param tutorialID unique tutorial identifier used to retrieve it from the disk """ if self._engine == None: @@ -57,13 +59,20 @@ class ServiceProxy: self._service = dbus.Interface(self._object, _DBUS_SERVICE_IFACE) def launch(self, tutorialID): - self._service.launch(tutorialID) + """ Launch a tutorial + @param tutorialID unique tutorial identifier used to retrieve it from the disk + """ + remote_call(self._service.launch, (tutorialID, ), block=False) def stop(self): - self._service.stop() + """ Stop the current tutorial + """ + remote_call(self._service.stop, (), block=False) def pause(self): - self._service.pause() + """ Interrupt the current tutorial and save its state in the journal + """ + remote_call(self._service.pause, (), block=False) if __name__ == "__main__": import dbus.mainloop.glib diff --git a/tutorius/store.py b/tutorius/store.py new file mode 100644 index 0000000..81925ed --- /dev/null +++ b/tutorius/store.py @@ -0,0 +1,473 @@ +# Copyright (C) 2009, Tutorius.org +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 2 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA + +import urllib +import urllib2 +from xml.dom import minidom +from apilib.restful_lib import Connection +from array import array + +class StoreProxy(object): + """ + Implements a communication channel with the Tutorius Store, where tutorials + are shared from around the world. This proxy is meant to offer a one-stop + shop to implement all the requests that could be made to the Store. + """ + + def __init__(self, base_url): + + # Base Urls for the api + self.base_url = base_url + self.remora_api = "api/1.4" + self.tutorius_api = "TutoriusApi" + self.bandwagon_api = "api/1.4/sharing" + + self.api_auth_key = None + + # Prepares the connection with the api + self.conn = Connection(self.base_url) + + # Setup the helper + self.helper = StoreProxyHelper() + + def get_categories(self): + """ + Returns all the categories registered in the store. Categories are used to + classify tutorials according to a theme. (e.g. Mathematics, History, etc...) + + @return The list of category names stored on the server. + """ + + request_url = "/%s/categories" % (self.tutorius_api) + + response = self.conn.request_get(request_url) + + if self.helper.iserror(response): + return None + + xml_response = minidom.parseString(response['body']) + + xml_categories = xml_response.getElementsByTagName('category') + + categories = list() + + # Loop through the categories and create the list to be returned + for xml_category in xml_categories: + category = {} + + category['id'] = xml_category.getElementsByTagName('id')[0].firstChild.nodeValue + category['name'] = xml_category.getElementsByTagName('name')[0].firstChild.nodeValue + + categories.append(category) + + return categories + + def search(self, keywords, category='all', page=1, numResults=10, sortBy='name'): + """ + Returns a list of tutorials that correspond to the given search criteria. + + @param keywords The keywords to search for + @param page The page in the result set from which to return results. This is + used to allow applications to fetch results one set at a time. + @param numResults The max number of results that can be returned in a page + @param sortBy The field on which to sort the results + @return A list of tutorial meta-data that corresponds to the query + """ + request_url = "/%s/search/%s/%s/%d/%d/%s" % (self.tutorius_api, keywords, category, page, numResults, sortBy) + + response = self.conn.request_get(request_url) + + if (self.helper.iserror(response)): + return None + + xml_response = minidom.parseString(response['body']) + + xml_tutorials = xml_response.getElementsByTagName('tutorial') + + tutorials = list() + + for xml_tutorial in xml_tutorials: + tutorial = self.helper.parse_tutorial(xml_tutorial) + tutorials.append(tutorial) + + return tutorials + + def get_tutorials(self, category='all', page=1, numResults=10, sortBy='name'): + """ + Returns the list of tutorials that correspond to the given search criteria. + + @param category The category in which to restrict the search. + @param page The page in the result set from which to return results. This is + used to allow applications to fetch results one set at a time. + @param numResults The max number of results that can be returned in a page + @param sortBy The field on which to sort the results + @return A list of tutorial meta-data that corresponds to the query + """ + + request_url = "/%s/tutorials/%s/%d/%d/%s" % (self.tutorius_api, category, page, numResults, sortBy) + + response = self.conn.request_get(request_url) + + if (self.helper.iserror(response)): + return None + + xml_response = minidom.parseString(response['body']) + + xml_tutorials = xml_response.getElementsByTagName('tutorial') + + tutorials = list() + + for xml_tutorial in xml_tutorials: + tutorial = self.helper.parse_tutorial(xml_tutorial) + tutorials.append(tutorial) + + return tutorials + + def list(self, type='recommended', numResults=3): + """ + Returns a list of tutorials corresponding to the type specified. + Type examples: 'Most downloaded', 'recommended', etc. + + @param type The type of list (Most downloaded, recommended, etc.) + @return A list of tutorials + """ + request_url = "/%s/list/%s/tutorial/%s" % (self.remora_api, type, numResults) + + response = self.conn.request_get(request_url) + + if (self.helper.iserror(response)): + return None + + xml_response = minidom.parseString(response['body']) + + xml_tutorials = xml_response.getElementsByTagName('addon') + + tutorials = list() + + for xml_tutorial in xml_tutorials: + tutorial = self.helper.parse_tutorial(xml_tutorial) + tutorials.append(tutorial) + + return tutorials + + + def get_latest_version(self, tutorial_id_list): + """ + Returns the latest version number on the server, for each tutorial ID + in the list. + + @param tutorial_id_list The list of tutorial IDs from which we want to + known the latest version number. + @return A dictionary having the tutorial ID as the key and the version + as the value. + """ + + versions = {} + + for tutorial_id in tutorial_id_list: + + request_url = "/%s/addon/%s/" % (self.remora_api, tutorial_id) + + response = self.conn.request_get(request_url) + + if (self.helper.iserror(response)): + return None + + xml = minidom.parseString(response['body']) + + versionnode = xml.getElementsByTagName("version")[0] + + version = versionnode.firstChild.nodeValue + + versions[tutorial_id] = version + + return versions + + def download_tutorial(self, tutorial_id, version=None): + """ + Fetches the tutorial file from the server and returns the + + @param tutorial_id The tutorial that we want to get + @param version The version number that we want to download. If None, + the latest version will be downloaded. + @return The downloaded file itself (an in-memory representation of the file, + not a path to it on the disk) + + TODO : We should decide if we're saving to disk or in mem. + """ + request_url = "/%s/addon/%s/" % (self.remora_api, tutorial_id) + + response = self.conn.request_get(request_url) + + if (self.helper.iserror(response)): + return None + + xml = minidom.parseString(response['body']) + + installnode = xml.getElementsByTagName("install")[0] + installurl = installnode.firstChild.nodeValue + + fp = urllib.urlopen(installurl) + + return fp + + def login(self, username, password): + """ + Logs in the user on the store and saves the login status in the proxy + state. After a successful logon, the operation requiring a login will + be successful. + + @param username + @param password + @return True if the login was successful, False otherwise + """ + request_url = "/%s/auth/" % (self.tutorius_api) + + params = {'username': username, 'password': password} + + response = self.conn.request_post(request_url, params) + + if (self.helper.iserror(response)): + return False + + xml_response = minidom.parseString(response['body']) + + keynode = xml_response.getElementsByTagName("token")[0] + + key = keynode.getAttribute('value') + + self.api_auth_key = key + + return True + + def close_session(self): + """ + Ends the user's session on the server and changes the state of the proxy + to disallow the calls to the store that requires to be logged in. + + @return True if the user was disconnected, False otherwise + """ + request_url = "/%s/auth/%s" % (self.tutorius_api, self.api_auth_key) + + headers = { 'X-API-Auth' : self.api_auth_key } + + response = self.conn.request_delete(request_url, None, headers) + + if (self.helper.iserror(response)): + return False + + self.api_auth_key = None + + return True + + def get_session_id(self): + """ + Gives the current session ID cached in the Store Proxy, or returns + None is the user is not logged yet. + + @return The current session's ID, or None if the user is not logged + """ + return self.api_auth_key + + def rate(self, value, tutorial_store_id): + """ + Sends a rating for the given tutorial. + + This function requires the user to be logged in. + + @param value The value of the rating. It must be an integer with a value + from 1 to 5. + @param tutorial_store_id The ID of the tutorial that was rated + @return True if the rating was sent to the Store, False otherwise. + """ + request_url = "/%s/review/%s" % (self.tutorius_api, tutorial_store_id) + + params = {'title': 'from api', 'body': 'from api', 'rating': value} + headers = { 'X-API-Auth' : self.api_auth_key } + + response = self.conn.request_post(request_url, params, None, None, headers) + + if self.helper.iserror(response): + return False + + return True + + def publish(self, tutorial, tutorial_info=None, tutorial_store_id = None): + """ + Sends a tutorial to the store. + + This function requires the user to be logged in. + + @param tutorial The tutorial file to be sent. Note that this is the + content itself and not the path to the file. + @param tutorial_info An array containing the tutorial information + @return True if the tutorial was sent correctly, False otherwise. + """ + + # This is in the case we have to re-publish a tutorial + if tutorial_store_id is not None: + request_url = "/%s/publish/%s" % (self.tutorius_api, tutorial_store_id) + headers = { 'X-API-Auth' : self.api_auth_key } + + response = self.conn.request_post(request_url, None, None, None, headers) + + if self.helper.iserror(response): + return False + + return True + + # Otherwise, we want to publish a new tutorial + if tutorial_info == None: + return False + + request_url = "/%s/publish/" % (self.tutorius_api) + + headers = { 'X-API-Auth' : self.api_auth_key } + + response = self.conn.request_post(request_url, tutorial_info, tutorial, tutorial_info['filename'], headers) + + if self.helper.iserror(response): + return False + + return True + + + def unpublish(self, tutorial_store_id): + """ + Removes a tutorial from the server. The user in the current session + needs to be the creator for it to be unpublished. This will remove + the file from the server and from all its collections and categories. + + This function requires the user to be logged in. + + @param tutorial_store_id The ID of the tutorial to be removed + @return True if the tutorial was properly removed from the server + """ + request_url = "/%s/publish/%s" % (self.tutorius_api, tutorial_store_id) + + headers = { 'X-API-Auth' : self.api_auth_key } + response = self.conn.request_delete(request_url, None, headers) + + if self.helper.iserror(response): + return False + + return True + + def update_published_tutorial(self, tutorial_id, tutorial, tutorial_info): + """ + Sends the new content for the tutorial with the given ID. + + This function requires the user to be logged in. + + @param tutorial_id The ID of the tutorial to be updated + @param tutorial The bundled tutorial file content (not a path!) + @return True if the tutorial was sent and updated, False otherwise + """ + request_url = "/%s/update/%s" % (self.tutorius_api, tutorial_id) + + headers = { 'X-API-Auth' : self.api_auth_key } + + response = self.conn.request_post(request_url, tutorial_info, tutorial, tutorial_info['filename'], headers) + + if self.helper.iserror(response): + return False + + return True + + + def register_new_user(self, user_info): + """ + Creates a new user from the given user information. + + @param user_info A structure containing all the data required to do a login. + @return True if the new account was created, false otherwise + """ + request_url = "/%s/registerNewUser" % (self.tutorius_api) + + params = {'nickname': user_info['nickname'], 'password': user_info['password'], 'email': user_info['email']} + + response = self.conn.request_post(request_url, params) + + if self.helper.iserror(response): + return False + + return True + + +class StoreProxyHelper(object): + """ + Implements helper methods for the Store, more specifically + methods to handle xml responses and errors + """ + def iserror(self, response): + """ + Check if the response received from the server is an error + + @param response The XML response from the server + @return True if the response is an error + """ + + # first look for HTTP errors + http_status = response['headers']['status'] + + if http_status in ['400', '401', '403', '500' ]: + return True + + # Now check if the response is valid XML + try: + minidom.parseString(response['body']) + except Exception, e: + return True + + # The response is valid XML, parse it and look for + # an error in xml format + xml_response = minidom.parseString(response['body']) + + errors = xml_response.getElementsByTagName('error') + + if (len(errors) > 0): + return True + + return False + + def parse_tutorial(self, xml_tutorial): + """ + Parse a tutorial's XML metadata and returns a dictionnary + containing the metadata + + @param xml_tutorial The tutorial metadata in XML format + @return A dictionnary containing the metadata + """ + tutorial = {} + + params = [ + 'name', + 'summary', + 'version', + 'description', + 'author', + 'rating' + ] + + for param in params: + xml_node = xml_tutorial.getElementsByTagName(param)[0].firstChild + + if xml_node != None: + tutorial[param] = xml_node.nodeValue + else: + tutorial[param] = '' + + return tutorial diff --git a/tutorius/tutorial.py b/tutorius/tutorial.py new file mode 100644 index 0000000..b45363f --- /dev/null +++ b/tutorius/tutorial.py @@ -0,0 +1,829 @@ +# Copyright (C) 2009, Tutorius.org +# Copyright (C) 2009, Erick Lavoie <erick.lavoie@gmail.com> +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 2 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA + +#TODO: For notification of modifications on the Tutorial check for GObject and PyDispatcher for inspiration + +from .constraints import ConstraintException +from .properties import TPropContainer + +_NAME_SEPARATOR = "/" + +class Tutorial(object): + """ This class replaces the previous Tutorial class and + allows manipulation of the abstract representation + of a tutorial as a state machine + """ + + INIT = "INIT" + END = "END" + INITIAL_TRANSITION_NAME = INIT + "/transition0" + + + def __init__(self, name, state_dict=None): + """ + The constructor for the Tutorial. By default, the tutorial contains + only an initial state and an end state. + The initial state doesn't contain any action but it contains + a single automatic transition <Tutorial.INITIAL_TRANSITION_NAME> + between the initial state <Tutorial.INIT> and the end state + <Tutorial.END>. + + The end state doesn't contain any action nor transition. + + If state_dict is provided, a valid initial state and an end state + must be provided. + + @param name The name of the tutorial + @param state_dict optional, a valid dictionary of states + @raise InvalidStateDictionary + """ + self.name = name + + + # We will use an adjacency list representation through the + # usage of state objects because our graph representation + # is really sparse and mostly linear, for a brief + # example of graph programming in python see: + # http://www.python.org/doc/essays/graphs + if not state_dict: + self._state_dict = \ + {Tutorial.INIT:State(name=Tutorial.INIT),\ + Tutorial.END:State(name=Tutorial.END)} + + self.add_transition(Tutorial.INIT, \ + (AutomaticTransitionEvent(), Tutorial.END)) + else: + self._state_dict = state_dict + + + + # Minimally check for the presence of an INIT and an END + # state + if not self._state_dict.has_key(Tutorial.INIT): + raise Exception("No INIT state found in state_dict") + + if not self._state_dict.has_key(Tutorial.END): + raise Exception("No END state found in state_dict") + + # TODO: Validate once validation is working + #self.validate() + + # Initialize variables for generating unique names + # TODO: We should take the max number from the + # existing state names + self._state_name_nb = 0 + + + def add_state(self, action_list=(), transition_list=()): + """ + Add a new state to the state machine. The state is + initialized with the action list and transition list + and a new unique name is returned for this state. + + The actions are added using add_action. + + The transitions are added using add_transition. + + @param action_list The list of valid actions for this state + @param transition_list The list of valid transitions + @return unique name for this state + """ + name = self._generate_unique_state_name() + + for action in action_list: + self._validate_action(action) + + for transition in transition_list: + self._validate_transition(transition) + + state = State(name, action_list, transition_list) + + self._state_dict[name] = state + + return name + + + def add_action(self, state_name, action): + """ + Add an action to a specific state. A name unique throughout the + tutorial is generated to refer precisely to this action + and is returned. + + The action is validated. + + @param state_name The name of the state to add an action to + @param action The action to be added + @return unique name for this action + @raise LookupError if state_name doesn't exist + """ + if not self._state_dict.has_key(state_name): + raise LookupError("Tutorial: state <" + state_name +\ + "> is not defined") + + self._validate_action(action) + + return self._state_dict[state_name].add_action(action) + + def add_transition(self, state_name, transition): + """ + Add a transition to a specific state. A name unique throughout the + tutorial is generated to refer precisely to this transition + and is returned. Inserting a duplicate transition will raise + an exception. + + The transition is validated. + + @param state_name The name of the state to add a transition to + @param transition The transition to be added + @return unique name for this action + @raise LookupError if state_name doesn't exist + @raise TransitionAlreadyExists + """ + if not self._state_dict.has_key(state_name): + raise LookupError("Tutorial: state <" + state_name +\ + "> is not defined") + + self._validate_transition(transition) + + # The unicity of the transition is validated by the state + return self._state_dict[state_name].add_transition(transition) + + def update_action(self, action_name, new_properties): + """ + Update the action with action_name with a property dictionary + new_properties. If one property update is invalid, the old + values are restored and an exception is raised. + + @param action_name The name of the action to update + @param new_properties The properties that will update the action + @return old properties from the action + @raise LookupError if action_name doesn't exist + @raise ConstraintException if a property constraint is violated + """ + state_name = self._validate_state_name(action_name) + + #TODO: We should validate that only properties defined on the action + # are passed in + + return self._state_dict[state_name].update_action(action_name, new_properties) + + def update_transition(self, transition_name, new_properties=None, new_state=None): + """ + Update the transition with transition_name with new properties and/or + a new state to transition to. A None value means that the corresponding + value won't be updated. If one property update is invalid, the old + values are restored and an exception is raised. + + @param transition_name The name of the transition to replace + @param new_properties The properties that will update the transition + @param new_state The new state to transition to + @return a tuple (old_properties, old_state) with previous values + @raise LookupError if transition_name doesn't exist + @raise ConstraintException if a property constraint is violated + """ + state_name = self._validate_state_name(transition_name) + + if not self._state_dict.has_key(state_name): + raise LookupError("Tutorial: transition <" + transition_name +\ + "> is not defined") + + if new_state and not self._state_dict.has_key(new_state): + raise LookupError("Tutorial: destination state <" + new_state +\ + "> is not defined") + + #TODO: We should validate that only properties defined on the action + # are passed in + + return self._state_dict[state_name].update_transition(transition_name, new_properties, new_state) + + def delete_action(self, action_name): + """ + Delete the action identified by action_name. + + @param action_name The name of the action to be deleted + @return the action that has been deleted + @raise LookupError if transition_name doesn't exist + """ + state_name = self._validate_state_name(action_name) + + return self._state_dict[state_name].delete_action(action_name) + + def delete_transition(self, transition_name): + """ + Delete the transition identified by transition_name. + + @param transition_name The name of the transition to be deleted + @return the transition that has been deleted + @raise LookupError if transition_name doesn't exist + """ + state_name = self._validate_state_name(transition_name) + + return self._state_dict[state_name].delete_transition(transition_name) + + def delete_state(self, state_name): + """ + Delete the state, delete all the actions and transitions + in this state, update the transitions from the state that + pointed to this one to point to the next state and remove all the + unreachable states recursively. + + All but the INIT and END states can be deleted. + + @param state_name The name of the state to remove + @return The deleted state + @raise StateDeletionError when trying to delete the INIT or the END state + @raise LookupError if state_name doesn't exist + """ + self._validate_state_name(state_name) + + if state_name == Tutorial.INIT or state_name == Tutorial.END: + raise StateDeletionError("<" + state_name + "> cannot be deleted") + + next_states = set(self.get_following_states_dict(state_name).values()) + previous_states = set(self.get_previous_states_dict(state_name).values()) + + # For now tutorials should be completely linear, + # let's make sure they are + assert len(next_states) <= 1 and len(previous_states) <= 1 + + # Update transitions only if they existed + if len(next_states) == 1 and len(previous_states) == 1: + next_state = next_states.pop() + previous_state = previous_states.pop() + + transitions = previous_state.get_transition_dict() + for transition_name, (event, state_to_delete) in \ + transitions.iteritems(): + self.update_transition(transition_name, None, next_state.name) + + # Since we assume tutorials are linear for now, we do not need + # to search for unreachable states + + return self._state_dict.pop(state_name) + + + + def get_action_dict(self, state_name=None): + """ + Returns a reference to the dictionary of all actions for a specific + state. + If no state_name is provided, returns an action dictionary + containing actions for all states. + + @param state_name The name of the state to list actions from + @return A dictionary of actions with action_name as key and action + as value for state_name + @raise LookupError if state_name doesn't exist + """ + if state_name and not self._state_dict.has_key(state_name): + raise LookupError("Tutorial: state <" + state_name +\ + "> is not defined") + elif state_name: + return self._state_dict[state_name].get_action_dict() + else: + action_dict = {} + for state in self._state_dict.itervalues(): + action_dict.update(state.get_action_dict()) + return action_dict + + def get_transition_dict(self, state_name=None): + """ + Returns a dictionary of all actions for a specific state. + If no state_name is provided, returns an action dictionary + containing actions for all states. + + @param state_name The name of the state to list actions from + @return A dictionary of transitions with transition_name as key and transition as value for state_name + @raise LookupError if state_name doesn't exist + """ + if state_name and not self._state_dict.has_key(state_name): + raise LookupError("Tutorial: state <" + state_name +\ + "> is not defined") + elif state_name: + return self._state_dict[state_name].get_transition_dict() + else: + transition_dict = {} + for state in self._state_dict.itervalues(): + transition_dict.update(state.get_transition_dict()) + return transition_dict + + + def get_state_dict(self): + """ + Returns a reference to the internal state dictionary used by + the Tutorial. + + @return A reference to the dictionary of all the states in the tutorial with state_name as key and state as value + """ + # Maybe we will need to change it for an immutable dictionary + # to make sure the internal representation is not modified + return self._state_dict + + def get_following_states_dict(self, state_name): + """ + Returns a dictionary of the states that are immediately reachable from + a specific state. + + @param state_name The name of the state + @raise LookupError if state_name doesn't exist + """ + if not self._state_dict.has_key(state_name): + raise LookupError("Tutorial: state <" + state_name +\ + "> is not defined") + + following_states_dict = {} + for (event, next_state) in \ + self._state_dict[state_name].get_transition_dict().itervalues(): + following_states_dict[next_state] = self._state_dict[next_state] + + return following_states_dict + + def get_previous_states_dict(self, state_name): + """ + Returns a dictionary of the states that can transition to a + specific state. + + @param state_name The name of the state + @raise LookupError if state_name doesn't exist + """ + if not self._state_dict.has_key(state_name): + raise LookupError("Tutorial: state <" + state_name +\ + "> is not defined") + + + previous_states_dict = {} + for iter_state_name, state in \ + self._state_dict.iteritems(): + + for (event, next_state) in \ + self._state_dict[iter_state_name].get_transition_dict().itervalues(): + + if next_state != state_name: + continue + + previous_states_dict[iter_state_name] = state + # if we have found one, do not look for other transitions + # from this state + break + + return previous_states_dict + + # Convenience methods for common tutorial manipulations + def add_state_before(self, state_name, action_list=[], event_list=[]): + """ + Add a new state just before another state state_name. All transitions + going to state_name are updated to end on the new state and all + events will be converted to transitions ending on state_name. + + When event_list is empty, an automatic transition to state_name + will be added to maintain consistency. + + @param state_name The name of the state that will be preceded by the + new state + @param action_list The list of valid actions for this state + @param event_list The list of events that will be converted to transitions to state_name + @return unique name for this state + @raise LookupError if state_name doesn't exist + """ + raise NotImplementedError + + # Callback mecanism to allow automatic change notification when + # the tutorial is modified + def register_action_added_cb(self, cb): + """ + Register a function cb that will be called when any action from + the tutorial is added. + + cb should be of the form: + + cb(action_name, new_action) where: + action_name is the unique name of the action that was added + new_action is the new action + + @param cb The callback function to be called + @raise InvalidCallbackFunction if the callback has less or more than + 2 arguments + """ + raise NotImplementedError + + def register_action_updated_cb(self, cb): + """ + Register a function cb that will be called when any action from + the tutorial is updated. + + cb should be of the form: + + cb(action_name, new_action) where: + action_name is the unique name of the action that has changed + new_action is the new action that replaces the old one + + @param cb The callback function to be called + @raise InvalidCallbackFunction if the callback has less or more than + 2 arguments + """ + raise NotImplementedError + + def register_action_deleted_cb(self, cb): + """ + Register a function cb that will be called when any action from + the tutorial is deleted. + + cb should be of the form: + + cb(action_name, old_action) where: + action_name is the unique name of the action that was deleted + old_action is the new action that replaces the old one + + @param cb The callback function to be called + @raise InvalidCallbackFunction if the callback has less or more than + 2 arguments + """ + raise NotImplementedError + + def register_transition_updated_cb(self, cb): + """ + Register a function cb that will be called when any transition from + the tutorial is updated. + + cb should be of the form: + + cb(transition_name, new_transition) where: + transition_name is the unique name of the transition + that has changed + new_transition is the new transition that replaces the old one + + @param cb The callback function to be called + @raise InvalidCallbackFunction if the callback has less or more than + 2 arguments + """ + raise NotImplementedError + + # Validation to assert precondition + def _validate_action(self, action): + """ + Validate that an action conforms to what we expect, + throws an exception otherwise. + + @param action The action to validate + @except InvalidAction if the action fails to conform to what we expect + """ + pass + + def _validate_transition(self, transition): + """ + Validate that a transition conforms to what we expect, + throws an exception otherwise. + + @param transition The transition to validate + @except InvalidTransition if the transition fails to conform to what we expect + """ + pass + + # Validation decorators to assert preconditions + def _validate_state_name(self,name): + """ + Assert that the state name found in the first part of the string + actually exists + + @param name The name that starts with a state name + @return the state_name from name + @raise LookupError if state_name doesn't exist + """ + state_name = name + + if name.find(_NAME_SEPARATOR) != -1: + state_name = name[:name.find(_NAME_SEPARATOR)] + + if not self._state_dict.has_key(state_name): + raise LookupError("Tutorial: state <" + str(state_name) +\ + "> is not defined") + + return state_name + + def validate(self): + """ + Validate the state machine for a serie of properties: + 1. No unreachable states + 2. No dead end state (except END) + 3. No branching in the main path + 4. No loop in the main path + 5. ... + + Throw an exception for the first condition that is not met. + """ + raise NotImplementedError + + def _generate_unique_state_name(self): + name = "State" + str(self._state_name_nb) + while name in self._state_dict: + self._state_name_nb += 1 + name = "State" + str(self._state_name_nb) + return name + + # Python Magic Methods + def __str__(self): + """ + Return a string representation of the tutorial + """ + return str(self._state_dict) + + def __eq__(self, other): + return isinstance(other, type(self)) and self.get_state_dict() == other.get_state_dict() + +class State(object): + """ + This is a step in a tutorial. The state represents a collection of actions + to undertake when entering the state, and a series of transitions to lead + to next states. + + This class is not meant to be used explicitly as no validation is done on + inputs, the validation should be done by the containing class. + """ + + def __init__(self, name, actions={}, transitions={}): + """ + Initializes the content of the state, such as loading the actions + that are required and building the correct transitions. + + @param actions list or dict of actions to perform when entering the + state + @param transitions list or dict of tuples of the form + (event, next_state_name), that explains the outgoing links for + this state + + For actions and transitions, dictionaries allow specifying the name. + If lists are given, their contents will be added with add_action or + add_transition + """ + object.__init__(self) + + self.name = name + + # Initialize internal variables for name generation + self.action_name_nb = 0 + self.transition_name_nb = 0 + + if type(actions) is dict: + self._actions = dict(actions) + else: + self._actions = {} + for action in actions: + self.add_action(action) + + if type(transitions) is dict: + self._transitions = dict(transitions) + else: + self._transitions = {} + for transition in transitions: + self.add_transition(transition) + + + # Action manipulations + def add_action(self, new_action): + """ + Adds an action to the state + + @param new_action The action to add + @return a unique name for this action + """ + action_name = self._generate_unique_action_name(new_action) + self._actions[action_name] = new_action + return action_name + + def delete_action(self, action_name): + """ + Delete the action with the name action_name + + @param action_name The name of the action to delete + @return The action deleted + @raise LookupError if action_name doesn't exist + """ + if self._actions.has_key(action_name): + return self._actions.pop(action_name) + else: + raise LookupError("Tutorial.State: action <" + action_name + "> is not defined") + + def update_action(self, action_name, new_properties): + """ + Update the action with action_name with a property dictionary + new_properties. If one property update is invalid, the old + values are restored and an exception is raised. + + @param action_name The name of the action to update + @param new_properties The properties that will update the action + @return The old properties from the action + @raise LookupError if action_name doesn't exist + @raise ConstraintException if a property constraint is violated + """ + if not self._actions.has_key(action_name): + raise LookupError("Tutorial.State: action <" + action_name + "> is not defined") + + action = self._actions[action_name] + old_properties = action.get_properties_dict_copy() + try: + for property_name, property_value in new_properties.iteritems(): + action.__setattr__(property_name, property_value) + return old_properties + except ConstraintException, e: + action._props = old_properties + raise e + + def get_action_dict(self): + """ + Return the reference to the internal action dictionary. + + @return A dictionary of actions that the state will execute + """ + return self._actions + + def delete_actions(self): + """ + Removes all the action associated with this state. A cleared state will + not do anything when entered or exited. + """ + self._actions = {} + + # Transition manipulations + def add_transition(self, new_transition): + """ + Adds a transition from this state to another state. + + The same transition may not be added twice. + + @param transition The new transition. + @return A unique name for the transition + @raise TransitionAlreadyExists if an equivalent transition exists + """ + for transition in self._transitions.itervalues(): + if transition == new_transition: + raise TransitionAlreadyExists(str(transition)) + + transition_name = self._generate_unique_transition_name(new_transition) + self._transitions[transition_name] = new_transition + return transition_name + + def update_transition(self, transition_name, new_properties=None, new_state=None): + """ + Update the transition with transition_name with new properties and/or + a new state to transition to. A None value means that the corresponding + value won't be updated. If one property update is invalid, the old + values are restored and an exception is raised. + + @param transition_name The name of the transition to replace + @param new_properties The properties that will update the event on the transition + @param new_state The new state to transition to + @return a tuple (old_properties, old_state) with previous values + @raise LookupError if transition_name doesn't exist + @raise ConstraintException if a property constraint is violated + """ + if not self._transitions.has_key(transition_name): + raise LookupError("Tutorial.State: transition <" + transition_name + "> is not defined") + + transition = self._transitions[transition_name] + + tmp_event = transition[0] + tmp_state = transition[1] + + prop = new_properties or {} + + old_properties = transition[0].get_properties_dict_copy() + old_state = transition[1] + + try: + for property_name, property_value in prop.iteritems(): + tmp_event.__setattr__(property_name, property_value) + except ConstraintException, e: + tmp_event._props = old_properties + raise e + + if new_state: + tmp_state = new_state + + self._transitions[transition_name] = (tmp_event, tmp_state) + + return (old_properties, old_state) + + def delete_transition(self, transition_name): + """ + Delete the transition with the name transition_name + + @param transition_name The name of the transition to delete + @return The transition deleted + @raise LookupError if transition_name doesn't exist + """ + if self._transitions.has_key(transition_name): + return self._transitions.pop(transition_name) + else: + raise LookupError("Tutorial.State: transition <" + transition_name + "> is not defined") + + def get_transition_dict(self): + """ + Return the reference to the internal transition dictionary. + + @return The dictionary of transitions associated with this state. + """ + return self._transitions + + def delete_transitions(self): + """ + Delete all the transitions associated with this state. + """ + self._transitions = {} + + def _generate_unique_action_name(self, action): + """ + Returns a unique name for the action in this state, + the actual content of the name should not be relied upon + for correct behavior + + @param action The action to generate a name for + @return A name garanteed to be unique within this state + """ + #TODO use the action class name to generate a name + # to make it easier to debug and know what we are + # manipulating + name = self.name + _NAME_SEPARATOR + "action" + str(self.action_name_nb) + while name in self._actions: + self.action_name_nb += 1 + name = self.name + _NAME_SEPARATOR + "action" + str(self.action_name_nb) + return name + + def _generate_unique_transition_name(self, transition): + """ + Returns a unique name for the transition in this state, + the actual content of the name should not be relied upon + for correct behavior + + @param transition The transition to generate a name for + @return A name garanteed to be unique within this state + """ + #TODO use the event class name from the transition to + # generate a name to make it easier to debug and know + # what we are manipulating + name = self.name + _NAME_SEPARATOR + "transition" + str(self.transition_name_nb) + while name in self._transitions: + self.transition_name_nb += 1 + name = self.name + _NAME_SEPARATOR + "transition" + str(self.transition_name_nb) + return name + + def __eq__(self, otherState): + """ + Compare current state to otherState. + + Two states are considered equal if and only if: + -every action in this state has a matching action in the + other state with the same properties and values + -every event filters in this state has a matching filter in the + other state having the same properties and values + -both states have the same name. + + + @param otherState The state that will be compared to this one + @return True if the states are the same, False otherwise +` """ + return isinstance(otherState, type(self)) and \ + self.get_action_dict() == otherState.get_action_dict() and \ + self.get_transition_dict() == otherState.get_transition_dict() + +#TODO: Define the automatic transition in the same way as +# other events +class AutomaticTransitionEvent(TPropContainer): + def __repr__(self): + return str(self.__class__.__name__) + + +################## Error Handling and Exceptions ############################## + +class TransitionAlreadyExists(Exception): + """ + Raised when a duplicate transition is added to a state + """ + pass + + +class InvalidStateDictionary(Exception): + """ + Raised when an initialization dictionary could not be used to initialize + a tutorial + """ + pass + +class StateDeletionError(Exception): + """ + Raised when trying to delete an INIT or an END state from a tutorial + """ + pass diff --git a/tutorius/vault.py b/tutorius/vault.py new file mode 100644 index 0000000..d6b4720 --- /dev/null +++ b/tutorius/vault.py @@ -0,0 +1,919 @@ +# Copyright (C) 2009, Tutorius.org +# Copyright (C) 2009, Jean-Christophe Savard <savard.jean.christophe@gmail.com> +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 2 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA + + +""" +This module contains all the data handling class of Tutorius +""" + +import logging +import os +import shutil +import tempfile +import uuid +import xml.dom.minidom +from xml.dom import NotFoundErr +import zipfile +from ConfigParser import SafeConfigParser + +from . import addon +from .tutorial import Tutorial, State, AutomaticTransitionEvent + +logger = logging.getLogger("tutorius") + +# this is where user installed/generated tutorials will go +def _get_store_root(): + profile_name = os.getenv("SUGAR_PROFILE") or "default" + return os.path.join(os.getenv("HOME"), + ".sugar",profile_name,"tutorius","data") +# this is where activity bundled tutorials should be, under the activity bundle +def _get_bundle_root(): + """ + Return the path of the bundled activity, or None if not applicable. + """ + if os.getenv("SUGAR_BUNDLE_PATH") != None: + return os.path.join(os.getenv("SUGAR_BUNDLE_PATH"),"data","tutorius","data") + else: + return None + +INI_ACTIVITY_SECTION = "RELATED_ACTIVITIES" +INI_METADATA_SECTION = "GENERAL_METADATA" +INI_GUID_PROPERTY = "guid" +INI_NAME_PROPERTY = "name" +INI_XML_FSM_PROPERTY = "fsm_filename" +INI_VERSION_PROPERTY = 'version' +INI_FILENAME = "meta.ini" +TUTORIAL_FILENAME = "tutorial.xml" + +###################################################################### +# XML Tag names and attributes +###################################################################### +ELEM_FSM = "FSM" +ELEM_STATES = "States" +ELEM_STATE = "State" +ELEM_ACTIONS = "Actions" +ELEM_TRANS = "Transitions" +ELEM_AUTOTRANS = "AutomaticTransition" +NODE_COMPONENT = "Component" +NODE_SUBCOMPONENT = "property" +NODE_SUBCOMPONENTLIST = "listproperty" +NAME_ATTR = "__name__" +NEXT_STATE_ATTR = "__next_state__" +START_STATE_ATTR = "__start_state__" + +class Vault(object): + + ## Vault internal functions : + @staticmethod + def list_available_tutorials(activity_name = None, activity_vers = 0): + """ + Generate the list of all tutorials present on disk for a + given activity. + + @param activity_name the name of the activity associated with this tutorial. None means ALL activities + @param activity_vers the version number of the activity to find tutorail for. 0 means find for ANY version. If activity_name is None, version number is not used + @returns a map of tutorial {names : GUID}. + """ + # check both under the activity data and user installed folders + if _get_bundle_root() != None: + paths = [_get_store_root(), _get_bundle_root()] + else: + paths = [_get_store_root()] + + tutoGuidName = {} + + for repository in paths: + # (our) convention dictates that tutorial folders are named + # with their GUID (for unicity) + try: + for tuto in os.listdir(repository): + parser = SafeConfigParser() + file = parser.read(os.path.join(repository, tuto, INI_FILENAME)) + if file != []: + # If parser can read at least section + guid = parser.get(INI_METADATA_SECTION, INI_GUID_PROPERTY) + name = parser.get(INI_METADATA_SECTION, INI_NAME_PROPERTY) + activities = parser.options(INI_ACTIVITY_SECTION) + # enforce matching activity name AND version, as UI changes + # break tutorials. We may lower this requirement when the + # UAM gets less dependent on the widget order. + # Also note property names are always stored lowercase. + if (activity_name != None) and (activity_name.lower() in activities): + version = parser.get(INI_ACTIVITY_SECTION, activity_name) + if (activity_vers == version) or (activity_vers == 0): + tutoGuidName[guid] = name + elif (activity_name == None): + tutoGuidName[guid] = name + except OSError: + # the repository may not exist. Continue scanning + pass + + return tutoGuidName + + ## Vault interface functions : + @staticmethod + def installTutorials(path, zip_file_name, forceinstall=False): + """ + Extract the tutorial files in the ZIPPED tutorial archive at the + specified path and add them inside the vault. This should remove any previous + version of this tutorial, if there's any. On the opposite, if we are + trying to install an earlier version, the function will return 1 if + forceInstall is not set to true. + + @params path The path where the zipped tutorial archive is present + @params forceinstall A flag that indicate if we need to force overwrite + of a tutorial even if is version number is lower than the existing one. + + @returns 0 if it worked, 1 if the user needs to confirm the installation + and 2 to mean an error happened + """ + # TODO : Check with architecture team for exception vs error returns + + # test if the file is a valid pkzip file + if zipfile.is_zipfile(os.path.join(path, zip_file_name)) != True: + assert False, "Error : The given file is not a valid PKZip file" + + # unpack the zip archive + zfile = zipfile.ZipFile(os.path.join(path, zip_file_name), "r" ) + + temp_path = tempfile.mkdtemp(dir=_get_store_root()) + zfile.extractall(temp_path) + + # get the tutorial file + ini_file_path = os.path.join(temp_path, INI_FILENAME) + ini_file = SafeConfigParser() + ini_file.read(ini_file_path) + + # get the tutorial guid + guid = ini_file.get(INI_METADATA_SECTION, INI_GUID_PROPERTY) + + # Check if tutorial already exist + tutorial_path = os.path.join(_get_store_root(), guid) + if os.path.isdir(tutorial_path) == False: + # Copy the tutorial in the Vault + shutil.copytree(temp_path, tutorial_path) + + else: + # Check the version of the existing tutorial + existing_version = ini_file.get(INI_METADATA_SECTION, INI_VERSION_PROPERTY) + # Check the version of the new tutorial + new_ini_file = SafeConfigParser() + new_ini_file.read(os.path.join(tutorial_path, INI_FILENAME)) + new_version = new_ini_file.get(INI_METADATA_SECTION, INI_VERSION_PROPERTY) + + if new_version < existing_version and forceinstall == False: + # Version of new tutorial is older and forceinstall is false, return exception + return 1 + else : + # New tutorial is newer or forceinstall flag is set, can overwrite the existing tutorial + shutil.rmtree(tutorial_path) + shutil.copytree(temp_path, tutorial_path) + + # Remove temp data + shutil.rmtree(temp_path) + + return 0 + + @staticmethod + def query(keyword=[], relatedActivityNames=[], category=[]): + """ + Returns the list of tutorials that corresponds to the specified parameters. + + @returns a list of Tutorial meta-data (TutorialID, Description, + Rating, Category, PublishState, etc...) + TODO : Search for tuto caracterised by the entry : OR between [], and between each + + The returned dictionnary is of this format : key = property name, value = property value + The dictionnary also contain one dictionnary element whose key is the string 'activities' + and whose value is another dictionnary of this form : key = related activity name, + value = related activity version number + """ + + # Temp solution for returning all tutorials metadata + + tutorial_list = [] + tuto_guid_list = [] + ini_file = SafeConfigParser() + if keyword == [] and relatedActivityNames == [] and category == []: + # get all tutorials tuples (name:guid) for all activities and version + tuto_dict = Vault.list_available_tutorials() + for id in tuto_dict.keys(): + tuto_guid_list.append(id) + + # Find .ini metadata files with the guid list + + # Get the guid from the tuto tuples + for guid in tuto_guid_list: + # Create a dictionnary containing the metadata and also + # another dictionnary containing the tutorial Related Acttivities, + # and add it to a list + + # Create a TutorialBundler object from the guid + bundler = TutorialBundler(guid) + # Find the .ini file path for this guid + ini_file_path = bundler.get_tutorial_path(guid) + # Read the .ini file + ini_file.read(os.path.join(ini_file_path, 'meta.ini')) + + metadata_dictionnary = {} + related_act_dictionnary = {} + metadata_list = ini_file.options(INI_METADATA_SECTION) + for metadata_name in metadata_list: + # Create a dictionnary of tutorial metadata + metadata_dictionnary[metadata_name] = ini_file.get(INI_METADATA_SECTION, metadata_name) + # Get Related Activities data from.ini files + related_act_list = ini_file.options(INI_ACTIVITY_SECTION) + for related_act in related_act_list: + # For related activites, the format is : key = activity name, value = activity version + related_act_dictionnary[related_act] = ini_file.get(INI_ACTIVITY_SECTION, related_act) + + # Add Related Activities dictionnary to metadata dictionnary + metadata_dictionnary['activities'] = related_act_dictionnary + + # Add this dictionnary to tutorial list + tutorial_list.append(metadata_dictionnary) + + # Return tutorial list + return tutorial_list + + @staticmethod + def loadTutorial(Guid): + """ + Creates an executable version of a tutorial from its saved representation. + @param Guid Unique identifier of the tutorial + @returns Tutorial object + """ + + bundle = TutorialBundler(Guid) + bundle_path = bundle.get_tutorial_path(Guid) + config = SafeConfigParser() + config.read(os.path.join(bundle_path, INI_FILENAME)) + + serializer = XMLSerializer() + + name = config.get(INI_METADATA_SECTION, INI_NAME_PROPERTY) + + # Open the XML file + tutorial_file = os.path.join(bundle_path, TUTORIAL_FILENAME) + with open(tutorial_file, 'r') as tfile: + tutorial = serializer.load_tutorial(tfile) + + return tutorial + + @staticmethod + def saveTutorial(tutorial, metadata_dict): + """ + Creates a persistent version of a tutorial in the Vault. + @param tutorial Tutorial + @param metadata_dict dictionary of metadata for the Tutorial + @returns true if the tutorial was saved correctly + """ + + # Get the tutorial guid from metadata dictionnary + guid = metadata_dict[INI_GUID_PROPERTY] + + # Check if tutorial already exist + tutorial_path = os.path.join(_get_store_root(), guid) + if os.path.isdir(tutorial_path) == False: + + # Serialize the tutorial and write it to disk + xml_ser = XMLSerializer() + os.makedirs(tutorial_path) + + with open(os.path.join(tutorial_path, TUTORIAL_FILENAME), 'w') as fsmfile: + xml_ser.save_tutorial(tutorial, fsmfile) + + # Create the metadata file + ini_file_path = os.path.join(tutorial_path, "meta.ini") + parser = SafeConfigParser() + parser.add_section(INI_METADATA_SECTION) + for key, value in metadata_dict.items(): + if key != 'activities': + parser.set(INI_METADATA_SECTION, key, value) + else: + related_activities_dict = value + parser.add_section(INI_ACTIVITY_SECTION) + for related_key, related_value in related_activities_dict.items(): + parser.set(INI_ACTIVITY_SECTION, related_key, related_value) + + # Write the file to disk + with open(ini_file_path, 'wb') as configfile: + parser.write(configfile) + + else: + # Error, tutorial already exist + return False + + # TODO : wait for Ben input on how to unpublish tuto before coding this function + # For now, no unpublishing will occur. + + + @staticmethod + def deleteTutorial(Tutorial): + """ + Removes the tutorial from the Vault. It will unpublish the tutorial if need be, + and it will also wipe it from the persistent storage. + @returns true is the tutorial was deleted from the Vault + """ + bundle = TutorialBundler(Guid) + bundle_path = bundle.get_tutorial_path(Guid) + + # TODO : Need also to unpublish tutorial, need to interact with webservice module + + shutil.rmtree(bundle_path) + if os.path.isdir(bundle_path) == False: + return True + else: + return False + + +class Serializer(object): + """ + Interface that provide serializing and deserializing of the FSM + used in the tutorials to/from disk. Must be inherited. + """ + + def save_tutorial(self,fsm): + """ + Save fsm to disk. If a GUID parameter is provided, the existing GUID is + located in the .ini files in the store root and bundle root and + the corresponding FSM is/are overwritten. If the GUId is not found, an + exception occur. If no GUID is provided, FSM is written in a new file + in the store root. + """ + raise NotImplementedError() + + def load_tutorial(self): + """ + Load fsm from disk. + """ + raise NotImplementedError() + +class XMLSerializer(Serializer): + """ + Class that provide serializing and deserializing of the FSM + used in the tutorials to/from a .xml file. Inherit from Serializer + """ + + @classmethod + def _create_state_dict_node(cls, state_dict, doc): + """ + Create and return a xml Node from a State dictionnary. + @param state_dict dictionary of State objects + @param doc The XML document root (used to create nodes only + @return xml Element containing the states + """ + statesList = doc.createElement(ELEM_STATES) + for state_name, state in state_dict.items(): + stateNode = doc.createElement(ELEM_STATE) + statesList.appendChild(stateNode) + stateNode.setAttribute("Name", state_name) + actionsList = stateNode.appendChild(cls._create_action_list_node(state.get_action_dict(), doc)) + transitionsList = stateNode.appendChild(cls._create_transitions_node(state.get_transition_dict(), doc)) + return statesList + + @classmethod + def _create_addon_component_node(cls, parent_attr_name, comp, doc): + """ + Takes a component that is embedded in another component (e.g. the content + of a OnceWrapper) and encapsulate it in a node with the property name. + + e.g. + <Component Class="OnceWrapper"> + <property name="addon"> + <Component Class="BubbleMessage" message="'Hi!'" position="[12,32]"/> + </property> + </Component> + + When reloading this node, we should look up the property name for the parent + in the attribute of the node, then examine the subnode to create the addon + object itself. + + @param parent_attr_name The name of the parent's attribute for this addon + e.g. the OnceWrapper has the action attribute, which corresponds to a + sub-action it must execute once. + @param comp The component node itself + @param doc The XML document root (only used to create the nodes) + @returns A NODE_SUBCOMPONENT node, with the property attribute and a sub node + that represents another component. + """ + subCompNode = doc.createElement(NODE_SUBCOMPONENT) + subCompNode.setAttribute("name", parent_attr_name) + + subNode = cls._create_component_node(comp, doc) + + subCompNode.appendChild(subNode) + + return subCompNode + + @classmethod + def _create_addonlist_component_node(cls, parent_attr_name, comp_list, doc): + """ + Takes a list of components that are embedded in another component (ex. the + content of a ChainAction) and encapsulate them in a node with the property + name. + + e.g. + <Component Class="ChainAction"> + <listproperty name="actions"> + <Component Class="BubbleMessage" message="'Hi!'" position="[15,35]"/> + <Component Class="DialogMessage" message="'Multi-action!'" position="[45,10]"/> + </listproperty> + </Component> + + When reloading this node, we should look up the property name for the parent + in the the attribute of the node, then rebuild the list by appending the + content of all the subnodes. + + @param parent_attr_name The name of the parent component's property + @param comp_list A list of components that comprise the property + @param doc The XML document root (only for creating new nodes) + @returns A NODE_SUBCOMPONENTLIST node with the property attribute + """ + subCompListNode = doc.createElement(NODE_SUBCOMPONENTLIST) + subCompListNode.setAttribute("name", parent_attr_name) + + for comp in comp_list: + compNode = cls._create_component_node(comp, doc) + subCompListNode.appendChild(compNode) + + return subCompListNode + + @classmethod + def _create_component_node(cls, comp, doc): + """ + Takes a single component (action or eventfilter) and transforms it + into a xml node. + + @param comp A single component + @param doc The XML document root (used to create nodes only + @return A XML Node object with the component tag name + """ + compNode = doc.createElement(NODE_COMPONENT) + + # Write down just the name of the Action class as the Class + # property -- + compNode.setAttribute("Class",type(comp).__name__) + + # serialize all tutorius properties + for propname in comp.get_properties(): + propval = getattr(comp, propname) + if getattr(type(comp), propname).type == "addonlist": + compNode.appendChild(cls._create_addonlist_component_node(propname, propval, doc)) + elif getattr(type(comp), propname).type == "addon": + #import rpdb2; rpdb2.start_embedded_debugger('pass') + compNode.appendChild(cls._create_addon_component_node(propname, propval, doc)) + else: + # repr instead of str, as we want to be able to eval() it into a + # valid object. + compNode.setAttribute(propname, repr(propval)) + + return compNode + + @classmethod + def _create_action_list_node(cls, action_dict, doc): + """ + Create and return a xml Node from a Action list. + + @param action_dict Dictionary of actions with names as keys + @param doc The XML document root (used to create new nodes only) + @return A XML Node object with the Actions tag name and a serie of + Action children + """ + actionsList = doc.createElement(ELEM_ACTIONS) + for name, action in action_dict.items(): + # Create the action node + actionNode = cls._create_component_node(action, doc) + actionNode.setAttribute(NAME_ATTR, name) + # Append it to the list + actionsList.appendChild(actionNode) + + return actionsList + + @classmethod + def _create_transitions_node(cls, transition_dict, doc): + """ + Create and return a xml Node from a transition dictionary. + @param transition_dict dictionary of (event, next_state) transitions. + @param doc The XML document root (used to create nodes only + @return xml Element containing the transitions + """ + eventFiltersList = doc.createElement(ELEM_TRANS) + for transition_name, (event, end_state) in transition_dict.items(): + #start_state = transition_name.split(Tutorial._NAME_SEPARATOR)[0] + #XXX The addon is not in the cache and cannot be loaded so we + # store it differently for now + if type(event) == AutomaticTransitionEvent: + eventFilterNode = doc.createElement(ELEM_AUTOTRANS) + else: + eventFilterNode = cls._create_component_node(event, doc) + #eventFilterNode.setAttribute(START_STATE_ATTR, unicode(start_state)) + eventFilterNode.setAttribute(NEXT_STATE_ATTR, unicode(end_state)) + eventFilterNode.setAttribute(NAME_ATTR, transition_name) + eventFiltersList.appendChild(eventFilterNode) + + return eventFiltersList + + @classmethod + def save_tutorial(cls, fsm, file_obj): + """ + Save fsm to file + + @param fsm Tutorial to save + @param file_obj file-like object in which the serialized fsm is saved + + Side effects: + A serialized version of the Tutorial is written to file_obj. + The file is not closed automatically. + """ + doc = xml.dom.minidom.Document() + fsm_element = doc.createElement(ELEM_FSM) + doc.appendChild(fsm_element) + + fsm_element.setAttribute("Name", fsm.name) + + states = cls._create_state_dict_node(fsm.get_state_dict(), doc) + fsm_element.appendChild(states) + + file_obj.write(doc.toprettyxml()) + + @classmethod + def _get_direct_descendants_by_tag_name(cls, node, name): + """ + Searches in the list of direct descendants of a node to find all the node + that have the given name. + + This is used because the Document.getElementsByTagName() function returns the + list of all the descendants (whatever their distance to the start node) that + have that name. In the case of complex components, we absolutely need to inspect + a single layer of the tree at the time. + + @param node The node from which we want the direct descendants with a particular + name + @param name The name of the node + @returns A list, possibly empty, of direct descendants of node that have this name + """ + return_list = [] + for childNode in node.childNodes: + if childNode.nodeName == name: + return_list.append(childNode) + return return_list + + @classmethod + def _load_xml_transitions(cls, filters_elem): + """ + Loads up a list of Event Filters. + + @param filters_elem An XML Element representing a list of event filters + @return dict of (event, next_state) transitions, keyed by name + """ + transition_dict = {} + + #Retrieve normal transitions + transition_element_list = cls._get_direct_descendants_by_tag_name(filters_elem, NODE_COMPONENT) + new_transition = None + + for transition in transition_element_list: + #start_state = transition.getAttribute(START_STATE_ATTR) + next_state = transition.getAttribute(NEXT_STATE_ATTR) + transition_name = transition.getAttribute(NAME_ATTR) + try: + #The attributes must be removed so that they are not + # viewed as a property in load_xml_component + # transition.removeAttribute(START_STATE_ATTR) + transition.removeAttribute(NEXT_STATE_ATTR) + transition.removeAttribute(NAME_ATTR) + except NotFoundErr: + continue + + new_transition = cls._load_xml_component(transition) + + if new_transition is not None: + transition_dict[transition_name] = (new_transition, next_state) + + #Retrieve automatic transitions + # XXX This is done differently as the AutomaticTransitionEvent + # cannot be loaded dynamically (yet?) + transition_element_list = cls._get_direct_descendants_by_tag_name(filters_elem, ELEM_AUTOTRANS) + new_transition = None + + for transition in transition_element_list: + #start_state = transition.getAttribute(START_STATE_ATTR) + next_state = transition.getAttribute(NEXT_STATE_ATTR) + transition_name = transition.getAttribute(NAME_ATTR) + try: + #The attributes must be removed so that they are not + # viewed as a property in load_xml_component + # transition.removeAttribute(START_STATE_ATTR) + transition.removeAttribute(NEXT_STATE_ATTR) + transition.removeAttribute(NAME_ATTR) + except NotFoundErr: + continue + + transition_dict[transition_name] = (AutomaticTransitionEvent(), next_state) + + return transition_dict + + @classmethod + def _load_xml_subcomponents(cls, node, properties): + """ + Loads all the subcomponent node below the given node and inserts them with + the right property name inside the properties dictionnary. + + @param node The parent node that contains one or many property nodes. + @param properties A dictionnary where the subcomponent property names + and the instantiated components will be stored + @returns Nothing. The properties dict will contain the property->comp mapping. + """ + subCompList = cls._get_direct_descendants_by_tag_name(node, NODE_SUBCOMPONENT) + + for subComp in subCompList: + property_name = subComp.getAttribute("name") + internal_comp_node = cls._get_direct_descendants_by_tag_name(subComp, NODE_COMPONENT)[0] + internal_comp = cls._load_xml_component(internal_comp_node) + properties[str(property_name)] = internal_comp + + @classmethod + def _load_xml_subcomponent_lists(cls, node, properties): + """ + Loads all the subcomponent lists below the given node and stores them + under the correct property name for that node. + + @param node The node from which we want to read the subComponent lists + @param properties The dictionnary that will contain the mapping of prop->subCompList + @returns Nothing. The values are returns inside the properties dict. + """ + listOf_subCompListNode = cls._get_direct_descendants_by_tag_name(node, NODE_SUBCOMPONENTLIST) + for subCompListNode in listOf_subCompListNode: + property_name = subCompListNode.getAttribute("name") + subCompList = [] + for subCompNode in cls._get_direct_descendants_by_tag_name(subCompListNode, NODE_COMPONENT): + subComp = cls._load_xml_component(subCompNode) + subCompList.append(subComp) + properties[str(property_name)] = subCompList + + @classmethod + def _load_xml_component(cls, node): + """ + Loads a single addon component instance from an Xml node. + + @param node The component XML Node to transform + object + @return The addon component object of the correct type according to the XML + description + """ + class_name = node.getAttribute("Class") + + properties = {} + + for prop in node.attributes.keys(): + if prop == "Class" : continue + # security : keep sandboxed + properties[str(prop)] = eval(node.getAttribute(prop)) + + # Read the complex attributes + cls._load_xml_subcomponents(node, properties) + cls._load_xml_subcomponent_lists(node, properties) + + new_action = addon.create(class_name, **properties) + + if not new_action: + return None + + return new_action + + @classmethod + def _load_xml_actions(cls, actions_elem): + """ + Transforms an Actions element into a dict of instanciated Action. + + @param actions_elem An XML Element representing a list of Actions + @return dictionary of actions keyed by name + """ + action_dict = {} + actions_element_list = cls._get_direct_descendants_by_tag_name(actions_elem, NODE_COMPONENT) + + for action in actions_element_list: + action_name = action.getAttribute(NAME_ATTR) + try: + #The name attribute must be removed so that it is not + # viewed as a property in load_xml_component + action.removeAttribute(NAME_ATTR) + except NotFoundErr: + continue + new_action = cls._load_xml_component(action) + + action_dict[action_name] = new_action + + return action_dict + + @classmethod + def _load_xml_states(cls, states_elem): + """ + Takes in a States element and fleshes out a complete list of State + objects. + + @param states_elem An XML Element that represents a list of States + @return dictionary of States + """ + state_dict = {} + # item(0) because there is always only one <States> tag in the xml file + # so states_elem should always contain only one element + states_element_list = states_elem.item(0).getElementsByTagName(ELEM_STATE) + + for state in states_element_list: + stateName = state.getAttribute("Name") + # Using item 0 in the list because there is always only one + # Actions and EventFilterList element per State node. + actions_list = cls._load_xml_actions(state.getElementsByTagName(ELEM_ACTIONS)[0]) + transitions_list = cls._load_xml_transitions(state.getElementsByTagName(ELEM_TRANS)[0]) + + state_dict[stateName] = State(stateName, actions_list, transitions_list) + + return state_dict + + @classmethod + def load_xml_tutorial(cls, fsm_elem): + """ + Takes in an XML element representing an FSM and returns the fully + crafted FSM. + + @param fsm_elem The XML element that describes a FSM + @return Tutorial loaded from xml element + """ + # Load the FSM's name and start state's name + fsm_name = fsm_elem.getAttribute("Name") + + # Load the states + states_dict = cls._load_xml_states(fsm_elem.getElementsByTagName(ELEM_STATES)) + fsm = Tutorial(fsm_name, states_dict) + + return fsm + + @classmethod + def load_tutorial(cls, tutorial_file): + """ + Load fsm from xml file + @param tutorial_file file-like object to read the fsm from + @return Tutorial object that was loaded from the file + """ + xml_dom = xml.dom.minidom.parse(tutorial_file) + + fsm_elem = xml_dom.getElementsByTagName(ELEM_FSM)[0] + + return cls.load_xml_tutorial(fsm_elem) + +class TutorialBundler(object): + """ + This class provide the various data handling methods useable by the tutorial + editor. + """ + + def __init__(self,generated_guid = None, bundle_path=None): + """ + Tutorial_bundler constructor. If a GUID is given in the parameter, the + Tutorial_bundler object will be associated with it. If no GUID is given, + a new GUID will be generated, + """ + + self.Guid = generated_guid or str(uuid.uuid1()) + + #FIXME: Look for the bundle in the activity first (more specific) + #Look for the file in the path if a uid is supplied + if generated_guid: + #General store + store_path = os.path.join(_get_store_root(), str(generated_guid), INI_FILENAME) + if os.path.isfile(store_path): + self.Path = os.path.dirname(store_path) + elif _get_bundle_root() != None: + #Bundle store + bundle_path = os.path.join(_get_bundle_root(), str(generated_guid), INI_FILENAME) + if os.path.isfile(bundle_path): + self.Path = os.path.dirname(bundle_path) + else: + raise IOError(2,"Unable to locate metadata file for guid '%s'" % generated_guid) + else: + raise IOError(2,"Unable to locate metadata file for guid '%s'" % generated_guid) + + else: + #Create the folder, any failure will go through to the caller for now + store_path = os.path.join(_get_store_root(), self.Guid) + os.makedirs(store_path) + self.Path = store_path + + def write_metadata_file(self, tutorial): + """ + Write metadata to the property file. + @param tutorial Tutorial for which to write metadata + """ + #Create the Config Object and populate it + cfg = SafeConfigParser() + cfg.add_section(INI_METADATA_SECTION) + cfg.set(INI_METADATA_SECTION, INI_GUID_PROPERTY, self.Guid) + cfg.set(INI_METADATA_SECTION, INI_NAME_PROPERTY, tutorial.name) + cfg.set(INI_METADATA_SECTION, INI_XML_FSM_PROPERTY, TUTORIAL_FILENAME) + cfg.add_section(INI_ACTIVITY_SECTION) + if os.environ['SUGAR_BUNDLE_NAME'] != None and os.environ['SUGAR_BUNDLE_VERSION'] != None: + cfg.set(INI_ACTIVITY_SECTION, os.environ['SUGAR_BUNDLE_NAME'], + os.environ['SUGAR_BUNDLE_VERSION']) + else: + cfg.set(INI_ACTIVITY_SECTION, 'not_an_activity', '0') + + #Write the ini file + cfg.write( file( os.path.join(self.Path, INI_FILENAME), 'w' ) ) + + + @staticmethod + def get_tutorial_path(guid): + """ + Finds the tutorial with the associated GUID. If it is found, return + the path to the tutorial's directory. If it doesn't exist, raise an + IOError. + + A note : if there are two tutorials with this GUID in the folders, + they will both be inspected and the one with the highest version + number will be returned. If they have the same version number, the one + from the global store will be returned. + + @param guid The GUID of the tutorial that is to be loaded. + """ + # Attempt to find the tutorial's directory in the global directory + global_dir = os.path.join(_get_store_root(),str(guid)) + # Then in the activty's bundle path + if _get_bundle_root() != None: + activity_dir = os.path.join(_get_bundle_root(), str(guid)) + else: + activity_dir = '' + + # If they both exist + if os.path.isdir(global_dir) and os.path.isdir(activity_dir): + # Inspect both metadata files + global_meta = os.path.join(global_dir, "meta.ini") + activity_meta = os.path.join(activity_dir, "meta.ini") + + # Open both config files + global_parser = SafeConfigParser() + global_parser.read(global_meta) + + activity_parser = SafeConfigParser() + activity_parser.read(activity_meta) + + # Get the version number for each tutorial + global_version = global_parser.get(INI_METADATA_SECTION, "version") + activity_version = activity_parser.get(INI_METADATA_SECTION, "version") + + # If the global version is higher or equal, we'll take it + if global_version >= activity_version: + return global_dir + else: + return activity_dir + + # Do we just have the global directory? + if os.path.isdir(global_dir): + return global_dir + + # Or just the activity's bundle directory? + if os.path.isdir(activity_dir): + return activity_dir + + # Error : none of these directories contain the tutorial + raise IOError(2, "Neither the global nor the bundle directory contained the tutorial with GUID %s"%guid) + + + def write_fsm(self, fsm): + + """ + Save fsm to disk. If a GUID parameter is provided, the existing GUID is + located in the .ini files in the store root and bundle root and + the corresponding FSM is/are created or overwritten. If the GUID is not + found, an exception occur. + """ + + config = SafeConfigParser() + + serializer = XMLSerializer() + path = os.path.join(self.Path, "meta.ini") + config.read(path) + xml_filename = config.get(INI_METADATA_SECTION, INI_XML_FSM_PROPERTY) + serializer.save_tutorial(fsm, xml_filename, self.Path) + + @staticmethod + def add_resources(typename, file): + """ + Add ressources to metadata. + """ + raise NotImplementedError("add_resources not implemented") diff --git a/tutorius/viewer.py b/tutorius/viewer.py new file mode 100644 index 0000000..272558e --- /dev/null +++ b/tutorius/viewer.py @@ -0,0 +1,423 @@ +# Copyright (C) 2009, Tutorius.org +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 2 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA +""" +This module renders a widget containing a graphical representation +of a tutorial and acts as a creator proxy as it has some editing +functionality. +""" +import sys + +import gtk, gtk.gdk +import cairo +from math import pi as PI +PI2 = PI/2 + +import rsvg + +from sugar.bundle import activitybundle +from sugar.tutorius import addon +from sugar.graphics import icon +from sugar.tutorius.filters import EventFilter +from sugar.tutorius.actions import Action +import os + +# FIXME ideally, apps scale correctly and we should use proportional positions +X_WIDTH = 800 +X_HEIGHT = 600 +ACTION_WIDTH = 100 +ACTION_HEIGHT = 70 + +# block look +BLOCK_PADDING = 5 +BLOCK_WIDTH = 100 +BLOCK_CORNERS = 10 +BLOCK_INNER_PAD = 10 + +SNAP_WIDTH = BLOCK_WIDTH - BLOCK_PADDING - BLOCK_INNER_PAD*2 +SNAP_HEIGHT = SNAP_WIDTH*X_HEIGHT/X_WIDTH +SNAP_SCALE = float(SNAP_WIDTH)/X_WIDTH + +class Viewer(object): + """ + Renders a tutorial as a sequence of blocks, each block representing either + an action or an event (transition). + + Current Viewer implementation lacks viewport management; + having many objects in a tutorial will not render properly. + """ + def __init__(self, tutorial, creator): + super(Viewer, self).__init__() + + self._tutorial = tutorial + self._creator = creator + self.alloc = None + self.click_pos = None + self.drag_pos = None + self.selection = [] + + self.win = gtk.Window(gtk.WINDOW_TOPLEVEL) + self.win.set_size_request(400, 200) + self.win.set_gravity(gtk.gdk.GRAVITY_SOUTH_WEST) + self.win.show() + self.win.set_deletable(False) + self.win.move(0, 0) + + vbox = gtk.ScrolledWindow() + self.win.add(vbox) + + canvas = gtk.DrawingArea() + vbox.add_with_viewport(canvas) + canvas.set_app_paintable(True) + canvas.connect_after("expose-event", self.on_viewer_expose, tutorial._states) + canvas.add_events(gtk.gdk.BUTTON_PRESS_MASK \ + |gtk.gdk.BUTTON_MOTION_MASK \ + |gtk.gdk.BUTTON_RELEASE_MASK \ + |gtk.gdk.KEY_PRESS_MASK) + canvas.connect('button-press-event', self._on_click) + # drag-select disabled, for now + #canvas.connect('motion-notify-event', self._on_drag) + canvas.connect('button-release-event', self._on_drag_end) + canvas.connect('key-press-event', self._on_key_press) + + canvas.set_flags(gtk.HAS_FOCUS|gtk.CAN_FOCUS) + canvas.grab_focus() + + self.win.show_all() + canvas.set_size_request(2048, 180) # FIXME + + def destroy(self): + self.win.destroy() + + + def _paint_state(self, ctx, states): + """ + Paints a tutorius fsm state in a cairo context. + Final context state will be shifted by the size of the graphics. + """ + block_width = BLOCK_WIDTH - BLOCK_PADDING + block_max_height = self.alloc.height + + new_insert_point = None + cur_state = 'INIT' + + # FIXME: get app when we have a model that supports it + cur_app = 'Calculate' + app_start = ctx.get_matrix() + try: + state = states[cur_state] + except KeyError: + state = None + + while state: + new_app = 'Calculate' + if new_app != cur_app: + ctx.save() + ctx.set_matrix(app_start) + self._render_app_hints(ctx, cur_app) + ctx.restore() + app_start = ctx.get_matrix() + ctx.translate(BLOCK_PADDING, 0) + cur_app = new_app + + action_list = state.get_action_list() + if action_list: + local_height = (block_max_height - BLOCK_PADDING)/len(action_list) - BLOCK_PADDING + ctx.save() + for action in action_list: + origin = tuple(ctx.get_matrix())[-2:] + if self.click_pos and \ + self.click_pos[0]-BLOCK_WIDTH<origin[0] and \ + self.drag_pos[0]>origin[0]: + self.selection.append(action) + self.render_action(ctx, block_width, local_height, action) + ctx.translate(0, local_height+BLOCK_PADDING) + + ctx.restore() + ctx.translate(BLOCK_WIDTH, 0) + + # insertion cursor painting made from two opposed triangles + # joined by a line. + if state.name == self._creator.get_insertion_point(): + ctx.save() + bp2 = BLOCK_PADDING/2 + ctx.move_to(-bp2, 0) + ctx.line_to(-BLOCK_PADDING-bp2, -BLOCK_PADDING) + ctx.line_to(bp2, -BLOCK_PADDING) + ctx.line_to(-bp2, 0) + + ctx.line_to(-bp2, block_max_height-2*BLOCK_PADDING) + ctx.line_to(bp2, block_max_height-BLOCK_PADDING) + ctx.line_to(-BLOCK_PADDING-bp2, block_max_height-BLOCK_PADDING) + ctx.line_to(-bp2, block_max_height-2*BLOCK_PADDING) + + ctx.line_to(-bp2, BLOCK_PADDING) + ctx.set_source_rgb(1.0, 1.0, 0.0) + ctx.stroke_preserve() + ctx.fill() + ctx.restore() + + + event_list = state.get_event_filter_list() + if event_list: + local_height = (block_max_height - BLOCK_PADDING)/len(event_list) - BLOCK_PADDING + ctx.save() + for event, next_state in event_list: + origin = tuple(ctx.get_matrix())[-2:] + if self.click_pos and \ + self.click_pos[0]-BLOCK_WIDTH<origin[0] and \ + self.drag_pos[0]>origin[0]: + self.selection.append(event) + self.render_event(ctx, block_width, local_height, event) + ctx.translate(0, local_height+BLOCK_PADDING) + + ctx.restore() + ctx.translate(BLOCK_WIDTH, 0) + + # FIXME point to next state in state, as it would highlight + # the "happy path". + cur_state = event_list[0][1] + + if (not new_insert_point) and self.click_pos: + origin = tuple(ctx.get_matrix())[-2:] + if self.click_pos[0]<origin[0]: + new_insert_point = state + + if event_list: + try: + state = states[cur_state] + except KeyError: + break + yield True + else: + break + + ctx.set_matrix(app_start) + self._render_app_hints(ctx, cur_app) + + if self.click_pos: + if not new_insert_point: + new_insert_point = state + + self._creator.set_insertion_point(new_insert_point.name) + + yield False + + def _render_snapshot(self, ctx, elem): + """ + Render the "simplified screenshot-like" representation of elements positions. + """ + ctx.set_source_rgba(1.0, 1.0, 1.0, 0.5) + ctx.rectangle(0, 0, SNAP_WIDTH, SNAP_HEIGHT) + ctx.fill_preserve() + ctx.stroke() + + if hasattr(elem, 'position'): + pos = elem.position + # FIXME this size approximation is fine, but I believe we could + # do better. + ctx.scale(SNAP_SCALE, SNAP_SCALE) + ctx.rectangle(pos[0], pos[1], ACTION_WIDTH, ACTION_HEIGHT) + ctx.fill_preserve() + ctx.stroke() + + def _render_app_hints(self, ctx, appname): + """ + Fetches the icon of the app related to current states and renders it on a + separator, between states. + """ + ctx.set_source_rgb(0.0, 0.0, 0.0) + ctx.set_dash((1,1,0,0), 1) + ctx.move_to(0, 0) + ctx.line_to(0, self.alloc.height) + ctx.stroke() + ctx.set_dash(tuple(), 1) + + bundle_path = os.getenv("SUGAR_BUNDLE_PATH") + if bundle_path: + icon_path = activitybundle.ActivityBundle(bundle_path).get_icon() + icon = rsvg.Handle(icon_path) + ctx.save() + ctx.translate(-15, 0) + ctx.scale(0.5, 0.5) + icon_surf = icon.render_cairo(ctx) + ctx.restore() + + + def render_action(self, ctx, width, height, action): + """ + Renders the action block, along with the icon of the action tool. + """ + ctx.save() + inner_width = width-(BLOCK_CORNERS<<1) + inner_height = height-(BLOCK_CORNERS<<1) + + paint_border = ctx.rel_line_to + filling = cairo.LinearGradient(0, 0, 0, inner_height) + if action not in self.selection: + filling.add_color_stop_rgb(0.0, 0.7, 0.7, 1.0) + filling.add_color_stop_rgb(1.0, 0.1, 0.1, 0.8) + else: + filling.add_color_stop_rgb(0.0, 0.4, 0.4, 0.8) + filling.add_color_stop_rgb(1.0, 0.0, 0.0, 0.5) + tracing = cairo.LinearGradient(0, 0, 0, inner_height) + tracing.add_color_stop_rgb(0.0, 1.0, 1.0, 1.0) + tracing.add_color_stop_rgb(1.0, 0.2, 0.2, 0.2) + + ctx.move_to(BLOCK_CORNERS, 0) + paint_border(inner_width, 0) + ctx.arc(inner_width+BLOCK_CORNERS, BLOCK_CORNERS, BLOCK_CORNERS, -PI2, 0.0) + ctx.arc(inner_width+BLOCK_CORNERS, inner_height+BLOCK_CORNERS, BLOCK_CORNERS, 0.0, PI2) + ctx.arc(BLOCK_CORNERS, inner_height+BLOCK_CORNERS, BLOCK_CORNERS, PI2, PI) + ctx.arc(BLOCK_CORNERS, BLOCK_CORNERS, BLOCK_CORNERS, -PI, -PI2) + + ctx.set_source(tracing) + ctx.stroke_preserve() + ctx.set_source(filling) + ctx.fill() + + addon_name = addon.get_name_from_type(type(action)) + # TODO use icon pool + icon_name = addon.get_addon_meta(addon_name)['icon'] + rsvg_icon = rsvg.Handle(icon.get_icon_file_name(icon_name)) + ctx.save() + ctx.translate(BLOCK_INNER_PAD, BLOCK_INNER_PAD) + ctx.scale(0.5, 0.5) + icon_surf = rsvg_icon.render_cairo(ctx) + + ctx.restore() + + ctx.translate(BLOCK_INNER_PAD, (height-SNAP_HEIGHT)/2) + self._render_snapshot(ctx, action) + + ctx.restore() + + def render_event(self, ctx, width, height, event): + """ + Renders the action block, along with the icon of the action tool. + """ + ctx.save() + inner_width = width-(BLOCK_CORNERS<<1) + inner_height = height-(BLOCK_CORNERS<<1) + + filling = cairo.LinearGradient(0, 0, 0, inner_height) + if event not in self.selection: + filling.add_color_stop_rgb(0.0, 1.0, 0.8, 0.6) + filling.add_color_stop_rgb(1.0, 1.0, 0.6, 0.2) + else: + filling.add_color_stop_rgb(0.0, 0.8, 0.6, 0.4) + filling.add_color_stop_rgb(1.0, 0.6, 0.4, 0.1) + tracing = cairo.LinearGradient(0, 0, 0, inner_height) + tracing.add_color_stop_rgb(0.0, 1.0, 1.0, 1.0) + tracing.add_color_stop_rgb(1.0, 0.3, 0.3, 0.3) + + ctx.move_to(BLOCK_CORNERS, 0) + ctx.rel_line_to(inner_width, 0) + ctx.rel_line_to(BLOCK_CORNERS, BLOCK_CORNERS) + ctx.rel_line_to(0, inner_height) + ctx.rel_line_to(-BLOCK_CORNERS, BLOCK_CORNERS) + ctx.rel_line_to(-inner_width, 0) + ctx.rel_line_to(-BLOCK_CORNERS, -BLOCK_CORNERS) + ctx.rel_line_to(0, -inner_height) + ctx.close_path() + + ctx.set_source(tracing) + ctx.stroke_preserve() + ctx.set_source(filling) + ctx.fill() + + addon_name = addon.get_name_from_type(type(event)) + # TODO use icon pool + icon_name = addon.get_addon_meta(addon_name)['icon'] + rsvg_icon = rsvg.Handle(icon.get_icon_file_name(icon_name)) + ctx.save() + ctx.translate(BLOCK_INNER_PAD, BLOCK_INNER_PAD) + ctx.scale(0.5, 0.5) + icon_surf = rsvg_icon.render_cairo(ctx) + + ctx.restore() + + ctx.translate(BLOCK_INNER_PAD, (height-SNAP_HEIGHT)/2) + self._render_snapshot(ctx, event) + + ctx.restore() + + def on_viewer_expose(self, widget, evt, states): + """ + Expose signal handler for the viewer's DrawingArea. + This loops through states and renders every action and transition of + the "happy path". + + @param widget: the gtk.DrawingArea on which to draw + @param evt: the gtk.gdk.Event containing an "expose" event + @param states: a tutorius FiniteStateMachine object to paint + """ + ctx = widget.window.cairo_create() + self.alloc = widget.get_allocation() + ctx.set_source_pixmap(widget.window, + widget.allocation.x, + widget.allocation.y) + + # draw no more than our expose event intersects our child + region = gtk.gdk.region_rectangle(widget.allocation) + r = gtk.gdk.region_rectangle(evt.area) + region.intersect(r) + ctx.region (region) + ctx.clip() + ctx.paint() + + ctx.translate(BLOCK_PADDING, BLOCK_PADDING) + + painter = self._paint_state(ctx, states) + while painter.next(): pass + + if self.click_pos and self.drag_pos: + ctx.set_matrix(cairo.Matrix()) + ctx.rectangle(self.click_pos[0], self.click_pos[1], + self.drag_pos[0]-self.click_pos[0], + self.drag_pos[1]-self.click_pos[1]) + ctx.set_source_rgba(0, 0, 1, 0.5) + ctx.fill_preserve() + ctx.stroke() + + return False + + def _on_click(self, widget, evt): + # the rendering pipeline will work out the click validation process + self.drag_pos = None + self.drag_pos = self.click_pos = evt.get_coords() + widget.queue_draw() + + self.selection = [] + + def _on_drag(self, widget, evt): + self.drag_pos = evt.get_coords() + widget.queue_draw() + + def _on_drag_end(self, widget, evt): + self.click_pos = self.drag_pos = None + widget.queue_draw() + + def _on_key_press(self, widget, evt): + if evt.keyval == gtk.keysyms.BackSpace: + # remove selection + for selected in self.selection: + if isinstance(selected, EventFilter): + self._creator.delete_state() + else: + self._creator.delete_action(selected) + widget.queue_draw() + + |