From 516b7fae68f9e9c28d1b5a5091bb91e59737fe3a Mon Sep 17 00:00:00 2001 From: Daniel Drake Date: Sun, 23 Nov 2008 22:41:30 +0000 Subject: Pipeline rework --- diff --git a/glive.py b/glive.py index bf27919..ae7480c 100644 --- a/glive.py +++ b/glive.py @@ -43,21 +43,17 @@ class Glive: def __init__(self, pca): self.window = None self.ca = pca - self.pipes = [] + self._eos_cb = None self.playing = False + self.picExposureOpen = False self.AUDIO_TRANSCODE_ID = 0 self.TRANSCODE_ID = 0 self.VIDEO_TRANSCODE_ID = 0 - self.PIPETYPE_SUGAR_JHBUILD = 0 - self.PIPETYPE_XV_VIDEO_DISPLAY_RECORD = 1 - self.PIPETYPE_X_VIDEO_DISPLAY = 2 - self.PIPETYPE_AUDIO_RECORD = 3 - self._PIPETYPE = self.PIPETYPE_XV_VIDEO_DISPLAY_RECORD - self._LAST_PIPETYPE = self._PIPETYPE - self._NEXT_PIPETYPE = -1 + self.PHOTO_MODE_PHOTO = 0 + self.PHOTO_MODE_AUDIO = 1 self.TRANSCODE_UPDATE_INTERVAL = 200 @@ -70,27 +66,99 @@ class Glive: self.VIDEO_HEIGHT_LARGE = 150 self.VIDEO_FRAMERATE_SMALL = 10 + self.pipeline = gst.Pipeline("my-pipeline") + self.createPhotoBin() + self.createAudioBin() + self.createVideoBin() + self.createPipeline() self.thumbPipes = [] self.muxPipes = [] - self._nextPipe() + bus = self.pipeline.get_bus() + bus.enable_sync_message_emission() + bus.add_signal_watch() + self.SYNC_ID = bus.connect('sync-message::element', self._onSyncMessageCb) + self.MESSAGE_ID = bus.connect('message', self._onMessageCb) - def setPipeType( self, type ): - self._NEXT_PIPETYPE = type + def createPhotoBin ( self ): + queue = gst.element_factory_make("queue", "pbqueue") + queue.set_property("leaky", True) + queue.set_property("max-size-buffers", 1) + colorspace = gst.element_factory_make("ffmpegcolorspace", "pbcolorspace") + jpeg = gst.element_factory_make("jpegenc", "pbjpeg") - def getPipeType( self ): - return self._PIPETYPE + sink = gst.element_factory_make("fakesink", "pbsink") + self.HANDOFF_ID = sink.connect("handoff", self.copyPic) + sink.set_property("signal-handoffs", True) + self.photobin = gst.Bin("photobin") + self.photobin.add(queue, colorspace, jpeg, sink) - def pipe(self): - return self.pipes[ len(self.pipes)-1 ] + gst.element_link_many(queue, colorspace, jpeg, sink) + pad = queue.get_static_pad("sink") + self.photobin.add_pad(gst.GhostPad("sink", pad)) - def el(self, name): - return self.pipe().get_by_name(name) + def createAudioBin ( self ): + src = gst.element_factory_make("alsasrc", "absrc") + srccaps = gst.Caps("audio/x-raw-int,rate=16000,channels=1,depth=16") + enc = gst.element_factory_make("wavenc", "abenc") + + sink = gst.element_factory_make("filesink", "absink") + sink.set_property("location", os.path.join(Instance.instancePath, "output.wav")) + + self.audiobin = gst.Bin("audiobin") + self.audiobin.add(src, enc, sink) + + src.link(enc, srccaps) + enc.link(sink) + + def createVideoBin ( self ): + queue = gst.element_factory_make("queue", "vbqueue") + + rate = gst.element_factory_make("videorate", "vbrate") + ratecaps = gst.Caps('video/x-raw-yuv,framerate='+str(self.VIDEO_FRAMERATE_SMALL)+'/1') + + scale = gst.element_factory_make("videoscale", "vbscale") + scalecaps = gst.Caps('video/x-raw-yuv,width='+str(self.VIDEO_WIDTH_SMALL)+',height='+str(self.VIDEO_HEIGHT_SMALL)) + + colorspace = gst.element_factory_make("ffmpegcolorspace", "vbcolorspace") + + enc = gst.element_factory_make("theoraenc", "vbenc") + enc.set_property("quality", 16) + + mux = gst.element_factory_make("oggmux", "vbmux") + + sink = gst.element_factory_make("filesink", "vbfile") + sink.set_property("location", os.path.join(Instance.instancePath, "output.ogg")) + + self.videobin = gst.Bin("videobin") + self.videobin.add(queue, rate, scale, colorspace, enc, mux, sink) + + queue.link(rate) + rate.link(scale, ratecaps) + scale.link(colorspace, scalecaps) + gst.element_link_many(colorspace, enc, mux, sink) + + pad = queue.get_static_pad("sink") + self.videobin.add_pad(gst.GhostPad("sink", pad)) + + def createPipeline ( self ): + src = gst.element_factory_make("v4l2src", "camsrc") + try: + # old gst-plugins-good does not have this property + src.set_property("queue-size", 2) + except: + pass + + tee = gst.element_factory_make("tee", "tee") + queue = gst.element_factory_make("queue", "dispqueue") + xvsink = gst.element_factory_make("xvimagesink", "xvsink") + self.pipeline.add(src, tee, queue, xvsink) + gst.element_link_many(src, tee, queue, xvsink) def thumbPipe(self): return self.thumbPipes[ len(self.thumbPipes)-1 ] @@ -109,134 +177,28 @@ class Glive: def play(self): - self.pipe().set_state(gst.STATE_PLAYING) + self.pipeline.set_state(gst.STATE_PLAYING) self.playing = True - def pause(self): - self.pipe().set_state(gst.STATE_PAUSED) + self.pipeline.set_state(gst.STATE_PAUSED) self.playing = False def stop(self): - self.pipe().set_state(gst.STATE_NULL) + self.pipeline.set_state(gst.STATE_NULL) self.playing = False - self._LAST_PIPETYPE = self._PIPETYPE - if (self._NEXT_PIPETYPE != -1): - self._PIPETYPE = self._NEXT_PIPETYPE - self._nextPipe() - self._NEXT_PIPETYPE = -1 - - def is_playing(self): return self.playing - def idlePlayElement(self, element): element.set_state(gst.STATE_PLAYING) return False - def _nextPipe(self): - if ( len(self.pipes) > 0 ): - - pipe = self.pipe() - bus = pipe.get_bus() - n = len(self.pipes)-1 - n = str(n) - - #only disconnect what was connected based on the last pipetype - if ((self._LAST_PIPETYPE == self.PIPETYPE_XV_VIDEO_DISPLAY_RECORD) - or (self._LAST_PIPETYPE == self.PIPETYPE_X_VIDEO_DISPLAY) - or (self._LAST_PIPETYPE == self.PIPETYPE_AUDIO_RECORD) ): - bus.disconnect(self.SYNC_ID) - bus.remove_signal_watch() - bus.disable_sync_message_emission() - if (self._LAST_PIPETYPE == self.PIPETYPE_XV_VIDEO_DISPLAY_RECORD): - pipe.get_by_name("picFakesink").disconnect(self.HANDOFF_ID) - if (self._LAST_PIPETYPE == self.PIPETYPE_AUDIO_RECORD): - pipe.get_by_name("picFakesink").disconnect(self.HANDOFF_ID) - - v4l2 = False - if (self._PIPETYPE == self.PIPETYPE_XV_VIDEO_DISPLAY_RECORD): - pipeline = gst.parse_launch("v4l2src name=v4l2src ! tee name=videoTee ! queue name=movieQueue ! videorate name=movieVideorate ! video/x-raw-yuv,framerate="+str(self.VIDEO_FRAMERATE_SMALL)+"/1 ! videoscale name=movieVideoscale ! video/x-raw-yuv,width="+str(self.VIDEO_WIDTH_SMALL)+",height="+str(self.VIDEO_HEIGHT_SMALL)+" ! ffmpegcolorspace name=movieFfmpegcolorspace ! theoraenc quality=16 name=movieTheoraenc ! oggmux name=movieOggmux ! filesink name=movieFilesink videoTee. ! xvimagesink name=xvimagesink videoTee. ! queue name=picQueue ! ffmpegcolorspace name=picFfmpegcolorspace ! jpegenc name=picJPegenc ! fakesink name=picFakesink alsasrc name=audioAlsasrc ! audio/x-raw-int,rate=16000,channels=1,depth=16 ! tee name=audioTee ! wavenc name=audioWavenc ! filesink name=audioFilesink audioTee. ! fakesink name=audioFakesink" ) - v4l2 = True - - videoTee = pipeline.get_by_name('videoTee') - - picQueue = pipeline.get_by_name('picQueue') - picQueue.set_property("leaky", True) - picQueue.set_property("max-size-buffers", 1) - picFakesink = pipeline.get_by_name("picFakesink") - self.HANDOFF_ID = picFakesink.connect("handoff", self.copyPic) - picFakesink.set_property("signal-handoffs", True) - self.picExposureOpen = False - - movieQueue = pipeline.get_by_name("movieQueue") - movieFilesink = pipeline.get_by_name("movieFilesink") - movieFilepath = os.path.join(Instance.instancePath, "output.ogg" ) #ogv - movieFilesink.set_property("location", movieFilepath ) - - audioFilesink = pipeline.get_by_name('audioFilesink') - audioFilepath = os.path.join(Instance.instancePath, "output.wav") - audioFilesink.set_property("location", audioFilepath ) - audioTee = pipeline.get_by_name('audioTee') - audioWavenc = pipeline.get_by_name('audioWavenc') - - audioTee.unlink(audioWavenc) - videoTee.unlink(movieQueue) - videoTee.unlink(picQueue) - - elif (self._PIPETYPE == self.PIPETYPE_X_VIDEO_DISPLAY ): - pipeline = gst.parse_launch("v4l2src name=v4l2src ! queue name=xQueue ! videorate ! video/x-raw-yuv,framerate=2/1 ! videoscale ! video/x-raw-yuv,width="+str(ui.UI.dim_PIPW)+",height="+str(ui.UI.dim_PIPH)+" ! ffmpegcolorspace ! ximagesink name=ximagesink") - v4l2 = True - - elif (self._PIPETYPE == self.PIPETYPE_AUDIO_RECORD): - pipeline = gst.parse_launch("v4l2src name=v4l2src ! tee name=videoTee ! xvimagesink name=xvimagesink videoTee. ! queue name=picQueue ! ffmpegcolorspace name=picFfmpegcolorspace ! jpegenc name=picJPegenc ! fakesink name=picFakesink alsasrc name=audioAlsasrc ! audio/x-raw-int,rate=16000,channels=1,depth=16 ! queue name=audioQueue ! audioconvert name=audioAudioconvert ! wavenc name=audioWavenc ! filesink name=audioFilesink" ) - v4l2 = True - - audioQueue = pipeline.get_by_name('audioQueue') - audioAudioconvert = pipeline.get_by_name('audioAudioconvert') - audioQueue.unlink(audioAudioconvert) - - videoTee = pipeline.get_by_name('videoTee') - picQueue = pipeline.get_by_name('picQueue') - picQueue.set_property("leaky", True) - picQueue.set_property("max-size-buffers", 1) - picFakesink = pipeline.get_by_name('picFakesink') - self.HANDOFF_ID = picFakesink.connect("handoff", self.copyPic) - picFakesink.set_property("signal-handoffs", True) - self.picExposureOpen = False - videoTee.unlink(picQueue) - - audioFilesink = pipeline.get_by_name('audioFilesink') - audioFilepath = os.path.join(Instance.instancePath, "output.wav") - audioFilesink.set_property("location", audioFilepath ) - - elif (self._PIPETYPE == self.PIPETYPE_SUGAR_JHBUILD): - pipeline = gst.parse_launch("fakesrc ! queue name=xQueue ! videorate ! video/x-raw-yuv,framerate=2/1 ! videoscale ! video/x-raw-yuv,width=160,height=120 ! ffmpegcolorspace ! ximagesink name=ximagesink") - - if (v4l2): - v4l2src = pipeline.get_by_name('v4l2src') - try: - v4l2src.set_property("queue-size", 2) - except: - pass - - if ((self._PIPETYPE == self.PIPETYPE_XV_VIDEO_DISPLAY_RECORD) - or (self._PIPETYPE == self.PIPETYPE_X_VIDEO_DISPLAY) - or (self._PIPETYPE == self.PIPETYPE_AUDIO_RECORD)): - bus = pipeline.get_bus() - bus.enable_sync_message_emission() - bus.add_signal_watch() - self.SYNC_ID = bus.connect('sync-message::element', self._onSyncMessageCb) - self.MESSAGE_ID = bus.connect('message', self._onMessageCb) - - self.pipes.append(pipeline) - - def stopRecordingAudio( self ): - self.stop() + self.audiobin.set_state(gst.STATE_NULL) + self.pipeline.remove(self.audiobin) gobject.idle_add( self.stoppedRecordingAudio ) @@ -325,63 +287,96 @@ class Glive: tl[gst.TAG_TITLE] = Constants.istrBy % {"1":stringType, "2":str(Instance.nickName)} return tl + def blockedCb(self, x, y, z): + pass - def takePhoto(self): - if not(self.picExposureOpen): - self.picExposureOpen = True - self.el("videoTee").link(self.el("picQueue")) + def _takePhoto(self): + if self.picExposureOpen: + return + + self.picExposureOpen = True + pad = self.photobin.get_static_pad("sink") + pad.set_blocked_async(True, self.blockedCb, None) + self.pipeline.add(self.photobin) + self.photobin.set_state(gst.STATE_PLAYING) + self.pipeline.get_by_name("tee").link(self.photobin) + pad.set_blocked_async(False, self.blockedCb, None) + def takePhoto(self): + self.photoMode = self.PHOTO_MODE_PHOTO + self._takePhoto() def copyPic(self, fsink, buffer, pad, user_data=None): - if (self.picExposureOpen): + if not self.picExposureOpen: + return - self.picExposureOpen = False - pic = gtk.gdk.pixbuf_loader_new_with_mime_type("image/jpeg") - pic.write( buffer ) - pic.close() - pixBuf = pic.get_pixbuf() - del pic + pad = self.photobin.get_static_pad("sink") + pad.set_blocked_async(True, self.blockedCb, None) + self.pipeline.get_by_name("tee").unlink(self.photobin) + self.pipeline.remove(self.photobin) + pad.set_blocked_async(False, self.blockedCb, None) + + self.picExposureOpen = False + pic = gtk.gdk.pixbuf_loader_new_with_mime_type("image/jpeg") + pic.write( buffer ) + pic.close() + pixBuf = pic.get_pixbuf() + del pic - self.el("videoTee").unlink(self.el("picQueue")) - self.savePhoto( pixBuf ) + self.savePhoto( pixBuf ) def savePhoto(self, pixbuf): - if (self._PIPETYPE == self.PIPETYPE_AUDIO_RECORD): + if self.photoMode == self.PHOTO_MODE_AUDIO: self.audioPixbuf = pixbuf else: self.ca.m.savePhoto(pixbuf) def startRecordingVideo(self): - self.pipe().set_state(gst.STATE_READY) - self.record = True self.audio = True - if (self.record): - self.el("videoTee").link(self.el("movieQueue")) - - if (self.audio): - self.el("audioTee").link(self.el("audioWavenc")) - - self.pipe().set_state(gst.STATE_PLAYING) + # It would be nicer to connect the video/audio-recording elements + # without stopping the pipeline. However, that seems to cause a + # very long delay at the start of the video recording where the first + # frame is 'frozen' for several seconds. MikeS from #gstreamer + # suggested that the videorate element might not be receiving a + # "new segment" signal soon enough. + # + # Stopping the pipeline while we reshuffle neatly works around this + # with minimal user experience impact. + self.pipeline.set_state(gst.STATE_NULL) + self.pipeline.add(self.videobin) + self.pipeline.get_by_name("tee").link(self.videobin) + self.pipeline.add(self.audiobin) + self.pipeline.set_state(gst.STATE_PLAYING) def startRecordingAudio(self): self.audioPixbuf = None - self.pipe().set_state(gst.STATE_READY) - self.takePhoto() + self.photoMode = self.PHOTO_MODE_AUDIO + self._takePhoto() self.record = True - if (self.record): - self.el("audioQueue").link(self.el("audioAudioconvert")) - - self.pipe().set_state(gst.STATE_PLAYING) - + self.pipeline.add(self.audiobin) + self.audiobin.set_state(gst.STATE_PLAYING) def stopRecordingVideo(self): - self.stop() + # Similarly to as when we start recording, we also stop the pipeline + # while we are adjusting the pipeline to stop recording. If we do + # it on-the-fly, the following video live feed to the screen becomes + # several seconds delayed. Weird! + self._eos_cb = self.stopRecordingVideoEOS + self.pipeline.get_by_name('camsrc').send_event(gst.event_new_eos()) + self.audiobin.get_by_name('absrc').send_event(gst.event_new_eos()) + + def stopRecordingVideoEOS(self): + self.pipeline.set_state(gst.STATE_NULL) + self.pipeline.get_by_name("tee").unlink(self.videobin) + self.pipeline.remove(self.videobin) + self.pipeline.remove(self.audiobin) + self.pipeline.set_state(gst.STATE_PLAYING) gobject.idle_add( self.stoppedRecordingVideo ) @@ -503,19 +498,16 @@ class Glive: def _onMessageCb(self, bus, message): t = message.type if t == gst.MESSAGE_EOS: - #print("MESSAGE_EOS") - pass + if self._eos_cb: + cb = self._eos_cb + self._eos_cb = None + cb() elif t == gst.MESSAGE_ERROR: #todo: if we come out of suspend/resume with errors, then get us back up and running... #todo: handle "No space left on the resource.gstfilesink.c" #err, debug = message.parse_error() pass - - def isXv(self): - return self._PIPETYPE == self.PIPETYPE_XV_VIDEO_DISPLAY_RECORD - - def abandonMedia(self): self.stop() diff --git a/glivex.py b/glivex.py new file mode 100644 index 0000000..5b8267f --- /dev/null +++ b/glivex.py @@ -0,0 +1,147 @@ +#Copyright (c) 2008, Media Modifications Ltd. + +#Permission is hereby granted, free of charge, to any person obtaining a copy +#of this software and associated documentation files (the "Software"), to deal +#in the Software without restriction, including without limitation the rights +#to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +#copies of the Software, and to permit persons to whom the Software is +#furnished to do so, subject to the following conditions: + +#The above copyright notice and this permission notice shall be included in +#all copies or substantial portions of the Software. + +#THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +#IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +#FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +#AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +#LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +#OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +#THE SOFTWARE. + +# This class is a cut-down version of glive which uses an ximagesink +# rather than an xvimagesink. This is used in video playback mode, where +# our only Xv port is used for Theora playback. +# +# I tried to modify the glive pipeline to allow swapping an xvimagesink for +# an ximagesink and vice-versa, but that didn't work out (all kinds of strange +# behaviour, perhaps a gstreamer bug). So we resort to using a separate +# pipeline - ugly, but it works... + +import os +import gtk +import pygtk +pygtk.require('2.0') +import sys +import gst +import gst.interfaces +import pygst +pygst.require('0.10') +import time +import threading +import gobject +gobject.threads_init() + +from instance import Instance +from constants import Constants +import record +import utils +import ui + +class GliveX: + def __init__(self, pca): + self.window = None + self.ca = pca + + self.playing = False + + self.pipeline = gst.Pipeline("slow-pipeline") + self.createPipeline() + + bus = self.pipeline.get_bus() + bus.enable_sync_message_emission() + bus.add_signal_watch() + self.SYNC_ID = bus.connect('sync-message::element', self._onSyncMessageCb) + self.MESSAGE_ID = bus.connect('message', self._onMessageCb) + + def createPipeline ( self ): + src = gst.element_factory_make("v4l2src", "camsrc") + try: + # old gst-plugins-good does not have this property + src.set_property("queue-size", 2) + except: + pass + + queue = gst.element_factory_make("queue", "dispqueue") + queue.set_property("leaky", True) + queue.set_property('max-size-buffers', 1) + + scale = gst.element_factory_make("videoscale", "scale") + scalecaps = gst.Caps('video/x-raw-yuv,width='+str(ui.UI.dim_PIPW)+',height='+str(ui.UI.dim_PIPH)) + colorspace = gst.element_factory_make("ffmpegcolorspace", "colorspace") + xsink = gst.element_factory_make("ximagesink", "xsink") + self.pipeline.add(src, queue, scale, colorspace, xsink) + gst.element_link_many(src, queue, scale) + scale.link(colorspace, scalecaps) + colorspace.link(xsink) + + def play(self): + self.pipeline.set_state(gst.STATE_PLAYING) + self.playing = True + + def pause(self): + self.pipeline.set_state(gst.STATE_PAUSED) + self.playing = False + + def stop(self): + self.pipeline.set_state(gst.STATE_NULL) + self.playing = False + + def is_playing(self): + return self.playing + + def idlePlayElement(self, element): + element.set_state(gst.STATE_PLAYING) + return False + + def _onSyncMessageCb(self, bus, message): + if message.structure is None: + return + if message.structure.get_name() == 'prepare-xwindow-id': + self.window.set_sink(message.src) + message.src.set_property('force-aspect-ratio', True) + + def _onMessageCb(self, bus, message): + t = message.type + if t == gst.MESSAGE_EOS: + #print("MESSAGE_EOS") + pass + elif t == gst.MESSAGE_ERROR: + #todo: if we come out of suspend/resume with errors, then get us back up and running... + #todo: handle "No space left on the resource.gstfilesink.c" + #err, debug = message.parse_error() + pass + +class SlowLiveVideoWindow(gtk.Window): + def __init__(self, bgd ): + gtk.Window.__init__(self) + + self.imagesink = None + self.glivex = None + + self.modify_bg( gtk.STATE_NORMAL, bgd ) + self.modify_bg( gtk.STATE_INSENSITIVE, bgd ) + self.unset_flags(gtk.DOUBLE_BUFFERED) + self.set_flags(gtk.APP_PAINTABLE) + + def set_glivex(self, pglivex): + self.glivex = pglivex + self.glivex.window = self + + def set_sink(self, sink): + if (self.imagesink != None): + assert self.window.xid + self.imagesink = None + del self.imagesink + + self.imagesink = sink + self.imagesink.set_xwindow_id(self.window.xid) diff --git a/instance.py b/instance.py index 0aac3ad..1ab5407 100644 --- a/instance.py +++ b/instance.py @@ -10,7 +10,12 @@ import record class Instance: key = profile.get_pubkey() - keyHash = util._sha_data(key) + if hasattr(util, '_sha_data'): + # sugar-0.82 and previous + keyHash = util._sha_data(key) + else: + keyHash = util.sha_data(key) + keyHashPrintable = util.printable_hash(keyHash) nickName = profile.get_nick_name() @@ -40,4 +45,5 @@ def recreateTmp(): def recreateData(): if (not os.path.exists(Instance.dataPath)): - os.makedirs(Instance.dataPath) \ No newline at end of file + os.makedirs(Instance.dataPath) + diff --git a/model.py b/model.py index c9f2a90..15c4553 100644 --- a/model.py +++ b/model.py @@ -297,7 +297,6 @@ class Model: #resume live video from the camera (if the activity is active) if (self.ca.ui.ACTIVE): self.ca.ui.updateVideoComponents() - self.ca.glive.play() self.ca.ui.progressWindow.updateProgress( 0, "" ) self.setRecording( False ) @@ -457,4 +456,4 @@ class Model: self.MODE = Constants.MODE_AUDIO self.setUpdating(True) - gobject.idle_add( self.setupMode, self.MODE, True ) \ No newline at end of file + gobject.idle_add( self.setupMode, self.MODE, True ) diff --git a/record.py b/record.py index 273b612..74df6e2 100755 --- a/record.py +++ b/record.py @@ -37,6 +37,7 @@ from model import Model from ui import UI from recordtube import RecordTube from glive import Glive +from glivex import GliveX from gplay import Gplay from greplay import Greplay from recorded import Recorded @@ -74,6 +75,7 @@ class Record(activity.Activity): #the main classes self.m = Model(self) self.glive = Glive(self) + self.glivex = GliveX(self) self.gplay = Gplay() self.ui = UI(self) @@ -131,6 +133,7 @@ class Record(activity.Activity): self.m.doShutter() else: self.glive.stop() + self.glivex.stop() def restartPipes(self): @@ -150,8 +153,9 @@ class Record(activity.Activity): if (self.gplay != None): self.gplay.stop( ) if (self.glive != None): - self.glive.setPipeType( self.glive.PIPETYPE_SUGAR_JHBUILD ) self.glive.stop( ) + if (self.glivex != None): + self.glivex.stop( ) #this calls write_file activity.Activity.close( self ) diff --git a/ui.py b/ui.py index 64786b2..d1a6726 100644 --- a/ui.py +++ b/ui.py @@ -50,6 +50,7 @@ from p5_button import P5Button from p5_button import Polygon from p5_button import Button from glive import LiveVideoWindow +from glivex import SlowLiveVideoWindow from gplay import PlayVideoWindow from recorded import Recorded from button import RecdButton @@ -92,7 +93,13 @@ class UI: self.piph = self.__class__.dim_PIPH #ui modes + + # True when we're in full-screen mode, False otherwise self.FULLSCREEN = False + + # True when we're showing live video feed in the primary screen + # area, False otherwise (even when we are still showing live video + # in a p-i-p) self.LIVEMODE = True self.LAST_MODE = -1 @@ -103,7 +110,11 @@ class UI: self.LAST_TRANSCODING = False self.TRANSCODING = False self.MESHING = False + + # RECD_INFO_ON is True when the 'info' for a recording is being + # display on-screen (who recorded it, tags, etc), and False otherwise. self.RECD_INFO_ON = False + self.UPDATE_DURATION_ID = 0 self.UPDATE_TIMER_ID = 0 self.COUNTINGDOWN = False @@ -367,6 +378,16 @@ class UI: self.livePhotoWindow.add_events(gtk.gdk.VISIBILITY_NOTIFY_MASK) self.livePhotoWindow.connect("visibility-notify-event", self._visibleNotifyCb) + #video playback windows + self.playOggWindow = PlayVideoWindow(Constants.colorBlack.gColor) + self.addToWindowStack( self.playOggWindow, self.windowStack[len(self.windowStack)-1] ) + #self.playOggWindow.set_gplay(self.ca.gplay) + self.ca.gplay.window = self.playOggWindow + self.playOggWindow.set_events(gtk.gdk.BUTTON_RELEASE_MASK) + self.playOggWindow.connect("button_release_event", self._mediaClickedForPlayback) + self.playOggWindow.add_events(gtk.gdk.VISIBILITY_NOTIFY_MASK) + self.playOggWindow.connect("visibility-notify-event", self._visibleNotifyCb) + #border behind self.pipBgdWindow = gtk.Window() self.pipBgdWindow.modify_bg( gtk.STATE_NORMAL, Constants.colorWhite.gColor ) @@ -381,28 +402,13 @@ class UI: self.liveVideoWindow.add_events(gtk.gdk.VISIBILITY_NOTIFY_MASK) self.liveVideoWindow.connect("visibility-notify-event", self._visibleNotifyCb) - #video playback windows - self.playOggWindow = PlayVideoWindow(Constants.colorBlack.gColor) - self.addToWindowStack( self.playOggWindow, self.windowStack[len(self.windowStack)-1] ) - #self.playOggWindow.set_gplay(self.ca.gplay) - self.ca.gplay.window = self.playOggWindow - self.playOggWindow.set_events(gtk.gdk.BUTTON_RELEASE_MASK) - self.playOggWindow.connect("button_release_event", self._mediaClickedForPlayback) - self.playOggWindow.add_events(gtk.gdk.VISIBILITY_NOTIFY_MASK) - self.playOggWindow.connect("visibility-notify-event", self._visibleNotifyCb) - - #border behind - self.pipBgdWindow2 = gtk.Window() - self.pipBgdWindow2.modify_bg( gtk.STATE_NORMAL, Constants.colorWhite.gColor ) - self.pipBgdWindow2.modify_bg( gtk.STATE_INSENSITIVE, Constants.colorWhite.gColor ) - self.addToWindowStack( self.pipBgdWindow2, self.windowStack[len(self.windowStack)-1] ) - - self.playLiveWindow = LiveVideoWindow(Constants.colorBlack.gColor) - self.addToWindowStack( self.playLiveWindow, self.windowStack[len(self.windowStack)-1] ) - self.playLiveWindow.set_events(gtk.gdk.BUTTON_RELEASE_MASK) - self.playLiveWindow.connect("button_release_event", self._playLiveButtonReleaseCb) - self.playLiveWindow.add_events(gtk.gdk.VISIBILITY_NOTIFY_MASK) - self.playLiveWindow.connect("visibility-notify-event", self._visibleNotifyCb) + self.slowLiveVideoWindow = SlowLiveVideoWindow(Constants.colorBlack.gColor) + self.addToWindowStack( self.slowLiveVideoWindow, self.windowStack[len(self.windowStack)-1] ) + self.slowLiveVideoWindow.set_glivex(self.ca.glivex) + self.slowLiveVideoWindow.set_events(gtk.gdk.BUTTON_RELEASE_MASK) + self.slowLiveVideoWindow.connect("button_release_event", self._returnButtonReleaseCb) + self.slowLiveVideoWindow.add_events(gtk.gdk.VISIBILITY_NOTIFY_MASK) + self.slowLiveVideoWindow.connect("visibility-notify-event", self._visibleNotifyCb) self.recordWindow = RecordWindow(self) self.addToWindowStack( self.recordWindow, self.windowStack[len(self.windowStack)-1] ) @@ -458,7 +464,7 @@ class UI: if (self.ca.m.MODE == Constants.MODE_VIDEO): if (not self.LIVEMODE and widget == self.playOggWindow): temp_ACTIVE = False - if ( self.LIVEMODE and widget == self.playLiveWindow): + if ( self.LIVEMODE and widget == self.liveVideoWindow): temp_ACTIVE = False @@ -482,8 +488,6 @@ class UI: self.pipBgdWindow.resize( pgdDim[0], pgdDim[1] ) self.liveVideoWindow.resize( imgDim[0], imgDim[1] ) self.playOggWindow.resize( imgDim[0], imgDim[1] ) - self.playLiveWindow.resize( imgDim[0], imgDim[1] ) - self.pipBgdWindow2.resize( pgdDim[0], pgdDim[1] ) self.recordWindow.resize( eyeDim[0], eyeDim[1] ) self.maxWindow.resize( maxDim[0], maxDim[1] ) self.progressWindow.resize( prgDim[0], prgDim[1] ) @@ -541,8 +545,8 @@ class UI: def hideWidgets( self ): self.moveWinOffscreen( self.maxWindow ) self.moveWinOffscreen( self.pipBgdWindow ) - self.moveWinOffscreen( self.pipBgdWindow2 ) self.moveWinOffscreen( self.infWindow ) + self.moveWinOffscreen( self.slowLiveVideoWindow ) if (self.FULLSCREEN): self.moveWinOffscreen( self.recordWindow ) @@ -554,7 +558,7 @@ class UI: self.moveWinOffscreen( self.liveVideoWindow ) elif (self.ca.m.MODE == Constants.MODE_VIDEO): if (not self.LIVEMODE): - self.moveWinOffscreen( self.playLiveWindow ) + self.moveWinOffscreen( self.liveVideoWindow ) elif (self.ca.m.MODE == Constants.MODE_AUDIO): if (not self.LIVEMODE): self.moveWinOffscreen( self.liveVideoWindow ) @@ -830,6 +834,13 @@ class UI: self.resumeLiveVideo() + def _returnButtonReleaseCb(self, widget, event): + self.ca.gplay.stop() + self.ca.glivex.stop() + self.ca.glive.play() + self.resumeLiveVideo() + + def resumeLiveVideo( self ): self.livePhotoCanvas.setImage( None ) @@ -862,7 +873,7 @@ class UI: self.showLiveVideoTags() self.LIVEMODE = True - self.startLiveVideo( self.playLiveWindow, self.ca.glive.PIPETYPE_XV_VIDEO_DISPLAY_RECORD, False ) + self.startLiveVideo( False ) self.updateVideoComponents() @@ -926,12 +937,7 @@ class UI: #set up the x & xv x-ition (if need be) self.ca.gplay.stop() - if (self.ca.m.MODE == Constants.MODE_PHOTO): - self.startLiveVideo( self.liveVideoWindow, self.ca.glive.PIPETYPE_XV_VIDEO_DISPLAY_RECORD, True ) - elif (self.ca.m.MODE == Constants.MODE_VIDEO): - self.startLiveVideo( self.playLiveWindow, self.ca.glive.PIPETYPE_XV_VIDEO_DISPLAY_RECORD, True ) - elif (self.ca.m.MODE == Constants.MODE_AUDIO): - self.startLiveVideo( self.liveVideoWindow, self.ca.glive.PIPETYPE_AUDIO_RECORD, True ) + self.startLiveVideo( True ) bottomKid = self.bottomCenter.get_child() if (bottomKid != None): @@ -944,19 +950,16 @@ class UI: self.resetWidgetFadeTimer() - def startLiveVideo(self, window, pipetype, force): + def startLiveVideo(self, force): #We need to know which window and which pipe here #if returning from another activity, active won't be false and needs to be to get started - if (self.ca.glive.getPipeType() == pipetype - and self.ca.glive.window == window + if (self.ca.glive.window == self.liveVideoWindow and self.ca.props.active and not force): return - self.ca.glive.setPipeType( pipetype ) - window.set_glive(self.ca.glive) - self.ca.glive.stop() + self.liveVideoWindow.set_glive(self.ca.glive) self.ca.glive.play() @@ -1326,8 +1329,8 @@ class UI: pos.append({"position":"inb", "window":self.livePhotoWindow} ) pos.append({"position":"inf", "window":self.infWindow} ) elif (self.ca.m.MODE == Constants.MODE_VIDEO): - pos.append({"position":"pgd", "window":self.pipBgdWindow2} ) - pos.append({"position":"pip", "window":self.playLiveWindow} ) + pos.append({"position":"pgd", "window":self.pipBgdWindow} ) + pos.append({"position":"pip", "window":self.slowLiveVideoWindow} ) pos.append({"position":"inb", "window":self.playOggWindow} ) pos.append({"position":"inf", "window":self.infWindow} ) elif (self.ca.m.MODE == Constants.MODE_AUDIO): @@ -1352,14 +1355,14 @@ class UI: pos.append({"position":"tmr", "window":self.progressWindow} ) elif (self.ca.m.MODE == Constants.MODE_VIDEO): if (self.LIVEMODE): - pos.append({"position":"img", "window":self.playLiveWindow} ) + pos.append({"position":"img", "window":self.liveVideoWindow} ) pos.append({"position":"max", "window":self.maxWindow} ) pos.append({"position":"eye", "window":self.recordWindow} ) pos.append({"position":"prg", "window":self.progressWindow} ) else: pos.append({"position":"img", "window":self.playOggWindow} ) - pos.append({"position":"pgd", "window":self.pipBgdWindow2} ) - pos.append({"position":"pip", "window":self.playLiveWindow} ) + pos.append({"position":"pgd", "window":self.pipBgdWindow} ) + pos.append({"position":"pip", "window":self.slowLiveVideoWindow} ) if (not self.MESHING): pos.append({"position":"max", "window":self.maxWindow} ) pos.append({"position":"scr", "window":self.scrubWindow} ) @@ -1419,8 +1422,8 @@ class UI: pos.append({"position":"eye", "window":self.recordWindow} ) elif (self.ca.m.MODE == Constants.MODE_VIDEO): if (not self.LIVEMODE): - pos.append({"position":"pgd", "window":self.pipBgdWindow2} ) - pos.append({"position":"pip", "window":self.playLiveWindow} ) + pos.append({"position":"pgd", "window":self.pipBgdWindow} ) + pos.append({"position":"pip", "window":self.slowLiveVideoWindow} ) if (not self.MESHING): pos.append({"position":"max", "window":self.maxWindow} ) pos.append({"position":"scr", "window":self.scrubWindow} ) @@ -1617,11 +1620,6 @@ class UI: def showVideo( self, recd ): - if (self.LIVEMODE): - if (self.ca.glive.isXv()): - self.ca.glive.setPipeType( self.ca.glive.PIPETYPE_X_VIDEO_DISPLAY ) - self.ca.glive.stop() - self.ca.glive.play() downloading = self.ca.requestMeshDownload(recd) if (not downloading): @@ -1641,12 +1639,17 @@ class UI: if (not downloading): mediaFilepath = recd.getMediaFilepath() if (mediaFilepath != None): + self.ca.glive.stop() + self.ca.glivex.play() videoUrl = "file://" + str( mediaFilepath ) self.ca.gplay.setLocation(videoUrl) self.scrubWindow.doPlay() ableToShowVideo = True if (not ableToShowVideo): + # FIXME is this correct? + self.ca.glive.stop() + self.ca.glivex.play() thumbFilepath = recd.getThumbFilepath( ) thumbUrl = "file://" + str( thumbFilepath ) self.ca.gplay.setLocation(thumbUrl) @@ -1664,7 +1667,7 @@ class UI: self.livePhotoCanvas.setImage( None ) elif (recd.type == Constants.TYPE_VIDEO): self.ca.gplay.stop() - self.startLiveVideo( self.playLiveWindow, self.ca.glive.PIPETYPE_XV_VIDEO_DISPLAY_RECORD, False ) + self.startLiveVideo( False ) elif (recd.type == Constants.TYPE_AUDIO): self.livePhotoCanvas.setImage( None ) self.startLiveAudio() @@ -1679,7 +1682,6 @@ class UI: self.ca.m.setUpdating(True) self.ca.gplay.stop() - self.ca.glive.setPipeType( self.ca.glive.PIPETYPE_AUDIO_RECORD ) self.liveVideoWindow.set_glive(self.ca.glive) self.showLiveVideoTags() @@ -2358,4 +2360,4 @@ class AudioToolbar(gtk.Toolbar): def getDuration(self): - return 60 * Constants.DURATIONS[self.durCb.combo.get_active()] \ No newline at end of file + return 60 * Constants.DURATIONS[self.durCb.combo.get_active()] -- cgit v0.9.1