]> git.parisson.com Git - gst-tests.git/commitdiff
init master
authorGuillaume Pellerin <yomguy@parisson.com>
Fri, 17 May 2013 11:22:02 +0000 (13:22 +0200)
committerGuillaume Pellerin <yomguy@parisson.com>
Fri, 17 May 2013 11:22:02 +0000 (13:22 +0200)
23 files changed:
audio_player_qt.py [new file with mode: 0644]
audio_video.py [new file with mode: 0644]
audio_video.pyc [new file with mode: 0644]
audio_video_crossfade.py [new file with mode: 0644]
control_mixer.py [new file with mode: 0644]
control_mixer_osc.py [new file with mode: 0644]
control_mixer_osc_touch.py [new file with mode: 0644]
control_mixer_osc_touch_1cam.py [new file with mode: 0644]
control_mixer_osc_touch_2cam.py [new file with mode: 0644]
control_mixer_osc_touch_3cams.py [new file with mode: 0644]
control_mixer_parallel.py [new file with mode: 0644]
control_mixer_parallel_no_effects.py [new file with mode: 0644]
control_mixer_pipes.py [new file with mode: 0644]
cross-fade.py [new file with mode: 0644]
cross-fade_2.py [new file with mode: 0644]
demo.py [new file with mode: 0644]
demo.pyc [new file with mode: 0644]
gtk_sink_pad.py [new file with mode: 0644]
osc_test.py [new file with mode: 0644]
rtpx264.sh [new file with mode: 0755]
rtpx264_pl.sh [new file with mode: 0755]
simple-effect-gtk.py [new file with mode: 0644]
video_player_qt.py [new file with mode: 0644]

diff --git a/audio_player_qt.py b/audio_player_qt.py
new file mode 100644 (file)
index 0000000..ed7da6e
--- /dev/null
@@ -0,0 +1,72 @@
+# -*- coding: utf-8 -*-
+
+import sys, os
+from PyQt4 import QtCore, QtGui, uic
+from PyQt4.phonon import Phonon
+
+class AudioPlayer(QtGui.QWidget):
+    def __init__(self, url, parent = None):
+
+        self.url = url
+
+        QtGui.QWidget.__init__(self, parent)
+        self.setSizePolicy(QtGui.QSizePolicy.Expanding,
+            QtGui.QSizePolicy.Preferred)
+
+
+        self.player = Phonon.createPlayer(Phonon.MusicCategory,
+            Phonon.MediaSource(url))
+        self.player.setTickInterval(100)
+        self.player.tick.connect(self.tock)
+
+        self.play_pause = QtGui.QPushButton(self)
+        self.play_pause.setIcon(QtGui.QIcon(':/icons/player_play.svg'))
+        self.play_pause.clicked.connect(self.playClicked)
+        self.player.stateChanged.connect(self.stateChanged)
+
+        self.slider = Phonon.SeekSlider(self.player , self)
+
+        self.status = QtGui.QLabel(self)
+        self.status.setAlignment(QtCore.Qt.AlignRight |
+            QtCore.Qt.AlignVCenter)
+
+        self.download = QtGui.QPushButton("Download", self)
+        self.download.clicked.connect(self.fetch)
+
+        layout = QtGui.QHBoxLayout(self)
+        layout.addWidget(self.play_pause)
+        layout.addWidget(self.slider)
+        layout.addWidget(self.status)
+        layout.addWidget(self.download)
+
+    def playClicked(self):
+        if self.player.state() == Phonon.PlayingState:
+            self.player.pause()
+        else:
+            self.player.play()
+
+    def stateChanged(self, new, old):
+        if new == Phonon.PlayingState:
+            self.play_pause.setIcon(QtGui.QIcon(':/icons/player_pause.svg'))
+        else:
+            self.play_pause.setIcon(QtGui.QIcon(':/icons/player_play.svg'))
+
+    def tock(self, time):
+        time = time/1000
+        h = time/3600
+        m = (time-3600*h) / 60
+        s = (time-3600*h-m*60)
+        self.status.setText('%02d:%02d:%02d'%(h,m,s))
+
+    def fetch(self):
+        print 'Should download %s'%self.url
+
+def main():
+    app = QtGui.QApplication(sys.argv)
+    window=AudioPlayer(sys.argv[1])
+    window.show()
+    # It's exec_ because exec is a reserved word in Python
+    sys.exit(app.exec_())
+
+if __name__ == "__main__":
+    main()
diff --git a/audio_video.py b/audio_video.py
new file mode 100644 (file)
index 0000000..bf17145
--- /dev/null
@@ -0,0 +1,113 @@
+#!/usr/bin/env python
+
+"""A short Audio-Video example"""
+import gobject
+gobject.threads_init()
+import gst
+import pygtk
+pygtk.require("2.0")
+import gtk
+gtk.gdk.threads_init()
+import sys
+import os
+from demo import Demo
+
+def create_decodebin():
+    try:
+        return gst.element_factory_make("decodebin2")
+    except:
+        return gst.element_factory_make("decodebin")
+
+class DemoException(Exception):
+    """Base exception class for errors which occur during demos"""
+
+    def __init__(self, reason):
+        self.reason = reason
+
+class AVDemo(Demo):
+    """Extends base demo with both audio and video sinks
+    * a window containing a drawing area and basic media controls
+    * a basic gstreamer pipeline using an ximagesink and an autoaudiosink
+    * connects the ximagesink to the window's drawing area
+
+    Derived classes need only override magic(), __name__,
+    and __usage__ to create new demos."""
+
+    __name__ = "AV Demo"
+    __usage__ = "python audio_video.py <filename>"
+    __def_win_size__ = (320, 240)
+
+    # this commment allows us to include only a portion of the file
+    # in the tutorial for this demo
+
+    def magic(self, pipeline, (videosink, audiosink), args):
+        """This is where the magic happens"""
+
+        def onPadAdded(source, pad):
+            # first we see if we can link to the videosink
+            tpad = videoqueue.get_compatible_pad(pad)
+            if tpad:
+                pad.link(tpad)
+                return
+            # if not, we try the audio sink
+            tpad = audioqueue.get_compatible_pad(pad)
+            if tpad:
+                pad.link(tpad)
+                return
+
+        src = gst.element_factory_make("filesrc", "src")
+        src.props.location = args[0]
+        dcd = create_decodebin()
+        audioqueue = gst.element_factory_make("queue")
+        videoqueue = gst.element_factory_make("queue")
+        pipeline.add(src, dcd, audioqueue, videoqueue)
+
+        src.link(dcd)
+        videoqueue.link(videosink)
+        audioqueue.link(audiosink)
+        dcd.connect("pad-added", onPadAdded)
+
+    def createPipeline(self, w):
+        """Given a window, creates a pipeline and connects it to the window"""
+
+        # code will make the ximagesink output in the specified window
+        def set_xid(window):
+            gtk.gdk.threads_enter()
+            videosink.set_xwindow_id(window.window.xid)
+            videosink.expose()
+            gtk.gdk.threads_leave()
+
+        # this code receives the messages from the pipeline. if we
+        # need to set X11 id, then we call set_xid
+        def bus_handler(unused_bus, message):
+            if message.type == gst.MESSAGE_ELEMENT:
+                if message.structure.get_name() == 'prepare-xwindow-id':
+                    set_xid(w)
+            return gst.BUS_PASS
+
+        # create our pipeline, and connect our bus_handler
+        self.pipeline = gst.Pipeline()
+        bus = self.pipeline.get_bus()
+        bus.set_sync_handler(bus_handler)
+
+        videosink = gst.element_factory_make("ximagesink", "sink")
+        videosink.set_property("force-aspect-ratio", True)
+        videosink.set_property("handle-expose", True)
+        scale = gst.element_factory_make("videoscale", "scale")
+        cspace = gst.element_factory_make("ffmpegcolorspace", "cspace")
+
+        audiosink = gst.element_factory_make("autoaudiosink")
+        audioconvert = gst.element_factory_make("audioconvert")
+
+        # pipeline looks like: ... ! cspace ! scale ! sink
+        #                      ... ! audioconvert ! autoaudiosink
+        self.pipeline.add(cspace, scale, videosink, audiosink,
+            audioconvert)
+        scale.link(videosink)
+        cspace.link(scale)
+        audioconvert.link(audiosink)
+        return (self.pipeline, (cspace, audioconvert))
+
+# if this file is being run directly, create the demo and run it
+if __name__ == '__main__':
+    AVDemo().run()
\ No newline at end of file
diff --git a/audio_video.pyc b/audio_video.pyc
new file mode 100644 (file)
index 0000000..433e326
Binary files /dev/null and b/audio_video.pyc differ
diff --git a/audio_video_crossfade.py b/audio_video_crossfade.py
new file mode 100644 (file)
index 0000000..d046ca1
--- /dev/null
@@ -0,0 +1,117 @@
+#!/usr/bin/env python
+
+"""A short Audio-Video example"""
+import gobject
+gobject.threads_init()
+import gst
+import pygtk
+pygtk.require("2.0")
+import gtk
+import sys
+import os
+from audio_video import AVDemo, create_decodebin
+
+class AVCrossfade(AVDemo):
+    """Base class implementing boring, boiler-plate code.
+    Sets up a basic gstreamer environment which includes:
+
+    * a window containing a drawing area and basic media controls
+    * a basic gstreamer pipeline using an ximagesink and an autoaudiosink
+    * connects the ximagesink to the window's drawing area
+
+    Derived classes need only override magic(), __name__,
+    and __usage__ to create new demos."""
+
+    __name__ = "AV Demo"
+    __usage__ = "python audio_video.py <filename>"
+    __def_win_size__ = (320, 240)
+
+    # this commment allows us to include only a portion of the file
+    # in the tutorial for this demo
+
+    def onPad(self, decoder, pad, target):
+        tpad = target.get_compatible_pad(pad)
+        if tpad:
+            pad.link(tpad)
+
+    def addVideoChain(self, pipeline, name, decoder, mixer):
+        alpha = gst.element_factory_make("alpha")
+        alpha.props.alpha = 1.0
+        videoscale = gst.element_factory_make("videoscale")
+        videorate = gst.element_factory_make("videorate")
+        colorspace = gst.element_factory_make("ffmpegcolorspace")
+        queue = gst.element_factory_make("queue")
+
+        pipeline.add(alpha, videoscale, videorate, colorspace, queue)
+        decoder.connect("pad-added", self.onPad, videorate)
+        videorate.link(videoscale)
+        videoscale.link(colorspace)
+        colorspace.link(queue)
+        queue.link(alpha)
+        alpha.link(mixer)
+
+        setattr(self, "alpha%s" % name, alpha)
+
+    def addAudioChain(self, pipeline, name, decoder, adder):
+        volume = gst.element_factory_make("volume")
+        volume.props.volume = 0.5
+        audioconvert = gst.element_factory_make("audioconvert")
+        audiorate = gst.element_factory_make("audioresample")
+        queue = gst.element_factory_make("queue")
+
+        pipeline.add(volume, audioconvert, audiorate, queue)
+        decoder.connect("pad-added", self.onPad, audioconvert)
+        audioconvert.link(audiorate)
+        audiorate.link(queue)
+        queue.link(volume)
+        volume.link(adder)
+
+        setattr(self, "vol%s" % name, volume)
+
+    def addSourceChain(self, pipeline, name, filename, mixer, adder):
+        src = gst.element_factory_make("filesrc")
+        src.props.location = filename
+        dcd = create_decodebin()
+
+        pipeline.add(src, dcd)
+        src.link(dcd)
+        self.addVideoChain(pipeline, name, dcd, mixer)
+        self.addAudioChain(pipeline, name, dcd, adder)
+
+    def magic(self, pipeline, (videosink, audiosink), args):
+        """This is where the magic happens"""
+        mixer = gst.element_factory_make("videomixer")
+        adder = gst.element_factory_make("adder")
+        pipeline.add(mixer, adder)
+
+        mixer.link(videosink)
+        adder.link(audiosink)
+        self.addSourceChain(pipeline, "A", args[0], mixer, adder)
+        self.addSourceChain(pipeline, "B", args[1], mixer, adder)
+        self.alphaB.props.alpha = 0.5
+
+    def onValueChanged(self, adjustment):
+        balance = self.balance.get_value()
+        crossfade = self.crossfade.get_value()
+        self.volA.props.volume = (2 - balance) * (1 - crossfade)
+        self.volB.props.volume = balance * crossfade
+        self.alphaB.props.alpha = crossfade
+
+    def customWidgets(self):
+        self.crossfade = gtk.Adjustment(0.5, 0, 1.0)
+        self.balance = gtk.Adjustment(1.0, 0.0, 2.0)
+        crossfadeslider = gtk.HScale(self.crossfade)
+        balanceslider = gtk.HScale(self.balance)
+        self.crossfade.connect("value-changed", self.onValueChanged)
+        self.balance.connect("value-changed", self.onValueChanged)
+
+        ret = gtk.Table()
+        ret.attach(gtk.Label("Crossfade"), 0, 1, 0, 1)
+        ret.attach(crossfadeslider, 1, 2, 0, 1)
+        ret.attach(gtk.Label("Balance"), 0, 1, 1, 2)
+        ret.attach(balanceslider, 1, 2, 1, 2)
+        return ret
+
+# if this file is being run directly, create the demo and run it
+if __name__ == '__main__':
+    AVCrossfade().run()
\ No newline at end of file
diff --git a/control_mixer.py b/control_mixer.py
new file mode 100644 (file)
index 0000000..d0fcb9e
--- /dev/null
@@ -0,0 +1,61 @@
+#!/usr/bin/python
+import gobject; gobject.threads_init()
+import pygst; pygst.require("0.10")
+import gst
+
+p = gst.parse_launch ("""videomixer name=mix0 ! ffmpegcolorspace ! xvimagesink
+      videotestsrc ! video/x-raw-yuv, framerate=24/1, width=640, height=360 ! mix0.sink_0
+      videomixer name=mix1 ! mix0.sink_1
+      videotestsrc pattern="snow" ! video/x-raw-yuv, framerate=24/1, width=200, height=150 ! mix1.sink_0
+      videomixer name=mix2 ! mix1.sink_1
+      videotestsrc pattern="snow" ! video/x-raw-yuv, framerate=24/1, width=200, height=150 ! mix2.sink_0
+      videotestsrc pattern="snow" ! video/x-raw-yuv, framerate=24/1, width=200, height=150 ! mix2.sink_1
+""")
+
+m1 = p.get_by_name ("mix1")
+s1_0 = m1.get_pad ("sink_0")
+s1_0.set_property ("xpos", 100)
+s1_1 = m1.get_pad ("sink_1")
+s1_1.set_property ("xpos", 250)
+
+m2 = p.get_by_name ("mix2")
+s2_0 = m2.get_pad ("sink_0")
+s2_0.set_property ("xpos", 200)
+s2_1 = m2.get_pad ("sink_1")
+s2_1.set_property ("xpos", 250)
+
+c1_0 = gst.Controller(s1_0, "ypos", "alpha")
+c1_0.set_interpolation_mode("ypos", gst.INTERPOLATE_LINEAR)
+c1_0.set_interpolation_mode("alpha", gst.INTERPOLATE_LINEAR)
+c1_0.set("ypos", 0, 0)
+c1_0.set("ypos", 5 * gst.SECOND, 200)
+c1_0.set("alpha", 0, 0)
+c1_0.set("alpha", 5 * gst.SECOND, 1.0)
+
+c1_1 = gst.Controller(s1_1, "ypos", "alpha")
+c1_1.set_interpolation_mode("ypos", gst.INTERPOLATE_LINEAR)
+c1_1.set_interpolation_mode("alpha", gst.INTERPOLATE_LINEAR)
+c1_1.set("ypos", 0, 0)
+c1_1.set("ypos", 5 * gst.SECOND, 200)
+c1_1.set("alpha", 0, 0)
+c1_1.set("alpha", 5 * gst.SECOND, 1.0)
+
+c2_0 = gst.Controller(s2_0, "ypos", "alpha")
+c2_0.set_interpolation_mode("ypos", gst.INTERPOLATE_LINEAR)
+c2_0.set_interpolation_mode("alpha", gst.INTERPOLATE_LINEAR)
+c2_0.set("ypos", 0, 0)
+c2_0.set("ypos", 5 * gst.SECOND, 200)
+c2_0.set("alpha", 0, 0)
+c2_0.set("alpha", 5 * gst.SECOND, 1.0)
+
+c2_1 = gst.Controller(s2_1, "ypos", "alpha")
+c2_1.set_interpolation_mode("ypos", gst.INTERPOLATE_LINEAR)
+c2_1.set_interpolation_mode("alpha", gst.INTERPOLATE_LINEAR)
+c2_1.set("ypos", 0, 0)
+c2_1.set("ypos", 5 * gst.SECOND, 200)
+c2_1.set("alpha", 0, 0)
+c2_1.set("alpha", 5 * gst.SECOND, 1.0)
+
+p.set_state (gst.STATE_PLAYING)
+
+gobject.MainLoop().run()
diff --git a/control_mixer_osc.py b/control_mixer_osc.py
new file mode 100644 (file)
index 0000000..379985e
--- /dev/null
@@ -0,0 +1,108 @@
+#!/usr/bin/python
+import gobject; gobject.threads_init()
+import pygst; pygst.require("0.10")
+import gst
+from threading import Thread
+
+
+class OSCController(Thread):
+
+    def __init__(self, port):
+        Thread.__init__(self)
+        import liblo
+        self.port = port
+        try:
+            self.server = liblo.Server(self.port)
+        except liblo.ServerError, err:
+            print str(err)
+
+    def add_method(self, path, type, method):
+        self.server.add_method(path, type, method)
+
+    def run(self):
+        while True:
+            self.server.recv(100)
+
+
+class GSTSrcVideo(object):
+
+    def __init__(self, pipe=None, framerate='24/1', width=160, height=90, xpos=0, ypos=0):
+        self.framerate = framerate
+        self.width = width
+        self.height = height
+        self.xpos = xpos
+        self.ypos = ypos
+        if not pipe:
+            pipe = 'videotestsrc pattern="snow"'
+        self.pipe = pipe + ' ! video/x-raw-yuv, framerate=%s, width=%s, height=%s' \
+                        % (self.framerate, str(self.width), str(self.height))
+
+class GSTMixer(object):
+
+    def __init__(self, osc_port=13000):
+        self.name = 'mixer'
+        self.pipe = ['videomixer name=mixer ! ffmpegcolorspace ! xvimagesink']
+        self.srcs = []
+        self.i= 0
+
+        self.osc_port = osc_port
+        self.osc = OSCController(self.osc_port)
+
+    def osc_callback(self, path, value):
+        paths = path.split('/')
+        sink = paths[1]
+        param = paths[2]
+        for src in self.srcs:
+            if src['sink'] == sink:
+                break
+        src['control'].set(param, 5 * gst.SECOND, value[0])
+
+    def add_src(self, src):
+        self.srcs.append({'id': self.i, 'src': src, 'sink': 'sink_' + str(self.i)})
+        self.i += 1
+
+    def setup(self):
+        self.srcs.reverse()
+
+        for src in self.srcs:
+            self.pipe.append(' '.join([src['src'].pipe, '! ' + self.name + '.' + src['sink']]))
+
+        print ' '.join(self.pipe)
+        self.process = gst.parse_launch(' '.join(self.pipe))
+        mixer = self.process.get_by_name("mixer")
+
+        for src in self.srcs:
+            src['pad'] = mixer.get_pad(src['sink'])
+            src['control'] = gst.Controller(src['pad'], "xpos", "ypos", "alpha")
+
+            src['control'].set_interpolation_mode("xpos", gst.INTERPOLATE_LINEAR)
+            src['control'].set("xpos", 5 * gst.SECOND, src['src'].xpos)
+            self.osc.add_method('/'+src['sink']+'/xpos', 'i', self.osc_callback)
+
+            src['control'].set_interpolation_mode("ypos", gst.INTERPOLATE_LINEAR)
+            src['control'].set("ypos", 5 * gst.SECOND, src['src'].ypos)
+            self.osc.add_method('/'+src['sink']+'/ypos', 'i', self.osc_callback)
+
+            src['control'].set_interpolation_mode("alpha", gst.INTERPOLATE_LINEAR)
+            src['control'].set("alpha", 5 * gst.SECOND, 1.0)
+            self.osc.add_method('/'+src['sink']+'/alpha', 'f', self.osc_callback)
+
+
+    def run(self):
+        self.osc.start()
+        self.process.set_state(gst.STATE_PLAYING)
+        gobject.MainLoop().run()
+
+
+if __name__ == '__main__':
+    src1 = GSTSrcVideo(width=640, height=360, framerate='24/1', pipe='videotestsrc')
+    src2 = GSTSrcVideo(width=160, height=90, framerate='24/1', xpos=100, ypos=50)
+    src3 = GSTSrcVideo(width=160, height=90, framerate='24/1', xpos=200, ypos=150)
+    src4 = GSTSrcVideo(width=160, height=90, framerate='24/1', xpos=300, ypos=250)
+    mixer = GSTMixer()
+    mixer.add_src(src1)
+    mixer.add_src(src2)
+    mixer.add_src(src3)
+    mixer.add_src(src4)
+    mixer.setup()
+    mixer.run()
diff --git a/control_mixer_osc_touch.py b/control_mixer_osc_touch.py
new file mode 100644 (file)
index 0000000..8c709d1
--- /dev/null
@@ -0,0 +1,125 @@
+#!/usr/bin/python
+import gobject; gobject.threads_init()
+import pygst; pygst.require("0.10")
+import gst
+from threading import Thread
+
+
+class OSCController(Thread):
+
+    def __init__(self, port):
+        Thread.__init__(self)
+        import liblo
+        self.port = port
+        try:
+            self.server = liblo.Server(self.port)
+        except liblo.ServerError, err:
+            print str(err)
+
+    def add_method(self, path, type, method):
+        self.server.add_method(path, type, method)
+
+    def run(self):
+        while True:
+            self.server.recv(100)
+
+
+class GSTSrcVideo(object):
+
+    def __init__(self, pipe=None, framerate='24/1', width=160, height=90, xpos=0, ypos=0):
+        self.framerate = framerate
+        self.width = width
+        self.height = height
+        self.xpos = xpos
+        self.ypos = ypos
+        if not pipe:
+            pipe = 'videotestsrc pattern="snow"'
+        self.pipe = pipe + ' ! video/x-raw-yuv, framerate=%s, width=%s, height=%s' \
+                        % (self.framerate, str(self.width), str(self.height))
+
+class GSTMixer(object):
+
+    def __init__(self, osc_port=8338):
+        self.name = 'mixer'
+        self.pipe = ['videomixer name=mixer ! ffmpegcolorspace ! xvimagesink']
+        self.srcs = []
+        self.i= 0
+
+        self.osc_port = osc_port
+        self.osc = OSCController(self.osc_port)
+
+    def osc_callback(self, path, value):
+        paths = path.split('/')
+        sink = paths[1]
+        param = paths[2]
+        for src in self.srcs:
+            if src['sink'] == sink:
+                break
+        src['control'].set(param, 5 * gst.SECOND, value[0])
+
+    def osc_alpha_callback(self, path, value):
+        paths = path.split('/')
+        layer = paths[1]
+        param = paths[2]
+        id = int(param[-1])-1
+        for src in self.srcs:
+            if src['id'] == id:
+                break
+        src['control'].set('alpha', 5 * gst.SECOND, value[0])
+
+    def osc_xy_callback(self, path, value):
+        for src in self.srcs:
+            if src['id'] == 2:
+                break
+        src['control'].set("xpos", 5 * gst.SECOND, int(value[0]*480))
+        src['control'].set("ypos", 5 * gst.SECOND, int(value[1]*270))
+
+    def add_src(self, src):
+        self.srcs.append({'id': self.i, 'src': src, 'sink': 'sink_' + str(self.i)})
+        self.i += 1
+
+    def setup(self):
+        self.srcs.reverse()
+
+        for src in self.srcs:
+            self.pipe.append(' '.join([src['src'].pipe, '! ' + self.name + '.' + src['sink']]))
+
+        print ' '.join(self.pipe)
+        self.process = gst.parse_launch(' '.join(self.pipe))
+        mixer = self.process.get_by_name("mixer")
+
+        for src in self.srcs:
+            src['pad'] = mixer.get_pad(src['sink'])
+            src['control'] = gst.Controller(src['pad'], "xpos", "ypos", "alpha")
+
+            src['control'].set_interpolation_mode("xpos", gst.INTERPOLATE_LINEAR)
+            src['control'].set("xpos", 5 * gst.SECOND, src['src'].xpos)
+
+            src['control'].set_interpolation_mode("ypos", gst.INTERPOLATE_LINEAR)
+            src['control'].set("ypos", 5 * gst.SECOND, src['src'].ypos)
+
+            src['control'].set_interpolation_mode("alpha", gst.INTERPOLATE_LINEAR)
+            src['control'].set("alpha", 5 * gst.SECOND, 1.0)
+
+            self.osc.add_method('/1/fader'+str(src['id']+1), 'f', self.osc_alpha_callback)
+
+        self.osc.add_method('/3/xy', 'ff', self.osc_xy_callback)
+
+    def run(self):
+        self.osc.start()
+        self.process.set_state(gst.STATE_PLAYING)
+        gobject.MainLoop().run()
+
+
+if __name__ == '__main__':
+    src1 = GSTSrcVideo(width=640, height=360, framerate='24/1', pipe='videotestsrc pattern="black" ')
+    src2 = GSTSrcVideo(width=640, height=360, framerate='24/1', pipe='videotestsrc ')
+    src3 = GSTSrcVideo(width=160, height=90, framerate='24/1', xpos=200, ypos=150)
+    src4 = GSTSrcVideo(width=160, height=90, framerate='24/1', xpos=300, ypos=250)
+    mixer = GSTMixer()
+    mixer.add_src(src1)
+    mixer.add_src(src2)
+    mixer.add_src(src3)
+    mixer.add_src(src4)
+    mixer.setup()
+    mixer.run()
diff --git a/control_mixer_osc_touch_1cam.py b/control_mixer_osc_touch_1cam.py
new file mode 100644 (file)
index 0000000..0bfcaa6
--- /dev/null
@@ -0,0 +1,133 @@
+#!/usr/bin/python
+import gobject; gobject.threads_init()
+import pygst; pygst.require("0.10")
+import gst
+from threading import Thread
+
+
+class OSCController(Thread):
+
+    def __init__(self, port):
+        Thread.__init__(self)
+        import liblo
+        self.port = port
+        try:
+            self.server = liblo.Server(self.port)
+        except liblo.ServerError, err:
+            print str(err)
+
+    def add_method(self, path, type, method):
+        self.server.add_method(path, type, method)
+
+    def run(self):
+        while True:
+            self.server.recv(100)
+
+
+class GSTSrcVideo(object):
+
+    def __init__(self, pipe=None, framerate='{30/1}', width=160, height=90, xpos=0, ypos=0):
+        self.framerate = framerate
+        self.width = width
+        self.height = height
+        self.xpos = xpos
+        self.ypos = ypos
+        if not pipe:
+            pipe = 'videotestsrc pattern="snow"'
+        self.pipe = pipe + ' ! video/x-raw-yuv, framerate=%s, width=%s, height=%s' \
+                        % (self.framerate, str(self.width), str(self.height))
+
+
+class GSTWebmHttpStreamer(object):
+
+    def __init__(self, protocol='tcp', port=9000):
+        self.protocol = protocol
+        self.port = port
+
+
+class GSTMixer(object):
+
+    def __init__(self, osc_port=8338):
+        self.name = 'mixer'
+        self.pipe = ['videomixer name=mixer ! queue ! ffmpegcolorspace ! xvimagesink sync=false']
+        self.srcs = []
+        self.i= 0
+
+        self.osc_port = osc_port
+        self.osc = OSCController(self.osc_port)
+
+    def osc_callback(self, path, value):
+        paths = path.split('/')
+        sink = paths[1]
+        param = paths[2]
+        for src in self.srcs:
+            if src['sink'] == sink:
+                break
+        src['control'].set(param, 5 * gst.SECOND, value[0])
+
+    def osc_alpha_callback(self, path, value):
+        paths = path.split('/')
+        layer = paths[1]
+        param = paths[2]
+        id = int(param[-1])-1
+        for src in self.srcs:
+            if src['id'] == id:
+                break
+        src['control'].set('alpha', 5 * gst.SECOND, value[0])
+
+    def osc_xy_callback(self, path, value):
+        for src in self.srcs:
+            if src['id'] == 2:
+                break
+        src['control'].set("xpos", 5 * gst.SECOND, int(value[0]*480))
+        src['control'].set("ypos", 5 * gst.SECOND, int(value[1]*270))
+
+    def add_src(self, src):
+        self.srcs.append({'id': self.i, 'src': src, 'sink': 'sink_' + str(self.i)})
+        self.i += 1
+
+    def setup(self):
+        self.srcs.reverse()
+
+        for src in self.srcs:
+            self.pipe.append(' '.join([src['src'].pipe, '! queue ! ' + self.name + '.' + src['sink']]))
+
+        print ' '.join(self.pipe)
+        self.process = gst.parse_launch(' '.join(self.pipe))
+        mixer = self.process.get_by_name("mixer")
+
+        for src in self.srcs:
+            src['pad'] = mixer.get_pad(src['sink'])
+            src['control'] = gst.Controller(src['pad'], "xpos", "ypos", "alpha")
+
+            src['control'].set_interpolation_mode("xpos", gst.INTERPOLATE_LINEAR)
+            src['control'].set("xpos", 5 * gst.SECOND, src['src'].xpos)
+
+            src['control'].set_interpolation_mode("ypos", gst.INTERPOLATE_LINEAR)
+            src['control'].set("ypos", 5 * gst.SECOND, src['src'].ypos)
+
+            src['control'].set_interpolation_mode("alpha", gst.INTERPOLATE_LINEAR)
+            src['control'].set("alpha", 5 * gst.SECOND, 1.0)
+
+            self.osc.add_method('/1/fader'+str(src['id']+1), 'f', self.osc_alpha_callback)
+
+        self.osc.add_method('/3/xy', 'ff', self.osc_xy_callback)
+
+    def run(self):
+        self.osc.start()
+        self.process.set_state(gst.STATE_PLAYING)
+        gobject.MainLoop().run()
+
+
+if __name__ == '__main__':
+    src1 = GSTSrcVideo(width=800, height=600, pipe='videotestsrc pattern="black"')
+    src2 = GSTSrcVideo(width=800, height=600, pipe='videotestsrc ')
+    src3 = GSTSrcVideo(width=640, height=480, xpos=200, ypos=150, pipe='v4l2src device=/dev/video0')
+    src4 = GSTSrcVideo(width=160, height=90, xpos=300, ypos=250)
+    mixer = GSTMixer()
+    mixer.add_src(src1)
+    mixer.add_src(src2)
+    mixer.add_src(src3)
+    mixer.add_src(src4)
+    mixer.setup()
+    mixer.run()
diff --git a/control_mixer_osc_touch_2cam.py b/control_mixer_osc_touch_2cam.py
new file mode 100644 (file)
index 0000000..344f7d9
--- /dev/null
@@ -0,0 +1,138 @@
+#!/usr/bin/python
+import gobject; gobject.threads_init()
+import pygst; pygst.require("0.10")
+import gst
+from threading import Thread
+
+
+class OSCController(Thread):
+
+    def __init__(self, port):
+        Thread.__init__(self)
+        import liblo
+        self.port = port
+        try:
+            self.server = liblo.Server(self.port)
+        except liblo.ServerError, err:
+            print str(err)
+
+    def add_method(self, path, type, method):
+        self.server.add_method(path, type, method)
+
+    def run(self):
+        while True:
+            self.server.recv(100)
+
+
+class GSTSrcVideo(object):
+
+    def __init__(self, pipe=None, framerate='{30/1}', width=160, height=90, xpos=0, ypos=0, queue_option=''):
+        self.framerate = framerate
+        self.width = width
+        self.height = height
+        self.xpos = xpos
+        self.ypos = ypos
+        self.queue_option = queue_option
+
+        if not pipe:
+            pipe = 'videotestsrc pattern="snow"'
+        self.pipe = pipe + ' ! video/x-raw-yuv, framerate=%s, width=%s, height=%s' \
+                        % (self.framerate, str(self.width), str(self.height))
+
+class GSTWebmHttpStreamer(object):
+
+    def __init__(self, protocol='tcp', port=9000):
+        self.protocol = protocol
+        self.port = port
+
+
+class GSTMixer(object):
+
+    def __init__(self, osc_port=8338):
+        self.name = 'mixer'
+        self.pipe = ['videomixer name=mixer ! queue ! ffmpegcolorspace ! xvimagesink sync=false']
+        self.srcs = []
+        self.i= 0
+
+        self.osc_port = osc_port
+        self.osc = OSCController(self.osc_port)
+
+    def osc_callback(self, path, value):
+        paths = path.split('/')
+        sink = paths[1]
+        param = paths[2]
+        for src in self.srcs:
+            if src['sink'] == sink:
+                break
+        src['control'].set(param, 5 * gst.SECOND, value[0])
+
+    def osc_alpha_callback(self, path, value):
+        paths = path.split('/')
+        layer = paths[1]
+        param = paths[2]
+        id = int(param[-1])-1
+        for src in self.srcs:
+            if src['id'] == id:
+                break
+        src['control'].set('alpha', 5 * gst.SECOND, value[0])
+
+    def osc_xy_callback(self, path, value):
+        for src in self.srcs:
+            if src['id'] == 2:
+                break
+        src['control'].set("xpos", 5 * gst.SECOND, int(value[0]*480))
+        src['control'].set("ypos", 5 * gst.SECOND, int(value[1]*270))
+
+    def add_src(self, src):
+        self.srcs.append({'id': self.i, 'src': src, 'sink': 'sink_' + str(self.i)})
+        self.i += 1
+
+    def setup(self):
+        self.srcs.reverse()
+
+        for src in self.srcs:
+            queue = 'queue'
+            if src['src'].queue_option:
+                # queue = 'timeoverlay ! queue'
+                queue += ' ' + src['src'].queue_option
+            self.pipe.append(' '.join([src['src'].pipe, '! ' + queue +  ' ! ' + self.name + '.' + src['sink']]))
+
+        print ' '.join(self.pipe)
+        self.process = gst.parse_launch(' '.join(self.pipe))
+        mixer = self.process.get_by_name("mixer")
+
+        for src in self.srcs:
+            src['pad'] = mixer.get_pad(src['sink'])
+            src['control'] = gst.Controller(src['pad'], "xpos", "ypos", "alpha")
+
+            src['control'].set_interpolation_mode("xpos", gst.INTERPOLATE_LINEAR)
+            src['control'].set("xpos", 5 * gst.SECOND, src['src'].xpos)
+
+            src['control'].set_interpolation_mode("ypos", gst.INTERPOLATE_LINEAR)
+            src['control'].set("ypos", 5 * gst.SECOND, src['src'].ypos)
+
+            src['control'].set_interpolation_mode("alpha", gst.INTERPOLATE_LINEAR)
+            src['control'].set("alpha", 5 * gst.SECOND, 1.0)
+
+            self.osc.add_method('/1/fader'+str(src['id']+1), 'f', self.osc_alpha_callback)
+
+        self.osc.add_method('/3/xy', 'ff', self.osc_xy_callback)
+
+    def run(self):
+        self.osc.start()
+        self.process.set_state(gst.STATE_PLAYING)
+        gobject.MainLoop().run()
+
+
+if __name__ == '__main__':
+    src1 = GSTSrcVideo(width=640, height=480, pipe='videotestsrc pattern="black"')
+    src4 = GSTSrcVideo(width=640, height=480, pipe='videotestsrc ')
+    src3 = GSTSrcVideo(width=640, height=480, xpos=0, ypos=0, pipe='v4l2src device=/dev/video0 do-timestamp=true', queue_option='leaky=upstream min-threshold-time=10000000000')
+    src2 = GSTSrcVideo(width=640, height=480, xpos=0, ypos=0, pipe='souphttpsrc location=http://192.168.0.15:8080/videofeed do-timestamp=true ! jpegdec ! queue ! ffmpegcolorspace ! videorate')
+    mixer = GSTMixer()
+    mixer.add_src(src1)
+    mixer.add_src(src2)
+    mixer.add_src(src3)
+    mixer.add_src(src4)
+    mixer.setup()
+    mixer.run()
diff --git a/control_mixer_osc_touch_3cams.py b/control_mixer_osc_touch_3cams.py
new file mode 100644 (file)
index 0000000..ab60d18
--- /dev/null
@@ -0,0 +1,133 @@
+#!/usr/bin/python
+import gobject; gobject.threads_init()
+import pygst; pygst.require("0.10")
+import gst
+from threading import Thread
+
+
+class OSCController(Thread):
+
+    def __init__(self, port):
+        Thread.__init__(self)
+        import liblo
+        self.port = port
+        try:
+            self.server = liblo.Server(self.port)
+        except liblo.ServerError, err:
+            print str(err)
+
+    def add_method(self, path, type, method):
+        self.server.add_method(path, type, method)
+
+    def run(self):
+        while True:
+            self.server.recv(100)
+
+
+class GSTSrcVideo(object):
+
+    def __init__(self, pipe=None, framerate='24/1', width=160, height=90, xpos=0, ypos=0):
+        self.framerate = framerate
+        self.width = width
+        self.height = height
+        self.xpos = xpos
+        self.ypos = ypos
+        if not pipe:
+            pipe = 'videotestsrc pattern="snow"'
+        self.pipe = pipe + ' ! video/x-raw-yuv, framerate=%s, width=%s, height=%s' \
+                        % (self.framerate, str(self.width), str(self.height))
+
+
+class GSTWebmHttpStreamer(object):
+
+    def __init__(self, protocol='tcp', port=9000):
+        self.protocol = protocol
+        self.port = port
+
+
+class GSTMixer(object):
+
+    def __init__(self, osc_port=8338):
+        self.name = 'mixer'
+        self.pipe = ['videomixer name=mixer ! ffmpegcolorspace ! xvimagesink']
+        self.srcs = []
+        self.i= 0
+
+        self.osc_port = osc_port
+        self.osc = OSCController(self.osc_port)
+
+    def osc_callback(self, path, value):
+        paths = path.split('/')
+        sink = paths[1]
+        param = paths[2]
+        for src in self.srcs:
+            if src['sink'] == sink:
+                break
+        src['control'].set(param, 5 * gst.SECOND, value[0])
+
+    def osc_alpha_callback(self, path, value):
+        paths = path.split('/')
+        layer = paths[1]
+        param = paths[2]
+        id = int(param[-1])-1
+        for src in self.srcs:
+            if src['id'] == id:
+                break
+        src['control'].set('alpha', 5 * gst.SECOND, value[0])
+
+    def osc_xy_callback(self, path, value):
+        for src in self.srcs:
+            if src['id'] == 2:
+                break
+        src['control'].set("xpos", 5 * gst.SECOND, int(value[0]*480))
+        src['control'].set("ypos", 5 * gst.SECOND, int(value[1]*270))
+
+    def add_src(self, src):
+        self.srcs.append({'id': self.i, 'src': src, 'sink': 'sink_' + str(self.i)})
+        self.i += 1
+
+    def setup(self):
+        self.srcs.reverse()
+
+        for src in self.srcs:
+            self.pipe.append(' '.join([src['src'].pipe, '! ' + self.name + '.' + src['sink']]))
+
+        print ' '.join(self.pipe)
+        self.process = gst.parse_launch(' '.join(self.pipe))
+        mixer = self.process.get_by_name("mixer")
+
+        for src in self.srcs:
+            src['pad'] = mixer.get_pad(src['sink'])
+            src['control'] = gst.Controller(src['pad'], "xpos", "ypos", "alpha")
+
+            src['control'].set_interpolation_mode("xpos", gst.INTERPOLATE_LINEAR)
+            src['control'].set("xpos", 5 * gst.SECOND, src['src'].xpos)
+
+            src['control'].set_interpolation_mode("ypos", gst.INTERPOLATE_LINEAR)
+            src['control'].set("ypos", 5 * gst.SECOND, src['src'].ypos)
+
+            src['control'].set_interpolation_mode("alpha", gst.INTERPOLATE_LINEAR)
+            src['control'].set("alpha", 5 * gst.SECOND, 1.0)
+
+            self.osc.add_method('/1/fader'+str(src['id']+1), 'f', self.osc_alpha_callback)
+
+        self.osc.add_method('/3/xy', 'ff', self.osc_xy_callback)
+
+    def run(self):
+        self.osc.start()
+        self.process.set_state(gst.STATE_PLAYING)
+        gobject.MainLoop().run()
+
+
+if __name__ == '__main__':
+    src1 = GSTSrcVideo(width=640, height=360, framerate='24/1', pipe='videotestsrc pattern="black" ')
+    src2 = GSTSrcVideo(width=640, height=360, framerate='24/1', pipe='videotestsrc ')
+    src3 = GSTSrcVideo(width=160, height=90, framerate='24/1', xpos=200, ypos=150)
+    src4 = GSTSrcVideo(width=160, height=90, framerate='24/1', xpos=300, ypos=250)
+    mixer = GSTMixer()
+    mixer.add_src(src1)
+    mixer.add_src(src2)
+    mixer.add_src(src3)
+    mixer.add_src(src4)
+    mixer.setup()
+    mixer.run()
diff --git a/control_mixer_parallel.py b/control_mixer_parallel.py
new file mode 100644 (file)
index 0000000..379985e
--- /dev/null
@@ -0,0 +1,108 @@
+#!/usr/bin/python
+import gobject; gobject.threads_init()
+import pygst; pygst.require("0.10")
+import gst
+from threading import Thread
+
+
+class OSCController(Thread):
+
+    def __init__(self, port):
+        Thread.__init__(self)
+        import liblo
+        self.port = port
+        try:
+            self.server = liblo.Server(self.port)
+        except liblo.ServerError, err:
+            print str(err)
+
+    def add_method(self, path, type, method):
+        self.server.add_method(path, type, method)
+
+    def run(self):
+        while True:
+            self.server.recv(100)
+
+
+class GSTSrcVideo(object):
+
+    def __init__(self, pipe=None, framerate='24/1', width=160, height=90, xpos=0, ypos=0):
+        self.framerate = framerate
+        self.width = width
+        self.height = height
+        self.xpos = xpos
+        self.ypos = ypos
+        if not pipe:
+            pipe = 'videotestsrc pattern="snow"'
+        self.pipe = pipe + ' ! video/x-raw-yuv, framerate=%s, width=%s, height=%s' \
+                        % (self.framerate, str(self.width), str(self.height))
+
+class GSTMixer(object):
+
+    def __init__(self, osc_port=13000):
+        self.name = 'mixer'
+        self.pipe = ['videomixer name=mixer ! ffmpegcolorspace ! xvimagesink']
+        self.srcs = []
+        self.i= 0
+
+        self.osc_port = osc_port
+        self.osc = OSCController(self.osc_port)
+
+    def osc_callback(self, path, value):
+        paths = path.split('/')
+        sink = paths[1]
+        param = paths[2]
+        for src in self.srcs:
+            if src['sink'] == sink:
+                break
+        src['control'].set(param, 5 * gst.SECOND, value[0])
+
+    def add_src(self, src):
+        self.srcs.append({'id': self.i, 'src': src, 'sink': 'sink_' + str(self.i)})
+        self.i += 1
+
+    def setup(self):
+        self.srcs.reverse()
+
+        for src in self.srcs:
+            self.pipe.append(' '.join([src['src'].pipe, '! ' + self.name + '.' + src['sink']]))
+
+        print ' '.join(self.pipe)
+        self.process = gst.parse_launch(' '.join(self.pipe))
+        mixer = self.process.get_by_name("mixer")
+
+        for src in self.srcs:
+            src['pad'] = mixer.get_pad(src['sink'])
+            src['control'] = gst.Controller(src['pad'], "xpos", "ypos", "alpha")
+
+            src['control'].set_interpolation_mode("xpos", gst.INTERPOLATE_LINEAR)
+            src['control'].set("xpos", 5 * gst.SECOND, src['src'].xpos)
+            self.osc.add_method('/'+src['sink']+'/xpos', 'i', self.osc_callback)
+
+            src['control'].set_interpolation_mode("ypos", gst.INTERPOLATE_LINEAR)
+            src['control'].set("ypos", 5 * gst.SECOND, src['src'].ypos)
+            self.osc.add_method('/'+src['sink']+'/ypos', 'i', self.osc_callback)
+
+            src['control'].set_interpolation_mode("alpha", gst.INTERPOLATE_LINEAR)
+            src['control'].set("alpha", 5 * gst.SECOND, 1.0)
+            self.osc.add_method('/'+src['sink']+'/alpha', 'f', self.osc_callback)
+
+
+    def run(self):
+        self.osc.start()
+        self.process.set_state(gst.STATE_PLAYING)
+        gobject.MainLoop().run()
+
+
+if __name__ == '__main__':
+    src1 = GSTSrcVideo(width=640, height=360, framerate='24/1', pipe='videotestsrc')
+    src2 = GSTSrcVideo(width=160, height=90, framerate='24/1', xpos=100, ypos=50)
+    src3 = GSTSrcVideo(width=160, height=90, framerate='24/1', xpos=200, ypos=150)
+    src4 = GSTSrcVideo(width=160, height=90, framerate='24/1', xpos=300, ypos=250)
+    mixer = GSTMixer()
+    mixer.add_src(src1)
+    mixer.add_src(src2)
+    mixer.add_src(src3)
+    mixer.add_src(src4)
+    mixer.setup()
+    mixer.run()
diff --git a/control_mixer_parallel_no_effects.py b/control_mixer_parallel_no_effects.py
new file mode 100644 (file)
index 0000000..771b229
--- /dev/null
@@ -0,0 +1,35 @@
+#!/usr/bin/python
+import gobject; gobject.threads_init()
+import pygst; pygst.require("0.10")
+import gst
+
+p = gst.parse_launch ("""videomixer name=mix0 ! ffmpegcolorspace ! xvimagesink
+      videotestsrc pattern="snow" ! video/x-raw-yuv, framerate=24/1, width=200, height=150 ! mix0.sink_3
+      videotestsrc pattern="snow" ! video/x-raw-yuv, framerate=24/1, width=200, height=150 ! mix0.sink_2
+      videotestsrc pattern="snow" ! video/x-raw-yuv, framerate=24/1, width=200, height=150 ! mix0.sink_1
+      videotestsrc ! video/x-raw-yuv, framerate=24/1, width=640, height=360 ! mix0.sink_0
+""")
+
+m1 = p.get_by_name("mix0")
+
+s1_1 = m1.get_pad("sink_1")
+c1_1 = gst.Controller(s1_1, "xpos", "ypos", "alpha")
+c1_1.set("xpos", 0, 0)
+c1_1.set("ypos", 0, 0)
+c1_1.set("alpha", 0, 1.0)
+
+s1_2 = m1.get_pad("sink_2")
+c1_2 = gst.Controller(s1_2, "xpos", "ypos", "alpha")
+c1_2.set("xpos", 0, 200)
+c1_2.set("ypos", 0, 200)
+c1_2.set("alpha", 0, 1.0)
+
+s1_3 = m1.get_pad("sink_3")
+c1_3 = gst.Controller(s1_3, "xpos", "ypos", "alpha")
+c1_3.set("xpos", 0, 400)
+c1_3.set("ypos", 0, 0)
+c1_3.set("alpha", 0, 1.0)
+
+p.set_state(gst.STATE_PLAYING)
+
+gobject.MainLoop().run()
diff --git a/control_mixer_pipes.py b/control_mixer_pipes.py
new file mode 100644 (file)
index 0000000..d0fcb9e
--- /dev/null
@@ -0,0 +1,61 @@
+#!/usr/bin/python
+import gobject; gobject.threads_init()
+import pygst; pygst.require("0.10")
+import gst
+
+p = gst.parse_launch ("""videomixer name=mix0 ! ffmpegcolorspace ! xvimagesink
+      videotestsrc ! video/x-raw-yuv, framerate=24/1, width=640, height=360 ! mix0.sink_0
+      videomixer name=mix1 ! mix0.sink_1
+      videotestsrc pattern="snow" ! video/x-raw-yuv, framerate=24/1, width=200, height=150 ! mix1.sink_0
+      videomixer name=mix2 ! mix1.sink_1
+      videotestsrc pattern="snow" ! video/x-raw-yuv, framerate=24/1, width=200, height=150 ! mix2.sink_0
+      videotestsrc pattern="snow" ! video/x-raw-yuv, framerate=24/1, width=200, height=150 ! mix2.sink_1
+""")
+
+m1 = p.get_by_name ("mix1")
+s1_0 = m1.get_pad ("sink_0")
+s1_0.set_property ("xpos", 100)
+s1_1 = m1.get_pad ("sink_1")
+s1_1.set_property ("xpos", 250)
+
+m2 = p.get_by_name ("mix2")
+s2_0 = m2.get_pad ("sink_0")
+s2_0.set_property ("xpos", 200)
+s2_1 = m2.get_pad ("sink_1")
+s2_1.set_property ("xpos", 250)
+
+c1_0 = gst.Controller(s1_0, "ypos", "alpha")
+c1_0.set_interpolation_mode("ypos", gst.INTERPOLATE_LINEAR)
+c1_0.set_interpolation_mode("alpha", gst.INTERPOLATE_LINEAR)
+c1_0.set("ypos", 0, 0)
+c1_0.set("ypos", 5 * gst.SECOND, 200)
+c1_0.set("alpha", 0, 0)
+c1_0.set("alpha", 5 * gst.SECOND, 1.0)
+
+c1_1 = gst.Controller(s1_1, "ypos", "alpha")
+c1_1.set_interpolation_mode("ypos", gst.INTERPOLATE_LINEAR)
+c1_1.set_interpolation_mode("alpha", gst.INTERPOLATE_LINEAR)
+c1_1.set("ypos", 0, 0)
+c1_1.set("ypos", 5 * gst.SECOND, 200)
+c1_1.set("alpha", 0, 0)
+c1_1.set("alpha", 5 * gst.SECOND, 1.0)
+
+c2_0 = gst.Controller(s2_0, "ypos", "alpha")
+c2_0.set_interpolation_mode("ypos", gst.INTERPOLATE_LINEAR)
+c2_0.set_interpolation_mode("alpha", gst.INTERPOLATE_LINEAR)
+c2_0.set("ypos", 0, 0)
+c2_0.set("ypos", 5 * gst.SECOND, 200)
+c2_0.set("alpha", 0, 0)
+c2_0.set("alpha", 5 * gst.SECOND, 1.0)
+
+c2_1 = gst.Controller(s2_1, "ypos", "alpha")
+c2_1.set_interpolation_mode("ypos", gst.INTERPOLATE_LINEAR)
+c2_1.set_interpolation_mode("alpha", gst.INTERPOLATE_LINEAR)
+c2_1.set("ypos", 0, 0)
+c2_1.set("ypos", 5 * gst.SECOND, 200)
+c2_1.set("alpha", 0, 0)
+c2_1.set("alpha", 5 * gst.SECOND, 1.0)
+
+p.set_state (gst.STATE_PLAYING)
+
+gobject.MainLoop().run()
diff --git a/cross-fade.py b/cross-fade.py
new file mode 100644 (file)
index 0000000..fb6a92a
--- /dev/null
@@ -0,0 +1,119 @@
+#!/usr/bin/env python
+"""Extends basic demo with a gnl composition"""
+import gobject
+gobject.threads_init()
+from demo import Demo, DemoException
+import gtk
+import gst
+import sys
+import os
+
+def create_decodebin():
+    try:
+        return gst.element_factory_make("decodebin2")
+    except:
+        return gst.element_factory_make("decodebin")
+
+class SimpleCrossfadeDemo(Demo):
+    __name__ = "Demo of crosfade  without using gnonlin"
+    __usage__ = '''python %s sourceA sourceB
+    live crossfading between two sources''' % sys.argv[0]
+    __def_size__ = (320, 420)
+
+    def magic(self, pipeline, sink, args):
+
+        def onPad(obj, pad, target):
+            sinkpad = target.get_compatible_pad(pad, pad.get_caps())
+            if sinkpad:
+                pad.link(sinkpad)
+            return True
+
+        assert len(sys.argv) == 3
+        assert os.path.exists(sys.argv[-1])
+        assert os.path.exists(sys.argv[-2])
+
+        # <excerpt 1>
+        src = gst.element_factory_make("filesrc")
+        src.set_property("location", sys.argv[-1])
+
+        srcAdecode = create_decodebin()
+        srcAconvert = gst.element_factory_make("ffmpegcolorspace")
+        srcAalpha = gst.element_factory_make("alpha")
+        srcAalpha.set_property("alpha", 1.0)
+
+        srcB = gst.element_factory_make("filesrc")
+        srcB.set_property("location", sys.argv[-2])
+        srcBdecode = create_decodebin()
+        srcBconvert = gst.element_factory_make("ffmpegcolorspace")
+        srcBalpha = gst.element_factory_make("alpha")
+        srcBalpha.set_property("alpha", 0.5)
+
+        mixer = gst.element_factory_make("videomixer")
+        mixer.set_property("background", "black")
+        # </excerpt>
+
+        # <excerpt 2>
+        pipeline.add(mixer)
+
+        pipeline.add(src, srcAdecode, srcAconvert, srcAalpha)
+        src.link(srcAdecode)
+        srcAdecode.connect("pad-added", onPad, srcAconvert)
+        srcAconvert.link(srcAalpha)
+        srcAalpha.link(mixer)
+
+        pipeline.add(srcB, srcBdecode, srcBconvert, srcBalpha)
+        srcB.link(srcBdecode)
+        srcBdecode.connect("pad-added", onPad, srcBconvert)
+        srcBconvert.link(srcBalpha)
+        srcBalpha.link(mixer)
+
+        mixer.link(sink)
+
+        # remember the alpha elements
+        self.srcBalpha = srcBalpha
+        # </excerpt>
+
+
+    # overriding from parent
+    def customWidgets(self):
+        """Create a control for each property in the videobalance
+        widget"""
+
+        # <excerpt 3>
+        # to be called a property value needs to change
+        def onValueChanged(widget):
+            if self.srcBalpha:
+                self.srcBalpha.set_property("alpha", widget.get_value())
+        # </excerpt>
+
+        lower = 0
+        upper = 1
+        default = 0.5
+
+        # create a place to hold our controls
+        controls = gtk.VBox()
+        labels = gtk.VBox()
+
+        widget = gtk.HScale(); label = gtk.Label("Crossfade")
+
+        # set appropriate atributes
+        widget.set_update_policy(gtk.UPDATE_CONTINUOUS)
+        widget.set_draw_value(True)
+        widget.set_range(lower, upper)
+        widget.set_value(default)
+
+        # connect to our signal handler, specifying the property
+        # to adjust
+        widget.connect("value-changed", onValueChanged)
+
+        # pack widget into box
+        controls.pack_start(widget, True, True)
+        labels.pack_start(label, True, False)
+
+        layout = gtk.HBox()
+        layout.pack_start(labels, False, False)
+        layout.pack_end(controls, True, True)
+        return layout
+
+if __name__ == '__main__':
+    SimpleCrossfadeDemo().run()
\ No newline at end of file
diff --git a/cross-fade_2.py b/cross-fade_2.py
new file mode 100644 (file)
index 0000000..c1fd340
--- /dev/null
@@ -0,0 +1,23 @@
+#!/usr/bin/python
+import gobject; gobject.threads_init()
+import pygst; pygst.require("0.10")
+import gst
+
+p = gst.parse_launch ("""videomixer name=mix ! ffmpegcolorspace ! xvimagesink
+      videotestsrc pattern="snow" ! video/x-raw-yuv, framerate=10/1, width=200, height=150 ! mix.sink_0
+      videotestsrc ! video/x-raw-yuv, framerate=10/1, width=640, height=360 ! mix.sink_1
+""")
+
+m = p.get_by_name ("mix")
+s0 = m.get_pad ("sink_0")
+s0.set_property ("xpos", 100)
+
+control = gst.Controller(s0, "ypos", "alpha")
+control.set_interpolation_mode("ypos", gst.INTERPOLATE_LINEAR)
+control.set_interpolation_mode("alpha", gst.INTERPOLATE_LINEAR)
+control.set("ypos", 0, 0); control.set("ypos", 5 * gst.SECOND, 200)
+control.set("alpha", 0, 0); control.set("alpha", 5 * gst.SECOND, 1.0)
+
+p.set_state (gst.STATE_PLAYING)
+
+gobject.MainLoop().run()
\ No newline at end of file
diff --git a/demo.py b/demo.py
new file mode 100644 (file)
index 0000000..51e95f3
--- /dev/null
+++ b/demo.py
@@ -0,0 +1,160 @@
+#!/usr/bin/env python
+
+"""Basic Framework for writing GStreamer Demos in Python"""
+#<excerpt 2>
+import gobject
+gobject.threads_init()
+import gst
+#</excerpt>
+import pygtk
+pygtk.require("2.0")
+import gtk
+gtk.gdk.threads_init()
+import sys
+import os
+
+
+class DemoException(Exception):
+    """Base exception class for errors which occur during demos"""
+
+    def __init__(self, reason):
+        self.reason = reason
+
+class Demo:
+    """Base class implementing boring, boiler-plate code.
+    Sets up a basic gstreamer environment which includes:
+
+    * a window containing a drawing area and basic media controls
+    * a basic gstreamer pipeline using an ximagesink
+    * connects the ximagesink to the window's drawing area
+
+    Derived classes need only override magic(), __name__,
+    and __usage__ to create new demos."""
+
+    __name__ = "Basic Demo"
+    __usage__ = "python demo.py -- runs a simple test demo"
+    __def_win_size__ = (320, 240)
+
+    # this commment allows us to include only a portion of the file
+    # in the tutorial for this demo
+    # <excerpt 1>     ...
+
+    def magic(self, pipeline, sink, args):
+        """This is where the magic happens"""
+        src = gst.element_factory_make("videotestsrc", "src")
+        pipeline.add(src)
+        src.link(sink)
+
+
+    def createPipeline(self, w):
+        """Given a window, creates a pipeline and connects it to the window"""
+
+        # code will make the ximagesink output in the specified window
+        def set_xid(window):
+            gtk.gdk.threads_enter()
+            sink.set_xwindow_id(window.window.xid)
+            sink.expose()
+            gtk.gdk.threads_leave()
+
+        # this code receives the messages from the pipeline. if we
+        # need to set X11 id, then we call set_xid
+        def bus_handler(unused_bus, message):
+            if message.type == gst.MESSAGE_ELEMENT:
+                if message.structure.get_name() == 'prepare-xwindow-id':
+                    set_xid(w)
+            return gst.BUS_PASS
+
+        # create our pipeline, and connect our bus_handler
+        self.pipeline = gst.Pipeline()
+        bus = self.pipeline.get_bus()
+        bus.set_sync_handler(bus_handler)
+
+        sink = gst.element_factory_make("ximagesink", "sink")
+        sink.set_property("force-aspect-ratio", True)
+        sink.set_property("handle-expose", True)
+        scale = gst.element_factory_make("videoscale", "scale")
+        cspace = gst.element_factory_make("ffmpegcolorspace", "cspace")
+
+        # our pipeline looks like this: ... ! cspace ! scale ! sink
+        self.pipeline.add(cspace, scale, sink)
+        scale.link(sink)
+        cspace.link(scale)
+        return (self.pipeline, cspace)
+
+    # ... end of excerpt </excerpt>
+
+    # subclasses can override this method to provide custom controls
+    def customWidgets(self):
+        return gtk.HBox()
+
+    def createWindow(self):
+        """Creates a top-level window, sets various boring attributes,
+        creates a place to put the video sink, adds some and finally
+        connects some basic signal handlers. Really, really boring.
+        """
+
+        # create window, set basic attributes
+        w = gtk.Window()
+        w.set_size_request(*self.__def_win_size__)
+        w.set_title("Gstreamer " + self.__name__)
+        w.connect("destroy", gtk.main_quit)
+
+        # declare buttons and their associated handlers
+        controls = (
+            ("play_button", gtk.ToolButton(gtk.STOCK_MEDIA_PLAY), self.onPlay),
+            ("stop_button", gtk.ToolButton(gtk.STOCK_MEDIA_STOP), self.onStop),
+            ("quit_button", gtk.ToolButton(gtk.STOCK_QUIT), gtk.main_quit)
+        )
+
+        # as well as the container in which to put them
+        box = gtk.HButtonBox()
+
+        # for every widget, connect to its clicked signal and add it
+        # to the enclosing box
+        for name, widget, handler in controls:
+            widget.connect("clicked", handler)
+            box.pack_start(widget, True)
+            setattr(self, name, widget)
+
+        viewer = gtk.DrawingArea()
+        viewer.modify_bg(gtk.STATE_NORMAL, viewer.style.black)
+
+        # we will need this later
+        self.xid = None
+
+        # now finally do the top-level layout for the window
+        layout = gtk.VBox(False)
+        layout.pack_start(viewer)
+
+        # subclasses can override childWidgets() to supply
+        # custom controls
+        layout.pack_start(self.customWidgets(), False, False)
+        layout.pack_end(box, False, False)
+        w.add(layout)
+        w.show_all()
+
+        # we want to return only the portion of the window which will
+        # be used to display the video, not the whole top-level
+        # window. a DrawingArea widget is, in fact, an X11 window.
+        return viewer
+
+    def onPlay(self, unused_button):
+        self.pipeline.set_state(gst.STATE_PLAYING)
+
+    def onStop(self, unused_button):
+        self.pipeline.set_state(gst.STATE_READY)
+
+    def run(self):
+        w = self.createWindow()
+        p, s = self.createPipeline(w)
+        try:
+            self.magic(p, s, sys.argv[1:])
+            gtk.main()
+        except DemoException, e:
+            print e.reason
+            print self.__usage__
+            sys.exit(-1)
+
+# if this file is being run directly, create the demo and run it
+if __name__ == '__main__':
+    Demo().run()
\ No newline at end of file
diff --git a/demo.pyc b/demo.pyc
new file mode 100644 (file)
index 0000000..acdd70c
Binary files /dev/null and b/demo.pyc differ
diff --git a/gtk_sink_pad.py b/gtk_sink_pad.py
new file mode 100644 (file)
index 0000000..0b0c53f
--- /dev/null
@@ -0,0 +1,69 @@
+#!/usr/bin/env python
+
+import sys, os
+import pygtk, gtk, gobject
+import pygst
+pygst.require("0.10")
+import gst
+
+class GTK_Main:
+
+       def __init__(self):
+               window = gtk.Window(gtk.WINDOW_TOPLEVEL)
+               window.set_title("Vorbis-Player")
+               window.set_default_size(500, 200)
+               window.connect("destroy", gtk.main_quit, "WM destroy")
+               vbox = gtk.VBox()
+               window.add(vbox)
+               self.entry = gtk.Entry()
+               vbox.pack_start(self.entry, False)
+               self.button = gtk.Button("Start")
+               vbox.add(self.button)
+               self.button.connect("clicked", self.start_stop)
+               window.show_all()
+
+               self.player = gst.Pipeline("player")
+               source = gst.element_factory_make("filesrc", "file-source")
+               demuxer = gst.element_factory_make("oggdemux", "demuxer")
+               demuxer.connect("pad-added", self.demuxer_callback)
+               self.audio_decoder = gst.element_factory_make("vorbisdec", "vorbis-decoder")
+               audioconv = gst.element_factory_make("audioconvert", "converter")
+               audiosink = gst.element_factory_make("autoaudiosink", "audio-output")
+
+               self.player.add(source, demuxer, self.audio_decoder, audioconv, audiosink)
+               gst.element_link_many(source, demuxer)
+               gst.element_link_many(self.audio_decoder, audioconv, audiosink)
+
+               bus = self.player.get_bus()
+               bus.add_signal_watch()
+               bus.connect("message", self.on_message)
+
+       def start_stop(self, w):
+               if self.button.get_label() == "Start":
+                       filepath = self.entry.get_text()
+                       if os.path.isfile(filepath):
+                               self.button.set_label("Stop")
+                               self.player.get_by_name("file-source").set_property("location", filepath)
+                               self.player.set_state(gst.STATE_PLAYING)
+               else:
+                       self.player.set_state(gst.STATE_NULL)
+                       self.button.set_label("Start")
+
+       def on_message(self, bus, message):
+               t = message.type
+               if t == gst.MESSAGE_EOS:
+                       self.player.set_state(gst.STATE_NULL)
+                       self.button.set_label("Start")
+               elif t == gst.MESSAGE_ERROR:
+                       err, debug = message.parse_error()
+                       print "Error: %s" % err, debug
+                       self.player.set_state(gst.STATE_NULL)
+                       self.button.set_label("Start")
+
+       def demuxer_callback(self, demuxer, pad):
+               adec_pad = self.audio_decoder.get_pad("sink")
+               pad.link(adec_pad)
+
+GTK_Main()
+gtk.gdk.threads_init()
+gtk.main()
\ No newline at end of file
diff --git a/osc_test.py b/osc_test.py
new file mode 100644 (file)
index 0000000..0f4337e
--- /dev/null
@@ -0,0 +1,14 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+import liblo, sys
+
+# send all messages to port 1234 on the local machine
+try:
+    target = liblo.Address(13000)
+except liblo.AddressError, err:
+    print str(err)
+    sys.exit()
+
+# send message "/foo/message1" with int, float and string arguments
+liblo.send(target, "/sink_2/xpos", 200)
diff --git a/rtpx264.sh b/rtpx264.sh
new file mode 100755 (executable)
index 0000000..86ae994
--- /dev/null
@@ -0,0 +1,11 @@
+#!/bin/sh
+
+gst-launch -v  gstrtpbin name=rtpbin \
+ v4l2src \
+ ! queue ! videoscale method=1 ! video/x-raw-yuv,width=640,height=360 \
+ ! queue ! x264enc byte-stream=true bitrate=1000 bframes=4 ref=4 me=hex subme=4 weightb=true threads=4 ! rtph264pay \
+ ! rtpbin.send_rtp_sink_0 \
+ rtpbin.send_rtp_src_0 ! udpsink port=5000 host=127.0.0.1 \
+ rtpbin.send_rtcp_src_0 ! udpsink port=5001 host=127.0.0.1 sync=false async=false  \
+ udpsrc port=5002 ! rtpbin.recv_rtcp_sink_0 > /dev/null &
\ No newline at end of file
diff --git a/rtpx264_pl.sh b/rtpx264_pl.sh
new file mode 100755 (executable)
index 0000000..308b176
--- /dev/null
@@ -0,0 +1,9 @@
+#!/bin/sh
+
+gst-launch -v gstrtpbin name=rtpbin latency=200 \
+ udpsrc caps="application/x-rtp, media=(string)video, clock-rate=(int)90000, encoding-name=(string)H264, payload=(int)96" port=5000 \
+ ! rtpbin.recv_rtp_sink_0 \
+ rtpbin. ! rtph264depay ! tee name=t ! ffdec_h264 ! xvimagesink \
+ udpsrc port=5001 ! rtpbin.recv_rtcp_sink_0 \
+ rtpbin.send_rtcp_src_0 ! udpsink port=5002 host=127.0.0.1 sync=false async=false \
+ t. ! filesink location=/tmp/video.mp4 
diff --git a/simple-effect-gtk.py b/simple-effect-gtk.py
new file mode 100644 (file)
index 0000000..19824f7
--- /dev/null
@@ -0,0 +1,94 @@
+#!/usr/bin/env python
+"""Extends basic demo with a gnl composition"""
+
+from demo import Demo, DemoException
+import gtk
+import gst
+import sys
+import os
+
+def create_decodebin():
+    try:
+        return gst.element_factory_make("decodebin2")
+    except:
+        return gst.element_factory_make("decodebin")
+
+class SimpleEffectDemo(Demo):
+    __name__ = "Basic GStreamer Effect Demo"
+    __usage__ = '''python %s file
+    display file with a color_balance effect''' % sys.argv[0]
+    __def_win_size__ = (320, 500)
+    # <excerpt 1>
+    def magic(self, pipeline, sink, args):
+
+        def onPad(obj, pad, target):
+            sinkpad = target.get_compatible_pad(pad, pad.get_caps())
+            pad.link(sinkpad)
+            return True
+
+        assert os.path.exists(sys.argv[-1])
+
+        # create the following pipeline
+        # filesrc location = sys.argv[1] ! decodebin ! videobalance ! ...
+        src = gst.element_factory_make("filesrc")
+        src.set_property("location", sys.argv[-1])
+        decode = create_decodebin()
+
+        self.balance = gst.element_factory_make("videobalance")
+
+        pipeline.add(src, decode, self.balance)
+        src.link(decode)
+        decode.connect("pad-added", onPad, self.balance)
+        self.balance.link(sink)
+
+        return
+    # </excerpt>
+
+    # <excerpt 2>
+    # overriding from parent
+    def customWidgets(self):
+        """Create a control for each property in the videobalance
+        widget"""
+
+        # to be called a property value needs to change
+        def onValueChanged(widget, prop):
+            # set the corresponding property of the videobalance element
+            self.balance.set_property(prop, widget.get_value())
+
+        # videobalance has several properties, with the following range
+        # and defaults
+        properties = [("contrast", 0, 2, 1),
+                      ("brightness", -1, 1, 0),
+                      ("hue", -1, 1, 0),
+                      ("saturation", 0, 2, 1)]
+
+        # create a place to hold our controls
+        controls = gtk.VBox()
+        labels = gtk.VBox()
+        # for every propety, create a control and set its attributes
+        for prop, lower, upper, default in properties:
+            widget = gtk.HScale(); label = gtk.Label(prop)
+
+            # set appropriate atributes
+            widget.set_update_policy(gtk.UPDATE_CONTINUOUS)
+            widget.set_value(default)
+            widget.set_draw_value(True)
+            widget.set_range(lower, upper)
+
+            # connect to our signal handler, specifying the property
+            # to adjust
+            widget.connect("value-changed", onValueChanged, prop)
+
+            # pack widget into box
+            controls.pack_start(widget, True, True)
+            labels.pack_start(label, True, False)
+
+        layout = gtk.HBox()
+        layout.pack_start(labels, False, False)
+        layout.pack_end(controls, True, True)
+        return layout
+
+    # </excerpt>
+
+if __name__ == '__main__':
+    SimpleEffectDemo().run()
\ No newline at end of file
diff --git a/video_player_qt.py b/video_player_qt.py
new file mode 100644 (file)
index 0000000..91e946a
--- /dev/null
@@ -0,0 +1,73 @@
+import sys, os
+from PyQt4 import QtCore, QtGui, uic
+from PyQt4.phonon import Phonon
+
+class VideoPlayer(QtGui.QWidget):
+    def __init__(self, url, parent = None):
+
+        self.url = url
+
+        QtGui.QWidget.__init__(self, parent)
+        self.setSizePolicy(QtGui.QSizePolicy.Expanding,
+            QtGui.QSizePolicy.Preferred)
+
+
+        self.player = Phonon.VideoPlayer(Phonon.VideoCategory,self)
+        self.player.load(Phonon.MediaSource(self.url))
+        self.player.mediaObject().setTickInterval(100)
+        self.player.mediaObject().tick.connect(self.tock)
+
+        self.play_pause = QtGui.QPushButton(self)
+        self.play_pause.setIcon(QtGui.QIcon(':/icons/player_play.svg'))
+        self.play_pause.clicked.connect(self.playClicked)
+        self.player.mediaObject().stateChanged.connect(self.stateChanged)
+
+        self.slider = Phonon.SeekSlider(self.player.mediaObject() , self)
+
+        self.status = QtGui.QLabel(self)
+        self.status.setAlignment(QtCore.Qt.AlignRight |
+            QtCore.Qt.AlignVCenter)
+
+        self.download = QtGui.QPushButton("Download", self)
+        self.download.clicked.connect(self.fetch)
+        topLayout = QtGui.QVBoxLayout(self)
+        topLayout.addWidget(self.player)
+        layout = QtGui.QHBoxLayout(self)
+        layout.addWidget(self.play_pause)
+        layout.addWidget(self.slider)
+        layout.addWidget(self.status)
+        layout.addWidget(self.download)
+        topLayout.addLayout(layout)
+        self.setLayout(topLayout)
+
+    def playClicked(self):
+        if self.player.mediaObject().state() == Phonon.PlayingState:
+            self.player.pause()
+        else:
+            self.player.play()
+
+    def stateChanged(self, new, old):
+        if new == Phonon.PlayingState:
+            self.play_pause.setIcon(QtGui.QIcon(':/icons/player_pause.svg'))
+        else:
+            self.play_pause.setIcon(QtGui.QIcon(':/icons/player_play.svg'))
+
+    def tock(self, time):
+        time = time/1000
+        h = time/3600
+        m = (time-3600*h) / 60
+        s = (time-3600*h-m*60)
+        self.status.setText('%02d:%02d:%02d'%(h,m,s))
+
+    def fetch(self):
+        print 'Should download %s'%self.url
+
+def main():
+    app = QtGui.QApplication(sys.argv)
+    window=VideoPlayer(sys.argv[1])
+    window.show()
+    # It's exec_ because exec is a reserved word in Python
+    sys.exit(app.exec_())
+
+if __name__ == "__main__":
+    main()