From f570dcf6cb819955a767bf06e180a12f1c0d4db3 Mon Sep 17 00:00:00 2001 From: Guillaume Pellerin Date: Fri, 17 May 2013 13:22:02 +0200 Subject: [PATCH 1/1] init --- audio_player_qt.py | 72 ++++++++++++ audio_video.py | 113 +++++++++++++++++++ audio_video.pyc | Bin 0 -> 4233 bytes audio_video_crossfade.py | 117 ++++++++++++++++++++ control_mixer.py | 61 ++++++++++ control_mixer_osc.py | 108 ++++++++++++++++++ control_mixer_osc_touch.py | 125 +++++++++++++++++++++ control_mixer_osc_touch_1cam.py | 133 ++++++++++++++++++++++ control_mixer_osc_touch_2cam.py | 138 +++++++++++++++++++++++ control_mixer_osc_touch_3cams.py | 133 ++++++++++++++++++++++ control_mixer_parallel.py | 108 ++++++++++++++++++ control_mixer_parallel_no_effects.py | 35 ++++++ control_mixer_pipes.py | 61 ++++++++++ cross-fade.py | 119 ++++++++++++++++++++ cross-fade_2.py | 23 ++++ demo.py | 160 +++++++++++++++++++++++++++ demo.pyc | Bin 0 -> 5533 bytes gtk_sink_pad.py | 69 ++++++++++++ osc_test.py | 14 +++ rtpx264.sh | 11 ++ rtpx264_pl.sh | 9 ++ simple-effect-gtk.py | 94 ++++++++++++++++ video_player_qt.py | 73 ++++++++++++ 23 files changed, 1776 insertions(+) create mode 100644 audio_player_qt.py create mode 100644 audio_video.py create mode 100644 audio_video.pyc create mode 100644 audio_video_crossfade.py create mode 100644 control_mixer.py create mode 100644 control_mixer_osc.py create mode 100644 control_mixer_osc_touch.py create mode 100644 control_mixer_osc_touch_1cam.py create mode 100644 control_mixer_osc_touch_2cam.py create mode 100644 control_mixer_osc_touch_3cams.py create mode 100644 control_mixer_parallel.py create mode 100644 control_mixer_parallel_no_effects.py create mode 100644 control_mixer_pipes.py create mode 100644 cross-fade.py create mode 100644 cross-fade_2.py create mode 100644 demo.py create mode 100644 demo.pyc create mode 100644 gtk_sink_pad.py create mode 100644 osc_test.py create mode 100755 rtpx264.sh create mode 100755 rtpx264_pl.sh create mode 100644 simple-effect-gtk.py create mode 100644 video_player_qt.py diff --git a/audio_player_qt.py b/audio_player_qt.py new file mode 100644 index 0000000..ed7da6e --- /dev/null +++ b/audio_player_qt.py @@ -0,0 +1,72 @@ +# -*- coding: utf-8 -*- + +import sys, os +from PyQt4 import QtCore, QtGui, uic +from PyQt4.phonon import Phonon + +class AudioPlayer(QtGui.QWidget): + def __init__(self, url, parent = None): + + self.url = url + + QtGui.QWidget.__init__(self, parent) + self.setSizePolicy(QtGui.QSizePolicy.Expanding, + QtGui.QSizePolicy.Preferred) + + + self.player = Phonon.createPlayer(Phonon.MusicCategory, + Phonon.MediaSource(url)) + self.player.setTickInterval(100) + self.player.tick.connect(self.tock) + + self.play_pause = QtGui.QPushButton(self) + self.play_pause.setIcon(QtGui.QIcon(':/icons/player_play.svg')) + self.play_pause.clicked.connect(self.playClicked) + self.player.stateChanged.connect(self.stateChanged) + + self.slider = Phonon.SeekSlider(self.player , self) + + self.status = QtGui.QLabel(self) + self.status.setAlignment(QtCore.Qt.AlignRight | + QtCore.Qt.AlignVCenter) + + self.download = QtGui.QPushButton("Download", self) + self.download.clicked.connect(self.fetch) + + layout = QtGui.QHBoxLayout(self) + layout.addWidget(self.play_pause) + layout.addWidget(self.slider) + layout.addWidget(self.status) + layout.addWidget(self.download) + + def playClicked(self): + if self.player.state() == Phonon.PlayingState: + self.player.pause() + else: + self.player.play() + + def stateChanged(self, new, old): + if new == Phonon.PlayingState: + self.play_pause.setIcon(QtGui.QIcon(':/icons/player_pause.svg')) + else: + self.play_pause.setIcon(QtGui.QIcon(':/icons/player_play.svg')) + + def tock(self, time): + time = time/1000 + h = time/3600 + m = (time-3600*h) / 60 + s = (time-3600*h-m*60) + self.status.setText('%02d:%02d:%02d'%(h,m,s)) + + def fetch(self): + print 'Should download %s'%self.url + +def main(): + app = QtGui.QApplication(sys.argv) + window=AudioPlayer(sys.argv[1]) + window.show() + # It's exec_ because exec is a reserved word in Python + sys.exit(app.exec_()) + +if __name__ == "__main__": + main() diff --git a/audio_video.py b/audio_video.py new file mode 100644 index 0000000..bf17145 --- /dev/null +++ b/audio_video.py @@ -0,0 +1,113 @@ +#!/usr/bin/env python + +"""A short Audio-Video example""" +import gobject +gobject.threads_init() +import gst +import pygtk +pygtk.require("2.0") +import gtk +gtk.gdk.threads_init() +import sys +import os +from demo import Demo + +def create_decodebin(): + try: + return gst.element_factory_make("decodebin2") + except: + return gst.element_factory_make("decodebin") + +class DemoException(Exception): + """Base exception class for errors which occur during demos""" + + def __init__(self, reason): + self.reason = reason + +class AVDemo(Demo): + """Extends base demo with both audio and video sinks + * a window containing a drawing area and basic media controls + * a basic gstreamer pipeline using an ximagesink and an autoaudiosink + * connects the ximagesink to the window's drawing area + + Derived classes need only override magic(), __name__, + and __usage__ to create new demos.""" + + __name__ = "AV Demo" + __usage__ = "python audio_video.py " + __def_win_size__ = (320, 240) + + # this commment allows us to include only a portion of the file + # in the tutorial for this demo + + def magic(self, pipeline, (videosink, audiosink), args): + """This is where the magic happens""" + + def onPadAdded(source, pad): + # first we see if we can link to the videosink + tpad = videoqueue.get_compatible_pad(pad) + if tpad: + pad.link(tpad) + return + # if not, we try the audio sink + tpad = audioqueue.get_compatible_pad(pad) + if tpad: + pad.link(tpad) + return + + src = gst.element_factory_make("filesrc", "src") + src.props.location = args[0] + dcd = create_decodebin() + audioqueue = gst.element_factory_make("queue") + videoqueue = gst.element_factory_make("queue") + pipeline.add(src, dcd, audioqueue, videoqueue) + + src.link(dcd) + videoqueue.link(videosink) + audioqueue.link(audiosink) + dcd.connect("pad-added", onPadAdded) + + def createPipeline(self, w): + """Given a window, creates a pipeline and connects it to the window""" + + # code will make the ximagesink output in the specified window + def set_xid(window): + gtk.gdk.threads_enter() + videosink.set_xwindow_id(window.window.xid) + videosink.expose() + gtk.gdk.threads_leave() + + # this code receives the messages from the pipeline. if we + # need to set X11 id, then we call set_xid + def bus_handler(unused_bus, message): + if message.type == gst.MESSAGE_ELEMENT: + if message.structure.get_name() == 'prepare-xwindow-id': + set_xid(w) + return gst.BUS_PASS + + # create our pipeline, and connect our bus_handler + self.pipeline = gst.Pipeline() + bus = self.pipeline.get_bus() + bus.set_sync_handler(bus_handler) + + videosink = gst.element_factory_make("ximagesink", "sink") + videosink.set_property("force-aspect-ratio", True) + videosink.set_property("handle-expose", True) + scale = gst.element_factory_make("videoscale", "scale") + cspace = gst.element_factory_make("ffmpegcolorspace", "cspace") + + audiosink = gst.element_factory_make("autoaudiosink") + audioconvert = gst.element_factory_make("audioconvert") + + # pipeline looks like: ... ! cspace ! scale ! sink + # ... ! audioconvert ! autoaudiosink + self.pipeline.add(cspace, scale, videosink, audiosink, + audioconvert) + scale.link(videosink) + cspace.link(scale) + audioconvert.link(audiosink) + return (self.pipeline, (cspace, audioconvert)) + +# if this file is being run directly, create the demo and run it +if __name__ == '__main__': + AVDemo().run() \ No newline at end of file diff --git a/audio_video.pyc b/audio_video.pyc new file mode 100644 index 0000000000000000000000000000000000000000..433e326117448949f5f6b8d56ad2661f2867f105 GIT binary patch literal 4233 zcmbtX?QR@N6|L$SkH2HbVGU>n6oS>xLTsZ5B+xFaIK-@kSS$2o1B{Sb?e4Ot*E3z6 zt{U5eoL@mCc@G|f$AF)_1>yzZoZCI)WPg$D*j?RqyXw}hd+#}Q`=9^WT>INU?C&zw z{?>56kIVlHO|H}styX!Y&b1n9Wwk0I^;{|4N;_)UnUCnIVOQC%Dwb5pt{y4m)@)c#* zMY_6>ZYjI1@-_8kq}*+#?G^_Yx5em^HbrXCU*{IzM{{WW$ko9dPV>U;Jt=bQjJ--H z)z}vQ#qZd6FzVj^Px>17&_(mmPF#k&{Fc!E3YY%~&Fr=cSPMpKZh!Fm5(a49%HTK1 z{s5;A`c!%x=0%6SF^B<2^ro8n=CBZg3Pa=QM#jV z;|iO2=C+TnGFyehm5cq;D-Uq_Z=$gX6*iAHq6-iwBu6}ut@xjFWa=&4+bS{HIQ8C~ zy4u*fcC|N`qaqs_mu1u1b z@gk$3a~7M+qLMOlO-`4*MZCZ+paAT%#axH?HBv`I2?`E<-K7fzDgWl0h< z%r{2J(_6<+*fE_o6__;gdI#lh!@ppnGV(5*z|M67Vh)wk^oWnY&AqxiX~i8v>e| zyiOs;6JjJ2p|;4(#O6gRBWpLl(a^kHWU;A=%8rZDnkiO-lG0oill07TUC{|2ai;gSDGvh9hxPX#@0Y%pT z=&nhUCIRlsOnN0rrXJ=ciNQ(@)S%(AN$h>k%s6~v7&mY53`~w3cknt)%_pbD*i!SS z#eKx1_%}vo3_X*Hk?-L0e@Am2sXs;PSyw$&@v25)d?oEvswL zcUOJ;TBteckm82L}f_ zqD^A}jsj%bb&pnPaC)0;-@^_bj|y+_e>t+XmDq@P%_yxZTQ*$4?J~RX;PQ{ra51q8 zi#?0z1-AoxuqsGGm{;c@Ua{l<8OOmbrEs0FgXYYJBy*E04aEsiU8T942S~H-Tm>A%hOt5t?rKV$ne@suu%rqHS)vofwbYqvn53YJaEG@Y;;!h9;e0 zB7jZs-bzqIt%(Fv8?EaEV2gb$vLRgnx3Oo1pae*<0MG~kKVJfFP{gfLMCjr@IxUWg zaCCrMz7Z9izf6|F|Dex<+22R=88{O>)}8=&Tk7+W%(Fn)dO^!YAVHFx8!FCNTJT!n z{9|;ur$r6=5*>IFtrVR!i6R(>ne9ky2%Xp9%pgi8orUw}o}WvB6Gk|Xa-X2avURwH zVIF|LYOG0$T;SdGWZqRPvR9SENN#+tVrS! zGV_jJYqFC*TKMS!I%GcBBd%g4+;7NGk`4|*bw=qeaTvInu6z2i(kzfUX$g^}zGF|~ zt7==}u-FFsrPPL?HG*KN<`UKRZf}etY!!(eYzJGV0H17N)fo3LuH{ zAd}96F9yk@!$Gr_Fxj-6dYdOFQ!g|!v7U#g*b=k2g>gz zS?0zlb*mJI1f7^Qk5u(W)e<&Df+_$AL75Ty)ZKWR;eXVYLGgFc5ns`>^c$CAviMn< zEw(BQPL+H^7Kg@}Z18bCwJ|@1guENl=K}Fh+58NRWIHs+zo3LDjK9z3muMssa{5Vl znn?yP-ky+&xTZdu@9o#r^FuVMyS1{jOitd6$j{pd)(#G~t?qkr$dRQbM{cU4J~0!E z&lvWYdzW-v)MMf(6y^3b0jd*U{24`B`~wbth^E;Y9^MhOF+)nfdM@2t?nNlmb#H0) zeMx + src = gst.element_factory_make("filesrc") + src.set_property("location", sys.argv[-1]) + + srcAdecode = create_decodebin() + srcAconvert = gst.element_factory_make("ffmpegcolorspace") + srcAalpha = gst.element_factory_make("alpha") + srcAalpha.set_property("alpha", 1.0) + + srcB = gst.element_factory_make("filesrc") + srcB.set_property("location", sys.argv[-2]) + srcBdecode = create_decodebin() + srcBconvert = gst.element_factory_make("ffmpegcolorspace") + srcBalpha = gst.element_factory_make("alpha") + srcBalpha.set_property("alpha", 0.5) + + mixer = gst.element_factory_make("videomixer") + mixer.set_property("background", "black") + # + + # + pipeline.add(mixer) + + pipeline.add(src, srcAdecode, srcAconvert, srcAalpha) + src.link(srcAdecode) + srcAdecode.connect("pad-added", onPad, srcAconvert) + srcAconvert.link(srcAalpha) + srcAalpha.link(mixer) + + pipeline.add(srcB, srcBdecode, srcBconvert, srcBalpha) + srcB.link(srcBdecode) + srcBdecode.connect("pad-added", onPad, srcBconvert) + srcBconvert.link(srcBalpha) + srcBalpha.link(mixer) + + mixer.link(sink) + + # remember the alpha elements + self.srcBalpha = srcBalpha + # + + + # overriding from parent + def customWidgets(self): + """Create a control for each property in the videobalance + widget""" + + # + # to be called a property value needs to change + def onValueChanged(widget): + if self.srcBalpha: + self.srcBalpha.set_property("alpha", widget.get_value()) + # + + lower = 0 + upper = 1 + default = 0.5 + + # create a place to hold our controls + controls = gtk.VBox() + labels = gtk.VBox() + + widget = gtk.HScale(); label = gtk.Label("Crossfade") + + # set appropriate atributes + widget.set_update_policy(gtk.UPDATE_CONTINUOUS) + widget.set_draw_value(True) + widget.set_range(lower, upper) + widget.set_value(default) + + # connect to our signal handler, specifying the property + # to adjust + widget.connect("value-changed", onValueChanged) + + # pack widget into box + controls.pack_start(widget, True, True) + labels.pack_start(label, True, False) + + layout = gtk.HBox() + layout.pack_start(labels, False, False) + layout.pack_end(controls, True, True) + return layout + +if __name__ == '__main__': + SimpleCrossfadeDemo().run() \ No newline at end of file diff --git a/cross-fade_2.py b/cross-fade_2.py new file mode 100644 index 0000000..c1fd340 --- /dev/null +++ b/cross-fade_2.py @@ -0,0 +1,23 @@ +#!/usr/bin/python +import gobject; gobject.threads_init() +import pygst; pygst.require("0.10") +import gst + +p = gst.parse_launch ("""videomixer name=mix ! ffmpegcolorspace ! xvimagesink + videotestsrc pattern="snow" ! video/x-raw-yuv, framerate=10/1, width=200, height=150 ! mix.sink_0 + videotestsrc ! video/x-raw-yuv, framerate=10/1, width=640, height=360 ! mix.sink_1 +""") + +m = p.get_by_name ("mix") +s0 = m.get_pad ("sink_0") +s0.set_property ("xpos", 100) + +control = gst.Controller(s0, "ypos", "alpha") +control.set_interpolation_mode("ypos", gst.INTERPOLATE_LINEAR) +control.set_interpolation_mode("alpha", gst.INTERPOLATE_LINEAR) +control.set("ypos", 0, 0); control.set("ypos", 5 * gst.SECOND, 200) +control.set("alpha", 0, 0); control.set("alpha", 5 * gst.SECOND, 1.0) + +p.set_state (gst.STATE_PLAYING) + +gobject.MainLoop().run() \ No newline at end of file diff --git a/demo.py b/demo.py new file mode 100644 index 0000000..51e95f3 --- /dev/null +++ b/demo.py @@ -0,0 +1,160 @@ +#!/usr/bin/env python + +"""Basic Framework for writing GStreamer Demos in Python""" +# +import gobject +gobject.threads_init() +import gst +# +import pygtk +pygtk.require("2.0") +import gtk +gtk.gdk.threads_init() +import sys +import os + + +class DemoException(Exception): + """Base exception class for errors which occur during demos""" + + def __init__(self, reason): + self.reason = reason + +class Demo: + """Base class implementing boring, boiler-plate code. + Sets up a basic gstreamer environment which includes: + + * a window containing a drawing area and basic media controls + * a basic gstreamer pipeline using an ximagesink + * connects the ximagesink to the window's drawing area + + Derived classes need only override magic(), __name__, + and __usage__ to create new demos.""" + + __name__ = "Basic Demo" + __usage__ = "python demo.py -- runs a simple test demo" + __def_win_size__ = (320, 240) + + # this commment allows us to include only a portion of the file + # in the tutorial for this demo + # ... + + def magic(self, pipeline, sink, args): + """This is where the magic happens""" + src = gst.element_factory_make("videotestsrc", "src") + pipeline.add(src) + src.link(sink) + + + def createPipeline(self, w): + """Given a window, creates a pipeline and connects it to the window""" + + # code will make the ximagesink output in the specified window + def set_xid(window): + gtk.gdk.threads_enter() + sink.set_xwindow_id(window.window.xid) + sink.expose() + gtk.gdk.threads_leave() + + # this code receives the messages from the pipeline. if we + # need to set X11 id, then we call set_xid + def bus_handler(unused_bus, message): + if message.type == gst.MESSAGE_ELEMENT: + if message.structure.get_name() == 'prepare-xwindow-id': + set_xid(w) + return gst.BUS_PASS + + # create our pipeline, and connect our bus_handler + self.pipeline = gst.Pipeline() + bus = self.pipeline.get_bus() + bus.set_sync_handler(bus_handler) + + sink = gst.element_factory_make("ximagesink", "sink") + sink.set_property("force-aspect-ratio", True) + sink.set_property("handle-expose", True) + scale = gst.element_factory_make("videoscale", "scale") + cspace = gst.element_factory_make("ffmpegcolorspace", "cspace") + + # our pipeline looks like this: ... ! cspace ! scale ! sink + self.pipeline.add(cspace, scale, sink) + scale.link(sink) + cspace.link(scale) + return (self.pipeline, cspace) + + # ... end of excerpt + + # subclasses can override this method to provide custom controls + def customWidgets(self): + return gtk.HBox() + + def createWindow(self): + """Creates a top-level window, sets various boring attributes, + creates a place to put the video sink, adds some and finally + connects some basic signal handlers. Really, really boring. + """ + + # create window, set basic attributes + w = gtk.Window() + w.set_size_request(*self.__def_win_size__) + w.set_title("Gstreamer " + self.__name__) + w.connect("destroy", gtk.main_quit) + + # declare buttons and their associated handlers + controls = ( + ("play_button", gtk.ToolButton(gtk.STOCK_MEDIA_PLAY), self.onPlay), + ("stop_button", gtk.ToolButton(gtk.STOCK_MEDIA_STOP), self.onStop), + ("quit_button", gtk.ToolButton(gtk.STOCK_QUIT), gtk.main_quit) + ) + + # as well as the container in which to put them + box = gtk.HButtonBox() + + # for every widget, connect to its clicked signal and add it + # to the enclosing box + for name, widget, handler in controls: + widget.connect("clicked", handler) + box.pack_start(widget, True) + setattr(self, name, widget) + + viewer = gtk.DrawingArea() + viewer.modify_bg(gtk.STATE_NORMAL, viewer.style.black) + + # we will need this later + self.xid = None + + # now finally do the top-level layout for the window + layout = gtk.VBox(False) + layout.pack_start(viewer) + + # subclasses can override childWidgets() to supply + # custom controls + layout.pack_start(self.customWidgets(), False, False) + layout.pack_end(box, False, False) + w.add(layout) + w.show_all() + + # we want to return only the portion of the window which will + # be used to display the video, not the whole top-level + # window. a DrawingArea widget is, in fact, an X11 window. + return viewer + + def onPlay(self, unused_button): + self.pipeline.set_state(gst.STATE_PLAYING) + + def onStop(self, unused_button): + self.pipeline.set_state(gst.STATE_READY) + + def run(self): + w = self.createWindow() + p, s = self.createPipeline(w) + try: + self.magic(p, s, sys.argv[1:]) + gtk.main() + except DemoException, e: + print e.reason + print self.__usage__ + sys.exit(-1) + +# if this file is being run directly, create the demo and run it +if __name__ == '__main__': + Demo().run() \ No newline at end of file diff --git a/demo.pyc b/demo.pyc new file mode 100644 index 0000000000000000000000000000000000000000..acdd70c027801b57e53b78a5aef847cb258b9e1c GIT binary patch literal 5533 zcmbtY?QR@b89uYSUV9xoZtOHANudLXu&w-2ijaU(8asBKh{OrAZqrynqw&nKed67j z&78Bg7ui3+2vP|oq+SP?z$Ng5E8rFoHvrG`o>|+?uO!$zXU_L|KcDwK^UwcGPnu7@ zc$li{uZ8z*JofJ>GNraq1}dwm{XlI6N(ai+)TvTI)mv9v_0f!m+G;4>P-aYJb#)q? zO=_yGrqWGi#$Q@Jp|&PQt6OTTr7~n&{LdgE(>8f&#kFWdy;JY1jR~XO&WeyCOv&vl>6aMQHF=5aVFml*E_D%7%0P4 z-79Qp^6=5n?G?HCFa9=c9qT_@{LoF~%^7P)sqQ;dI3Q_f7R4)ti0FzqJR3PHLoi_(UNdnVlri!>dSVKyj9Vn)JJ?l5}pa5qwP z9H?WZV$oEmO-eSTbWK#jmo0KR+BLXBcXuMrY2)#Q{Q+uAdqqz#^@?7xl<9+|U0X?H z$!Y7*)hibJLpOn$acpwq;`l1+mc*!_&7G^k9CD0fN4dvwugC^nHpiuz6=@ttl;-*A zQhRk0A}m%{@K~PN44O4zi&{BT28mkX6v*UdWzdvsVy;s(RBvHhu+^sM2{tAMwlz-6 zqUKT;pqO`HzX zBGZeLA^tkr*>KPglWDM>q`kl73(3)=go~0T?exi!D>fbMfBrUd0w{!qMHdx2kw2NnKG6 z<6g3_WqOij;s(&$ew(yM^a1h6kvhHiV_YgpxoacB{JA5(x(_;@AWVFKiY3gb+H5c# z2tKjNN7(omDt`(D%LDaCrM^NFznadEGd4PLLLH;t;FhO#6;P)w zX+WnKL1UzCp%7i*qSL+v9_?)u>oDy+9QovYWudgTkrKok&%DRDZ+c8hT;@aN&Ec^h zqae9w0rv$O5D7|&u(URIH6`d(Ga(0tW(Cr?*+zSUyVOzax_$r7_9d3NJ@`;&V<3z! z-DS+M+QmoSe`AwL1o16dcT{!hqke&zguc-hU7Z|gIWM|~ZeN>W4Uc*a+hU{;Don<( zZArX>ZOsLf-o7ZDGXbrjK}Q53A|vJkZDwT(?jZ$i`@q;61Z zalKs#lMp72mCrlzqvei2OTyzIA6T8m+XE}%v8OG;O=yX3@DytA@O9-!Ys8lE9sm>Z z<`YuoWjGgH3uXg3`W(?CvLc)#AxiZ^V*5b$g)%{UBiV)DXQ5I#xfigqY0}jK1itnb z26lFOeZ89&UF0zRB-LU*sZ=Eo^Rgo`kE$3G(dLAE%8HWUK2*OQ=IMxsh&%~Sa-?4t zeOT*IRI*tPbVLM=XpEvNfxhT_?E5~~(eSZzbc;Qdrbx8SZV|IcK<2-Om6C2BfJUtR z2tD>)6sj>ZF*`mJ%mmW_#kcT38_d?P!Gr;jBM;~M4p)ddiHDz_J1osHqBRPScUOud z?@#Ra)T8n`PDSCQkZ#d?Y%+-2KIVFEr#wwGUfQ|P#B9c631~V&b}6n!(5b6GV+Gqt zoh0pq`@BPsVjp1GgSkgyghB2yA!Unx$1RZ9-AP0 zz}>ixd}BJ897~CjqesAzA|j6A2WawnD!l$exR<7am-}`-pnJUzdaEODytXo6hJ7kJgq~pPZ8&QM7MF=`t4n z70-xL21VN31RkOE_o#STpZY)~LPw|pTs}qSj$d63+cF3D2K)f7lJ_Avy+_Q24OhwW zq}{+)NdkSAqw~1 zuR)-scq*6*-VW-)jo>mYegglZ#`C*xTT&@8V7W!o38R?B&sdOC&ocqRi-6`9QIip5 zb-@mV;8)M6B7!-6%kuirY&C26wi7g)t?|~aR=xFOJaxeod{iP1$6g@JUL#T|FtS^0 zKSTB^pf|dh@MD#?U~<$S@(#qa7!6FRMNbhgThUW)LoX?;XzA=LpVuB2NnrT@7!@Xd g;XjQ1l9pwe>o /dev/null & + \ No newline at end of file diff --git a/rtpx264_pl.sh b/rtpx264_pl.sh new file mode 100755 index 0000000..308b176 --- /dev/null +++ b/rtpx264_pl.sh @@ -0,0 +1,9 @@ +#!/bin/sh + +gst-launch -v gstrtpbin name=rtpbin latency=200 \ + udpsrc caps="application/x-rtp, media=(string)video, clock-rate=(int)90000, encoding-name=(string)H264, payload=(int)96" port=5000 \ + ! rtpbin.recv_rtp_sink_0 \ + rtpbin. ! rtph264depay ! tee name=t ! ffdec_h264 ! xvimagesink \ + udpsrc port=5001 ! rtpbin.recv_rtcp_sink_0 \ + rtpbin.send_rtcp_src_0 ! udpsink port=5002 host=127.0.0.1 sync=false async=false \ + t. ! filesink location=/tmp/video.mp4 diff --git a/simple-effect-gtk.py b/simple-effect-gtk.py new file mode 100644 index 0000000..19824f7 --- /dev/null +++ b/simple-effect-gtk.py @@ -0,0 +1,94 @@ +#!/usr/bin/env python +"""Extends basic demo with a gnl composition""" + +from demo import Demo, DemoException +import gtk +import gst +import sys +import os + +def create_decodebin(): + try: + return gst.element_factory_make("decodebin2") + except: + return gst.element_factory_make("decodebin") + +class SimpleEffectDemo(Demo): + __name__ = "Basic GStreamer Effect Demo" + __usage__ = '''python %s file + display file with a color_balance effect''' % sys.argv[0] + __def_win_size__ = (320, 500) + # + def magic(self, pipeline, sink, args): + + def onPad(obj, pad, target): + sinkpad = target.get_compatible_pad(pad, pad.get_caps()) + pad.link(sinkpad) + return True + + assert os.path.exists(sys.argv[-1]) + + # create the following pipeline + # filesrc location = sys.argv[1] ! decodebin ! videobalance ! ... + src = gst.element_factory_make("filesrc") + src.set_property("location", sys.argv[-1]) + decode = create_decodebin() + + self.balance = gst.element_factory_make("videobalance") + + pipeline.add(src, decode, self.balance) + src.link(decode) + decode.connect("pad-added", onPad, self.balance) + self.balance.link(sink) + + return + # + + # + # overriding from parent + def customWidgets(self): + """Create a control for each property in the videobalance + widget""" + + # to be called a property value needs to change + def onValueChanged(widget, prop): + # set the corresponding property of the videobalance element + self.balance.set_property(prop, widget.get_value()) + + # videobalance has several properties, with the following range + # and defaults + properties = [("contrast", 0, 2, 1), + ("brightness", -1, 1, 0), + ("hue", -1, 1, 0), + ("saturation", 0, 2, 1)] + + # create a place to hold our controls + controls = gtk.VBox() + labels = gtk.VBox() + # for every propety, create a control and set its attributes + for prop, lower, upper, default in properties: + widget = gtk.HScale(); label = gtk.Label(prop) + + # set appropriate atributes + widget.set_update_policy(gtk.UPDATE_CONTINUOUS) + widget.set_value(default) + widget.set_draw_value(True) + widget.set_range(lower, upper) + + # connect to our signal handler, specifying the property + # to adjust + widget.connect("value-changed", onValueChanged, prop) + + # pack widget into box + controls.pack_start(widget, True, True) + labels.pack_start(label, True, False) + + layout = gtk.HBox() + layout.pack_start(labels, False, False) + layout.pack_end(controls, True, True) + return layout + + # + +if __name__ == '__main__': + SimpleEffectDemo().run() \ No newline at end of file diff --git a/video_player_qt.py b/video_player_qt.py new file mode 100644 index 0000000..91e946a --- /dev/null +++ b/video_player_qt.py @@ -0,0 +1,73 @@ +import sys, os +from PyQt4 import QtCore, QtGui, uic +from PyQt4.phonon import Phonon + +class VideoPlayer(QtGui.QWidget): + def __init__(self, url, parent = None): + + self.url = url + + QtGui.QWidget.__init__(self, parent) + self.setSizePolicy(QtGui.QSizePolicy.Expanding, + QtGui.QSizePolicy.Preferred) + + + self.player = Phonon.VideoPlayer(Phonon.VideoCategory,self) + self.player.load(Phonon.MediaSource(self.url)) + self.player.mediaObject().setTickInterval(100) + self.player.mediaObject().tick.connect(self.tock) + + self.play_pause = QtGui.QPushButton(self) + self.play_pause.setIcon(QtGui.QIcon(':/icons/player_play.svg')) + self.play_pause.clicked.connect(self.playClicked) + self.player.mediaObject().stateChanged.connect(self.stateChanged) + + self.slider = Phonon.SeekSlider(self.player.mediaObject() , self) + + self.status = QtGui.QLabel(self) + self.status.setAlignment(QtCore.Qt.AlignRight | + QtCore.Qt.AlignVCenter) + + self.download = QtGui.QPushButton("Download", self) + self.download.clicked.connect(self.fetch) + topLayout = QtGui.QVBoxLayout(self) + topLayout.addWidget(self.player) + layout = QtGui.QHBoxLayout(self) + layout.addWidget(self.play_pause) + layout.addWidget(self.slider) + layout.addWidget(self.status) + layout.addWidget(self.download) + topLayout.addLayout(layout) + self.setLayout(topLayout) + + def playClicked(self): + if self.player.mediaObject().state() == Phonon.PlayingState: + self.player.pause() + else: + self.player.play() + + def stateChanged(self, new, old): + if new == Phonon.PlayingState: + self.play_pause.setIcon(QtGui.QIcon(':/icons/player_pause.svg')) + else: + self.play_pause.setIcon(QtGui.QIcon(':/icons/player_play.svg')) + + def tock(self, time): + time = time/1000 + h = time/3600 + m = (time-3600*h) / 60 + s = (time-3600*h-m*60) + self.status.setText('%02d:%02d:%02d'%(h,m,s)) + + def fetch(self): + print 'Should download %s'%self.url + +def main(): + app = QtGui.QApplication(sys.argv) + window=VideoPlayer(sys.argv[1]) + window.show() + # It's exec_ because exec is a reserved word in Python + sys.exit(app.exec_()) + +if __name__ == "__main__": + main() -- 2.39.5