--- /dev/null
+#! /usr/bin/env python
+# -*- coding: utf-8 -*-
+#
+# Copyright (c) 2007-2013 Parisson SARL
+
+# This file is part of TimeSide.
+
+# TimeSide is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 2 of the License, or
+# (at your option) any later version.
+
+# TimeSide is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with TimeSide. If not, see <http://www.gnu.org/licenses/>.
+
+# Authors:
+# Paul Brossier <piem@piem.org>
+
+
+"""This script runs a timeside pipeline on a collection of media files. The
+pipeline can be configured using command line options or a configuration file.
+"""
+
+import sys, os.path
+
+usage = "usage: %s [options] -c file.conf file1.wav [file2.wav ...]" % sys.argv[0]
+usage += "\n help: %s -h" % sys.argv[0]
+
+def parse_config(path):
+ import yaml
+ return yaml.load(open(path))
+
+def parse_args():
+ from optparse import OptionParser
+ parser = OptionParser(usage = usage)
+ parser.add_option("-v","--verbose",
+ action="store_true", dest="verbose", default=False,
+ help="be verbose")
+ parser.add_option("-C", "--conf", action = "store",
+ dest = "config_file",
+ help="configuration file",
+ metavar = "<config_file>")
+
+ parser.add_option("-s", "--samplerate", action = "store",
+ dest = "samplerate", type = int,
+ help="samplerate at which to run the pipeline",
+ default = None,
+ metavar = "<samplerate>")
+ parser.add_option("-c", "--channels", action = "store",
+ dest = "channels", type = int,
+ help="number of channels to run the pipeline with",
+ default = None,
+ metavar = "<channels>")
+ parser.add_option("-b", "--blocksize", action = "store",
+ dest = "blocksize", type = int,
+ help="blocksize at which to run the pipeline",
+ default = None,
+ metavar = "<blocksize>")
+
+ parser.add_option("-a", "--analyzers", action = "store",
+ dest = "analyzers", type = str,
+ help="analyzers in the pipeline",
+ default = [],
+ metavar = "<analyzers>")
+ parser.add_option("-g", "--graphers", action = "store",
+ dest = "graphers", type = str,
+ help="graphers in the pipeline",
+ default = [],
+ metavar = "<graphers>")
+
+ parser.add_option("-o", "--ouput-directory", action = "store",
+ dest = "outputdir", type = str,
+ help="output directory",
+ default = None,
+ metavar = "<outputdir>")
+
+ (options, args) = parser.parse_args()
+
+ if options.analyzers:
+ options.analyzers = options.analyzers.split(',')
+ if options.graphers:
+ options.graphers = options.graphers.split(',')
+
+ if options.config_file:
+ if not os.path.isfile(options.config_file):
+ print "ERROR: configuration file not found:", options.config_file
+ sys.exit(1)
+ config = parse_config(options.config_file)
+ for key in config:
+ if not hasattr(options, key) or not getattr(options,key):
+ setattr(options, key, config[key])
+
+ if options.outputdir == None:
+ import tempfile
+ options.outputdir = tempfile.mkdtemp('-timeside')
+ if not os.path.isdir(options.outputdir):
+ os.makedirs(options.outputdir)
+
+ return options, args
+
+if __name__ == '__main__':
+ options, args = parse_args()
+ # load timeside after parse_args, to avoid gstreamer hijacking
+ import timeside
+
+ if 1: #options.verbose:
+ for a in dir(options):
+ if not callable(getattr(options,a)) and not a.startswith('_'):
+ print a + ":", getattr(options,a)
+
+ all_decoders = timeside.core.processors(timeside.api.IDecoder)
+ all_analyzers = timeside.core.processors(timeside.api.IAnalyzer)
+ all_graphers = timeside.core.processors(timeside.api.IGrapher)
+ all_encoders = timeside.core.processors(timeside.api.IEncoder)
+
+ channels = options.channels
+ samplerate = options.samplerate
+ blocksize = options.blocksize
+ analyzers = options.analyzers
+ graphers = options.graphers
+
+ def match_id_or_class(id_or_class, processors):
+ class_matches = filter(lambda x: x.__name__ == id_or_class, processors)
+ id_matches = filter(lambda x: x.id() == id_or_class , processors)
+ if not len(id_matches) and not len(class_matches):
+ print 'ERROR: could not find %s, skipping' % id_or_class
+ print 'possible processors:', [a.__name__ for a in processors]
+ sys.exit(1)
+ elif len(id_matches) + len(class_matches) > 1:
+ print 'ERROR: too many matches for %s analyzer' % id_or_class
+ print 'matched values:', id_matches, class_matches
+ sys.exit(1)
+ else:
+ return (id_matches + class_matches)[0]()
+
+ def match_analyzer(id_or_class):
+ return match_id_or_class(id_or_class, all_analyzers)
+ def match_grapher(id_or_class):
+ return match_id_or_class(id_or_class, all_graphers)
+
+ # create instances of analyzers and graphers
+ _analyzers = map(match_analyzer, analyzers)
+ _graphers = map(match_grapher, graphers)
+
+ def process_file(path):
+ decoder = timeside.decoder.FileDecoder(path, start = 1)
+ #pipe.setup(channels = channels, samplerate = samplerate, blocksize = blocksize)
+ pipe = decoder
+
+ for a in _analyzers:
+ pipe = pipe | a
+ for g in _graphers:
+ pipe = pipe | g
+ pipe.run()
+
+ if len(_analyzers):
+ container = timeside.analyzer.core.AnalyzerResultContainer()
+ for a in _analyzers:
+ container.add(a.results.values())
+ result_path = os.path.join(options.outputdir, decoder.uuid() + '.hf5')
+ container.to_hdf5(result_path)
+ if options.verbose : print 'saved', result_path
+ if len(_graphers):
+ for g in _graphers:
+ graph_path = os.path.join(options.outputdir, decoder.uuid(), g.id() + '.png')
+ if not os.path.isdir(os.path.dirname(graph_path)):
+ os.makedirs(os.path.dirname(graph_path))
+ g.render(graph_path)
+ if options.verbose : print 'saved', graph_path
+
+ for path in args:
+ process_file (path)