]> git.parisson.com Git - timeside.git/commitdiff
Simplify Analyzer Result container naming
authorThomas Fillon <thomas@parisson.com>
Mon, 7 Oct 2013 16:40:00 +0000 (18:40 +0200)
committerThomas Fillon <thomas@parisson.com>
Mon, 7 Oct 2013 16:40:27 +0000 (18:40 +0200)
doc/slides/timeside_slides.html
timeside/analyzer/aubio_melenergy.py
timeside/analyzer/aubio_mfcc.py
timeside/analyzer/aubio_pitch.py
timeside/analyzer/aubio_specdesc.py
timeside/analyzer/aubio_temporal.py
timeside/analyzer/core.py
timeside/analyzer/dc.py
timeside/analyzer/level.py
timeside/analyzer/yaafe.py
timeside/core.py

index 8dc0e9b6cd4e9f4562a8f8f844b0ba4e8a11f85a..923a205e1829afcff7dae597098a7dc46420fffb 100644 (file)
@@ -358,10 +358,10 @@ class AnalyzerResultContainer(object):
             if a != b: return False
         return True
 
-    def add_result(self, analyzer_result):
+    def add(self, analyzer_result):
         if type(analyzer_result) == list:
             for a in analyzer_result:
-                self.add_result(a)
+                self.add(a)
             return
         if type(analyzer_result) != AnalyzerResult:
             raise TypeError('only AnalyzerResult can be added')
@@ -483,7 +483,7 @@ class NewAnalyzer(Processor):
 
         result = AnalyzerResult(id = self.id(), name = self.name(), unit = "something")
         result.value = self.result_data
-        container.add_result(result)
+        container.add(result)
 
         # add other results in the container if needed...
 
index ae56071de1d02d71a4aa314c1959eb28fb6cad38..68945e73b946bb08ccfad418afc6226182911571 100644 (file)
@@ -78,5 +78,5 @@ class AubioMelEnergy(Analyzer):
         # Set Data
         melenergy.data.value = self.melenergy_results
 
-        self.resultContainer.add_result(melenergy)
+        self._results.add(melenergy)
 
index 2c302c0d5970deadc5c1bf27ab2529dccebf4273..987d8b5d942d0ac18842f33ff92f665f6012bfab 100644 (file)
@@ -77,4 +77,4 @@ class AubioMfcc(Analyzer):
         mfcc.parameters = parameters
 
         mfcc.data.value = self.mfcc_results
-        self.resultContainer.add_result(mfcc)
+        self._results.add(mfcc)
index b778a9a567f80c380bbb096327ab31768e6b215b..face938ba03eb500df7e69693291433e8d3ea572 100644 (file)
@@ -78,5 +78,5 @@ class AubioPitch(Analyzer):
         # Set Data
         pitch.data.value = numpy.array(self.pitches)
 
-        self.resultContainer.add_result(pitch)
+        self._results.add(pitch)
 
index 72491cd347467e88fdaa7aad82648a0d5c9f9ad3..c7594eb8ddab20149896def73e3c4d62da06ce08 100644 (file)
@@ -83,5 +83,5 @@ class AubioSpecdesc(Analyzer):
 
             res_specdesc.data.value = self.specdesc_results[method]
 
-            self.resultContainer.add_result(res_specdesc)
+            self._results.add(res_specdesc)
 
index 79bf78ff6b9a7c946c359781abe7b98b34088b48..24ca77ed72bf22d7018d7148a2e4efb6aebf9458 100644 (file)
@@ -89,7 +89,7 @@ class AubioTemporal(Analyzer):
 
         onsets.labelMetadata.label = {1: 'Onset'}
 
-        self.resultContainer.add_result(onsets)
+        self._results.add(onsets)
 
         #---------------------------------
         #  Onset Rate
@@ -109,7 +109,7 @@ class AubioTemporal(Analyzer):
         else:
             onsetrate.data.value = []
 
-        self.resultContainer.add_result(onsetrate)
+        self._results.add(onsetrate)
 
         #---------------------------------
         #  Beats
@@ -133,7 +133,7 @@ class AubioTemporal(Analyzer):
 
         beats.labelMetadata.label = {1: 'Beat'}
 
-        self.resultContainer.add_result(beats)
+        self._results.add(beats)
 
         #---------------------------------
         #  BPM
@@ -156,4 +156,4 @@ class AubioTemporal(Analyzer):
         else:
             bpm.data.value = []
 
-        self.resultContainer.add_result(bpm)
+        self._results.add(bpm)
index aaef5f20e145dfb661bbba49e4ee427c5f8aa75e..2fc94975f3fc1f83dc06340fc064955a98b6284d 100644 (file)
@@ -597,7 +597,7 @@ class AnalyzerResultContainer(dict):
     def __init__(self, analyzer_results=None):
         super(AnalyzerResultContainer,self).__init__()
         if analyzer_results is not None:
-            self.add_result(analyzer_results)
+            self.add(analyzer_results)
 
 #    def __getitem__(self, i):
 #        return self.results[i]
@@ -616,10 +616,10 @@ class AnalyzerResultContainer(dict):
     #def __ne__(self, other):
     #    return not self.__eq__(other)
 
-    def add_result(self, analyzer_result):
+    def add(self, analyzer_result):
         if isinstance(analyzer_result, list):
             for res in analyzer_result:
-                self.add_result(res)
+                self.add(res)
             return
         # Check result
         if not isinstance(analyzer_result, AnalyzerResult):
@@ -651,7 +651,7 @@ class AnalyzerResultContainer(dict):
         root = ET.fromstring(xml_string)
         for child in root.iter('result'):
             result = AnalyzerResult()
-            results.add_result(result.from_xml(ET.tostring(child)))
+            results.add(result.from_xml(ET.tostring(child)))
 
         return results
 
@@ -691,7 +691,7 @@ class AnalyzerResultContainer(dict):
                 if key not in ['dataMode', 'timeMode']:
                     res[key] = res_json[key]
 
-            results.add_result(res)
+            results.add(res)
         return results
 
     def to_yaml(self):
@@ -724,7 +724,7 @@ class AnalyzerResultContainer(dict):
             res = AnalyzerResult()
             for key in res_yaml.keys():
                 res[key] = res_yaml[key]
-            results.add_result(res)
+            results.add(res)
         return results
 
     def to_numpy(self, output_file):
@@ -806,7 +806,7 @@ class AnalyzerResultContainer(dict):
                             else:
                                 result[subgroup_name][dsetName] = []
 
-                data_list.add_result(result)
+                data_list.add(result)
         except TypeError:
             print('TypeError for HDF5 serialization')
         finally:
@@ -837,8 +837,8 @@ class Analyzer(Processor):
         self.result_stepsize = self.input_stepsize
 
     def results(self):
-        #container = AnalyzerResultContainer()
-        return self.resultContainer
+        #TODO :return self._results[id=analyzerID]
+        return self._results
 
     @staticmethod
     @interfacedoc
index 5374257546c1287403b81db982c8a51f14b9f70c..da8fe5eedb5621e134a78c051af743b6bcc49dad 100644 (file)
@@ -55,4 +55,4 @@ class MeanDCShift(Analyzer):
         dc_result.idMetadata.unit = "%"
         # Set Data
         dc_result.data.value = numpy.round(numpy.mean(100*self.values),3)
-        self.resultContainer.add_result(dc_result)
\ No newline at end of file
+        self._results.add(dc_result)
\ No newline at end of file
index bcfc1d11de6e66b135e62d772c52062fcaba40d4..bf406ab89754f2fa9a576dcb99dea9a1760d9d0d 100644 (file)
@@ -69,7 +69,7 @@ class Level(Analyzer):
         max_level.idMetadata.unit = "dBFS"
 
         max_level.data.value = numpy.round(20*numpy.log10(self.max_value), 3)
-        self.resultContainer.add_result(max_level)
+        self._results.add(max_level)
 
         # RMS level
         rms_level = self.new_result(dataMode='value', timeMode='global')
@@ -79,5 +79,5 @@ class Level(Analyzer):
 
         rms_level.data.value = numpy.round(20*numpy.log10(
                                 numpy.sqrt(numpy.mean(self.mean_values))), 3)
-        self.resultContainer.add_result(rms_level)
+        self._results.add(rms_level)
 
index 0ee83fe755e97a4877755b584cef39fe11c6e9f2..e2fbaf8d00e2c490274b9b7b546d099742c51f5f 100644 (file)
@@ -99,6 +99,6 @@ class Yaafe(Analyzer):
             result.data.value = self.yaafe_engine.readOutput(featName)
             # Store results in Container
             if len(result.data.value):
-                self.resultContainer.add_result(result)
+                self._results.add(result)
 
 
index cde3bad96f8395da46dd6ebcc4135968b4b2259a..b2360530ee739fda67b6221c8c74d2f4f21c9601 100644 (file)
@@ -240,7 +240,7 @@ class ProcessPipe(object):
         last = source
 
         from timeside.analyzer.core import AnalyzerResultContainer
-        self.resultContainer = AnalyzerResultContainer()
+        self._results = AnalyzerResultContainer()
 
         # setup/reset processors and configure properties throughout the pipe
         for item in items:
@@ -249,7 +249,7 @@ class ProcessPipe(object):
                        blocksize = last.blocksize(),
                        totalframes = last.totalframes())
             item.source_mediainfo = source.mediainfo()
-            item.resultContainer = self.resultContainer
+            item._results = self._results
             last = item
 
         # now stream audio data along the pipe