]> git.parisson.com Git - timeside.git/commitdiff
Change 'Attributes' to 'Metadata' in AnalyzerResult
authorThomas Fillon <thomas@parisson.com>
Mon, 15 Jul 2013 14:33:59 +0000 (16:33 +0200)
committerThomas Fillon <thomas@parisson.com>
Mon, 15 Jul 2013 14:33:59 +0000 (16:33 +0200)
12 files changed:
tests/test_AnalyzerResult.py
tests/test_analyzer_dc.py
tests/test_analyzer_level.py
timeside/analyzer/aubio_melenergy.py
timeside/analyzer/aubio_mfcc.py
timeside/analyzer/aubio_pitch.py
timeside/analyzer/aubio_specdesc.py
timeside/analyzer/aubio_temporal.py
timeside/analyzer/core.py
timeside/analyzer/dc.py
timeside/analyzer/level.py
timeside/analyzer/yaafe.py

index a2ef6445f877e404939521ffdd99cc7369d00070..d2aa9e04cf46594803311346f4cd2b25612b644a 100755 (executable)
@@ -13,7 +13,7 @@ class TestAnalyzerResult(TestCase):
 
     def setUp(self):
         self.result = AnalyzerResult()
-        self.result.attributes=dict(id="foo_bar", name="Foo bar", unit="foo")
+        self.result.metadata=dict(id="foo_bar", name="Foo bar", unit="foo")
 
     def testOnFloat(self):
         "float result"
index e4cf7bfed32d887f33f5abd2aa4f4bf144cbc452..5f23808eadbacce4276b852c51995c6734a48dfa 100755 (executable)
@@ -3,7 +3,7 @@
 from unit_timeside import *
 from timeside.decoder import *
 from timeside.analyzer.dc import MeanDCShift
-from timeside.analyzer.core import AnalyzerResult, AnalyzerAttributes
+from timeside.analyzer.core import AnalyzerResult, AnalyzerMetadata
 from numpy import round
 
 class TestAnalyzerDC(TestCase):
@@ -14,25 +14,25 @@ class TestAnalyzerDC(TestCase):
     def testOnSweep(self):
         "runs on sweep"
         self.source = os.path.join (os.path.dirname(__file__),  "samples", "sweep.wav")
-        attributes=AnalyzerAttributes(name="Mean DC shift",
+        metadata=AnalyzerMetadata(name="Mean DC shift",
                                       unit="%",
                                       id="mean_dc_shift",
                                       samplerate=44100,
                                       blocksize=None,
                                       stepsize=None)
 
-        self.expected = AnalyzerResult(data=-0.000, attributes=attributes)
+        self.expected = AnalyzerResult(data=-0.000, metadata=metadata)
 
     def testOnGuitar(self):
         "runs on guitar"
         self.source = os.path.join (os.path.dirname(__file__),  "samples", "guitar.wav")
-        attributes=AnalyzerAttributes(name="Mean DC shift",
+        metadata=AnalyzerMetadata(name="Mean DC shift",
                                       unit="%",
                                       id="mean_dc_shift",
                                       samplerate=44100,
                                       blocksize=None,
                                       stepsize=None)
-        self.expected = AnalyzerResult(data=0.054, attributes=attributes)
+        self.expected = AnalyzerResult(data=0.054, metadata=metadata)
 
     def tearDown(self):
         decoder = FileDecoder(self.source)
index 7d9d8c114bfa9823152c5b7f21b110fb84aebf29..b8314301ead64b97f0cd89b014c7225aacee785f 100755 (executable)
@@ -4,7 +4,7 @@ from unit_timeside import *
 from timeside.decoder import *
 from timeside.analyzer.level import Level
 from timeside.analyzer import AnalyzerResult, AnalyzerResultContainer
-from timeside.analyzer import AnalyzerAttributes
+from timeside.analyzer import AnalyzerMetadata
 
 class TestAnalyzerLevel(TestCase):
 
@@ -16,18 +16,18 @@ class TestAnalyzerLevel(TestCase):
         self.source = os.path.join (os.path.dirname(__file__),  "samples", "sweep.wav")
 
         # Max level
-        attr = AnalyzerAttributes(id="max_level",
+        metadata = AnalyzerMetadata(id="max_level",
                                   name="Max level",
                                   unit = "dBFS",
                                   samplerate=44100)
-        max_level = AnalyzerResult(-6.021, attr)
+        max_level = AnalyzerResult(-6.021, metadata)
 
         # RMS level
-        attr = AnalyzerAttributes(id="rms_level",
+        metadata = AnalyzerMetadata(id="rms_level",
                                   name="RMS level",
                                   unit="dBFS",
                                   samplerate=44100)
-        rms_level = AnalyzerResult(-9.856, attr)
+        rms_level = AnalyzerResult(-9.856, metadata)
         self.expected = AnalyzerResultContainer([max_level,rms_level])
 
     def testOnGuitar(self):
@@ -35,18 +35,18 @@ class TestAnalyzerLevel(TestCase):
         self.source = os.path.join (os.path.dirname(__file__),  "samples", "guitar.wav")
 
         # Max level
-        attr = AnalyzerAttributes(id="max_level",
+        metadata = AnalyzerMetadata(id="max_level",
                                   name="Max level",
                                   unit = "dBFS",
                                   samplerate=44100)
-        max_level = AnalyzerResult(-4.258, attr)
+        max_level = AnalyzerResult(-4.258, metadata)
 
         # RMS level
-        attr = AnalyzerAttributes(id="rms_level",
+        metadata = AnalyzerMetadata(id="rms_level",
                                   name="RMS level",
                                   unit="dBFS",
                                   samplerate=44100)
-        rms_level = AnalyzerResult(-21.945, attr)
+        rms_level = AnalyzerResult(-21.945, metadata)
         self.expected = AnalyzerResultContainer([max_level,rms_level])
 
     def tearDown(self):
index b99aa255ebf02562c03779b98e9353f285bbe97f..b387df6faeed761b492d0c46ee652e15d9c00164 100644 (file)
@@ -64,14 +64,14 @@ class AubioMelEnergy(Processor):
         container = AnalyzerResultContainer()
         melenergy = AnalyzerResult()
 
-        # Get attributes
+        # Get metadata
         samplerate = self.samplerate()
         blocksize = self.win_s
         stepsize = self.hop_s
         parameters = dict(n_filters= self.n_filters,
                           n_coeffs=  self.n_coeffs)
-        # Set attributes
-        melenergy.attributes = AnalyzerAttributes(id="aubio_melenergy",
+        # Set metadata
+        melenergy.metadata = AnalyzerMetadata(id="aubio_melenergy",
                                                   name="melenergy (aubio)",
                                                   unit='',
                                                   samplerate = samplerate,
index 510bf53280bf925560f9d63523447d6cd1a5f36a..aa0165d84ce1bed552217d53636224061bcdba0c 100644 (file)
@@ -70,7 +70,7 @@ class AubioMfcc(Processor):
         stepsize = self.hop_s
         parameters = dict(n_filters= self.n_filters,
                           n_coeffs=  self.n_coeffs)
-        mfcc.attributes = AnalyzerAttributes(id = "aubio_mfcc",
+        mfcc.metadata = AnalyzerMetadata(id = "aubio_mfcc",
                                              name = "mfcc (aubio)",
                                              unit = "",
                                              samplerate = samplerate,
index 2f5697f3566e764b0568c9fb67b6af6e3cbff0af..ed151fed27344b9ac1b075b97f45e57f11c2efa9 100644 (file)
@@ -67,14 +67,14 @@ class AubioPitch(Processor):
         container = AnalyzerResultContainer()
         pitch = AnalyzerResult()
 
-        # Get attributes
+        # Get metadata
         samplerate = self.samplerate()
         blocksize = self.win_s
         stepsize = self.hop_s
         # parameters : None # TODO check with Piem "default" and "freq" in setup
 
-        # Set attributes
-        pitch.attributes = AnalyzerAttributes(id="aubio_pitch",
+        # Set metadata
+        pitch.metadata = AnalyzerMetadata(id="aubio_pitch",
                                               name="f0 (aubio)",
                                               unit='Hz',
                                               samplerate = samplerate,
index fadf75e4a41a908edb026f4ee87855512eb6454e..ae1e6cd1811e09e99c54453cb7c48e8f3db5c922 100644 (file)
@@ -65,7 +65,7 @@ class AubioSpecdesc(Processor):
     def results(self):
 
         container = AnalyzerResultContainer()
-       # Get common attributes
+       # Get common metadata
         samplerate = self.samplerate()
         blocksize = self.win_s
         stepsize = self.hop_s
@@ -73,12 +73,12 @@ class AubioSpecdesc(Processor):
         # For each method store results in container
         for method in self.methods:
             specdesc = AnalyzerResult()
-            # Set attributes
+            # Set metadata
             id = '_'.join(["aubio_specdesc", method])
             name = ' '.join(["spectral descriptor", method, "(aubio)"])
 
 
-            specdesc.attributes = AnalyzerAttributes(id = id,
+            specdesc.metadata = AnalyzerMetadata(id = id,
                                                   name = name,
                                                   unit = unit,
                                                   samplerate = samplerate,
index deff4ce11fa61b33a3454cfb2a1b87b0b0f09122..bb8d224fc416ba76d034fd29ded9e700ee43f6d3 100644 (file)
@@ -67,7 +67,7 @@ class AubioTemporal(Processor):
         return frames, eod
 
     def results(self):
-        # Get common attributes
+        # Get common metadata
         commonAttr = dict(samplerate=self.samplerate(),
                           blocksize=self.win_s,
                           stepsize=self.hop_s)
@@ -79,11 +79,11 @@ class AubioTemporal(Processor):
         #  Onsets
         #---------------------------------
         onsets = AnalyzerResult()
-        # Set attributes
+        # Set metadata
         onsetsAttr = dict(id="aubio_onset",
                           name="onsets (aubio)",
                           unit="s")
-        onsets.attributes = dict(onsetsAttr.items() + commonAttr.items())
+        onsets.metadata = dict(onsetsAttr.items() + commonAttr.items())
         # Set Data
         onsets.data = self.onsets
 
@@ -91,11 +91,11 @@ class AubioTemporal(Processor):
         #  Onset Rate
         #---------------------------------
         onsetrate = AnalyzerResult()
-        # Set attributes
+        # Set metadata
         onsetrateAttr = dict(id="aubio_onset_rate",
                              name="onset rate (aubio)",
                              unit="bpm")
-        onsetrate.attributes = dict(onsetrateAttr.items() + commonAttr.items())
+        onsetrate.metadata = dict(onsetrateAttr.items() + commonAttr.items())
         # Set Data
         if len(self.onsets) > 1:
             #periods = [60./(b - a) for a,b in zip(self.onsets[:-1],self.onsets[1:])]
@@ -108,11 +108,11 @@ class AubioTemporal(Processor):
         #  Beats
         #---------------------------------
         beats = AnalyzerResult()
-        # Set attributes
+        # Set metadata
         beatsAttr = dict(id="aubio_beat",
                         name="beats (aubio)",
                         unit="s")
-        beats.attributes = dict(beatsAttr.items() + commonAttr.items())
+        beats.metadata = dict(beatsAttr.items() + commonAttr.items())
         #  Set Data
         beats.data = self.beats
 
@@ -120,11 +120,11 @@ class AubioTemporal(Processor):
         #  BPM
         #---------------------------------
         bpm = AnalyzerResult()
-        # Set attributes
+        # Set metadata
         bpmAttr = dict(id="aubio_bpm",
                        name="bpm (aubio)",
                        unit="bpm")
-        bpm.attributes = dict(bpmAttr.items() + commonAttr.items())
+        bpm.metadata = dict(bpmAttr.items() + commonAttr.items())
         #  Set Data
         if len(self.beats) > 1:
             #periods = [60./(b - a) for a,b in zip(self.beats[:-1],self.beats[1:])]
index da55ecb5189ae0abf560f7ef38fedd2e6e65ac09..0d09ff1393dffbc07e095803058d0642da5c3ec7 100644 (file)
@@ -46,13 +46,13 @@ numpy_data_types = map(lambda x: getattr(numpy, x), numpy_data_types)
 numpy_data_types += [numpy.ndarray]
 
 
-class AnalyzerAttributes(object):
+class AnalyzerMetadata(object):
     """
-    Object that contains the attributes and parameters of an analyzer process
+    Object that contains the metadata and parameters of an analyzer process
     stucture inspired by [1]
     [1] : http://www.saltycrane.com/blog/2012/08/python-data-object-motivated-desire-mutable-namedtuple-default-values/
 
-    Attributes
+    Metadata
     ----------
     id : string
     name : string
@@ -65,7 +65,7 @@ class AnalyzerAttributes(object):
     Methods
     -------
     as_dict()
-        Return a dictionnary representation of the AnalyzerAttributes
+        Return a dictionnary representation of the AnalyzerMetadata
     """
     from collections import OrderedDict
     # Define default values as an OrderDict
@@ -86,9 +86,9 @@ class AnalyzerAttributes(object):
 
     def __init__(self, **kwargs):
         '''
-        Construct an AnalyzerAttributes object
+        Construct an AnalyzerMetadata object
 
-        AnalyzerAttributes()
+        AnalyzerMetadata()
 
         Parameters
         ----------
@@ -102,13 +102,13 @@ class AnalyzerAttributes(object):
 
         Returns
         -------
-        AnalyzerAttributes
+        AnalyzerMetadata
         '''
         # Set Default values
         for key, value in self._default_value.items():
             setattr(self, key, value)
 
-        # Set attributes passed in as arguments
+        # Set metadata passed in as arguments
         #for k, v in zip(self._default_value.keys(), args):
         #    setattr(self, k, v)
         #    print 'args'
@@ -119,7 +119,7 @@ class AnalyzerAttributes(object):
         if name not in self._default_value.keys():
             raise AttributeError("%s is not a valid attribute in %s" %
             (name, self.__class__.__name__))
-        super(AnalyzerAttributes, self).__setattr__(name, value)
+        super(AnalyzerMetadata, self).__setattr__(name, value)
 
     def as_dict(self):
         return dict((att, getattr(self, att))
@@ -140,16 +140,16 @@ class AnalyzerAttributes(object):
 class AnalyzerResult(object):
     """
     Object that contains results return by an analyzer process
-    Attributes :
+    metadata :
         - data :
-        - attributes : an AnalyzerAttributes object containing the attributes
+        - metadata : an AnalyzerMetadata object containing the metadata
     """
-    def __init__(self, data=None, attributes=None):
-        # Define Attributes
-        if attributes is None:
-            self.attributes = AnalyzerAttributes()
+    def __init__(self, data=None, metadata=None):
+        # Define Metadata
+        if metadata is None:
+            self.metadata = AnalyzerMetadata()
         else:
-            self.attributes = attributes
+            self.metadata = metadata
 
         # Define Data
         if data is None:
@@ -171,9 +171,9 @@ class AnalyzerResult(object):
             if type(value) not in [list, str, int, long, float, complex, type(None)] + numpy_data_types:
                 raise TypeError('AnalyzerResult can not accept type %s' %
                 type(value))
-        elif name == 'attributes':
-            if not isinstance(value, AnalyzerAttributes):
-                value = AnalyzerAttributes(**value)
+        elif name == 'metadata':
+            if not isinstance(value, AnalyzerMetadata):
+                value = AnalyzerMetadata(**value)
         else:
             raise AttributeError("%s is not a valid attribute in %s" %
             (name, self.__class__.__name__))
@@ -191,12 +191,12 @@ class AnalyzerResult(object):
                      # ajouter size
         return(prop)
 #    def __getattr__(self, name):
-#        if name in ['id', 'name', 'unit', 'value', 'attributes']:
+#        if name in ['id', 'name', 'unit', 'value', 'metadata']:
 #            return self[name]
 #        return super(AnalyzerResult, self).__getattr__(name)
 
     def as_dict(self):
-        return(dict(data=self.data, attributes=self.attributes.as_dict()))
+        return(dict(data=self.data, metadata=self.metadata.as_dict()))
 
     def to_json(self):
         import simplejson as json
@@ -257,19 +257,19 @@ class AnalyzerResultContainer(object):
 
         for result in data_list:
             res_node = ET.SubElement(root, 'result')
-            res_node.attrib = {'name': result.attributes.name,
-                               'id': result.attributes.id}
+            res_node.metadata = {'name': result.metadata.name,
+                               'id': result.metadata.id}
             # Serialize Data
             data_node = ET.SubElement(res_node, 'data')
             if type(result.data) in [str, unicode]:
                 data_node.text = result.data
             else:
                 data_node.text = repr(result.data)
-            # Serialize Attributes
-            attr_node = ET.SubElement(res_node, 'attributes')
-            for (name, val) in result.attributes.as_dict().items():
+            # Serialize Metadata
+            metadata_node = ET.SubElement(res_node, 'metadata')
+            for (name, val) in result.metadata.as_dict().items():
                 # TODO reorder keys
-                child = ET.SubElement(attr_node, name)
+                child = ET.SubElement(metadata_node, name)
                 if name == 'parameters':
                     for (par_key, par_val) in val.items():
                         par_child = ET.SubElement(child, par_key)
@@ -279,26 +279,7 @@ class AnalyzerResultContainer(object):
 
         #tree = ET.ElementTree(root)
         return ET.tostring(root, encoding="utf-8", method="xml")
-#        import xml.dom.minidom
-#        doc = xml.dom.minidom.Document()
-#
-#        root = doc.createElement('telemeta')
-#        doc.appendChild(root)
-#        for result in data_list:
-#            node = doc.createElement('dataset')
-#            # Serialize Data
-#            if type(result.data) in [str, unicode]:
-#                node.setAttribute('data', result.data )
-#            else:
-#                node.setAttribute('data', repr(result.data))
-#            # Serialize Attributes
-#
-#            node_attr = doc.createElement('attributes')
-#            for name in result.attributes._default_value.keys():
-#                node_attr.setAttribute(name, str(result.attributes.name) )
-#            node.appendChild(node_attr)
-#            root.appendChild(node)
-#        return xml.dom.minidom.Document.toprettyxml(doc)
+
 
     def from_xml(self, xml_string):
         import xml.etree.ElementTree as ET
@@ -317,8 +298,8 @@ class AnalyzerResultContainer(object):
             except:
                 result.data = result_child.find('data').text
 
-            # Get attributes
-            for attr_child in result_child.find('attributes'):
+            # Get metadata
+            for attr_child in result_child.find('metadata'):
                 name = attr_child.tag
                 if name == 'parameters':
                     parameters = dict()
@@ -329,31 +310,11 @@ class AnalyzerResultContainer(object):
                     value = parameters
                 else:
                     value = ast.literal_eval(attr_child.text)
-                result.attributes.__setattr__(name, value)
+                result.metadata.__setattr__(name, value)
             results.add_result(result)
 
         return results
-#
-#
-#        import xml.dom.minidom
-#        import ast
-#        doc = xml.dom.minidom.parseString(xml_string)
-#        root = doc.getElementsByTagName('telemeta')[0]
-#        results = []
-#        for child in root.childNodes:
-#            if child.nodeType != child.ELEMENT_NODE: continue
-#            result = AnalyzerResult()
-#            for a in ['name', 'id', 'unit']:
-#                child_dict[a] = str(child.getAttribute(a))
-#            # Get Data
-#            try:
-#                result.data = ast.literal_eval(child.getAttribute('data'))
-#            except:
-#                results.data = child.getAttribute('data')
-#            # Get Attributes
-#            node_attr = root.childNodes
-#            results.append(results)
-#        return results
+
 
     def to_json(self):
         #if data_list == None: data_list = self.results
@@ -366,7 +327,7 @@ class AnalyzerResultContainer(object):
         results = AnalyzerResultContainer()
         for res_json in results_json:
             res = AnalyzerResult(data=res_json['data'],
-                                 attributes=res_json['attributes'])
+                                 metadata=res_json['metadata'])
             results.add_result(res)
         return results
 
@@ -382,7 +343,7 @@ class AnalyzerResultContainer(object):
         results = AnalyzerResultContainer()
         for res_yaml in results_yaml:
             res = AnalyzerResult(data=res_yaml['data'],
-                                 attributes=res_yaml['attributes'])
+                                 metadata=res_yaml['metadata'])
             results.add_result(res)
         return results
 
@@ -410,7 +371,7 @@ class AnalyzerResultContainer(object):
             for data in data_list:
                 # Save results in HDF5 Dataset
                 dset = h5_file.create_dataset(data['id'], data=data['value'])
-                # Save associated attributes
+                # Save associated metadata
                 dset.attrs["unit"] = data['unit']
                 dset.attrs["name"] = data['name']
         except TypeError:
@@ -428,7 +389,7 @@ class AnalyzerResultContainer(object):
             for name in h5_file.keys():
                 dset = h5_file.get(name)  # Read Dataset
                 id = name
-                # Read Attributes
+                # Read metadata
                 unit = dset.attrs['unit']
                 name = dset.attrs['name']
                 # Create new AnalyzerResult
index dff87de160e1893e5c392ed60c1077dac703fba1..2deced30794fbf07758344463f3e39fec20f01bf 100644 (file)
@@ -49,9 +49,9 @@ class MeanDCShift(Processor):
 
     def results(self):
         result = AnalyzerResult()
-        #  Set attributes
+        #  Set metadata
         #  FIXME : blocksize and stepsize are not appropriate here
-        result.attributes = AnalyzerAttributes(id="mean_dc_shift",
+        result.metadata = AnalyzerMetadata(id="mean_dc_shift",
                                                name = "Mean DC shift",
                                                unit = "%",
                                                samplerate=self.samplerate(),
index eef9085e9cad17da067a100ac225f1f28fd75fae..b5d297b86002c0c6505d179a14eff98400376bb2 100644 (file)
@@ -60,20 +60,20 @@ class Level(Processor):
     def results(self):
         # Max level
         #  FIXME : blocksize and stepsize are not appropriate here
-        attr = AnalyzerAttributes(id="max_level",
+        metadata = AnalyzerMetadata(id="max_level",
                                   name="Max level",
                                   unit = "dBFS",
                                   samplerate=self.samplerate())
         data = numpy.round(20*numpy.log10(self.max_value), 3)
-        max_level = AnalyzerResult(data, attr)
+        max_level = AnalyzerResult(data, metadata)
 
         # RMS level
         #  FIXME : blocksize and stepsize are not appropriate here
-        attr = AnalyzerAttributes(id="rms_level",
+        metadata = AnalyzerMetadata(id="rms_level",
                                   name="RMS level",
                                   unit="dBFS",
                                   samplerate=self.samplerate())
         data = numpy.round(20*numpy.log10(numpy.sqrt(numpy.mean(self.mean_values))), 3)
-        rms_level = AnalyzerResult(data, attr)
+        rms_level = AnalyzerResult(data, metadata)
 
         return AnalyzerResultContainer([max_level, rms_level])
index 4914cd48d52d48fcd7eac99db1a0f34d395bda80..4b156cfc8ea95856bbd01a547af20b563dff1dd2 100644 (file)
@@ -93,7 +93,7 @@ class Yaafe(Processor):
 
             # Get results from Yaafe engine
             result = AnalyzerResult()
-            result.attributes = AnalyzerAttributes(id = id,
+            result.metadata = AnalyzerMetadata(id = id,
                                       name = name,
                                       unit = unit,
                                       samplerate = self.samplerate,