From 05ea1db905e6203a56607e6d9e6d7a6b781ce4e3 Mon Sep 17 00:00:00 2001 From: Thomas Fillon Date: Mon, 15 Jul 2013 16:33:59 +0200 Subject: [PATCH] Change 'Attributes' to 'Metadata' in AnalyzerResult --- tests/test_AnalyzerResult.py | 2 +- tests/test_analyzer_dc.py | 10 +-- tests/test_analyzer_level.py | 18 ++--- timeside/analyzer/aubio_melenergy.py | 6 +- timeside/analyzer/aubio_mfcc.py | 2 +- timeside/analyzer/aubio_pitch.py | 6 +- timeside/analyzer/aubio_specdesc.py | 6 +- timeside/analyzer/aubio_temporal.py | 18 ++--- timeside/analyzer/core.py | 111 +++++++++------------------ timeside/analyzer/dc.py | 4 +- timeside/analyzer/level.py | 8 +- timeside/analyzer/yaafe.py | 2 +- 12 files changed, 77 insertions(+), 116 deletions(-) diff --git a/tests/test_AnalyzerResult.py b/tests/test_AnalyzerResult.py index a2ef644..d2aa9e0 100755 --- a/tests/test_AnalyzerResult.py +++ b/tests/test_AnalyzerResult.py @@ -13,7 +13,7 @@ class TestAnalyzerResult(TestCase): def setUp(self): self.result = AnalyzerResult() - self.result.attributes=dict(id="foo_bar", name="Foo bar", unit="foo") + self.result.metadata=dict(id="foo_bar", name="Foo bar", unit="foo") def testOnFloat(self): "float result" diff --git a/tests/test_analyzer_dc.py b/tests/test_analyzer_dc.py index e4cf7bf..5f23808 100755 --- a/tests/test_analyzer_dc.py +++ b/tests/test_analyzer_dc.py @@ -3,7 +3,7 @@ from unit_timeside import * from timeside.decoder import * from timeside.analyzer.dc import MeanDCShift -from timeside.analyzer.core import AnalyzerResult, AnalyzerAttributes +from timeside.analyzer.core import AnalyzerResult, AnalyzerMetadata from numpy import round class TestAnalyzerDC(TestCase): @@ -14,25 +14,25 @@ class TestAnalyzerDC(TestCase): def testOnSweep(self): "runs on sweep" self.source = os.path.join (os.path.dirname(__file__), "samples", "sweep.wav") - attributes=AnalyzerAttributes(name="Mean DC shift", + metadata=AnalyzerMetadata(name="Mean DC shift", unit="%", id="mean_dc_shift", samplerate=44100, blocksize=None, stepsize=None) - self.expected = AnalyzerResult(data=-0.000, attributes=attributes) + self.expected = AnalyzerResult(data=-0.000, metadata=metadata) def testOnGuitar(self): "runs on guitar" self.source = os.path.join (os.path.dirname(__file__), "samples", "guitar.wav") - attributes=AnalyzerAttributes(name="Mean DC shift", + metadata=AnalyzerMetadata(name="Mean DC shift", unit="%", id="mean_dc_shift", samplerate=44100, blocksize=None, stepsize=None) - self.expected = AnalyzerResult(data=0.054, attributes=attributes) + self.expected = AnalyzerResult(data=0.054, metadata=metadata) def tearDown(self): decoder = FileDecoder(self.source) diff --git a/tests/test_analyzer_level.py b/tests/test_analyzer_level.py index 7d9d8c1..b831430 100755 --- a/tests/test_analyzer_level.py +++ b/tests/test_analyzer_level.py @@ -4,7 +4,7 @@ from unit_timeside import * from timeside.decoder import * from timeside.analyzer.level import Level from timeside.analyzer import AnalyzerResult, AnalyzerResultContainer -from timeside.analyzer import AnalyzerAttributes +from timeside.analyzer import AnalyzerMetadata class TestAnalyzerLevel(TestCase): @@ -16,18 +16,18 @@ class TestAnalyzerLevel(TestCase): self.source = os.path.join (os.path.dirname(__file__), "samples", "sweep.wav") # Max level - attr = AnalyzerAttributes(id="max_level", + metadata = AnalyzerMetadata(id="max_level", name="Max level", unit = "dBFS", samplerate=44100) - max_level = AnalyzerResult(-6.021, attr) + max_level = AnalyzerResult(-6.021, metadata) # RMS level - attr = AnalyzerAttributes(id="rms_level", + metadata = AnalyzerMetadata(id="rms_level", name="RMS level", unit="dBFS", samplerate=44100) - rms_level = AnalyzerResult(-9.856, attr) + rms_level = AnalyzerResult(-9.856, metadata) self.expected = AnalyzerResultContainer([max_level,rms_level]) def testOnGuitar(self): @@ -35,18 +35,18 @@ class TestAnalyzerLevel(TestCase): self.source = os.path.join (os.path.dirname(__file__), "samples", "guitar.wav") # Max level - attr = AnalyzerAttributes(id="max_level", + metadata = AnalyzerMetadata(id="max_level", name="Max level", unit = "dBFS", samplerate=44100) - max_level = AnalyzerResult(-4.258, attr) + max_level = AnalyzerResult(-4.258, metadata) # RMS level - attr = AnalyzerAttributes(id="rms_level", + metadata = AnalyzerMetadata(id="rms_level", name="RMS level", unit="dBFS", samplerate=44100) - rms_level = AnalyzerResult(-21.945, attr) + rms_level = AnalyzerResult(-21.945, metadata) self.expected = AnalyzerResultContainer([max_level,rms_level]) def tearDown(self): diff --git a/timeside/analyzer/aubio_melenergy.py b/timeside/analyzer/aubio_melenergy.py index b99aa25..b387df6 100644 --- a/timeside/analyzer/aubio_melenergy.py +++ b/timeside/analyzer/aubio_melenergy.py @@ -64,14 +64,14 @@ class AubioMelEnergy(Processor): container = AnalyzerResultContainer() melenergy = AnalyzerResult() - # Get attributes + # Get metadata samplerate = self.samplerate() blocksize = self.win_s stepsize = self.hop_s parameters = dict(n_filters= self.n_filters, n_coeffs= self.n_coeffs) - # Set attributes - melenergy.attributes = AnalyzerAttributes(id="aubio_melenergy", + # Set metadata + melenergy.metadata = AnalyzerMetadata(id="aubio_melenergy", name="melenergy (aubio)", unit='', samplerate = samplerate, diff --git a/timeside/analyzer/aubio_mfcc.py b/timeside/analyzer/aubio_mfcc.py index 510bf53..aa0165d 100644 --- a/timeside/analyzer/aubio_mfcc.py +++ b/timeside/analyzer/aubio_mfcc.py @@ -70,7 +70,7 @@ class AubioMfcc(Processor): stepsize = self.hop_s parameters = dict(n_filters= self.n_filters, n_coeffs= self.n_coeffs) - mfcc.attributes = AnalyzerAttributes(id = "aubio_mfcc", + mfcc.metadata = AnalyzerMetadata(id = "aubio_mfcc", name = "mfcc (aubio)", unit = "", samplerate = samplerate, diff --git a/timeside/analyzer/aubio_pitch.py b/timeside/analyzer/aubio_pitch.py index 2f5697f..ed151fe 100644 --- a/timeside/analyzer/aubio_pitch.py +++ b/timeside/analyzer/aubio_pitch.py @@ -67,14 +67,14 @@ class AubioPitch(Processor): container = AnalyzerResultContainer() pitch = AnalyzerResult() - # Get attributes + # Get metadata samplerate = self.samplerate() blocksize = self.win_s stepsize = self.hop_s # parameters : None # TODO check with Piem "default" and "freq" in setup - # Set attributes - pitch.attributes = AnalyzerAttributes(id="aubio_pitch", + # Set metadata + pitch.metadata = AnalyzerMetadata(id="aubio_pitch", name="f0 (aubio)", unit='Hz', samplerate = samplerate, diff --git a/timeside/analyzer/aubio_specdesc.py b/timeside/analyzer/aubio_specdesc.py index fadf75e..ae1e6cd 100644 --- a/timeside/analyzer/aubio_specdesc.py +++ b/timeside/analyzer/aubio_specdesc.py @@ -65,7 +65,7 @@ class AubioSpecdesc(Processor): def results(self): container = AnalyzerResultContainer() - # Get common attributes + # Get common metadata samplerate = self.samplerate() blocksize = self.win_s stepsize = self.hop_s @@ -73,12 +73,12 @@ class AubioSpecdesc(Processor): # For each method store results in container for method in self.methods: specdesc = AnalyzerResult() - # Set attributes + # Set metadata id = '_'.join(["aubio_specdesc", method]) name = ' '.join(["spectral descriptor", method, "(aubio)"]) - specdesc.attributes = AnalyzerAttributes(id = id, + specdesc.metadata = AnalyzerMetadata(id = id, name = name, unit = unit, samplerate = samplerate, diff --git a/timeside/analyzer/aubio_temporal.py b/timeside/analyzer/aubio_temporal.py index deff4ce..bb8d224 100644 --- a/timeside/analyzer/aubio_temporal.py +++ b/timeside/analyzer/aubio_temporal.py @@ -67,7 +67,7 @@ class AubioTemporal(Processor): return frames, eod def results(self): - # Get common attributes + # Get common metadata commonAttr = dict(samplerate=self.samplerate(), blocksize=self.win_s, stepsize=self.hop_s) @@ -79,11 +79,11 @@ class AubioTemporal(Processor): # Onsets #--------------------------------- onsets = AnalyzerResult() - # Set attributes + # Set metadata onsetsAttr = dict(id="aubio_onset", name="onsets (aubio)", unit="s") - onsets.attributes = dict(onsetsAttr.items() + commonAttr.items()) + onsets.metadata = dict(onsetsAttr.items() + commonAttr.items()) # Set Data onsets.data = self.onsets @@ -91,11 +91,11 @@ class AubioTemporal(Processor): # Onset Rate #--------------------------------- onsetrate = AnalyzerResult() - # Set attributes + # Set metadata onsetrateAttr = dict(id="aubio_onset_rate", name="onset rate (aubio)", unit="bpm") - onsetrate.attributes = dict(onsetrateAttr.items() + commonAttr.items()) + onsetrate.metadata = dict(onsetrateAttr.items() + commonAttr.items()) # Set Data if len(self.onsets) > 1: #periods = [60./(b - a) for a,b in zip(self.onsets[:-1],self.onsets[1:])] @@ -108,11 +108,11 @@ class AubioTemporal(Processor): # Beats #--------------------------------- beats = AnalyzerResult() - # Set attributes + # Set metadata beatsAttr = dict(id="aubio_beat", name="beats (aubio)", unit="s") - beats.attributes = dict(beatsAttr.items() + commonAttr.items()) + beats.metadata = dict(beatsAttr.items() + commonAttr.items()) # Set Data beats.data = self.beats @@ -120,11 +120,11 @@ class AubioTemporal(Processor): # BPM #--------------------------------- bpm = AnalyzerResult() - # Set attributes + # Set metadata bpmAttr = dict(id="aubio_bpm", name="bpm (aubio)", unit="bpm") - bpm.attributes = dict(bpmAttr.items() + commonAttr.items()) + bpm.metadata = dict(bpmAttr.items() + commonAttr.items()) # Set Data if len(self.beats) > 1: #periods = [60./(b - a) for a,b in zip(self.beats[:-1],self.beats[1:])] diff --git a/timeside/analyzer/core.py b/timeside/analyzer/core.py index da55ecb..0d09ff1 100644 --- a/timeside/analyzer/core.py +++ b/timeside/analyzer/core.py @@ -46,13 +46,13 @@ numpy_data_types = map(lambda x: getattr(numpy, x), numpy_data_types) numpy_data_types += [numpy.ndarray] -class AnalyzerAttributes(object): +class AnalyzerMetadata(object): """ - Object that contains the attributes and parameters of an analyzer process + Object that contains the metadata and parameters of an analyzer process stucture inspired by [1] [1] : http://www.saltycrane.com/blog/2012/08/python-data-object-motivated-desire-mutable-namedtuple-default-values/ - Attributes + Metadata ---------- id : string name : string @@ -65,7 +65,7 @@ class AnalyzerAttributes(object): Methods ------- as_dict() - Return a dictionnary representation of the AnalyzerAttributes + Return a dictionnary representation of the AnalyzerMetadata """ from collections import OrderedDict # Define default values as an OrderDict @@ -86,9 +86,9 @@ class AnalyzerAttributes(object): def __init__(self, **kwargs): ''' - Construct an AnalyzerAttributes object + Construct an AnalyzerMetadata object - AnalyzerAttributes() + AnalyzerMetadata() Parameters ---------- @@ -102,13 +102,13 @@ class AnalyzerAttributes(object): Returns ------- - AnalyzerAttributes + AnalyzerMetadata ''' # Set Default values for key, value in self._default_value.items(): setattr(self, key, value) - # Set attributes passed in as arguments + # Set metadata passed in as arguments #for k, v in zip(self._default_value.keys(), args): # setattr(self, k, v) # print 'args' @@ -119,7 +119,7 @@ class AnalyzerAttributes(object): if name not in self._default_value.keys(): raise AttributeError("%s is not a valid attribute in %s" % (name, self.__class__.__name__)) - super(AnalyzerAttributes, self).__setattr__(name, value) + super(AnalyzerMetadata, self).__setattr__(name, value) def as_dict(self): return dict((att, getattr(self, att)) @@ -140,16 +140,16 @@ class AnalyzerAttributes(object): class AnalyzerResult(object): """ Object that contains results return by an analyzer process - Attributes : + metadata : - data : - - attributes : an AnalyzerAttributes object containing the attributes + - metadata : an AnalyzerMetadata object containing the metadata """ - def __init__(self, data=None, attributes=None): - # Define Attributes - if attributes is None: - self.attributes = AnalyzerAttributes() + def __init__(self, data=None, metadata=None): + # Define Metadata + if metadata is None: + self.metadata = AnalyzerMetadata() else: - self.attributes = attributes + self.metadata = metadata # Define Data if data is None: @@ -171,9 +171,9 @@ class AnalyzerResult(object): if type(value) not in [list, str, int, long, float, complex, type(None)] + numpy_data_types: raise TypeError('AnalyzerResult can not accept type %s' % type(value)) - elif name == 'attributes': - if not isinstance(value, AnalyzerAttributes): - value = AnalyzerAttributes(**value) + elif name == 'metadata': + if not isinstance(value, AnalyzerMetadata): + value = AnalyzerMetadata(**value) else: raise AttributeError("%s is not a valid attribute in %s" % (name, self.__class__.__name__)) @@ -191,12 +191,12 @@ class AnalyzerResult(object): # ajouter size return(prop) # def __getattr__(self, name): -# if name in ['id', 'name', 'unit', 'value', 'attributes']: +# if name in ['id', 'name', 'unit', 'value', 'metadata']: # return self[name] # return super(AnalyzerResult, self).__getattr__(name) def as_dict(self): - return(dict(data=self.data, attributes=self.attributes.as_dict())) + return(dict(data=self.data, metadata=self.metadata.as_dict())) def to_json(self): import simplejson as json @@ -257,19 +257,19 @@ class AnalyzerResultContainer(object): for result in data_list: res_node = ET.SubElement(root, 'result') - res_node.attrib = {'name': result.attributes.name, - 'id': result.attributes.id} + res_node.metadata = {'name': result.metadata.name, + 'id': result.metadata.id} # Serialize Data data_node = ET.SubElement(res_node, 'data') if type(result.data) in [str, unicode]: data_node.text = result.data else: data_node.text = repr(result.data) - # Serialize Attributes - attr_node = ET.SubElement(res_node, 'attributes') - for (name, val) in result.attributes.as_dict().items(): + # Serialize Metadata + metadata_node = ET.SubElement(res_node, 'metadata') + for (name, val) in result.metadata.as_dict().items(): # TODO reorder keys - child = ET.SubElement(attr_node, name) + child = ET.SubElement(metadata_node, name) if name == 'parameters': for (par_key, par_val) in val.items(): par_child = ET.SubElement(child, par_key) @@ -279,26 +279,7 @@ class AnalyzerResultContainer(object): #tree = ET.ElementTree(root) return ET.tostring(root, encoding="utf-8", method="xml") -# import xml.dom.minidom -# doc = xml.dom.minidom.Document() -# -# root = doc.createElement('telemeta') -# doc.appendChild(root) -# for result in data_list: -# node = doc.createElement('dataset') -# # Serialize Data -# if type(result.data) in [str, unicode]: -# node.setAttribute('data', result.data ) -# else: -# node.setAttribute('data', repr(result.data)) -# # Serialize Attributes -# -# node_attr = doc.createElement('attributes') -# for name in result.attributes._default_value.keys(): -# node_attr.setAttribute(name, str(result.attributes.name) ) -# node.appendChild(node_attr) -# root.appendChild(node) -# return xml.dom.minidom.Document.toprettyxml(doc) + def from_xml(self, xml_string): import xml.etree.ElementTree as ET @@ -317,8 +298,8 @@ class AnalyzerResultContainer(object): except: result.data = result_child.find('data').text - # Get attributes - for attr_child in result_child.find('attributes'): + # Get metadata + for attr_child in result_child.find('metadata'): name = attr_child.tag if name == 'parameters': parameters = dict() @@ -329,31 +310,11 @@ class AnalyzerResultContainer(object): value = parameters else: value = ast.literal_eval(attr_child.text) - result.attributes.__setattr__(name, value) + result.metadata.__setattr__(name, value) results.add_result(result) return results -# -# -# import xml.dom.minidom -# import ast -# doc = xml.dom.minidom.parseString(xml_string) -# root = doc.getElementsByTagName('telemeta')[0] -# results = [] -# for child in root.childNodes: -# if child.nodeType != child.ELEMENT_NODE: continue -# result = AnalyzerResult() -# for a in ['name', 'id', 'unit']: -# child_dict[a] = str(child.getAttribute(a)) -# # Get Data -# try: -# result.data = ast.literal_eval(child.getAttribute('data')) -# except: -# results.data = child.getAttribute('data') -# # Get Attributes -# node_attr = root.childNodes -# results.append(results) -# return results + def to_json(self): #if data_list == None: data_list = self.results @@ -366,7 +327,7 @@ class AnalyzerResultContainer(object): results = AnalyzerResultContainer() for res_json in results_json: res = AnalyzerResult(data=res_json['data'], - attributes=res_json['attributes']) + metadata=res_json['metadata']) results.add_result(res) return results @@ -382,7 +343,7 @@ class AnalyzerResultContainer(object): results = AnalyzerResultContainer() for res_yaml in results_yaml: res = AnalyzerResult(data=res_yaml['data'], - attributes=res_yaml['attributes']) + metadata=res_yaml['metadata']) results.add_result(res) return results @@ -410,7 +371,7 @@ class AnalyzerResultContainer(object): for data in data_list: # Save results in HDF5 Dataset dset = h5_file.create_dataset(data['id'], data=data['value']) - # Save associated attributes + # Save associated metadata dset.attrs["unit"] = data['unit'] dset.attrs["name"] = data['name'] except TypeError: @@ -428,7 +389,7 @@ class AnalyzerResultContainer(object): for name in h5_file.keys(): dset = h5_file.get(name) # Read Dataset id = name - # Read Attributes + # Read metadata unit = dset.attrs['unit'] name = dset.attrs['name'] # Create new AnalyzerResult diff --git a/timeside/analyzer/dc.py b/timeside/analyzer/dc.py index dff87de..2deced3 100644 --- a/timeside/analyzer/dc.py +++ b/timeside/analyzer/dc.py @@ -49,9 +49,9 @@ class MeanDCShift(Processor): def results(self): result = AnalyzerResult() - # Set attributes + # Set metadata # FIXME : blocksize and stepsize are not appropriate here - result.attributes = AnalyzerAttributes(id="mean_dc_shift", + result.metadata = AnalyzerMetadata(id="mean_dc_shift", name = "Mean DC shift", unit = "%", samplerate=self.samplerate(), diff --git a/timeside/analyzer/level.py b/timeside/analyzer/level.py index eef9085..b5d297b 100644 --- a/timeside/analyzer/level.py +++ b/timeside/analyzer/level.py @@ -60,20 +60,20 @@ class Level(Processor): def results(self): # Max level # FIXME : blocksize and stepsize are not appropriate here - attr = AnalyzerAttributes(id="max_level", + metadata = AnalyzerMetadata(id="max_level", name="Max level", unit = "dBFS", samplerate=self.samplerate()) data = numpy.round(20*numpy.log10(self.max_value), 3) - max_level = AnalyzerResult(data, attr) + max_level = AnalyzerResult(data, metadata) # RMS level # FIXME : blocksize and stepsize are not appropriate here - attr = AnalyzerAttributes(id="rms_level", + metadata = AnalyzerMetadata(id="rms_level", name="RMS level", unit="dBFS", samplerate=self.samplerate()) data = numpy.round(20*numpy.log10(numpy.sqrt(numpy.mean(self.mean_values))), 3) - rms_level = AnalyzerResult(data, attr) + rms_level = AnalyzerResult(data, metadata) return AnalyzerResultContainer([max_level, rms_level]) diff --git a/timeside/analyzer/yaafe.py b/timeside/analyzer/yaafe.py index 4914cd4..4b156cf 100644 --- a/timeside/analyzer/yaafe.py +++ b/timeside/analyzer/yaafe.py @@ -93,7 +93,7 @@ class Yaafe(Processor): # Get results from Yaafe engine result = AnalyzerResult() - result.attributes = AnalyzerAttributes(id = id, + result.metadata = AnalyzerMetadata(id = id, name = name, unit = unit, samplerate = self.samplerate, -- 2.39.5