]> git.parisson.com Git - timeside.git/commitdiff
* fix some variable names fot overhaul coherence
authorGuillaume Pellerin <yomguy@parisson.com>
Sun, 7 Jul 2013 14:35:45 +0000 (16:35 +0200)
committerGuillaume Pellerin <yomguy@parisson.com>
Sun, 7 Jul 2013 14:35:45 +0000 (16:35 +0200)
 * fix yafee.results container

12 files changed:
tests/test_analyzer_dc.py
tests/test_analyzer_level.py
tests/test_yaafe.py
timeside/analyzer/aubio_melenergy.py
timeside/analyzer/aubio_mfcc.py
timeside/analyzer/aubio_pitch.py
timeside/analyzer/aubio_specdesc.py
timeside/analyzer/aubio_temporal.py
timeside/analyzer/core.py
timeside/analyzer/dc.py
timeside/analyzer/level.py
timeside/analyzer/yaafe.py

index 99bd4500f92ed36b66137e012ff1dbadb2174882..e4cf7bfed32d887f33f5abd2aa4f4bf144cbc452 100755 (executable)
@@ -17,10 +17,10 @@ class TestAnalyzerDC(TestCase):
         attributes=AnalyzerAttributes(name="Mean DC shift",
                                       unit="%",
                                       id="mean_dc_shift",
-                                      sampleRate=44100,
-                                      blockSize=None,
-                                      stepSize=None)
-  
+                                      samplerate=44100,
+                                      blocksize=None,
+                                      stepsize=None)
+
         self.expected = AnalyzerResult(data=-0.000, attributes=attributes)
 
     def testOnGuitar(self):
@@ -29,10 +29,10 @@ class TestAnalyzerDC(TestCase):
         attributes=AnalyzerAttributes(name="Mean DC shift",
                                       unit="%",
                                       id="mean_dc_shift",
-                                      sampleRate=44100,
-                                      blockSize=None,
-                                      stepSize=None)
-        self.expected = AnalyzerResult(data=0.054, attributes=attributes) 
+                                      samplerate=44100,
+                                      blocksize=None,
+                                      stepsize=None)
+        self.expected = AnalyzerResult(data=0.054, attributes=attributes)
 
     def tearDown(self):
         decoder = FileDecoder(self.source)
index 10248e178827a12b231eb2fb5318f9fb5ac9d714..7d9d8c114bfa9823152c5b7f21b110fb84aebf29 100755 (executable)
@@ -14,39 +14,39 @@ class TestAnalyzerLevel(TestCase):
     def testOnSweep(self):
         "runs on sweep"
         self.source = os.path.join (os.path.dirname(__file__),  "samples", "sweep.wav")
-               
+
         # Max level
         attr = AnalyzerAttributes(id="max_level",
                                   name="Max level",
                                   unit = "dBFS",
-                                  sampleRate=44100) 
+                                  samplerate=44100)
         max_level = AnalyzerResult(-6.021, attr)
-        
+
         # RMS level
         attr = AnalyzerAttributes(id="rms_level",
                                   name="RMS level",
                                   unit="dBFS",
-                                  sampleRate=44100)
-        rms_level = AnalyzerResult(-9.856, attr)                          
+                                  samplerate=44100)
+        rms_level = AnalyzerResult(-9.856, attr)
         self.expected = AnalyzerResultContainer([max_level,rms_level])
-        
+
     def testOnGuitar(self):
         "runs on guitar"
         self.source = os.path.join (os.path.dirname(__file__),  "samples", "guitar.wav")
-               
+
         # Max level
         attr = AnalyzerAttributes(id="max_level",
                                   name="Max level",
                                   unit = "dBFS",
-                                  sampleRate=44100) 
+                                  samplerate=44100)
         max_level = AnalyzerResult(-4.258, attr)
-        
+
         # RMS level
         attr = AnalyzerAttributes(id="rms_level",
                                   name="RMS level",
                                   unit="dBFS",
-                                  sampleRate=44100)
-        rms_level = AnalyzerResult(-21.945, attr)                          
+                                  samplerate=44100)
+        rms_level = AnalyzerResult(-21.945, attr)
         self.expected = AnalyzerResultContainer([max_level,rms_level])
 
     def tearDown(self):
index 0f6ccf3bd444ce1fe40b5130e85270182b791440..daaa0a3e87c884562fb6eec4d181c7a65bd3d1b5 100755 (executable)
@@ -9,9 +9,9 @@ class TestYaafe(TestCase):
 
     def setUp(self):
         self.sample_rate = 16000
-    
+
     def testOnSweepWithFeaturePlan(self):
-        "runs on sweep and define feature plan manualy"
+        "runs on sweep and define feature plan manually"
         self.source = os.path.join (os.path.dirname(__file__),  "samples", "sweep.wav")
 
         # Setup Yaafe Analyzer
@@ -21,8 +21,8 @@ class TestYaafe(TestCase):
         fp.addFeature('mfcc: MFCC blockSize=512 stepSize=256')
         fp.addFeature('mfcc_d1: MFCC blockSize=512 stepSize=256 > Derivate DOrder=1')
         fp.addFeature('mfcc_d2: MFCC blockSize=512 stepSize=256 > Derivate DOrder=2')
-        
-        # Setup a new Yaafe TimeSide analyzer 
+
+        # Setup a new Yaafe TimeSide analyzer
         # from FeaturePlan
         self.analyzer = Yaafe(fp)
 
@@ -33,22 +33,22 @@ class TestYaafe(TestCase):
         # Load Yaafe Feature Plan
         fp = FeaturePlan(sample_rate=self.sample_rate)
         fp_file = os.path.join (os.path.dirname(__file__),  "yaafe_config", "yaafeFeaturePlan")
-        
+
         fp.loadFeaturePlan(fp_file)
-        # Setup a new Yaafe TimeSide analyzer 
+        # Setup a new Yaafe TimeSide analyzer
         # from FeaturePlan
         self.analyzer = Yaafe(fp)
-    
+
     def testOnGuitarWithDataFlow(self):
         "runs on guitar and load Yaafe dataflow from file"
         self.source = os.path.join (os.path.dirname(__file__),  "samples", "guitar.wav")
         # Setup Yaafe Analyzer
         # Load DataFlow from file
-        df = DataFlow() 
+        df = DataFlow()
         df_file = os.path.join (os.path.dirname(__file__),  "yaafe_config", "yaafeDataFlow")
         df.load(df_file)
-        
-        # Setup a new Yaafe TimeSide analyzer 
+
+        # Setup a new Yaafe TimeSide analyzer
         # from DataFlow
         self.analyzer = Yaafe(df)
 
index bec64dfd045b17f778db57c94c62b4b39b1eb97d..b99aa255ebf02562c03779b98e9353f285bbe97f 100644 (file)
@@ -63,21 +63,21 @@ class AubioMelEnergy(Processor):
 
         container = AnalyzerResultContainer()
         melenergy = AnalyzerResult()
-              
+
         # Get attributes
-        sampleRate = self.samplerate()
-        blockSize = self.win_s
-        stepSize = self.hop_s
+        samplerate = self.samplerate()
+        blocksize = self.win_s
+        stepsize = self.hop_s
         parameters = dict(n_filters= self.n_filters,
                           n_coeffs=  self.n_coeffs)
         # Set attributes
         melenergy.attributes = AnalyzerAttributes(id="aubio_melenergy",
                                                   name="melenergy (aubio)",
                                                   unit='',
-                                                  sampleRate = sampleRate,
-                                                  blockSize = blockSize,
-                                                  stepSize = stepSize,
-                                                  parameters = parameters)                         
+                                                  samplerate = samplerate,
+                                                  blocksize = blocksize,
+                                                  stepsize = stepsize,
+                                                  parameters = parameters)
         # Set Data
         melenergy.data = self.melenergy_results
         container.add_result(melenergy)
index 06fbbbef6845e2cd56cf0015774b195d552fd71b..510bf53280bf925560f9d63523447d6cd1a5f36a 100644 (file)
@@ -65,18 +65,18 @@ class AubioMfcc(Processor):
     def results(self):
         # MFCC
         mfcc = AnalyzerResult()
-        sampleRate = self.samplerate()
-        blockSize = self.win_s
-        stepSize = self.hop_s
+        samplerate = self.samplerate()
+        blocksize = self.win_s
+        stepsize = self.hop_s
         parameters = dict(n_filters= self.n_filters,
                           n_coeffs=  self.n_coeffs)
-        mfcc.attributes = AnalyzerAttributes(id = "aubio_mfcc", 
+        mfcc.attributes = AnalyzerAttributes(id = "aubio_mfcc",
                                              name = "mfcc (aubio)",
                                              unit = "",
-                                             sampleRate = sampleRate,
-                                             blockSize = blockSize,
-                                             stepSize = stepSize,
+                                             samplerate = samplerate,
+                                             blocksize = blocksize,
+                                             stepsize = stepsize,
                                              parameters = parameters)
         mfcc.data = [list(line) for line in self.mfcc_results] # TODO : type ? list list ?
-        
+
         return AnalyzerResultContainer(mfcc)
index 8a290ae67df5754dde60a06e12f7ac6c07aa8bf3..2f5697f3566e764b0568c9fb67b6af6e3cbff0af 100644 (file)
@@ -66,23 +66,23 @@ class AubioPitch(Processor):
 
         container = AnalyzerResultContainer()
         pitch = AnalyzerResult()
-              
+
         # Get attributes
-        sampleRate = self.samplerate()
-        blockSize = self.win_s
-        stepSize = self.hop_s
+        samplerate = self.samplerate()
+        blocksize = self.win_s
+        stepsize = self.hop_s
         # parameters : None # TODO check with Piem "default" and "freq" in setup
-        
+
         # Set attributes
         pitch.attributes = AnalyzerAttributes(id="aubio_pitch",
                                               name="f0 (aubio)",
                                               unit='Hz',
-                                              sampleRate = sampleRate,
-                                              blockSize = blockSize,
-                                              stepSize = stepSize)                         
+                                              samplerate = samplerate,
+                                              blocksize = blocksize,
+                                              stepsize = stepsize)
         # Set Data
         self.pitches = numpy.array(self.pitches)
         pitch.data = self.pitches
         container.add_result(pitch)
-        
+
         return container
index e23f4af829e0cbe466d9e09dc6deadf3454b8d2f..fadf75e4a41a908edb026f4ee87855512eb6454e 100644 (file)
@@ -66,9 +66,9 @@ class AubioSpecdesc(Processor):
 
         container = AnalyzerResultContainer()
        # Get common attributes
-        sampleRate = self.samplerate()
-        blockSize = self.win_s
-        stepSize = self.hop_s
+        samplerate = self.samplerate()
+        blocksize = self.win_s
+        stepsize = self.hop_s
         unit = ""
         # For each method store results in container
         for method in self.methods:
@@ -76,16 +76,16 @@ class AubioSpecdesc(Processor):
             # Set attributes
             id = '_'.join(["aubio_specdesc", method])
             name = ' '.join(["spectral descriptor", method, "(aubio)"])
-            
+
 
             specdesc.attributes = AnalyzerAttributes(id = id,
                                                   name = name,
                                                   unit = unit,
-                                                  sampleRate = sampleRate,
-                                                  blockSize = blockSize,
-                                                  stepSize = stepSize) 
-                                                  
-            # Set Data                                         
+                                                  samplerate = samplerate,
+                                                  blocksize = blocksize,
+                                                  stepsize = stepsize)
+
+            # Set Data
             specdesc.data = numpy.array(self.specdesc_results[method])
 
             container.add_result(specdesc)
index eb5b25149b3554bbc7415dfdd9d69abb3c8c434e..deff4ce11fa61b33a3454cfb2a1b87b0b0f09122 100644 (file)
@@ -68,11 +68,11 @@ class AubioTemporal(Processor):
 
     def results(self):
         # Get common attributes
-        commonAttr = dict(sampleRate=self.samplerate(),
-                          blockSize=self.win_s,
-                          stepSize=self.hop_s)
+        commonAttr = dict(samplerate=self.samplerate(),
+                          blocksize=self.win_s,
+                          stepsize=self.hop_s)
        # FIXME : Onsets, beat and onset rate are not frame based Results
-        # sampleRate, blockSize, etc. are not appropriate here
+        # samplerate, blocksize, etc. are not appropriate here
         # Those might be some kind of "AnalyzerSegmentResults"
 
         #---------------------------------
@@ -90,19 +90,19 @@ class AubioTemporal(Processor):
         #---------------------------------
         #  Onset Rate
         #---------------------------------
-        onsetRate = AnalyzerResult()
+        onsetrate = AnalyzerResult()
         # Set attributes
-        onsetRateAttr = dict(id="aubio_onset_rate",
+        onsetrateAttr = dict(id="aubio_onset_rate",
                              name="onset rate (aubio)",
                              unit="bpm")
-        onsetRate.attributes = dict(onsetRateAttr.items() + commonAttr.items())
+        onsetrate.attributes = dict(onsetrateAttr.items() + commonAttr.items())
         # Set Data
         if len(self.onsets) > 1:
             #periods = [60./(b - a) for a,b in zip(self.onsets[:-1],self.onsets[1:])]
             periods = 60. / numpy.diff(self.onsets)
-            onsetRate.data = periods
+            onsetrate.data = periods
         else:
-            onsetRate.data = []
+            onsetrate.data = []
 
         #---------------------------------
         #  Beats
@@ -133,4 +133,4 @@ class AubioTemporal(Processor):
         else:
             bpm.data = []
 
-        return AnalyzerResultContainer([onsets, onsetRate, beats, bpm])
\ No newline at end of file
+        return AnalyzerResultContainer([onsets, onsetrate, beats, bpm])
\ No newline at end of file
index ae28251d0aaf630e21dd045e9c35d79f1f92e5cb..0569c639bb2e25f2f3118be545eb6cede0d9830e 100644 (file)
@@ -57,14 +57,14 @@ class AnalyzerAttributes(object):
     id : string
     name : string
     unit : string
-    sampleRate : int or float
-    blockSize : int
-    stepSize : int
+    samplerate : int or float
+    blocksize : int
+    stepsize : int
     parameters : dict
 
     Methods
     -------
-    asdict()
+    as_dict()
         Return a dictionnary representation of the AnalyzerAttributes
     """
     from collections import OrderedDict
@@ -73,9 +73,9 @@ class AnalyzerAttributes(object):
     _default_value = OrderedDict([('id', ''),
                                   ('name', ''),
                                   ('unit', ''),
-                                  ('sampleRate', None),
-                                  ('blockSize', None),
-                                  ('stepSize', None),
+                                  ('samplerate', None),
+                                  ('blocksize', None),
+                                  ('stepsize', None),
                                   ('parameters', {})
                                   ])
     # TODO : rajouter
@@ -95,9 +95,9 @@ class AnalyzerAttributes(object):
         id : string
         name : string
         unit : string
-        sampleRate : int or float
-        blockSize : int
-        stepSize : int
+        samplerate : int or float
+        blocksize : int
+        stepsize : int
         parameters : dict
 
         Returns
@@ -121,7 +121,7 @@ class AnalyzerAttributes(object):
             (name, self.__class__.__name__))
         super(AnalyzerAttributes, self).__setattr__(name, value)
 
-    def asdict(self):
+    def as_dict(self):
         return dict((att, getattr(self, att))
         for att in self._default_value.keys())
 
@@ -131,10 +131,10 @@ class AnalyzerAttributes(object):
             ', '.join('{}={}'.format(
             att, repr(getattr(self, att)))
             for att in self._default_value.keys()))
-            
+
     def __eq__(self,other):
         return (isinstance(other, self.__class__)
-            and self.asdict() == other.asdict())
+            and self.as_dict() == other.as_dict())
 
 
 class AnalyzerResult(object):
@@ -195,19 +195,19 @@ class AnalyzerResult(object):
 #            return self[name]
 #        return super(AnalyzerResult, self).__getattr__(name)
 
-    def asdict(self):
-        return(dict(data=self.data, attributes=self.attributes.asdict()))
+    def as_dict(self):
+        return(dict(data=self.data, attributes=self.attributes.as_dict()))
 
     def to_json(self):
         import simplejson as json
-        return json.dumps(self.asdict())
+        return json.dumps(self.as_dict())
 
     def __repr__(self):
         return self.to_json()
-    
+
     def __eq__(self,other):
         return (isinstance(other, self.__class__)
-            and self.asdict() == other.asdict())
+            and self.as_dict() == other.as_dict())
 
     def __ne__(self, other):
         return not self.__eq__(other)
@@ -226,7 +226,7 @@ class AnalyzerResultContainer(object):
         return len(self.results)
 
     def __repr__(self):
-        return [res.asdict() for res in self.results]
+        return [res.as_dict() for res in self.results]
 
     def __eq__(self, other):
         if hasattr(other, 'results'):
@@ -235,9 +235,9 @@ class AnalyzerResultContainer(object):
             if a != b:
                 return False
         return True
-   
+
     def __ne__(self, other):
-        return not self.__eq__(other)     
+        return not self.__eq__(other)
 
     def add_result(self, analyzer_result):
         if type(analyzer_result) == list:
@@ -267,7 +267,7 @@ class AnalyzerResultContainer(object):
                 data_node.text = repr(result.data)
             # Serialize Attributes
             attr_node = ET.SubElement(res_node, 'attributes')
-            for (name, val) in result.attributes.asdict().items():
+            for (name, val) in result.attributes.as_dict().items():
                 # TODO reorder keys
                 child = ET.SubElement(attr_node, name)
                 if name == 'parameters':
@@ -316,7 +316,7 @@ class AnalyzerResultContainer(object):
                 result.data = ast.literal_eval(result_child.find('data').text)
             except:
                 result.data = result_child.find('data').text
-            
+
             # Get attributes
             for attr_child in result_child.find('attributes'):
                 name = attr_child.tag
@@ -358,7 +358,7 @@ class AnalyzerResultContainer(object):
     def to_json(self):
         #if data_list == None: data_list = self.results
         import simplejson as json
-        return json.dumps([res.asdict() for res in self])
+        return json.dumps([res.as_dict() for res in self])
 
     def from_json(self, json_str):
         import simplejson as json
@@ -373,7 +373,7 @@ class AnalyzerResultContainer(object):
     def to_yaml(self):
         #if data_list == None: data_list = self.results
         import yaml
-        return yaml.dump([res.asdict() for res in self])
+        return yaml.dump([res.as_dict() for res in self])
 
     def from_yaml(self, yaml_str):
         import yaml
index f22c3955c1b02345a0b53f4dc8e9af3d729113c6..dff87de160e1893e5c392ed60c1077dac703fba1 100644 (file)
@@ -50,14 +50,14 @@ class MeanDCShift(Processor):
     def results(self):
         result = AnalyzerResult()
         #  Set attributes
-        #  FIXME : blockSize and stepSize are not appropriate here
+        #  FIXME : blocksize and stepsize are not appropriate here
         result.attributes = AnalyzerAttributes(id="mean_dc_shift",
                                                name = "Mean DC shift",
                                                unit = "%",
-                                               sampleRate=self.samplerate(),
-                                               blockSize=None,
-                                               stepSize=None)
-                                               
+                                               samplerate=self.samplerate(),
+                                               blocksize=None,
+                                               stepsize=None)
+
         # Set Data
         result.data = numpy.round(numpy.mean(100*self.values),3)
         return AnalyzerResultContainer(result)
index bf061f405eb0ff49768086d62ae7a689e3df52fe..eef9085e9cad17da067a100ac225f1f28fd75fae 100644 (file)
@@ -59,21 +59,21 @@ class Level(Processor):
 
     def results(self):
         # Max level
-        #  FIXME : blockSize and stepSize are not appropriate here
+        #  FIXME : blocksize and stepsize are not appropriate here
         attr = AnalyzerAttributes(id="max_level",
                                   name="Max level",
                                   unit = "dBFS",
-                                  sampleRate=self.samplerate()) 
+                                  samplerate=self.samplerate())
         data = numpy.round(20*numpy.log10(self.max_value), 3)
         max_level = AnalyzerResult(data, attr)
-        
+
         # RMS level
-        #  FIXME : blockSize and stepSize are not appropriate here
+        #  FIXME : blocksize and stepsize are not appropriate here
         attr = AnalyzerAttributes(id="rms_level",
                                   name="RMS level",
                                   unit="dBFS",
-                                  sampleRate=self.samplerate())
+                                  samplerate=self.samplerate())
         data = numpy.round(20*numpy.log10(numpy.sqrt(numpy.mean(self.mean_values))), 3)
         rms_level = AnalyzerResult(data, attr)
-        
+
         return AnalyzerResultContainer([max_level, rms_level])
index 4887920b938e903cab81c64100555cf8b35bc3d1..4914cd48d52d48fcd7eac99db1a0f34d395bda80 100644 (file)
@@ -19,7 +19,7 @@
 
 # Author : Thomas Fillon <thomas@parisson.com>
 """
-Module Yaafe Analyzer 
+Module Yaafe Analyzer
 Created on Thu Jun 13 16:05:02 2013
 
 @author: Thomas Fillon
@@ -27,13 +27,13 @@ Created on Thu Jun 13 16:05:02 2013
 from timeside.core import Processor, implements, interfacedoc, FixedSizeInputAdapter
 from timeside.analyzer.core import *
 from timeside.api import IValueAnalyzer
-#
 from yaafelib import *
-#
 import numpy
 
+
 class Yaafe(Processor):
     implements(IValueAnalyzer)
+
     def __init__(self, yaafeSpecification):
         # Check arguments
         if isinstance(yaafeSpecification,DataFlow):
@@ -43,7 +43,7 @@ class Yaafe(Processor):
             self.dataFlow = self.featurePlan.getDataFlow()
         else:
             raise TypeError("'%s' Type must be either '%s' or '%s'" % (str(yaafeSpecification),str(DataFlow),str(FeaturePlan)))
-        
+
     @interfacedoc
     def setup(self, channels=None, samplerate=None, blocksize=None, totalframes=None):
         super(Yaafe, self).setup(channels, samplerate, blocksize, totalframes)
@@ -51,6 +51,8 @@ class Yaafe(Processor):
         self.yaafe_engine = Engine()
         self.yaafe_engine.load(self.dataFlow)
         self.yaafe_engine.reset()
+        self.samplerate = samplerate
+        self.blocksize = blocksize
 
     @staticmethod
     @interfacedoc
@@ -64,47 +66,44 @@ class Yaafe(Processor):
 
     def process(self, frames, eod=False):
         # do process things...
-        # Downmixing to mono and convert to float64 for compatibility with Yaafe       
+        # Downmixing to mono and convert to float64 for compatibility with Yaafe
         yaafe_frames = frames.sum(axis=-1,dtype=numpy.float64) / frames.shape[-1]
-        # Reshape for compatibility with Yaafe input format        
-        yaafe_frames.shape = (1,yaafe_frames.shape[0]) 
+        # Reshape for compatibility with Yaafe input format
+        yaafe_frames.shape = (1,yaafe_frames.shape[0])
         # write audio array on 'audio' input
-        self.yaafe_engine.writeInput('audio',yaafe_frames) 
-        # process available data        
-        self.yaafe_engine.process() 
+        self.yaafe_engine.writeInput('audio',yaafe_frames)
+        # process available data
+        self.yaafe_engine.process()
         if eod:
             # flush yaafe engine to process remaining data
-            self.yaafe_engine.flush() 
-           
+            self.yaafe_engine.flush()
+
         return frames, eod
 
     def results(self):
         # Get back current container
         container = AnalyzerResultContainer()
         # Get feature extraction results from yaafe
-        map_keys = {'sampleRate': 'sampleRate',
-                    'frameLength': 'blockSize',
-                    'sampleStep': 'stepSize', 
-                    'parameters': 'parameters', 
-                    }
         featNames = self.yaafe_engine.getOutputs().keys()
         for featName in featNames:
-            # Map Yaafe attributes into AnalyzerResults dict
-            res_dict = {map_keys[name]: self.yaafe_engine.getOutputs()['mfcc'][name] for name in map_keys.keys()}
-            # Define ID fields            
-            res_dict['id'] = 'yaafe_' + featName
-            res_dict['name'] = 'Yaafe ' + featName
-            res_dict['unit'] = ''
-            # create AnalyzerResult and set its attributes
-            result = AnalyzerResult(attributes=res_dict)
+            # Define ID fields
+            id = 'yaafe_' + featName
+            name = 'Yaafe ' + featName
+            unit = ''
+
             # Get results from Yaafe engine
-            result.data = self.yaafe_engine.readOutput(featName)  # Read Yaafe Results       
+            result = AnalyzerResult()
+            result.attributes = AnalyzerAttributes(id = id,
+                                      name = name,
+                                      unit = unit,
+                                      samplerate = self.samplerate,
+                                      blocksize = self.blocksize,
+                                      stepsize = None)
+
+            result.data = self.yaafe_engine.readOutput(featName)  # Read Yaafe Results
             # Store results in Container
             if len(result.data):
                 container.add_result(result)
-        
-        return container
-
-
 
+        return container