]> git.parisson.com Git - timeside.git/commitdiff
Fixes #24
authorThomas Fillon <thomas@parisson.com>
Tue, 27 May 2014 10:40:13 +0000 (12:40 +0200)
committerThomas Fillon <thomas@parisson.com>
Tue, 27 May 2014 10:40:13 +0000 (12:40 +0200)
timeside/analyzer/core.py
timeside/server/models.py

index 85fda63140e7389d017a6f905250bb235486a1df..8c00984cb569075baa422d758fd881071fcca948 100644 (file)
@@ -208,7 +208,8 @@ class IdMetadata(MetadataObject):
             date and time in ISO  8601 format YYYY-MM-DDTHH:MM:SS
         version : str
         author : str
-        uuid : str
+        proc_uuid : str
+        res_uuid : str
     '''
     # TODO :
     # - (long) description --> à mettre dans l'API Processor
@@ -221,7 +222,8 @@ class IdMetadata(MetadataObject):
                                   ('date', None),
                                   ('version', None),
                                   ('author', None),
-                                  ('uuid', None)])
+                                  ('proc_uuid', None),
+                                  ('res_uuid', None)])
 
     def __setattr__(self, name, value):
         if value is None:
@@ -617,7 +619,7 @@ class AnalyzerResult(MetadataObject):
 
     def to_hdf5(self, h5_file):
         # Save results in HDF5 Dataset
-        group = h5_file.create_group(self.id_metadata.uuid)
+        group = h5_file.create_group(self.id_metadata.res_uuid)
         group.attrs['data_mode'] = self.__getattribute__('data_mode')
         group.attrs['time_mode'] = self.__getattribute__('time_mode')
         for key in self.keys():
@@ -894,12 +896,12 @@ class AnalyzerResultContainer(dict):
 
         # Update result uuid by adding a suffix uuid
         # It enable to deal with multiple results for the same processor uuid
-        uuid = analyzer_result.id_metadata.uuid
+        uuid = analyzer_result.id_metadata.proc_uuid
         count = 0
         for res_uuid in self.keys():
             count += res_uuid.startswith(uuid)
         res_uuid = '-'.join([uuid, format(count, '02x')])
-        analyzer_result.id_metadata.uuid = res_uuid
+        analyzer_result.id_metadata.res_uuid = res_uuid
 
         self.__setitem__(res_uuid, analyzer_result)
 
@@ -1127,7 +1129,7 @@ class Analyzer(Processor):
         result.id_metadata.id = self.id()
         result.id_metadata.name = self.name()
         result.id_metadata.unit = self.unit()
-        result.id_metadata.uuid = self.uuid()
+        result.id_metadata.proc_uuid = self.uuid()
 
         result.audio_metadata.uri = self.mediainfo()['uri']
         result.audio_metadata.sha1 = self.mediainfo()['sha1']
index 7948f86eb351abe841583a0d0cb86ca24e22b480..0b39b646b7c3b173186d07949d9158a2fab38a5c 100644 (file)
@@ -244,7 +244,6 @@ class Task(BaseResource):
                 else:
                     proc = proc()
                 proc.set_parameters(preset.parameters)
-                print proc.get_parameters()
                 presets[preset] = proc
                 pipe = pipe | proc
 
@@ -254,25 +253,21 @@ class Task(BaseResource):
             if not item.hdf5:
                 item.hdf5 =  path + str(self.experience.uuid) + '.hdf5'
                 item.save()
-            print pipe
             pipe.run()
             item.lock_setter(True)
-            print item.hdf5.path
             pipe.results.to_hdf5(item.hdf5.path)
             item.lock_setter(False)
 
             for preset in presets.keys():
                 proc = presets[preset]
                 if proc.type == 'analyzer':
-                    for processor_id in proc.results.keys():
-                        parameters = proc.results[processor_id].parameters
+                    for result_id in proc.results.keys():
+                        parameters = proc.results[result_id].parameters
                         preset, c = Preset.objects.get_or_create(processor=preset.processor, parameters=unicode(parameters))
                         result, c = Result.objects.get_or_create(preset=preset, item=item)
                         result.hdf5 = path + str(result.uuid) + '.hdf5'
-                        print result.hdf5
                         proc.results.to_hdf5(result.hdf5.path)
                         result.status_setter(_DONE)
-                        print '*****************DONE*****************'
                 elif proc.type == 'grapher':
                     parameters = {}
                     result, c = Result.objects.get_or_create(preset=preset, item=item)