EMAIL_HOST = 'localhost'
DEFAULT_FROM_EMAIL = 'webmaster@parisson.com'
--
- FILE_UPLOAD_TEMP_DIR = '/tmp'
'document_root': settings.TELEMETA_CACHE_DIR,}),
url(r'^', include('jqchat.urls')),
- )
+ )
- url(r'^__debug__/', include(debug_toolbar.urls)),)
+ if settings.DEBUG:
+ import debug_toolbar
+ urlpatterns += patterns('',
++ url(r'^__debug__/', include(debug_toolbar.urls)),)
def get_object(self):
return MediaCollection.objects.get(public_id=self.kwargs['public_id'])
- def get(self, request, *args, **kwargs):
+ def get_stream(self, request, *args, **kwargs):
+ """
+ Stream a ZIP file of collection data
+ without loading the whole file into memory.
+ Based on ZipStream
+ """
+ from telemeta.views import MarkerView
+ from telemeta.backup import CollectionSerializer
+ import json
+ import zipstream
+
+ z = zipstream.ZipFile()
+ collection = MediaCollection.objects.get(public_id=public_id)
+ z.write(collection.code)
+
+ for item in collection.items.all():
+ z.write(item.file.path)
+
+ try:
+ from django.http import StreamingHttpResponse
+ response = StreamingHttpResponse(z, content_type='application/zip')
+ except:
+ response = HttpResponse(z, content_type='application/zip')
+
+ response['Content-Disposition'] = "attachment; filename=%s.%s" % \
+ (item.code, 'zip')
+ return response
+
+ @method_decorator(login_required)
+ def dispatch(self, *args, **kwargs):
+ return super(CollectionPackageView, self).dispatch(*args, **kwargs)
-
-
-
"""
Create a ZIP file on disk and transmit it in chunks of 8KB,
without loading the whole file into memory. A similar approach can
def item_analyze(self, item):
analyses = MediaItemAnalysis.objects.filter(item=item)
-
+ mime_type = ''
+
if analyses:
for analysis in analyses:
if not item.approx_duration and analysis.analyzer_id == 'duration':
analyzer_id='duration', unit='s',
value=unicode(datetime.timedelta(0,decoder.input_duration)))
analysis.save()
-
+
for analyzer in analyzers_sub:
- value = analyzer.result()
- analysis = MediaItemAnalysis(item=item, name=analyzer.name(),
- analyzer_id=analyzer.id(),
- unit=analyzer.unit(), value=str(value))
- analysis.save()
+ for key in analyzer.results.keys():
+ result = analyzer.results[key]
+ value = result.data_object.value
+ if value.shape[0] == 1:
+ value = value[0]
+ analysis = MediaItemAnalysis(item=item, name=result.name,
+ analyzer_id=result.id, unit=result.unit, value = unicode(value))
+ analysis.save()
-# FIXME: parse tags on first load
+ analyses = MediaItemAnalysis.objects.filter(item=item)
+
+# TODO: parse tags on first load
# tags = decoder.tags
- return mime_type
+ return analyses
def item_analyze_xml(self, request, public_id):
item = MediaItem.objects.get(public_id=public_id)