class TelemetaBaseMixin(object): MEDIA_ROOT = getattr(settings, 'MEDIA_ROOT') CACHE_DIR = os.path.join(MEDIA_ROOT, 'cache') cache_data = TelemetaCache(getattr(settings, 'TELEMETA_DATA_CACHE_DIR', CACHE_DIR)) cache_export = TelemetaCache(getattr(settings, 'TELEMETA_EXPORT_CACHE_DIR', os.path.join(CACHE_DIR, 'export'))) cache_tmp = TelemetaCache(getattr(settings, 'FILE_UPLOAD_TEMP_DIR', os.path.join(MEDIA_ROOT, 'tmp')))
class Command(BaseCommand): help = "Cleanup DB : multiple analyses, data cache, export cache, etc.." args = "cache" cache_data = TelemetaCache(settings.TELEMETA_DATA_CACHE_DIR) cache_export = TelemetaCache(settings.TELEMETA_EXPORT_CACHE_DIR) def handle(self, *args, **options): items = MediaItem.objects.all() a_counter = 0 print 'cleaning multiple analyses per item...' for item in items: if 'cache' in args: print 'cleaning cache...' self.cache_data.delete_item_data(item.code) self.cache_export.delete_item_data(item.code) analyses = MediaItemAnalysis.objects.filter(item=item) ids = [] for analysis in analyses: id = analysis.analyzer_id if id in ids: print 'item : ' + item.code + ' analyzer_id : ' + id analysis.delete() a_counter += 1 else: ids.append(id) print "Done, cleaned %s analyses" % str(a_counter)
class Command(BaseCommand): help = "Cleanup DB : multiple analyses, data cache, export cache, etc.." args = "None" cache_data = None cache_export = None def handle(self, *args, **options): if 'cache' in args: self.cache_data = TelemetaCache(settings.TELEMETA_DATA_CACHE_DIR) self.cache_export = TelemetaCache(settings.TELEMETA_EXPORT_CACHE_DIR) print "Cleaning all cache..." items = MediaItem.objects.all() a_counter = 0 print 'Cleaning multiple analyses per item...' for item in items: if self.cache_data and self.cache_export: self.cache_data.delete_item_data(item.code) self.cache_export.delete_item_data(item.code) analyses = MediaItemAnalysis.objects.filter(item=item) ids = [] for analysis in analyses: id = analysis.analyzer_id if id in ids: print 'item : ' + item.code + ' analyzer_id : ' + id analysis.delete() a_counter += 1 else: ids.append(id) print "Done, cleaned %s analyses" % str(a_counter)
def handle(self, *args, **options): if 'cache' in args: self.cache_data = TelemetaCache(settings.TELEMETA_DATA_CACHE_DIR) self.cache_export = TelemetaCache( settings.TELEMETA_EXPORT_CACHE_DIR) print "Cleaning all cache..." items = MediaItem.objects.all() a_counter = 0 print 'Cleaning multiple analyses per item...' for item in items: if self.cache_data and self.cache_export: self.cache_data.delete_item_data(item.code) self.cache_export.delete_item_data(item.code) analyses = MediaItemAnalysis.objects.filter(item=item) ids = [] for analysis in analyses: id = analysis.analyzer_id if id in ids: print 'item : ' + item.code + ' analyzer_id : ' + id analysis.delete() a_counter += 1 else: ids.append(id) print "Done, cleaned %s analyses" % str(a_counter)
class Command(BaseCommand): help = "Test: download and import a test item" args = "absolute paths of a local audio files" code = 'test' title = 'test' urls = ['http://files.parisson.com/telemeta/tests/media/sweep.mp3', 'http://files.parisson.com/telemeta/tests/media/sweep.wav', 'http://files.parisson.com/telemeta/tests/media/test.ogg', 'http://files.parisson.com/telemeta/tests/media/test.flac', 'http://files.parisson.com/telemeta/tests/media/test4.mp3', 'http://files.parisson.com/telemeta/tests/media/test5.wav', 'http://files.parisson.com/telemeta/tests/media/test6.wav'] cache_data = TelemetaCache(settings.TELEMETA_DATA_CACHE_DIR) cache_export = TelemetaCache(settings.TELEMETA_EXPORT_CACHE_DIR) def handle(self, *args, **options): if args: self.urls = [] for file in args: self.urls.append('file://' + file) collections = MediaCollection.objects.filter(code=self.code) if not collections: collection = MediaCollection(code=self.code, title=self.title) collection.public_access = 'full' collection.save() else: collection = collections[0] for url in self.urls: code = url.split('/')[-1] code = code.replace(' ', '_') items = MediaItem.objects.filter(code=code) if not items: item = MediaItem(collection=collection, code=code, title=code) item.save() else: print 'cleanup' item = items[0] self.cache_data.delete_item_data(code) self.cache_export.delete_item_data(code) flags = MediaItemTranscodingFlag.objects.filter(item=item) analyses = MediaItemAnalysis.objects.filter(item=item) for flag in flags: flag.delete() for analysis in analyses: analysis.delete() print 'downloading: ' + url file = urllib.urlopen(url) file_content = ContentFile(file.read()) item.file.save(code, file_content) item.public_access = 'full' item.save() print 'item created: ' + code
def __init__(self, root_dir, dest_dir, log_file): from telemeta.cache import TelemetaCache as Cache from telemeta.util.logger import Logger self.media_item_dir = 'items' self.root_dir = root_dir + 'items' self.dest_dir = dest_dir self.threads = 1 self.logger = Logger(log_file) self.counter = 0 self.force = 0 self.cache = Cache(self.dest_dir) self.scheme = GrapherScheme() self.width = self.scheme.width self.height = self.scheme.height self.bg_color = self.scheme.bg_color self.color_scheme = self.scheme.color_scheme self.force = self.scheme.force self.threads = self.scheme.threads self.logger = Logger(log_file) self.counter = 0 self.collection_name = 'awdio' self.collection = self.set_collection(self.collection_name) self.analyzers = timeside.core.processors(timeside.api.IAnalyzer) self.grapher = timeside.grapher.WaveformAwdio(width=self.width, height=self.height, bg_color=self.bg_color, color_scheme=self.color_scheme)
class TelemetaPreprocessImport(object): def __init__(self, root_dir, dest_dir, log_file): from telemeta.cache import TelemetaCache as Cache from telemeta.util.logger import Logger self.media_item_dir = 'items' self.root_dir = root_dir + 'items' self.dest_dir = dest_dir self.threads = 1 self.logger = Logger(log_file) self.counter = 0 self.force = 0 self.cache = Cache(self.dest_dir) self.scheme = GrapherScheme() self.width = self.scheme.width self.height = self.scheme.height self.bg_color = self.scheme.bg_color self.color_scheme = self.scheme.color_scheme self.force = self.scheme.force self.threads = self.scheme.threads self.logger = Logger(log_file) self.counter = 0 self.collection_name = 'awdio' self.collection = self.set_collection(self.collection_name) self.analyzers = timeside.core.processors(timeside.api.IAnalyzer) self.grapher = timeside.grapher.WaveformAwdio(width=self.width, height=self.height, bg_color=self.bg_color, color_scheme=self.color_scheme) def set_collection(self, collection_name): import telemeta.models collections = telemeta.models.media.MediaCollection.objects.filter(code=collection_name) if not collections: c = telemeta.models.media.MediaCollection(code=collection_name) c.title = collection_name c.save() msg = 'added' self.logger.logger.info(collection_name, msg) collection = c else: collection = collections[0] return collection def process(self): import telemeta.models keys = fs.keys() if keys[0] == 'file': filename = fs['file'].value media_orig = orig_media_dir + os.sep + filename media = self.root_dir + os.sep + filename if not os.path.exists(media): shutil.copy(media_orig, media) os.system('chmod 644 ' + media) name, ext = os.path.splitext(filename) size = str(self.width) + '_' + str(self.height) image_name = name + '.' + self.scheme.id + '.' + size + '.png' image = self.dest_dir + os.sep + image_name xml = name + '.xml' if not self.cache.exists(image_name) or not self.cache.exists(xml): mess = 'Processing ' + media self.logger.logger.info(mess) print "Content-type: text/plain\n" print mess decoder = timeside.decoder.FileDecoder(media) pipe = decoder | self.grapher analyzers = [] analyzers_sub = [] for analyzer in self.analyzers: subpipe = analyzer() analyzers_sub.append(subpipe) pipe = pipe | subpipe pipe.run() mess = 'Rendering ' + image self.logger.logger.info(mess) self.grapher.render(output=image) mess = 'Frames / Pixel = ' + str(self.grapher.graph.samples_per_pixel) self.logger.logger.info(mess) for analyzer in analyzers_sub: value = analyzer.result() if analyzer.id() == 'duration': value = datetime.timedelta(0,value) analyzers.append({'name':analyzer.name(), 'id':analyzer.id(), 'unit':analyzer.unit(), 'value':str(value)}) self.cache.write_analyzer_xml(analyzers, xml) item = telemeta.models.media.MediaItem.objects.filter(code=name) if not item: item = telemeta.models.media.MediaItem(collection=self.collection, code=name) item.title = name item.file = self.media_item_dir + os.sep + filename item.save() msg = 'added item : ' + filename self.logger.logger.info(self.collection_name, msg) pipe = 0 decoder = 0 print "OK" #except: #pipe = 0 #decoder = 0 #mess = 'Could NOT process : ' + media #self.logger.logger.error(mess) #print mess else: mess = "Nothing to do with file : " + media self.logger.logger.info(mess) print "Content-type: text/plain\n" print mess else: print "Content-type: text/plain\n" print "No file given !"
class Command(BaseCommand): args = "<media_file1 [media_file2 ...]>" help = "Download and import a media item" option_list = BaseCommand.option_list + ( make_option('--collection-code', action='store', dest='code', default='default', metavar='<code>', help='collection code'), make_option('--collection-title', action='store', dest='title', default='default', metavar='<title>', help='collection title'), ) cache_data = TelemetaCache(settings.TELEMETA_DATA_CACHE_DIR) cache_export = TelemetaCache(settings.TELEMETA_EXPORT_CACHE_DIR) urls = [] def handle(self, *args, **options): if len(args) < 1: return if options['title']: self.title = options['title'] if options['code']: self.code = options['code'] for file in args: self.urls.append('file://' + file) collections = MediaCollection.objects.filter(code=self.code) if not collections: # create a new collection collection = MediaCollection(code=self.code, title=self.title) collection.public_access = 'full' collection.save() else: collection = collections[0] for url in self.urls: basename = os.path.basename(url) code = slugify(basename) title = beautify(basename) items = MediaItem.objects.filter(code=code) if not items: item = MediaItem(collection=collection, code=code, title=title) item.save() else: print 'cleaning up', code item = items[0] self.cache_data.delete_item_data(code) self.cache_export.delete_item_data(code) flags = MediaItemTranscodingFlag.objects.filter(item=item) analyses = MediaItemAnalysis.objects.filter(item=item) for flag in flags: flag.delete() for analysis in analyses: analysis.delete() print 'fetching: ' + url file = urllib.urlopen(url) file_content = ContentFile(file.read()) item.title = title item.file.save(code, file_content) item.public_access = 'full' item.save() print 'item created: ', collection, code print 'done importing', len(self.urls), 'items'