def track_unicode_special(DBSession, commit): tags_data = ( 'title:UnicodeAssention', 'from:Hack//Sign', 'artist:こ', ) def _create_tag(tag_data): tag = get_tag(tag_data, create_if_missing=True) DBSession.add(tag) return tag tag_objs = tuple(_create_tag(tag) for tag in tags_data) commit() track = Track() track.id = 'x999' track.duration = 120 track.tags[:] = tag_objs track.source_filename = 'unicode_special' DBSession.add(track) commit() yield track DBSession.delete(track) for tag_obj in tag_objs: DBSession.delete(tag_obj) commit()
def create_test_track(id=None, duration=None, tags=(), attachments=(), lyrics=None, source_filename=None): def _get_tag(tag): return get_tag(tag, create_if_missing=True) def _get_attachment(attachment): if hasattr(attachment, 'location') and hasattr(attachment, 'type'): return create_attachment(attachment) else: return DBSession.query(Attachment).filter( Attachment.location.like('%%{0}%%'.format(attachment))).one() track = Track() track.id = id if id else random_string(10) track.duration = duration if duration else random.randint(60, 360) for tag in tags: track.tags.append(_get_tag(tag)) for attachment in attachments: track.attachments.append(_get_attachment(attachment)) track.lyrics = lyrics or '' track.source_filename = source_filename DBSession.add(track) return track
def track_unicode_special(DBSession, commit): tags_data = ( 'title:UnicodeAssention', 'from:Hack//Sign', 'artist:こ', ) def _create_tag(tag_data): tag = get_tag(tag_data, create_if_missing=True) DBSession.add(tag) return tag tag_objs = tuple(_create_tag(tag) for tag in tags_data) commit() track = Track() track.id = 'x999' track.duration = 120 track.tags[:] = tag_objs track.source_filename = 'unicode_special' DBSession.add(track) commit() yield track DBSession.delete(track) for tag_obj in tag_objs: DBSession.delete(tag_obj) commit()
def create_test_track(id=None, duration=None, tags=[], attachments=[], lyrics=None, source_filename=None): def _get_tag(tag): return get_tag(tag, create_if_missing=True) def _get_attachment(filename): return DBSession.query(Attachment).filter(Attachment.location.like('%%{0}%%'.format(filename))).one() track = Track() track.id = id if id else random_string(10) track.duration = duration if duration else random.randint(60,360) [track.tags .append(_get_tag (t)) for t in tags ] [track.attachments.append(_get_attachment(a)) for a in attachments] if lyrics: track.lyrics.append(lyrics) track.source_filename = source_filename return track
def track_import_post(request): existing_track_ids = _existing_tracks_dict().keys() for track_dict in _get_json_request(request): if track_dict['id'] in existing_track_ids: log.warning( 'Exists: {source_filename} - {id}'.format(**track_dict)) continue log.info('Import: {source_filename} - {id}'.format(**track_dict)) track = Track() track.id = track_dict['id'] track.source_filename = track_dict['source_filename'] track.duration = track_dict['duration'] track.lyrics = track_dict['lyrics'] # Attachments for attachment_dict in track_dict['attachments']: assert attachment_dict['type'] in ATTACHMENT_TYPES attachment = Attachment() attachment.type = attachment_dict['type'] attachment.location = attachment_dict['location'] track.attachments.append(attachment) # Tags for tag_string in track_dict['tags']: tag = get_tag(tag_string, create_if_missing=True) if tag: track.tags.append(tag) elif tag_string: log.warning('null tag %s', tag_string) for duplicate_tag in (tag for tag in track.tags if track.tags.count(tag) > 1): log.warning('Unneeded duplicate tag found %s in %s', duplicate_tag, track.source_filename) track.tags.remove(duplicate_tag) DBSession.add(track) commit() request.registry.settings['karakara.tracks.version'] += 1 return action_ok()
def track_import_post(request): existing_track_ids = _existing_tracks_dict().keys() for track_dict in _get_json_request(request): if track_dict['id'] in existing_track_ids: log.warning('Exists: {source_filename} - {id}'.format(**track_dict)) continue log.info('Import: {source_filename} - {id}'.format(**track_dict)) track = Track() track.id = track_dict['id'] track.source_filename = track_dict['source_filename'] track.duration = track_dict['duration'] track.lyrics = track_dict['lyrics'] # Attachments for attachment_dict in track_dict['attachments']: assert attachment_dict['type'] in ATTACHMENT_TYPES attachment = Attachment() attachment.type = attachment_dict['type'] attachment.location = attachment_dict['location'] track.attachments.append(attachment) # Tags for tag_string in track_dict['tags']: tag = get_tag(tag_string, create_if_missing=True) if tag: track.tags.append(tag) elif tag_string: log.warning('null tag %s', tag_string) for duplicate_tag in (tag for tag in track.tags if track.tags.count(tag) > 1): log.warning('Unneeded duplicate tag found %s in %s', duplicate_tag, track.source_filename) track.tags.remove(duplicate_tag) DBSession.add(track) commit() request.registry.settings['karakara.tracks.version'] += 1 return action_ok()
def import_track(self, name): log.debug('Attemping: %s', name) self.meta_manager.load(name) m = self.meta_manager.get(name) if not m.source_hash: raise TrackNotProcesedException() if self._missing_files(m.processed_files): # If we are missing any files but we have a source hash, # we may have some of the derived media missing. # Explicity mark the item for reencoding if PENDING_ACTION['encode'] not in m.pending_actions: # Feels clunky to manage this as a list? maybe a set? m.pending_actions.append(PENDING_ACTION['encode']) self.meta_manager.save(name) # Feels clunky raise TrackMissingProcessedFiles(id=m.source_hash in self.exisiting_track_ids and m.source_hash) if m.source_hash in self.exisiting_track_ids: log.debug('Exists: %s', name) return False log.info('Import: %s', name) track = Track() track.id = m.source_hash track.source_filename = name track.duration = m.source_details.get('duration') self._add_attachments(track, m.processed_files) self._add_lyrics(track, m.processed_files.get('srt')) self._add_tags(track, m.processed_files.get('tags')) DBSession.add(track) commit() self.exisiting_track_ids.add(m.source_hash) # HACK!! .. we should not have duplicate hashs's in the source set. This is a temp patch return True
def create_test_track(id=None, duration=None, tags=(), attachments=(), lyrics=None, source_filename=None): def _get_tag(tag): return get_tag(tag, create_if_missing=True) def _get_attachment(attachment): if hasattr(attachment, 'location') and hasattr(attachment, 'type'): return create_attachment(attachment) else: return DBSession.query(Attachment).filter(Attachment.location.like('%%{0}%%'.format(attachment))).one() track = Track() track.id = id if id else random_string(10) track.duration = duration if duration else random.randint(60, 360) for tag in tags: track.tags.append(_get_tag(tag)) for attachment in attachments: track.attachments.append(_get_attachment(attachment)) track.lyrics = lyrics or '' track.source_filename = source_filename DBSession.add(track) return track