def create_test_track(id=None, duration=None, tags=(), attachments=(), lyrics=None, source_filename=None): def _get_tag(tag): return get_tag(tag, create_if_missing=True) def _get_attachment(attachment): if hasattr(attachment, 'location') and hasattr(attachment, 'type'): return create_attachment(attachment) else: return DBSession.query(Attachment).filter( Attachment.location.like('%%{0}%%'.format(attachment))).one() track = Track() track.id = id if id else random_string(10) track.duration = duration if duration else random.randint(60, 360) for tag in tags: track.tags.append(_get_tag(tag)) for attachment in attachments: track.attachments.append(_get_attachment(attachment)) track.lyrics = lyrics or '' track.source_filename = source_filename DBSession.add(track) return track
def tracks(request, DBSession, commit, tags, attachments, lyrics): tracks_data = [ { 'id' :"t1", 'duration': 100, 'tags':[ 'title :Test Track 1', #'description:Test track for the KaraKara system with キ', 'opening','male','jp','anime','jpop', 'series X', ], 'attachments': ['image1','preview1'], 'lyrics': lyrics[0], }, { 'id' :"t2", 'duration': 200, 'tags':[ 'title :Test Track 2', #'description:Test track for the KaraKara system with キ' 'ending','female','en','anime', 'series X', ], 'attachments': ['image2','preview2'], 'lyrics':lyrics[1], }, { 'id' :"t3", 'duration': 300, 'tags':[ 'title :Test Track 3 キ', #'description:Test track for the KaraKara system with キ', 'ending','female','en','jpop', 'series Y', ], 'attachments': ['image3','preview3'], }, { 'id' :"xxx", 'duration': 400, 'tags':[ 'title :Wildcard', 'fr', ], 'attachments': [], }, ] tracks = [] # Keep tabs on all tracks generated for track_data in tracks_data: track = create_test_track(**track_data) DBSession.add(track) tracks.append(track) def finalizer(): pass #for track in tracks: # DBSession.delete(track) #commit() request.addfinalizer(finalizer) commit() return tracks
def tags(request): """ Basic category tags for 3 test series """ tags_data = [ 'from:series X', 'from:series Y', 'from:series Z', ] tags = [] for tag_data in tags_data: tag = get_tag(tag_data, create_if_missing=True) DBSession.add(tag) tags.append(tag) def finalizer(): pass #for tag in tags: # DBSession.delete(tag) #commit() #request.addfinalizer(finalizer) commit() return tags
def attachments(request): #, DBSession, commit """ """ attachments_data = [ ('test/preview1.3gp' , 'preview' ), ('test/preview2.flv' , 'preview' ), ('test/preview3.mp4' , 'preview' ), ('test/image1.jpg' , 'thumbnail'), ('test/image2.jpg' , 'thumbnail'), ('test/image3.png' , 'thumbnail'), ('test/processed.mpg', 'video' ), ('test/subtitles.ssa', 'subtitle' ), ] attachments = [] for attachment_location, attachment_type in attachments_data: attachment = Attachment() attachment.location = attachment_location attachment.type = attachment_type DBSession.add(attachment) attachments.append(attachment) def finalizer(): pass #for attachment in attachments: # DBSession.delete(attachment) #commit() request.addfinalizer(finalizer) commit() return attachments
def test_list(app, users, tracks): login(app) response = app.get('/comunity/list') for track in tracks: DBSession.add(track) # Hu? why does this need to be attached to the session again? assert track.source_filename in response.text logout(app)
def test_list(app, users, tracks): login(app) response = app.get('/comunity/list') for track in tracks: DBSession.add( track ) # Hu? why does this need to be attached to the session again? assert track.source_filename in response.text logout(app)
def login(app, name='TestUser'): social_token = DBSession.query(ComunityUser).filter( ComunityUser.name == name).one().tokens[0] class MockLoginProvider(ILoginProvider): provider_token = ProviderToken( social_token.provider, token=social_token.token, response={}) # this token should match [email protected] @property def html_include(self): return """<script>console.log('MockLoginProvider html_include');</script>""" def verify_cridentials(self, request): assert request.params.get('token') == 'token' return self.provider_token def aquire_additional_user_details(self, provider_token): assert provider_token == self.provider_token return social_token.data from karakara.views.comunity_login import social_login with patch.dict(social_login.login_providers, {'test_provider': MockLoginProvider()}): response = app.get('/comunity/login?token=token') assert name in response.text assert 'MockLoginProvider' in response.text
def track_import_post(request): existing_track_ids = _existing_tracks_dict().keys() for track_dict in _get_json_request(request): if track_dict['id'] in existing_track_ids: log.warning( 'Exists: {source_filename} - {id}'.format(**track_dict)) continue log.info('Import: {source_filename} - {id}'.format(**track_dict)) track = Track() track.id = track_dict['id'] track.source_filename = track_dict['source_filename'] track.duration = track_dict['duration'] track.lyrics = track_dict['lyrics'] # Attachments for attachment_dict in track_dict['attachments']: assert attachment_dict['type'] in ATTACHMENT_TYPES attachment = Attachment() attachment.type = attachment_dict['type'] attachment.location = attachment_dict['location'] track.attachments.append(attachment) # Tags for tag_string in track_dict['tags']: tag = get_tag(tag_string, create_if_missing=True) if tag: track.tags.append(tag) elif tag_string: log.warning('null tag %s', tag_string) for duplicate_tag in (tag for tag in track.tags if track.tags.count(tag) > 1): log.warning('Unneeded duplicate tag found %s in %s', duplicate_tag, track.source_filename) track.tags.remove(duplicate_tag) DBSession.add(track) commit() request.registry.settings['karakara.tracks.version'] += 1 return action_ok()
def tracks_volume(request): tracks = [create_test_track(tags=['test']) for track_num in range(15)] [DBSession.add(track) for track in tracks] def finalizer(): for track in tracks: DBSession.delete(track) commit() request.addfinalizer(finalizer) commit() return tracks
def track_unicode_special(DBSession, commit): tags_data = ( 'title:UnicodeAssention', 'from:Hack//Sign', 'artist:こ', ) def _create_tag(tag_data): tag = get_tag(tag_data, create_if_missing=True) DBSession.add(tag) return tag tag_objs = tuple(_create_tag(tag) for tag in tags_data) commit() track = Track() track.id = 'x999' track.duration = 120 track.tags[:] = tag_objs track.source_filename = 'unicode_special' DBSession.add(track) commit() yield track DBSession.delete(track) for tag_obj in tag_objs: DBSession.delete(tag_obj) commit()
def track_import_post(request): existing_track_ids = _existing_tracks_dict().keys() for track_dict in _get_json_request(request): if track_dict['id'] in existing_track_ids: log.warning('Exists: {source_filename} - {id}'.format(**track_dict)) continue log.info('Import: {source_filename} - {id}'.format(**track_dict)) track = Track() track.id = track_dict['id'] track.source_filename = track_dict['source_filename'] track.duration = track_dict['duration'] track.lyrics = track_dict['lyrics'] # Attachments for attachment_dict in track_dict['attachments']: assert attachment_dict['type'] in ATTACHMENT_TYPES attachment = Attachment() attachment.type = attachment_dict['type'] attachment.location = attachment_dict['location'] track.attachments.append(attachment) # Tags for tag_string in track_dict['tags']: tag = get_tag(tag_string, create_if_missing=True) if tag: track.tags.append(tag) elif tag_string: log.warning('null tag %s', tag_string) for duplicate_tag in (tag for tag in track.tags if track.tags.count(tag) > 1): log.warning('Unneeded duplicate tag found %s in %s', duplicate_tag, track.source_filename) track.tags.remove(duplicate_tag) DBSession.add(track) commit() request.registry.settings['karakara.tracks.version'] += 1 return action_ok()
def consume(self): try: token = DBSession.query(PriorityToken) \ .filter(PriorityToken.used == False) \ .filter(PriorityToken.session_owner == self.user_id) \ .filter(PriorityToken.valid_start <= now(), PriorityToken.valid_end > now()) \ .one() token.used = True self.request.response.delete_cookie('priority_token') log.debug('priority_token consumed') return True except NoResultFound: return False
def random_images(request): """ The player interface titlescreen can be populated with random thumbnails from the system. This is a nice showcase. Not optimised as this is rarely called. """ import random from karakara.model import DBSession from karakara.model.model_tracks import Attachment thumbnails = DBSession.query(Attachment.location).filter(Attachment.type=='thumbnail').all() random.shuffle(thumbnails) thumbnails = [t[0] for t in thumbnails] return action_ok(data={'thumbnails':thumbnails[0:int(request.params.get('count',0) or 100)]})
def users(request): """ """ users = [] user = ComunityUser() user.name = 'TestUser' user.email = '*****@*****.**' user.approved = True token = SocialToken() token.token = 'abcdefg' token.provider = 'test_provider' token.data = {'avatar_url': 'avatar1.png'} user.tokens.append(token) DBSession.add(user) users.append(user) user = ComunityUser() user.name = 'UnknownUser' user.email = '*****@*****.**' user.approved = False token = SocialToken() token.token = '1234567' token.provider = 'test_provider' token.data = {'avatar_url': 'avatar2.png'} user.tokens.append(token) DBSession.add(user) users.append(user) def finalizer(): pass #for user in users: # DBSession.delete(tag) #commit() #request.addfinalizer(finalizer) commit() return users
def tags(request): #, DBSession, commit """ """ tags_data = [ 'from:series X', 'from:series Y', 'from:series Z', ] tags = [] for tag_data in tags_data: tag = get_tag(tag_data, create_if_missing=True)#Tag(tag_data, create_if_missing=True) DBSession.add(tag) tags.append(tag) def finalizer(): pass #for tag in tags: # DBSession.delete(tag) #commit() #request.addfinalizer(finalizer) commit() return tags
def random_images(request): """ The player interface titlescreen can be populated with random thumbnails from the system. This is a nice showcase. Not optimised as this is rarely called. """ import random from karakara.model import DBSession from karakara.model.model_tracks import Attachment images = DBSession.query(Attachment.location).filter(Attachment.type == 'image').all() # TODO: use serach.restrict_trags to get the images for the current event random.shuffle(images) images = [t[0] for t in images] return action_ok(data={'images': images[0: int(request.params.get('count', 0) or 100)]})
def import_track(self, name): log.debug('Attemping: %s', name) self.meta_manager.load(name) m = self.meta_manager.get(name) if not m.source_hash: raise TrackNotProcesedException() if self._missing_files(m.processed_files): # If we are missing any files but we have a source hash, # we may have some of the derived media missing. # Explicity mark the item for reencoding if PENDING_ACTION['encode'] not in m.pending_actions: # Feels clunky to manage this as a list? maybe a set? m.pending_actions.append(PENDING_ACTION['encode']) self.meta_manager.save(name) # Feels clunky raise TrackMissingProcessedFiles(id=m.source_hash in self.exisiting_track_ids and m.source_hash) if m.source_hash in self.exisiting_track_ids: log.debug('Exists: %s', name) return False log.info('Import: %s', name) track = Track() track.id = m.source_hash track.source_filename = name track.duration = m.source_details.get('duration') self._add_attachments(track, m.processed_files) self._add_lyrics(track, m.processed_files.get('srt')) self._add_tags(track, m.processed_files.get('tags')) DBSession.add(track) commit() self.exisiting_track_ids.add(m.source_hash) # HACK!! .. we should not have duplicate hashs's in the source set. This is a temp patch return True
def create_test_track(id=None, duration=None, tags=(), attachments=(), lyrics=None, source_filename=None): def _get_tag(tag): return get_tag(tag, create_if_missing=True) def _get_attachment(attachment): if hasattr(attachment, 'location') and hasattr(attachment, 'type'): return create_attachment(attachment) else: return DBSession.query(Attachment).filter(Attachment.location.like('%%{0}%%'.format(attachment))).one() track = Track() track.id = id if id else random_string(10) track.duration = duration if duration else random.randint(60, 360) for tag in tags: track.tags.append(_get_tag(tag)) for attachment in attachments: track.attachments.append(_get_attachment(attachment)) track.lyrics = lyrics or '' track.source_filename = source_filename DBSession.add(track) return track
def random_images(request): """ The player interface titlescreen can be populated with random thumbnails from the system. This is a nice showcase. Not optimised as this is rarely called. """ import random from karakara.model import DBSession from karakara.model.model_tracks import Attachment images = DBSession.query( Attachment.location).filter(Attachment.type == 'image').all() # TODO: use serach.restrict_trags to get the images for the current event random.shuffle(images) images = [t[0] for t in images] return action_ok( data={'images': images[0:int(request.params.get('count', 0) or 100)]})
def import_media(**kwargs): """ """ stats = dict(meta_set=set(), meta_imported=set(), meta_unprocessed=set(), db_removed=list(), missing_processed_deleted=set(), missing_processed_aborted=set(), db_start=set(), meta_hash_matched_db_hash=set()) def get_db_track_names(): return set(t.source_filename for t in DBSession.query(Track.source_filename)) meta_manager = MetaManagerExtended(**kwargs) importer = TrackImporter(meta_manager=meta_manager) stats['db_start'] = get_db_track_names() meta_manager.load_all() # mtime=epoc(last_update()) meta_processed_track_ids = set(meta_manager.source_hashs) stats['meta_set'] = set(m.name for m in meta_manager.meta_items if m.source_hash) for name in progress_bar(meta_manager.meta.keys()): try: if importer.import_track(name): stats['meta_imported'].add(name) else: stats['meta_hash_matched_db_hash'].add(name) except TrackNotProcesedException: log.debug('Unprocessed (no source_hash): %s', name) stats['meta_unprocessed'].add(name) except TrackMissingProcessedFiles as ex: if ex.id: log.warning('Missing (processed files) delete existing: %s', name) delete_track(ex.id) commit() stats['missing_processed_deleted'].add(name) else: log.warning('Missing (processed files) abort import: %s', name) stats['missing_processed_aborted'].add(name) for unneeded_track_id in importer.exisiting_track_ids - meta_processed_track_ids: log.warning('Remove: %s', unneeded_track_id) stats['db_removed'].append(DBSession.query(Track).get(unneeded_track_id).source_filename or unneeded_track_id) delete_track(unneeded_track_id) commit() stats['db_end'] = get_db_track_names() #assert stats['db_end'] == stats['meta_hash_matched_db_hash'] | stats['meta_imported'] # TODO! Reinstate this return stats
def login(app, name='TestUser'): social_token = DBSession.query(ComunityUser).filter(ComunityUser.name == name).one().tokens[0] class MockLoginProvider(ILoginProvider): provider_token = ProviderToken(social_token.provider, token=social_token.token, response={}) # this token should match [email protected] @property def html_include(self): return """<script>console.log('MockLoginProvider html_include');</script>""" def verify_cridentials(self, request): assert request.params.get('token') == 'token' return self.provider_token def aquire_additional_user_details(self, provider_token): assert provider_token == self.provider_token return social_token.data from karakara.views.comunity_login import social_login with patch.dict(social_login.login_providers, {'test_provider': MockLoginProvider()}): response = app.get('/comunity/login?token=token') assert name in response.text assert 'MockLoginProvider' in response.text
def queue(request, DBSession, commit, cache_store): QUEUE_ID = 'qtest' queue = Queue(id=QUEUE_ID) DBSession.add(queue) queue_setting = QueueSetting() queue_setting.queue_id = QUEUE_ID queue_setting.key = 'karakara.private.password' queue_setting.value = QUEUE_ID DBSession.add(queue_setting) commit() cache_store.invalidate() yield QUEUE_ID DBSession.delete(queue)
def queue(request, DBSession, commit): QUEUE_ID = 'qtest' queue = Queue(id=QUEUE_ID) DBSession.add(queue) queue_setting = QueueSetting() queue_setting.queue_id = QUEUE_ID queue_setting.key = 'karakara.private.password' queue_setting.value = QUEUE_ID DBSession.add(queue_setting) commit() cache.invalidate() yield QUEUE_ID DBSession.delete(queue)
def _get_attachment(attachment): if hasattr(attachment, 'location') and hasattr(attachment, 'type'): return create_attachment(attachment) else: return DBSession.query(Attachment).filter(Attachment.location.like('%%{0}%%'.format(attachment))).one()
def finalizer(): for attachment in attachments: DBSession.delete(attachment) commit()
def finalizer(): for track in mock_tracks: DBSession.delete(track) commit()
def create_attachment(attachment_description): attachment = Attachment() attachment.location = attachment_description.location attachment.type = attachment_description.type DBSession.add(attachment) return attachment
def del_track(track_id): DBSession.query(Track).filter(Track.id == track_id).delete() commit()
def _existing_tracks_dict(): return { t.id: t.source_filename for t in DBSession.query(Track.id, Track.source_filename) }
def issue(self): priority_window = self.settings.get( 'karakara.queue.add.limit.priority_window') # TODO: Depreciate this priority_window settings # This can be auto-calculated from the current average track length in the queue # Aquire most recent priority token - if most recent token in past, set recent token to now try: latest_token = DBSession.query(PriorityToken).filter( PriorityToken.used == False).order_by( PriorityToken.valid_end.desc()).limit(1).one() latest_token_end = latest_token.valid_end except NoResultFound: latest_token_end = None if not latest_token_end or latest_token_end < now(): # When issueing the first priority token latest_token_end = now( ) + priority_window # get_queue_duration(request) # Adding entire queue here was unnessisary. # Do not issue tokens past the end of the event event_end = self.settings.get('karakara.event.end') if event_end and latest_token_end > event_end: # Unable to issue token as event end log.debug('priority_token rejected - event end') return TOKEN_ISSUE_ERROR.EVENT_END # TODO: possibly depricate this - we can just keep staking tokens until the end of the event priority_token_limit = self.settings.get( 'karakara.queue.add.limit.priority_token') if priority_token_limit and latest_token_end > now( ) + priority_token_limit: # Unable to issue token as priority tokens are time limited log.debug('priority_token rejected - token limit') return TOKEN_ISSUE_ERROR.TOKEN_LIMIT # TODO: Look at the last priority tokens created and raise a warning if the token is likely to pass beyond the end of the event. # Do not issue another priority_token if current user already has a priority_token try: priority_token = DBSession.query(PriorityToken) \ .filter(PriorityToken.used==False) \ .filter(PriorityToken.session_owner==self.user_id) \ .filter(PriorityToken.valid_end>now()) \ .one() if priority_token: log.debug('priority_token rejected - existing token') return TOKEN_ISSUE_ERROR.TOKEN_ISSUED except NoResultFound: pass # Issue the new token priority_token = PriorityToken() priority_token.session_owner = self.user_id priority_token.valid_start = latest_token_end priority_token.valid_end = latest_token_end + priority_window DBSession.add(priority_token) # TODO: replace with new one in lib #request.response.set_cookie('priority_token', json_cookie); # WebOb.set_cookie mangles the cookie with m.serialize() - so I rolled my own set_cookie priority_token_dict = priority_token.to_dict() priority_token_dict.update({ 'server_datetime': now( ), # The client datetime and server datetime may be out. we need to return the server time so the client can calculate the difference }) json_cookie = json.dumps(priority_token_dict, default=json_object_handler) self.request.response.headerlist.append( ('Set-Cookie', 'priority_token={0}; Path=/'.format(json_cookie))) #self.request.response.set_cookie(name='priority_token', value=json_cookie, path='/', overwrite=True) # This method butchers the json and cannot be used log.debug('priority_token issued') return priority_token
def get_db_track_names(): return set(t.source_filename for t in DBSession.query(Track.source_filename))
def band_tracks(DBSession, commit): data = [ { 'id': 'go', 'tags': { 'title': 'Go!!!', 'category': 'anime', 'from': 'Naruto', 'artist': 'Flow', 'lang': 'jp', 'use': 'opening', }, }, { 'id': 'power_rangers', 'tags': { 'title': 'Go Go Power Rangers', 'category': 'cartoon', 'from': 'Mighty Morphing Power Rangers', 'lang': 'en', 'artist': 'Ron Wasserman', }, }, { 'id': 'reignite', 'tags': { 'title': 'Reignite', 'category': 'game', 'from': 'Mass Effect', 'lang': 'en', 'use': 'cover', 'artist': 'Malukah', }, }, { 'id': 'alchemy', 'tags': { 'title': 'Alchemy', 'category': 'anime', 'from': 'Angel Beats', 'use': 'insert', 'artist': 'Girls Dead Monster', 'lang': 'jp', }, }, { 'id': 'god_knows', 'tags': { 'title': 'God Knows', 'category': 'anime', 'from': 'The Melancholy of Haruhi Suzumiya', 'artist': 'Satoru Kosaki', 'lang': 'jp', } }, { 'id': 'lagann', 'tags': { 'title': 'Sorairo Days', 'category': 'anime', 'from': 'Gurren Lagann', 'artist': 'Iwasaki Taku', 'lang': 'jp', 'use': 'opening', } }, ] for d in data: _id = "band_{0}".format(d['id']) track = get_track(_id) if not track: track = Track() track.id = _id track.duration = 300 track.tags = [ get_tag(tag, parent=parent, create_if_missing=True) for parent, tag in ChainMap(d['tags'], DEFAULT_TAGS).items() ] #track.attachments = attachments DBSession.add(track) commit()
def band_tracks(DBSession, commit): data = [ { 'id': 'go', 'tags': { 'title': 'Go!!!', 'category': 'anime', 'from': 'Naruto', 'artist': 'Flow', 'lang': 'jp', 'use': 'opening', }, }, { 'id': 'power_rangers', 'tags': { 'title': 'Go Go Power Rangers', 'category': 'cartoon', 'from': 'Mighty Morphing Power Rangers', 'lang': 'en', 'artist': 'Ron Wasserman', }, }, { 'id': 'reignite', 'tags': { 'title': 'Reignite', 'category': 'game', 'from': 'Mass Effect', 'lang': 'en', 'use': 'cover', 'artist': 'Malukah', }, }, { 'id': 'alchemy', 'tags': { 'title': 'Alchemy', 'category': 'anime', 'from': 'Angel Beats', 'use': 'insert', 'artist': 'Girls Dead Monster', 'lang': 'jp', }, }, { 'id': 'god_knows', 'tags': { 'title': 'God Knows', 'category': 'anime', 'from': 'The Melancholy of Haruhi Suzumiya', 'artist': 'Satoru Kosaki', 'lang': 'jp', } }, { 'id': 'lagann', 'tags': { 'title': 'Sorairo Days', 'category': 'anime', 'from': 'Gurren Lagann', 'artist': 'Iwasaki Taku', 'lang': 'jp', 'use': 'opening', } }, ] for d in data: _id = "band_{0}".format(d['id']) track = get_track(_id) if not track: track = Track() track.id = _id track.duration = 300 track.tags = [get_tag(tag, parent=parent, create_if_missing=True) for parent, tag in ChainMap(d['tags'], DEFAULT_TAGS).items()] #track.attachments = attachments DBSession.add(track) commit()
def _existing_tracks_dict(): return {t.id: t.source_filename for t in DBSession.query(Track.id, Track.source_filename)}
def _get_attachment(filename): return DBSession.query(Attachment).filter(Attachment.location.like('%%{0}%%'.format(filename))).one()
def __init__(self, meta_manager=None): # , path_meta=None, path_processed=None, **kwargs self.meta_manager = meta_manager #or MetaManager(path_meta) self.exisiting_track_ids = set(t.id for t in DBSession.query(Track.id)) self.existing_files_lookup = set(f.relative for f in fast_scan(self.meta_manager.processed_files_manager.path))
def _create_tag(tag_data): tag = get_tag(tag_data, create_if_missing=True) DBSession.add(tag) return tag
def __init__(self, meta_manager=None, path_meta=None, path_processed=None, **kwargs): self.meta = meta_manager or MetaManager(path_meta) self.processed_files_manager = ProcessedFilesManager(path_processed) self.exisiting_track_ids = set(t.id for t in DBSession.query(Track.id))
def add_track(track_id): track = Track() track.id = track_id DBSession.add(track) commit()
def import_media(**kwargs): """ - hash and identify primary key for track - import tags - import subtiles - cleanup db - any sources we don't have the actual processed files for - prune and remove from db - check this removes unnneeded attachments stats description: : number of tracks imported this session : the total number of tracks in the processed meta dataset : the number of track in the db before this import operation was performed : meta exists, but the processed data has not been encoded yet : some source files were missing, making it impossible to use : no matching processed meta paired with db entry at all db_end: the total tracks in the db at the end of this import operation meta_hash_matched_db_hash: The number of meta tracks that matched existing hash in the db """ stats = dict(meta_set=set(), meta_imported=set(), meta_unprocessed=set(), db_removed=list(), missing_processed_deleted=set(), missing_processed_aborted=set(), db_start=set(), meta_hash_matched_db_hash=set()) def get_db_track_names(): return set(t.source_filename for t in DBSession.query(Track.source_filename)) meta = MetaManager(kwargs['path_meta']) importer = TrackImporter(meta_manager=meta, path_processed=kwargs['path_processed']) stats['db_start'] = get_db_track_names() meta.load_all() # mtime=epoc(last_update()) meta_processed_track_ids = set(m.source_hash for m in meta.meta.values() if m.source_hash) stats['meta_set'] = set(m.name for m in meta.meta.values() if m.source_hash) for name in meta.meta.keys(): try: if importer.import_track(name): stats['meta_imported'].add(name) else: stats['meta_hash_matched_db_hash'].add(name) except TrackNotProcesedException: log.debug('Unprocessed (no source_hash): %s', name) stats['meta_unprocessed'].add(name) except TrackMissingProcessedFiles as ex: if ex.id: log.warn('Missing (processed files) delete existing: %s', name) delete_track(ex.id) stats['missing_processed_deleted'].add(name) else: log.warn('Missing (processed files) abort import: %s', name) stats['missing_processed_aborted'].add(name) for unneeded_track_id in importer.exisiting_track_ids - meta_processed_track_ids: log.warn('Remove: %s', unneeded_track_id) stats['db_removed'].append(DBSession.query(Track).get(unneeded_track_id).source_filename or unneeded_track_id) delete_track(unneeded_track_id) commit() stats['db_end'] = get_db_track_names() assert stats['db_end'] == stats['meta_hash_matched_db_hash'] | stats['meta_imported'] return stats