Ejemplo n.º 1
0
def tracks(request, DBSession, commit, tags, attachments, lyrics):
    tracks_data = [
        {
            'id'      :"t1",
            'duration': 100,
            'tags':[
                'title      :Test Track 1',
                #'description:Test track for the KaraKara system with キ',
                'opening','male','jp','anime','jpop', 'series X',
            ],
            'attachments': ['image1','preview1'],
            'lyrics': lyrics[0],
        },
        {
            'id'      :"t2",
            'duration': 200,
            'tags':[
                'title      :Test Track 2',
                #'description:Test track for the KaraKara system with キ'
                'ending','female','en','anime', 'series X',
            ],
            'attachments': ['image2','preview2'],
            'lyrics':lyrics[1],
        },
        {
            'id'      :"t3",
            'duration': 300,
            'tags':[
                'title      :Test Track 3 キ',
                #'description:Test track for the KaraKara system with キ',
                'ending','female','en','jpop', 'series Y',
            ],
            'attachments': ['image3','preview3'],
        },
        {
            'id'      :"xxx",
            'duration': 400,
            'tags':[
                'title      :Wildcard',
                'fr',
            ],
            'attachments': [],
        },
    ]
    
    tracks = [] # Keep tabs on all tracks generated 
    for track_data in tracks_data:
        track = create_test_track(**track_data)
        DBSession.add(track)
        tracks.append(track)

    def finalizer():
        pass
        #for track in tracks:
        #    DBSession.delete(track)
        #commit()
    request.addfinalizer(finalizer)
    
    commit()
    return tracks
Ejemplo n.º 2
0
def tags(request):
    """
    Basic category tags for 3 test series
    """
    tags_data = [
        'from:series X',
        'from:series Y',
        'from:series Z',
    ]
    tags = []
    for tag_data in tags_data:
        tag = get_tag(tag_data, create_if_missing=True)
        DBSession.add(tag)
        tags.append(tag)

    def finalizer():
        pass
        #for tag in tags:
        #    DBSession.delete(tag)
        #commit()

    #request.addfinalizer(finalizer)

    commit()
    return tags
Ejemplo n.º 3
0
def attachments(request):
    """
    Mock attachments
    """
    attachments_data = (
        AttachmentDescription('test/preview1.3gp', 'preview'),
        AttachmentDescription('test/preview2.flv', 'preview'),
        AttachmentDescription('test/preview3.mp4', 'preview'),
        AttachmentDescription('test/image1.jpg', 'image'),
        AttachmentDescription('test/image2.jpg', 'image'),
        AttachmentDescription('test/image3.png', 'image'),
        AttachmentDescription('test/processed.mpg', 'video'),
        AttachmentDescription('test/subtitles.srt', 'srt'),
    )
    mock_attachments = tuple(create_attachment(attachment) for attachment in attachments_data)

    def finalizer():
        pass
        #for attachment in attachments:
        #    DBSession.delete(attachment)
        #commit()
    request.addfinalizer(finalizer)

    commit()
    return mock_attachments
Ejemplo n.º 4
0
def attachments(request): #, DBSession, commit
    """
    """
    attachments_data = [
        ('test/preview1.3gp' , 'preview'  ),
        ('test/preview2.flv' , 'preview'  ),        
        ('test/preview3.mp4' , 'preview'  ),
        ('test/image1.jpg'   , 'thumbnail'),
        ('test/image2.jpg'   , 'thumbnail'),
        ('test/image3.png'   , 'thumbnail'),
        ('test/processed.mpg', 'video'    ),
        ('test/subtitles.ssa', 'subtitle' ),
    ]
    attachments = []
    for attachment_location, attachment_type in attachments_data:
        attachment = Attachment()
        attachment.location = attachment_location
        attachment.type     = attachment_type
        DBSession.add(attachment)
        attachments.append(attachment)
    
    def finalizer():
        pass
        #for attachment in attachments:
        #    DBSession.delete(attachment)
        #commit()
    request.addfinalizer(finalizer)
        
    commit()
    return attachments
Ejemplo n.º 5
0
def attachments(request):
    """
    Mock attachments
    """
    attachments_data = (
        AttachmentDescription('test/preview1.3gp', 'preview'),
        AttachmentDescription('test/preview2.flv', 'preview'),
        AttachmentDescription('test/preview3.mp4', 'preview'),
        AttachmentDescription('test/image1.jpg', 'image'),
        AttachmentDescription('test/image2.jpg', 'image'),
        AttachmentDescription('test/image3.png', 'image'),
        AttachmentDescription('test/processed.mpg', 'video'),
        AttachmentDescription('test/subtitles.srt', 'srt'),
    )
    mock_attachments = tuple(create_attachment(attachment) for attachment in attachments_data)

    def finalizer():
        pass
        #for attachment in attachments:
        #    DBSession.delete(attachment)
        #commit()
    request.addfinalizer(finalizer)

    commit()
    return mock_attachments
Ejemplo n.º 6
0
def tracks_volume(request):
    tracks = [create_test_track(tags=['test']) for track_num in range(15)]
    [DBSession.add(track) for track in tracks]
    
    def finalizer():
        for track in tracks:
            DBSession.delete(track)
        commit()
    request.addfinalizer(finalizer)
    
    commit()
    return tracks
Ejemplo n.º 7
0
def tracks_volume(request):
    """
    Create 15 random tracks to assert larger list
    """
    mock_tracks = tuple(create_test_track(tags=['test']) for track_num in range(15))

    def finalizer():
        for track in mock_tracks:
            DBSession.delete(track)
        commit()
    request.addfinalizer(finalizer)

    commit()
    cache.invalidate()
    return mock_tracks
Ejemplo n.º 8
0
def tracks_volume(request):
    """
    Create 15 random tracks to assert larger list
    """
    mock_tracks = tuple(create_test_track(tags=['test']) for track_num in range(15))

    def finalizer():
        for track in mock_tracks:
            DBSession.delete(track)
        commit()
    request.addfinalizer(finalizer)

    commit()
    cache.invalidate()
    return mock_tracks
Ejemplo n.º 9
0
def queue(request, DBSession, commit, cache_store):
    QUEUE_ID = 'qtest'

    queue = Queue(id=QUEUE_ID)
    DBSession.add(queue)

    queue_setting = QueueSetting()
    queue_setting.queue_id = QUEUE_ID
    queue_setting.key = 'karakara.private.password'
    queue_setting.value = QUEUE_ID
    DBSession.add(queue_setting)

    commit()
    cache_store.invalidate()
    yield QUEUE_ID
    DBSession.delete(queue)
Ejemplo n.º 10
0
def queue(request, DBSession, commit):
    QUEUE_ID = 'qtest'

    queue = Queue(id=QUEUE_ID)
    DBSession.add(queue)

    queue_setting = QueueSetting()
    queue_setting.queue_id = QUEUE_ID
    queue_setting.key = 'karakara.private.password'
    queue_setting.value = QUEUE_ID
    DBSession.add(queue_setting)

    commit()
    cache.invalidate()
    yield QUEUE_ID
    DBSession.delete(queue)
Ejemplo n.º 11
0
def import_media(**kwargs):
    """
    """
    stats = dict(meta_set=set(), meta_imported=set(), meta_unprocessed=set(), db_removed=list(), missing_processed_deleted=set(), missing_processed_aborted=set(), db_start=set(), meta_hash_matched_db_hash=set())

    def get_db_track_names():
        return set(t.source_filename for t in DBSession.query(Track.source_filename))

    meta_manager = MetaManagerExtended(**kwargs)
    importer = TrackImporter(meta_manager=meta_manager)
    stats['db_start'] = get_db_track_names()

    meta_manager.load_all()  # mtime=epoc(last_update())

    meta_processed_track_ids = set(meta_manager.source_hashs)
    stats['meta_set'] = set(m.name for m in meta_manager.meta_items if m.source_hash)

    for name in progress_bar(meta_manager.meta.keys()):
        try:
            if importer.import_track(name):
                stats['meta_imported'].add(name)
            else:
                stats['meta_hash_matched_db_hash'].add(name)
        except TrackNotProcesedException:
            log.debug('Unprocessed (no source_hash): %s', name)
            stats['meta_unprocessed'].add(name)
        except TrackMissingProcessedFiles as ex:
            if ex.id:
                log.warning('Missing (processed files) delete existing: %s', name)
                delete_track(ex.id)
                commit()
                stats['missing_processed_deleted'].add(name)
            else:
                log.warning('Missing (processed files) abort import: %s', name)
                stats['missing_processed_aborted'].add(name)

    for unneeded_track_id in importer.exisiting_track_ids - meta_processed_track_ids:
        log.warning('Remove: %s', unneeded_track_id)
        stats['db_removed'].append(DBSession.query(Track).get(unneeded_track_id).source_filename or unneeded_track_id)
        delete_track(unneeded_track_id)
        commit()

    stats['db_end'] = get_db_track_names()

    #assert stats['db_end'] == stats['meta_hash_matched_db_hash'] | stats['meta_imported']  # TODO! Reinstate this

    return stats
Ejemplo n.º 12
0
def track_import_post(request):
    existing_track_ids = _existing_tracks_dict().keys()

    for track_dict in _get_json_request(request):
        if track_dict['id'] in existing_track_ids:
            log.warning(
                'Exists: {source_filename} - {id}'.format(**track_dict))
            continue

        log.info('Import: {source_filename} - {id}'.format(**track_dict))
        track = Track()
        track.id = track_dict['id']
        track.source_filename = track_dict['source_filename']
        track.duration = track_dict['duration']
        track.lyrics = track_dict['lyrics']

        # Attachments
        for attachment_dict in track_dict['attachments']:
            assert attachment_dict['type'] in ATTACHMENT_TYPES
            attachment = Attachment()
            attachment.type = attachment_dict['type']
            attachment.location = attachment_dict['location']
            track.attachments.append(attachment)

        # Tags
        for tag_string in track_dict['tags']:
            tag = get_tag(tag_string, create_if_missing=True)
            if tag:
                track.tags.append(tag)
            elif tag_string:
                log.warning('null tag %s', tag_string)
        for duplicate_tag in (tag for tag in track.tags
                              if track.tags.count(tag) > 1):
            log.warning('Unneeded duplicate tag found %s in %s', duplicate_tag,
                        track.source_filename)
            track.tags.remove(duplicate_tag)

        DBSession.add(track)
        commit()

    request.registry.settings['karakara.tracks.version'] += 1
    return action_ok()
Ejemplo n.º 13
0
def track_unicode_special(DBSession, commit):
    tags_data = (
        'title:UnicodeAssention',
        'from:Hack//Sign',
        'artist:こ',
    )
    def _create_tag(tag_data):
        tag = get_tag(tag_data, create_if_missing=True)
        DBSession.add(tag)
        return tag
    tag_objs = tuple(_create_tag(tag) for tag in tags_data)
    commit()

    track = Track()
    track.id = 'x999'
    track.duration = 120
    track.tags[:] = tag_objs
    track.source_filename = 'unicode_special'

    DBSession.add(track)
    commit()

    yield track

    DBSession.delete(track)
    for tag_obj in tag_objs:
        DBSession.delete(tag_obj)
    commit()
Ejemplo n.º 14
0
def track_unicode_special(DBSession, commit):
    tags_data = (
        'title:UnicodeAssention',
        'from:Hack//Sign',
        'artist:こ',
    )
    def _create_tag(tag_data):
        tag = get_tag(tag_data, create_if_missing=True)
        DBSession.add(tag)
        return tag
    tag_objs = tuple(_create_tag(tag) for tag in tags_data)
    commit()

    track = Track()
    track.id = 'x999'
    track.duration = 120
    track.tags[:] = tag_objs
    track.source_filename = 'unicode_special'

    DBSession.add(track)
    commit()

    yield track

    DBSession.delete(track)
    for tag_obj in tag_objs:
        DBSession.delete(tag_obj)
    commit()
Ejemplo n.º 15
0
def track_import_post(request):
    existing_track_ids = _existing_tracks_dict().keys()

    for track_dict in _get_json_request(request):
        if track_dict['id'] in existing_track_ids:
            log.warning('Exists: {source_filename} - {id}'.format(**track_dict))
            continue

        log.info('Import: {source_filename} - {id}'.format(**track_dict))
        track = Track()
        track.id = track_dict['id']
        track.source_filename = track_dict['source_filename']
        track.duration = track_dict['duration']
        track.lyrics = track_dict['lyrics']

        # Attachments
        for attachment_dict in track_dict['attachments']:
            assert attachment_dict['type'] in ATTACHMENT_TYPES
            attachment = Attachment()
            attachment.type = attachment_dict['type']
            attachment.location = attachment_dict['location']
            track.attachments.append(attachment)

        # Tags
        for tag_string in track_dict['tags']:
            tag = get_tag(tag_string, create_if_missing=True)
            if tag:
                track.tags.append(tag)
            elif tag_string:
                log.warning('null tag %s', tag_string)
        for duplicate_tag in (tag for tag in track.tags if track.tags.count(tag) > 1):
            log.warning('Unneeded duplicate tag found %s in %s', duplicate_tag, track.source_filename)
            track.tags.remove(duplicate_tag)

        DBSession.add(track)
        commit()

    request.registry.settings['karakara.tracks.version'] += 1
    return action_ok()
Ejemplo n.º 16
0
def users(request):
    """
    """
    users = []

    user = ComunityUser()
    user.name = 'TestUser'
    user.email = '*****@*****.**'
    user.approved = True
    token = SocialToken()
    token.token = 'abcdefg'
    token.provider = 'test_provider'
    token.data = {'avatar_url': 'avatar1.png'}
    user.tokens.append(token)
    DBSession.add(user)
    users.append(user)

    user = ComunityUser()
    user.name = 'UnknownUser'
    user.email = '*****@*****.**'
    user.approved = False
    token = SocialToken()
    token.token = '1234567'
    token.provider = 'test_provider'
    token.data = {'avatar_url': 'avatar2.png'}
    user.tokens.append(token)
    DBSession.add(user)
    users.append(user)

    def finalizer():
        pass
        #for user in users:
        #    DBSession.delete(tag)
        #commit()
    #request.addfinalizer(finalizer)

    commit()
    return users
Ejemplo n.º 17
0
def tags(request): #, DBSession, commit
    """
    """
    tags_data = [
        'from:series X',
        'from:series Y',
        'from:series Z',
    ]
    tags = []
    for tag_data in tags_data:
        tag = get_tag(tag_data, create_if_missing=True)#Tag(tag_data, create_if_missing=True)
        DBSession.add(tag)
        tags.append(tag)
    
    def finalizer():
        pass
        #for tag in tags:
        #    DBSession.delete(tag)
        #commit()
    #request.addfinalizer(finalizer)
    
    commit()
    return tags
Ejemplo n.º 18
0
    def import_track(self, name):
        log.debug('Attemping: %s', name)

        self.meta_manager.load(name)
        m = self.meta_manager.get(name)

        if not m.source_hash:
            raise TrackNotProcesedException()

        if self._missing_files(m.processed_files):
            # If we are missing any files but we have a source hash,
            # we may have some of the derived media missing.
            # Explicity mark the item for reencoding
            if PENDING_ACTION['encode'] not in m.pending_actions:  # Feels clunky to manage this as a list? maybe a set?
                m.pending_actions.append(PENDING_ACTION['encode'])
                self.meta_manager.save(name)  # Feels clunky
            raise TrackMissingProcessedFiles(id=m.source_hash in self.exisiting_track_ids and m.source_hash)

        if m.source_hash in self.exisiting_track_ids:
            log.debug('Exists: %s', name)
            return False

        log.info('Import: %s', name)
        track = Track()
        track.id = m.source_hash
        track.source_filename = name
        track.duration = m.source_details.get('duration')

        self._add_attachments(track, m.processed_files)
        self._add_lyrics(track, m.processed_files.get('srt'))
        self._add_tags(track, m.processed_files.get('tags'))

        DBSession.add(track)
        commit()
        self.exisiting_track_ids.add(m.source_hash)  # HACK!! .. we should not have duplicate hashs's in the source set. This is a temp patch

        return True
Ejemplo n.º 19
0
 def finalizer():
     for track in mock_tracks:
         DBSession.delete(track)
     commit()
Ejemplo n.º 20
0
 def finalizer():
     for attachment in attachments:
         DBSession.delete(attachment)
     commit()
Ejemplo n.º 21
0
 def del_track(track_id):
     DBSession.query(Track).filter(Track.id == track_id).delete()
     commit()
Ejemplo n.º 22
0
 def del_track(track_id):
     DBSession.query(Track).filter(Track.id == track_id).delete()
     commit()
Ejemplo n.º 23
0
def tracks(request, DBSession, commit, tags, attachments):
    """
    4 test tracks with various unicode characters, lyrics, attachments, tags
    """
    tracks_data = [
        {
            'id': "t1",
            'duration': 60,  # 1min
            'tags': [
                'title      :Test Track 1',
                #'description:Test track for the KaraKara system with キ',
                'opening', 'male', 'jp', 'anime', 'jpop', 'series X',
            ],
            'attachments': ['image1', 'preview1', 'processed'],
            'lyrics': 'ここにいくつかのテキストです。',
            'source_filename': 'track1source',
        },
        {
            'id': "t2",
            'duration': 120,  # 2min
            'tags': [
                'title      :Test Track 2',
                #'description:Test track for the KaraKara system with キ'
                'ending', 'female', 'en', 'anime', 'series X',
            ],
            'attachments': ['image2', 'preview2'],
            'lyrics': 'Line1\nLine2\nLine3\nLine4\näöü"',
            'source_filename': 'track2source',
        },
        {
            'id': "t3",
            'duration': 240,  # 4min
            'tags':[
                'title      :Test Track 3 キ',
                #'description:Test track for the KaraKara system with キ',
                'ending', 'female', 'en', 'jpop', 'series Y',
            ],
            'attachments': ['image3', 'preview3'],
            'source_filename': 'track3source',
        },
        {
            'id': "xxx",
            'duration': 300,  # 5min
            'tags': [
                'title      :Wildcard',
                'lang:fr',
            ],
            'attachments': [],
            'source_filename': 'wildcardsource',
        },
    ]

    mock_tracks = tuple(create_test_track(**track_data) for track_data in tracks_data)  # Keep tabs on all tracks generated

    def finalizer():
        pass
        #for track in tracks:
        #    DBSession.delete(track)
        #commit()
    request.addfinalizer(finalizer)

    commit()
    cache.invalidate()
    return mock_tracks
Ejemplo n.º 24
0
 def finalizer():
     for track in mock_tracks:
         DBSession.delete(track)
     commit()
Ejemplo n.º 25
0
def band_tracks(DBSession, commit):

    data = [
        {
            'id': 'go',
            'tags': {
                'title': 'Go!!!',
                'category': 'anime',
                'from': 'Naruto',
                'artist': 'Flow',
                'lang': 'jp',
                'use': 'opening',
            },
        },
        {
            'id': 'power_rangers',
            'tags': {
                'title': 'Go Go Power Rangers',
                'category': 'cartoon',
                'from': 'Mighty Morphing Power Rangers',
                'lang': 'en',
                'artist': 'Ron Wasserman',
            },
        },
        {
            'id': 'reignite',
            'tags': {
                'title': 'Reignite',
                'category': 'game',
                'from': 'Mass Effect',
                'lang': 'en',
                'use': 'cover',
                'artist': 'Malukah',
            },
        },
        {
            'id': 'alchemy',
            'tags': {
                'title': 'Alchemy',
                'category': 'anime',
                'from': 'Angel Beats',
                'use': 'insert',
                'artist': 'Girls Dead Monster',
                'lang': 'jp',
            },
        },
        {
            'id': 'god_knows',
            'tags': {
                'title': 'God Knows',
                'category': 'anime',
                'from': 'The Melancholy of Haruhi Suzumiya',
                'artist': 'Satoru Kosaki',
                'lang': 'jp',
            }
        },
        {
            'id': 'lagann',
            'tags': {
                'title': 'Sorairo Days',
                'category': 'anime',
                'from': 'Gurren Lagann',
                'artist': 'Iwasaki Taku',
                'lang': 'jp',
                'use': 'opening',
            }
        },
    ]

    for d in data:
        _id = "band_{0}".format(d['id'])

        track = get_track(_id)
        if not track:
            track = Track()
            track.id = _id
            track.duration = 300
            track.tags = [get_tag(tag, parent=parent, create_if_missing=True) for parent, tag in ChainMap(d['tags'], DEFAULT_TAGS).items()]
            #track.attachments = attachments
            DBSession.add(track)

    commit()
Ejemplo n.º 26
0
def tracks(request, DBSession, commit, tags, attachments):
    """
    4 test tracks with various unicode characters, lyrics, attachments, tags
    """
    tracks_data = [
        {
            'id': "t1",
            'duration': 60,  # 1min
            'tags': [
                'title      :Test Track 1',
                #'description:Test track for the KaraKara system with キ',
                'opening', 'male', 'jp', 'anime', 'jpop', 'series X',
            ],
            'attachments': ['image1', 'preview1', 'processed'],
            'lyrics': 'ここにいくつかのテキストです。',
            'source_filename': 'track1source',
        },
        {
            'id': "t2",
            'duration': 120,  # 2min
            'tags': [
                'title      :Test Track 2',
                #'description:Test track for the KaraKara system with キ'
                'ending', 'female', 'en', 'anime', 'series X',
            ],
            'attachments': ['image2', 'preview2'],
            'lyrics': 'Line1\nLine2\nLine3\nLine4\näöü"',
            'source_filename': 'track2source',
        },
        {
            'id': "t3",
            'duration': 240,  # 4min
            'tags':[
                'title      :Test Track 3 キ',
                #'description:Test track for the KaraKara system with キ',
                'ending', 'female', 'en', 'jpop', 'series Y',
            ],
            'attachments': ['image3', 'preview3'],
            'source_filename': 'track3source',
        },
        {
            'id': "xxx",
            'duration': 300,  # 5min
            'tags': [
                'title      :Wildcard',
                'lang:fr',
            ],
            'attachments': [],
            'source_filename': 'wildcardsource',
        },
    ]

    mock_tracks = tuple(create_test_track(**track_data) for track_data in tracks_data)  # Keep tabs on all tracks generated

    def finalizer():
        pass
        #for track in tracks:
        #    DBSession.delete(track)
        #commit()
    request.addfinalizer(finalizer)

    commit()
    cache.invalidate()
    return mock_tracks
Ejemplo n.º 27
0
 def add_track(track_id):
     track = Track()
     track.id = track_id
     DBSession.add(track)
     commit()
Ejemplo n.º 28
0
 def add_track(track_id):
     track = Track()
     track.id = track_id
     DBSession.add(track)
     commit()
Ejemplo n.º 29
0
def import_media(**kwargs):
    """
     - hash and identify primary key for track
     - import tags
     - import subtiles
     - cleanup db - any sources we don't have the actual processed files for - prune and remove from db
       - check this removes unnneeded attachments

    stats description:
        : number of tracks imported this session
        : the total number of tracks in the processed meta dataset
        : the number of track in the db before this import operation was performed
        : meta exists, but the processed data has not been encoded yet
        : some source files were missing, making it impossible to use
        : no matching processed meta paired with db entry at all
        db_end: the total tracks in the db at the end of this import operation
        meta_hash_matched_db_hash: The number of meta tracks that matched existing hash in the db
    """
    stats = dict(meta_set=set(), meta_imported=set(), meta_unprocessed=set(), db_removed=list(), missing_processed_deleted=set(), missing_processed_aborted=set(), db_start=set(), meta_hash_matched_db_hash=set())

    def get_db_track_names():
        return set(t.source_filename for t in DBSession.query(Track.source_filename))

    meta = MetaManager(kwargs['path_meta'])
    importer = TrackImporter(meta_manager=meta, path_processed=kwargs['path_processed'])
    stats['db_start'] = get_db_track_names()

    meta.load_all()  # mtime=epoc(last_update())

    meta_processed_track_ids = set(m.source_hash for m in meta.meta.values() if m.source_hash)
    stats['meta_set'] = set(m.name for m in meta.meta.values() if m.source_hash)

    for name in meta.meta.keys():
        try:
            if importer.import_track(name):
                stats['meta_imported'].add(name)
            else:
                stats['meta_hash_matched_db_hash'].add(name)
        except TrackNotProcesedException:
            log.debug('Unprocessed (no source_hash): %s', name)
            stats['meta_unprocessed'].add(name)
        except TrackMissingProcessedFiles as ex:
            if ex.id:
                log.warn('Missing (processed files) delete existing: %s', name)
                delete_track(ex.id)
                stats['missing_processed_deleted'].add(name)
            else:
                log.warn('Missing (processed files) abort import: %s', name)
                stats['missing_processed_aborted'].add(name)

    for unneeded_track_id in importer.exisiting_track_ids - meta_processed_track_ids:
        log.warn('Remove: %s', unneeded_track_id)
        stats['db_removed'].append(DBSession.query(Track).get(unneeded_track_id).source_filename or unneeded_track_id)
        delete_track(unneeded_track_id)
    commit()

    stats['db_end'] = get_db_track_names()

    assert stats['db_end'] == stats['meta_hash_matched_db_hash'] | stats['meta_imported']

    return stats
Ejemplo n.º 30
0
def band_tracks(DBSession, commit):

    data = [
        {
            'id': 'go',
            'tags': {
                'title': 'Go!!!',
                'category': 'anime',
                'from': 'Naruto',
                'artist': 'Flow',
                'lang': 'jp',
                'use': 'opening',
            },
        },
        {
            'id': 'power_rangers',
            'tags': {
                'title': 'Go Go Power Rangers',
                'category': 'cartoon',
                'from': 'Mighty Morphing Power Rangers',
                'lang': 'en',
                'artist': 'Ron Wasserman',
            },
        },
        {
            'id': 'reignite',
            'tags': {
                'title': 'Reignite',
                'category': 'game',
                'from': 'Mass Effect',
                'lang': 'en',
                'use': 'cover',
                'artist': 'Malukah',
            },
        },
        {
            'id': 'alchemy',
            'tags': {
                'title': 'Alchemy',
                'category': 'anime',
                'from': 'Angel Beats',
                'use': 'insert',
                'artist': 'Girls Dead Monster',
                'lang': 'jp',
            },
        },
        {
            'id': 'god_knows',
            'tags': {
                'title': 'God Knows',
                'category': 'anime',
                'from': 'The Melancholy of Haruhi Suzumiya',
                'artist': 'Satoru Kosaki',
                'lang': 'jp',
            }
        },
        {
            'id': 'lagann',
            'tags': {
                'title': 'Sorairo Days',
                'category': 'anime',
                'from': 'Gurren Lagann',
                'artist': 'Iwasaki Taku',
                'lang': 'jp',
                'use': 'opening',
            }
        },
    ]

    for d in data:
        _id = "band_{0}".format(d['id'])

        track = get_track(_id)
        if not track:
            track = Track()
            track.id = _id
            track.duration = 300
            track.tags = [
                get_tag(tag, parent=parent, create_if_missing=True)
                for parent, tag in ChainMap(d['tags'], DEFAULT_TAGS).items()
            ]
            #track.attachments = attachments
            DBSession.add(track)

    commit()