def scan(self):
     return fast_scan(self.path)
示例#2
0
 def files(self):
     # wha? (mtime == None or f.stat().st_mtime >= mtime)
     return fast_scan(self.path, search_filter=lambda f: f.name.endswith('.json'))
示例#3
0
def import_media(**kwargs):
    """
    """
    stats = dict(meta_set=set(),
                 meta_imported=set(),
                 meta_unprocessed=set(),
                 db_removed=list(),
                 missing_processed_deleted=set(),
                 missing_processed_aborted=set(),
                 db_start=set(),
                 meta_hash_matched_db_hash=set())

    track_api = partial(_track_api, kwargs['api_host'])

    meta_manager = MetaManagerExtended(**kwargs)
    meta_manager.load_all()  # mtime=epoc(last_update())
    processed_track_ids = set(meta_manager.source_hashs)
    processed_files_lookup = set(
        f.relative
        for f in fast_scan(meta_manager.processed_files_manager.path))
    existing_tracks = track_api()['data']['tracks']
    existing_track_ids = existing_tracks.keys()

    generate_track_dict = partial(
        _generate_track_dict,
        meta_manager=meta_manager,
        existing_track_ids=existing_track_ids,
        processed_files_lookup=processed_files_lookup)

    stats['db_start'] = set(existing_tracks.values())
    stats['meta_set'] = set(m.name for m in meta_manager.meta_items
                            if m.source_hash)

    tracks_to_add = []
    track_ids_to_delete = []

    log.info(
        f'Importing tracks - Existing:{len(existing_track_ids)} Processed{len(processed_track_ids)}'
    )
    for name in progress_bar(meta_manager.meta.keys()):
        try:
            track = generate_track_dict(name)
            if track:
                stats['meta_imported'].add(name)
                #tracks_to_add.append(track)
                track_api([track], method='POST')
            else:
                stats['meta_hash_matched_db_hash'].add(name)
        except TrackNotProcesedException:
            log.debug('Unprocessed (no source_hash): %s', name)
            stats['meta_unprocessed'].add(name)
        except TrackMissingProcessedFiles as ex:
            if ex.id:
                log.warning('Missing (processed files) delete existing: %s',
                            name)
                track_ids_to_delete.append(ex.id)
                stats['missing_processed_deleted'].add(name)
            else:
                log.warning('Missing (processed files) abort import: %s', name)
                stats['missing_processed_aborted'].add(name)

    for unneeded_track_id in existing_track_ids - processed_track_ids:
        log.warning('Remove: %s', unneeded_track_id)
        stats['db_removed'].append(existing_tracks[unneeded_track_id])
        track_ids_to_delete.append(unneeded_track_id)

    log.info(
        f"""{kwargs['api_host']} -> Add:{len(tracks_to_add)} Delete:{len(track_ids_to_delete)}"""
    )
    #track_api(tracks_to_add, method='POST')
    track_api(track_ids_to_delete, method='DELETE')

    stats['db_end'] = track_api()['data']['tracks'].values()

    #assert stats['db_end'] == stats['meta_hash_matched_db_hash'] | stats['meta_imported']  # TODO! Reinstate this
    return stats
示例#4
0
 def __init__(self, meta_manager=None):  # , path_meta=None, path_processed=None, **kwargs
     self.meta_manager = meta_manager #or MetaManager(path_meta)
     self.exisiting_track_ids = set(t.id for t in DBSession.query(Track.id))
     self.existing_files_lookup = set(f.relative for f in fast_scan(self.meta_manager.processed_files_manager.path))
示例#5
0
 def files(self):
     # wha? (mtime == None or f.stat().st_mtime >= mtime)
     return fast_scan(self.path, search_filter=lambda f: f.name.endswith('.json'))
 def scan(self):
     return fast_scan(self.path)
示例#7
0
 def files(self):
     return fast_scan(self.path)
 def files(self):
     return fast_scan(self.path)