Example #1
0
    def start(self, location, meta_type, folder, dryrun):
        dryrun = True if dryrun == 'true'  else False

        if location == 'Metadata':
            root_path = os.path.join(ModelSetting.get('base_path_metadata'), meta_type)
        elif location == 'Media':
            root_path = os.path.join(ModelSetting.get('base_path_media'), 'localhost')
       
        if folder == 'all':
            folders = os.listdir(root_path)
        else:
            folders = [folder]
        
        db_file = ModelSetting.get('base_path_db')
        con = sqlite3.connect(db_file)
        cur = con.cursor()

        status = {'is_working':'run', 'remove_count' : 0, 'remove_size':0, 'count':0, 'current':0}


        for folder in folders:
            folder_path = os.path.join(root_path, folder)
            if os.path.exists(folder_path) == False:
                continue

            bundle_list = os.listdir(folder_path)
            status['count'] += len(bundle_list)
            for bundle in bundle_list:
                try:
                    if ModelSetting.get_bool('clear_bundle_task_stop_flag'):
                        return 'stop'
                    time.sleep(0.05)
                    status['current'] += 1
                    data = {'folder':folder, 'bundle':bundle, 'status':status}
                    bundle_path = os.path.join(folder_path, bundle)
                    hash_value = folder + bundle.split('.')[0]
                    if location == 'Metadata':
                        ce = con.execute('SELECT * FROM metadata_items WHERE hash = ?', (hash_value,))
                    else:
                        ce = con.execute('SELECT * FROM media_parts WHERE hash = ?', (hash_value,))
                    ce.row_factory = dict_factory
                    fetch = ce.fetchall()
                    if len(fetch) == 1:
                        if location == 'Metadata':
                            data['title'] = fetch[0]['title']
                        else:
                            data['file'] = fetch[0]['file']
                    elif len(fetch) == 0:
                        tmp = ToolBaseFile.size(start_path=bundle_path)
                        data['remove'] = tmp
                        status['remove_size'] += tmp
                        status['remove_count'] += 1
                        if dryrun == False:
                            ToolBaseFile.rmtree(bundle_path)
                    if app.config['config']['use_celery']:
                        self.update_state(state='PROGRESS', meta=data)
                    else:
                        self.receive_from_task(data, celery=False)
                except Exception as e: 
                    logger.error(f'Exception:{str(e)}')
                    logger.error(traceback.format_exc())
        return 'wait'
Example #2
0
    def show_process(data, con, cur):

        data['meta'] = {'remove': 0}
        data['meta']['metapath'] = os.path.join(
            ModelSetting.get('base_path_metadata'), 'TV Shows',
            data['db']['hash'][0], f"{data['db']['hash'][1:]}.bundle")

        data['meta']['total'] = ToolBaseFile.size(
            start_path=data['meta']['metapath'])
        if data['command'] == 'start0':
            return
        combined_xmlpath = os.path.join(data['meta']['metapath'], 'Contents',
                                        '_combined', 'Info.xml')
        if os.path.exists(combined_xmlpath) == False:
            return
        data['use_filepath'] = []
        data['remove_filepath'] = []
        data['seasons'] = {}
        data['media'] = {'total': 0, 'remove': 0}
        ret = Task.xml_analysis(combined_xmlpath, data, data)
        if ret == False:
            logger.warning(f"{data['db']['title']} 쇼 분석 실패")
            return

        season_cs = con.execute(
            'SELECT * FROM metadata_items WHERE metadata_type = 3 and parent_id = ? ORDER BY "index"',
            (data['db']['id'], ))
        season_cs.row_factory = dict_factory
        for season in season_cs.fetchall():
            episode_cs = con.execute(
                'SELECT * FROM metadata_items WHERE metadata_type = 4 and parent_id = ? ORDER BY "index"',
                (season['id'], ))
            episode_cs.row_factory = dict_factory

            for episode in episode_cs.fetchall():
                season_index = season['index']
                episode_index = episode['index']
                if episode['index'] == -1:
                    if episode['available_at'] is not None:
                        episode_index = episode['available_at'].split(' ')[0]
                    else:
                        episode_index = episode[
                            'originally_available_at'].split(' ')[0]
                    #season_index = episode_index.split('-')[0]
                if season_index not in data['seasons']:
                    data['seasons'][season_index] = {'db': season}
                    combined_xmlpath = os.path.join(data['meta']['metapath'],
                                                    'Contents', '_combined',
                                                    'seasons',
                                                    f"{season_index}.xml")
                    ret = Task.xml_analysis(combined_xmlpath,
                                            data['seasons'][season_index],
                                            data)
                    if ret == False:
                        logger.warning(combined_xmlpath)
                        logger.warning(
                            f"{data['db']['title']} 시즌 분석 실패 : season_index - {season_index}"
                        )
                        #logger.warning(combined_xmlpath)
                        #return
                    data['seasons'][season_index]['episodes'] = {}
                data['seasons'][season_index]['episodes'][episode_index] = {
                    'db': episode
                }
                combined_xmlpath = os.path.join(data['meta']['metapath'],
                                                'Contents', '_combined',
                                                'seasons', f"{season_index}",
                                                "episodes",
                                                f"{episode_index}.xml")
                ret = Task.xml_analysis(
                    combined_xmlpath,
                    data['seasons'][season_index]['episodes'][episode_index],
                    data,
                    is_episode=True)
                if ret == False:
                    logger.warning(combined_xmlpath)
                    #logger.warning(d(episode))
                    logger.warning(f"{data['db']['title']} 에피소드 분석 실패")
                    #del data['seasons'][season_index]['episodes'][episode_index]
                    #return

        #logger.warning(d(data['use_filepath']))
        #logger.warning(d(data))

        query = ""

        if data['command'] in ['start22', 'start3', 'start4']:
            # 쇼 http로
            sql = 'UPDATE metadata_items SET '
            if data['process']['poster']['url'] != '':
                sql += ' user_thumb_url = "{}", '.format(
                    data['process']['poster']['url'])
                try:
                    data['use_filepath'].remove(
                        data['process']['poster']['localpath'])
                except:
                    pass
                try:
                    data['use_filepath'].remove(
                        data['process']['poster']['realpath'])
                except:
                    pass
            if data['process']['art']['url'] != '':
                sql += ' user_art_url = "{}", '.format(
                    data['process']['art']['url'])
                try:
                    data['use_filepath'].remove(
                        data['process']['art']['localpath'])
                except:
                    pass
                try:
                    data['use_filepath'].remove(
                        data['process']['art']['realpath'])
                except:
                    pass
            if data['process']['banner']['url'] != '':
                sql += ' user_banner_url = "{}", '.format(
                    data['process']['banner']['url'])
                try:
                    data['use_filepath'].remove(
                        data['process']['banner']['localpath'])
                except:
                    pass
                try:
                    data['use_filepath'].remove(
                        data['process']['banner']['realpath'])
                except:
                    pass
            if data['process']['theme']['url'] != '':
                sql += ' user_music_url = "{}", '.format(
                    data['process']['theme']['url'])

            if sql != 'UPDATE metadata_items SET ':
                sql = sql.strip().rstrip(',')
                sql += '  WHERE id = {} ;\n'.format(data['db']['id'])
                query += sql

            for season_index, season in data['seasons'].items():
                if 'process' not in season:
                    continue
                sql = 'UPDATE metadata_items SET '
                if season['process']['poster']['url'] != '':
                    sql += ' user_thumb_url = "{}", '.format(
                        season['process']['poster']['url'])
                    try:
                        data['use_filepath'].remove(
                            season['process']['poster']['localpath'])
                    except:
                        pass
                    try:
                        data['use_filepath'].remove(
                            season['process']['poster']['realpath'])
                    except:
                        pass
                if season['process']['art']['url'] != '':
                    sql += ' user_art_url = "{}", '.format(
                        season['process']['art']['url'])
                    try:
                        data['use_filepath'].remove(
                            season['process']['art']['localpath'])
                    except:
                        pass
                    try:
                        data['use_filepath'].remove(
                            season['process']['art']['realpath'])
                    except:
                        pass
                if season['process']['banner']['url'] != '':
                    sql += ' user_banner_url = "{}", '.format(
                        season['process']['banner']['url'])
                    try:
                        data['use_filepath'].remove(
                            season['process']['banner']['localpath'])
                    except:
                        pass
                    try:
                        data['use_filepath'].remove(
                            season['process']['banner']['realpath'])
                    except:
                        pass
                if sql != 'UPDATE metadata_items SET ':
                    sql = sql.strip().rstrip(',')
                    sql += '  WHERE id = {} ;\n'.format(season['db']['id'])
                    query += sql

        if data['command'] in ['start21', 'start22', 'start3', 'start4']:

            for season_index, season in data['seasons'].items():
                for episode_index, episode in season['episodes'].items():
                    #logger.warning(episode['process']['thumb'])
                    media_item_cs = con.execute(
                        'SELECT * FROM media_items WHERE metadata_item_id = ? ORDER BY id',
                        (episode['db']['id'], ))
                    media_item_cs.row_factory = dict_factory
                    episode['media_list'] = []

                    for media_item in media_item_cs.fetchall():
                        media_part_cs = con.execute(
                            'SELECT * FROM media_parts WHERE media_item_id = ? ORDER BY id',
                            (media_item['id'], ))
                        media_part_cs.row_factory = dict_factory
                        for media_part in media_part_cs.fetchall():
                            media_hash = media_part['hash']
                            #logger.warning(f"  파일 : {media_part['file']} {media_hash}")
                            mediapath = os.path.join(
                                ModelSetting.get('base_path_media'),
                                'localhost', media_hash[0],
                                f"{media_hash[1:]}.bundle", 'Contents',
                                'Thumbnails', 'thumb1.jpg')
                            if os.path.exists(mediapath):
                                #logger.warning("미디오 썸네일 있음")
                                episode['media_list'].append(mediapath)
                                data['media']['total'] = os.path.getsize(
                                    mediapath)
                                #data['remove_size'] += os.stat(mediapath).st_size
                                #os.remove(mediapath)
                                #media://0/10c056239442666d0931c90996ff69673861d95.bundle/Contents/Thumbnails/thumb1.jpg
                    # 2021-11-01
                    # 4단계 미디어파일을 디코에 올리고 그 url로 대체한다.
                    #
                    if data['command'] == 'start4' and episode['process'][
                            'thumb']['db_type'] == 'media':
                        localpath = os.path.join(
                            ModelSetting.get('base_path_media'), 'localhost',
                            episode['process']['thumb']['db'].replace(
                                'media://', ''))
                        if localpath[0] != '/':
                            localpath = localpath.replace('/', '\\')
                        if os.path.exists(localpath):
                            if data['dryrun'] == False:
                                discord_url = ToolExpandDiscord.discord_proxy_image_localfile(
                                    localpath)
                                if discord_url is not None:
                                    episode['process']['thumb'][
                                        'url'] = discord_url
                                    logger.warning(discord_url)
                        else:
                            #logger.warning(episode)
                            logger.warning(f"썸네일 없음 1: {episode['db']['id']}")
                            PlexWebHandle.analyze_by_id(episode['db']['id'])
                    if data['command'] == 'start4' and episode['process'][
                            'thumb']['db'] == '':
                        logger.warning(f"썸네일 없음 분석 2: {episode['db']['id']}")
                        PlexWebHandle.analyze_by_id(episode['db']['id'])

                    if episode['process']['thumb']['url'] != '':
                        query += f'UPDATE metadata_items SET user_thumb_url = "{episode["process"]["thumb"]["url"]}" WHERE id = {episode["db"]["id"]};\n'
                        try:
                            data['use_filepath'].remove(
                                episode['process']['thumb']['localpath'])
                        except:
                            pass
                        try:
                            data['use_filepath'].remove(
                                episode['process']['thumb']['realpath'])
                        except:
                            pass
                        if data['command'] in ['start3', 'start4']:
                            for mediafilepath in episode['media_list']:
                                if os.path.exists(mediapath):
                                    data['media']['remove'] += os.path.getsize(
                                        mediapath)
                                    if data['dryrun'] == False:
                                        os.remove(mediapath)
                    elif episode['process']['thumb']['db'] == '':
                        if len(episode['media_list']) > 0:
                            tmp = f"media://{episode['media_list'][0].split('localhost/')[1]}"
                            query += f'UPDATE metadata_items SET user_thumb_url = "{tmp}" WHERE id = {episode["db"]["id"]};\n'

                    if data['dryrun'] == False and data['command'] in [
                            'start3', 'start4'
                    ]:
                        for mediafilepath in episode['media_list']:
                            content_folder = os.path.dirname(
                                os.path.dirname(mediafilepath))
                            for base, folders, files in os.walk(
                                    content_folder):
                                if not folders and not files:
                                    os.removedirs(base)

        #logger.error(data['command'])
        #logger.error(query)
        if query != '' and data['dryrun'] == False:
            PlexDBHandle.execute_query(query)

        #logger.error(data['meta']['remove'] )
        for base, folders, files in os.walk(data['meta']['metapath']):
            for f in files:
                data['file_count'] += 1
                filepath = os.path.join(base, f)
                #if filepath.find('themes') == -1:
                #    continue
                if filepath not in data['use_filepath']:
                    if os.path.exists(filepath):
                        data['remove_count'] += 1
                        if filepath not in data['remove_filepath']:
                            data['remove_filepath'].append(filepath)
                        if os.path.islink(filepath) == False:
                            data['meta']['remove'] += os.path.getsize(filepath)
                        #logger.error(filepath)
                        if data['dryrun'] == False:
                            os.remove(filepath)

        for base, folders, files in os.walk(data['meta']['metapath']):
            if not folders and not files:
                os.removedirs(base)

        if data['command'] == 'start1':
            return
Example #3
0
    def analysis(data, con, cur):
        #logger.warning(f"분석시작 : {data['db']['title']}")

        Task.thumb_process(data)

        if data['command'] == 'start1':
            return
        
        # 2단계 TAG별 URL 로 세팅하고 xml 파일만 남기고 제거
        if data['dryrun'] == False:
            #sql = 'UPDATE metadata_items SET user_thumb_url = "{}", user_art_url = "{}", user_banner_url = "{}" WHERE id = {} ;'.format(
            #    data['process']['poster']['url'],
            #   data['process']['art']['url'],
            #    data['process']['banner']['url'],
            #    data['db']['id']
            #)
            if 'poster' not in data['process']:
                return
            sql = 'UPDATE metadata_items SET '
            if data['process']['poster']['url'] != '':
                sql += ' user_thumb_url = "{}", '.format(data['process']['poster']['url'])
            if data['process']['art']['url'] != '':
                sql += ' user_art_url = "{}", '.format(data['process']['art']['url'])
            if data['process']['banner']['url'] != '':
                sql += ' user_banner_url = "{}", '.format(data['process']['banner']['url'])
            if sql != 'UPDATE metadata_items SET ':
                sql = sql.strip().rstrip(',')
                sql += '  WHERE id = {} ;'.format(data['db']['id'])
                sql_filepath = os.path.join(path_data, 'tmp', f"movie_{data['db']['id']}.sql")
                PlexDBHandle.execute_query(sql, sql_filepath=sql_filepath)
        
        c_metapath = os.path.join(data['meta']['metapath'], 'Contents')  
        if os.path.exists(c_metapath):
                          
            for f in os.listdir(c_metapath):
                _path = os.path.join(c_metapath, f)
                if f == '_combined':
                    for tag, value in TAG.items():
                        tag_path = os.path.join(_path, value[1])
                        if os.path.exists(tag_path):
                            if data['dryrun'] == False:
                                data['meta']['remove'] += ToolBaseFile.size(start_path=tag_path)
                                ToolBaseFile.rmtree(tag_path)
                            
                    tmp = os.path.join(_path, 'extras')
                    if os.path.exists(tmp) and len(os.listdir(tmp)) == 0:
                        if data['dryrun'] == False:
                            ToolBaseFile.rmtree(tmp)
                    tmp = os.path.join(_path, 'extras.xml')
                    if os.path.exists(tmp):
                        if os.path.exists(tmp):
                            data['meta']['remove'] += os.path.getsize(tmp)
                            if data['dryrun'] == False:
                                os.remove(tmp)
                else:
                    tmp = ToolBaseFile.size(start_path=_path)
                    if data['dryrun'] == False:
                        data['meta']['remove'] += tmp
                        ToolBaseFile.rmtree(_path)
                    else:
                        if f == '_stored':
                            data['meta']['remove'] += tmp

        if data['command'] == 'start2':
            return

        

        media_ce = con.execute('SELECT user_thumb_url, user_art_url, media_parts.file, media_parts.hash FROM metadata_items, media_items, media_parts WHERE metadata_items.id = media_items.metadata_item_id AND media_items.id = media_parts.media_item_id AND metadata_items.id = ?;', (data['db']['id'],))
        media_ce.row_factory = dict_factory
        data['media'] = {'total':0, 'remove':0}

        for item in media_ce.fetchall():
            #logger.warning(d(item))
            if item['hash'] == '':
                continue
            mediapath = os.path.join(ModelSetting.get('base_path_media'), 'localhost', item['hash'][0], f"{item['hash'][1:]}.bundle")
            if os.path.exists(mediapath) == False:
                continue
            data['media']['total'] += ToolBaseFile.size(start_path=mediapath)
            if item['user_thumb_url'].startswith('media') == False:
                img = os.path.join(mediapath, 'Contents', 'Thumbnails', 'thumb1.jpg')
                if os.path.exists(img):
                    data['media']['remove'] += os.path.getsize(img)
                    if data['dryrun'] == False:
                        os.remove(img)
            if item['user_art_url'].startswith('media') == False:
                img = os.path.join(mediapath, 'Contents', 'Art', 'art1.jpg')
                if os.path.exists(img):
                    data['media']['remove'] += os.path.getsize(img)
                    if data['dryrun'] == False:
                        os.remove(img)
Example #4
0
    def artist_process(data, con, cur):

        data['meta'] = {'remove': 0}
        data['meta']['metapath'] = os.path.join(
            ModelSetting.get('base_path_metadata'), 'Artists',
            data['db']['hash'][0], f"{data['db']['hash'][1:]}.bundle")

        data['meta']['total'] = ToolBaseFile.size(
            start_path=data['meta']['metapath'])
        if data['command'] == 'start0':
            return
        combined_xmlpath = os.path.join(data['meta']['metapath'], 'Contents',
                                        '_combined', 'Info.xml')
        if os.path.exists(combined_xmlpath) == False:
            return
        data['use_filepath'] = []
        data['remove_filepath'] = []
        data['albums'] = {}
        ret = Task.xml_analysis(combined_xmlpath, data)
        if ret == False:
            logger.warning(f"{data['db']['title']} 아티스트 분석 실패")
            return

        # 2022-05-11 앨범은 인덱스 모두 1임.
        # 트랙은 순서대로 있음
        #album_cs = con.execute('SELECT * FROM metadata_items WHERE metadata_type = 9 and parent_id = ? ORDER BY "index"', (data['db']['id'],))
        album_cs = con.execute(
            'SELECT * FROM metadata_items WHERE metadata_type = 9 and parent_id = ?',
            (data['db']['id'], ))
        album_cs.row_factory = dict_factory
        data['albums'] = []
        for album in album_cs.fetchall():
            #album_index = album['index']
            #logger.warning(album_index)

            #if album_index not in data['albums']:

            #data['albums'][album_index] = {'db':album, 'use_filepath':[], 'remove_filepath':[]}
            album_data = {
                'db': album,
                'use_filepath': [],
                'remove_filepath': []
            }
            album_data['meta'] = {'remove': 0}
            album_data['meta']['metapath'] = os.path.join(
                ModelSetting.get('base_path_metadata'), 'Albums',
                album_data['db']['hash'][0],
                f"{album_data['db']['hash'][1:]}.bundle")
            data['meta']['total'] += ToolBaseFile.size(
                start_path=album_data['meta']['metapath'])

            combined_xmlpath = os.path.join(album_data['meta']['metapath'],
                                            'Contents', '_combined',
                                            'Info.xml')

            ret = Task.xml_analysis(combined_xmlpath, album_data)
            if ret == False:
                logger.warning(combined_xmlpath)
                logger.warning(f"{album_data['db']['title']} 앨범 분석 실패")
            else:
                data['albums'].append(album_data)

        query = ""

        #logger.debug(d(data))

        if data['command'] == 'start2':
            # 쇼 http로
            sql = 'UPDATE metadata_items SET '
            if data['process']['poster']['url'] != '':
                sql += ' user_thumb_url = "{}", '.format(
                    data['process']['poster']['url'])
                try:
                    data['use_filepath'].remove(
                        data['process']['poster']['localpath'])
                except:
                    pass
                try:
                    data['use_filepath'].remove(
                        data['process']['poster']['realpath'])
                except:
                    pass
            if data['process']['art']['url'] != '':
                sql += ' user_art_url = "{}", '.format(
                    data['process']['art']['url'])
                try:
                    data['use_filepath'].remove(
                        data['process']['art']['localpath'])
                except:
                    pass
                try:
                    data['use_filepath'].remove(
                        data['process']['art']['realpath'])
                except:
                    pass

            if sql != 'UPDATE metadata_items SET ':
                sql = sql.strip().rstrip(',')
                sql += '  WHERE id = {} ;\n'.format(data['db']['id'])
                query += sql

            for album in data['albums']:
                if 'process' not in album:
                    continue
                sql = 'UPDATE metadata_items SET '
                # localmedia 로 생성된 파일은 url이 셋된다.
                #if album['process']['poster']['url'] != '':
                # 2022-05-11 태그로 생성된 앨범은 디스코드에 올리고 셋
                if album['process']['poster']['url'] != '' and album[
                        'process']['poster']['url'].startswith(
                            'http') == False:
                    if 'localpath' in album['process']['poster'] and album[
                            'process']['poster']['localpath'] != '':
                        localpath = album['process']['poster']['localpath']
                        if localpath[0] != '/':
                            localpath = localpath.replace('/', '\\')
                        if os.path.exists(localpath):
                            if data['dryrun'] == False:
                                discord_url = ToolExpandDiscord.discord_proxy_image_localfile(
                                    localpath)
                                if discord_url is not None:
                                    album['process']['poster'][
                                        'url'] = discord_url
                                    logger.warning(discord_url)

                if album['process']['poster']['url'].startswith('http'):
                    sql += ' user_thumb_url = "{}", '.format(
                        album['process']['poster']['url'])
                    try:
                        data['use_filepath'].remove(
                            album['process']['poster']['localpath'])
                    except:
                        pass
                    try:
                        data['use_filepath'].remove(
                            album['process']['poster']['realpath'])
                    except:
                        pass

                if album['process']['art']['url'] != '':
                    sql += ' user_art_url = "{}", '.format(
                        album['process']['art']['url'])
                    try:
                        data['use_filepath'].remove(
                            album['process']['art']['localpath'])
                    except:
                        pass
                    try:
                        data['use_filepath'].remove(
                            album['process']['art']['realpath'])
                    except:
                        pass
                if sql != 'UPDATE metadata_items SET ':
                    sql = sql.strip().rstrip(',')
                    sql += '  WHERE id = {} ;\n'.format(album['db']['id'])
                    query += sql

        #logger.error(data['command'])
        #logger.error(query)
        if query != '' and data['dryrun'] == False:
            PlexDBHandle.execute_query(query)

        #logger.warning(data['meta']['remove'] )

        for base, folders, files in os.walk(data['meta']['metapath']):
            for f in files:
                data['file_count'] += 1
                filepath = os.path.join(base, f)
                if filepath not in data['use_filepath']:
                    if os.path.islink(filepath) and os.path.exists(
                            filepath) == False:
                        os.remove(filepath)
                    elif os.path.exists(filepath):
                        data['remove_count'] += 1
                        if filepath not in data['remove_filepath']:
                            data['remove_filepath'].append(filepath)
                        if os.path.islink(filepath) == False:
                            data['meta']['remove'] += os.path.getsize(filepath)
                        if data['dryrun'] == False:
                            os.remove(filepath)

        for album in data['albums']:
            for base, folders, files in os.walk(album['meta']['metapath']):
                for f in files:
                    #logger.warning(data['file_count'])
                    #logger.warning(f)
                    data['file_count'] += 1
                    filepath = os.path.join(base, f)
                    if filepath not in album['use_filepath']:
                        if os.path.islink(filepath) and os.path.exists(
                                filepath) == False:
                            os.remove(filepath)
                        elif os.path.exists(filepath):
                            data['remove_count'] += 1
                            if filepath not in album['remove_filepath']:
                                data['remove_filepath'].append(filepath)
                            if os.path.islink(filepath) == False:
                                data['meta']['remove'] += os.path.getsize(
                                    filepath)
                            if data['dryrun'] == False:
                                os.remove(filepath)
                    else:
                        data['use_filepath'].append(filepath)
        for base, folders, files in os.walk(data['meta']['metapath']):
            if not folders and not files:
                os.removedirs(base)
        for album in data['albums']:
            for base, folders, files in os.walk(album['meta']['metapath']):
                if not folders and not files:
                    os.removedirs(base)
Example #5
0
    def thumb_process(data):
        data['meta'] = {'remove':0}
        #logger.warning(data['db'])
        if data['db']['metadata_type'] == 1:
            data['meta']['metapath'] = os.path.join(ModelSetting.get('base_path_metadata'), 'Movies', data['db']['hash'][0], f"{data['db']['hash'][1:]}.bundle")
            combined_xmlpath = os.path.join(data['meta']['metapath'], 'Contents', '_combined', 'Info.xml')
        elif data['db']['metadata_type'] == 2:
            data['meta']['metapath'] = os.path.join(ModelSetting.get('base_path_metadata'), 'TV Shows', data['db']['hash'][0], f"{data['db']['hash'][1:]}.bundle")
            combined_xmlpath = os.path.join(data['meta']['metapath'], 'Contents', '_combined', 'Info.xml')
            
        data['meta']['total'] = ToolBaseFile.size(start_path=data['meta']['metapath'])
        if data['command'] == 'start0':
            return
        if os.path.exists(combined_xmlpath) == False:
            return

        Task.xml_analysis(combined_xmlpath, data)
    
        data['process'] = {}
        for tag, value in TAG.items():
            data['process'][tag] = {
                'db' : data['db'][f'user_{value[0]}_url'],
                'db_type' : '', 
                'url' : '',
                'filename' : '',
                'location' : '',
            }

        for tag, value in TAG.items():
            if data['process'][tag]['db'] != '':
                data['process'][tag]['db_type'] = data['process'][tag]['db'].split('//')[0]
                data['process'][tag]['filename'] = data['process'][tag]['db'].split('/')[-1]
                for item in data['info'][value[1]]:
                    if data['process'][tag]['filename'] == item['filename']:
                        data['process'][tag]['url'] = item['url']
                        break

        #logger.error(d(data['process']))
        # 1단계.
        # _combined 에서 ..stored 
        
        not_remove_filelist = []
        c_metapath = os.path.join(data['meta']['metapath'], 'Contents')
        if os.path.exists(c_metapath):
            for f in os.listdir(c_metapath):
                _path = os.path.join(c_metapath, f)
                # 윈도우는 combined에 바로 데이터가 있어서 무조건 삭제?
                if f == '_stored':
                    tmp = ToolBaseFile.size(start_path=_path)
                    data['meta']['stored'] = tmp
                    if platform.system() == 'Windows':
                        data['meta']['remove'] += tmp
                        if data['dryrun'] == False:
                            ToolBaseFile.rmtree(_path)
                elif f == '_combined':
                    for tag, value in TAG.items():
                        tag_path = os.path.join(_path, value[1])
                        #logger.warning(tag_path)
                        if os.path.exists(tag_path) == False:
                            continue
                        for img_file in os.listdir(tag_path):
                            img_path = os.path.join(tag_path, img_file)
                            if os.path.islink(img_path):
                                if os.path.realpath(img_path).find('_stored') == -1:
                                    # 저장된 파일에 대한 링크가 아니기 삭제
                                    # db에 저장된 url이 stored가 아닌 에이전트 폴더를 가로 가르키는 경우가 있음
                                    #logger.warning(img_file)
                                    if img_file == data['process'][tag]['filename']:
                                        logger.error(data['process'][tag]['filename'])
                                        not_remove_filelist.append(data['process'][tag]['filename'])
                                        continue
                                    if data['dryrun'] == False:# and os.path.exists(img_path) == True:
                                        os.remove(img_path)
                            else: #윈도우
                                if img_file != data['process'][tag]['filename']:
                                    # 저장파일이 아니기 때문에 삭제
                                    data['meta']['remove'] += os.path.getsize(img_path)
                                    if data['dryrun'] == False and os.path.exists(img_path) == True:
                                        os.remove(img_path)
                    
            #if len(not_remove_filelist) == 0:
            for f in os.listdir(c_metapath):
                _path = os.path.join(c_metapath, f)
                if f == '_stored' or f == '_combined':
                    continue
                tmp = ToolBaseFile.size(start_path=_path)
                data['meta']['remove'] += tmp
                if data['dryrun'] == False:
                    ToolBaseFile.rmtree(_path)
        #else:
        if not_remove_filelist:
            logger.error(not_remove_filelist)