示例#1
0
def main(args):
    global SHOWS, EXCEPTIONS
    unlock()
    parse_args(args)
    data = load_json(environ[CONF_FILE])
    save_json(data, 'data/save_tree.json')
    SHOWS = load_shows()
    EXCEPTIONS = load_json(EXCEPTIONS_FILE)
    if 'title_match' not in EXCEPTIONS:
        EXCEPTIONS['title_match'] = []
    if SHOWS is None:
        save_json({'shows_locked': True}, environ[OUT_FILE])
        print('shows locked')
        return
    queue = syncer.QUEUE
    queue_errors(data['errors'], queue)
    load_all(data['shows'], queue)
    save_json(EXCEPTIONS, EXCEPTIONS_FILE)
    save_queue(queue)
    report = []
    for file in queue:
        report.append(file.get_report())
    print(dumps(report, indent=4, sort_keys=True))
    save_shows(SHOWS)

    import file_tree
    file_tree.main(out_file=environ[OUT_FILE])
示例#2
0
def main(args):
    global SHOWS
    parse_args(args)
    data = load_json(environ[CONF_FILE])
    save_json(data, 'data/update_save.json')
    SHOWS = load_shows()

    if SHOWS is None:
        save_json({'error': 'Shows locked'}, environ[OUT_FILE])
        print('shows locked')
        return
    for s in data:
        show = SHOWS[s['series_name_unchanged']]
        if s[SERIES_NAME] == '13 Reasons Why':
            print(s)
        if s['changed']:
            show.name_needed = s[NAME_NEEDED]
            show.status = s[STATUS]
            show.premiere = s[PREMIERE]
            show.final = s[FINAL]
            show.tvdb_id = s[TVDB_ID]
            show.genre1 = s['genre1']
            show.genre2 = s['genre2']

        if not s['series_name_unchanged'] == s[SERIES_NAME]:
            update_location(show, s[SERIES_NAME])
            SHOWS.pop(s['series_name_unchanged'], None)
            show.series_name = s[SERIES_NAME]
            SHOWS[show.series_name] = show

    update_prep.SHOWS = SHOWS
    save_json(update_prep.prep_data(), environ[OUT_FILE])
    save_shows(SHOWS)
示例#3
0
def main(args):
    global SHOWS
    SHOWS = load_shows(read_only=True)
    parse_args(args)
    update = prep_data()

    save_json(update, environ[OUT_FILE])
示例#4
0
def main(args='', date=''):
    global SHOWS
    if args:
        parse_args(args)
        date = load_json(os.environ[CONF_FILE])['date']

    SHOWS = load_shows()
    if SHOWS is None:
        save_json({'error': 'Shows locked'}, os.environ[OUT_FILE])
        print('shows locked')
        return
    folder = os.path.join(BACKUP_DIR, date)
    file_list = os.listdir(ASSETS)
    for f in file_list:
        if 'lock' in f or 'test' in f:
            continue
        file = os.path.join(ASSETS, f)
        if os.path.isfile(file):
            try:
                os.remove(file)
                wait_on_delete(file)
            except:
                pass
    file_list = os.listdir(folder)
    for f in file_list:
        try:
            copyfile(os.path.join(folder, f), os.path.join(ASSETS, f))
        except:
            pass

    unlock()
    if args:
        save_json({'done': True}, os.environ[OUT_FILE])
示例#5
0
def main(args):
    parse_args(args)
    conf = load_json(environ[CONF_FILE])
    # conf = {'series_name': 'Doctor Who', 'level': 'title'}
    shows = load_shows(read_only=True)
    show: Series = shows[conf['series_name']]
    episodes, total = get_all_episodes(show, conf['level'])
    save_json({'episodes': episodes, 'total': total}, environ[OUT_FILE])
示例#6
0
def main(args):
    parse_args(args)
    data = load_json(os.environ[CONF_FILE])
    save_json(data, 'data/batch_match.json')
    file_list = []
    for u in data['units']:
        if not u['select']:
            continue
        location = os.path.join(FILE_DIR, u['text'])
        if os.path.isfile(location):
            file = prep_file(location, '')
            if file:
                file_list.append(file)
        else:
            for root, dirs, files in os.walk(location):
                for name in files:
                    file = prep_file(name, root)
                    if file:
                        file_list.append(file)

    for reg in data['regex']:
        if not reg['regex']:
            continue
        pattern = re.compile(reg['regex'])
        for file in file_list:
            match = re.findall(pattern, file.old_location)
            if match:
                try:
                    file.s_nr = int(match[0][reg['s_start']:reg['s_end']])
                    file.e_nr = int(match[0][reg['e_start']:reg['e_end']])
                except IndexError:
                    continue
    output = {'files': []}
    for f in file_list:
        output['files'].append({
            'location': f.old_location.split(os.sep, 2 + MAC_OFFSET)[2 + MAC_OFFSET],
            'title': '',
            'title2': '',
            'title3': '',
            's_nr': f.s_nr,
            'e_nr': f.e_nr,
            'episode_option': 'Single',
            'sub': f.subs
        })

    output.update({SERIES_NAME: '',
                   TVDB_ID: '',
                   PREMIERE: '',
                   FINAL: '',
                   STATUS: '',
                   'anime': False,
                   NAME_NEEDED: True,
                   })

    save_json(output, os.environ[OUT_FILE])
    return output
示例#7
0
def main(args=None, out_file='data/tree_file_comb.json'):
    global SHOWS
    SHOWS = load_shows(read_only=True)
    if args:
        parse_args(args)
        out_file = environ[OUT_FILE]

    tree_file = load_all_parallel()

    save_json(tree_file, out_file)
    save_shows(SHOWS)
    return tree_file
示例#8
0
def main(args, out_file='data/load_logs.json'):
    if args:
        parse_args(args)
        out_file = environ[OUT_FILE]
    logs = [{
        'file': f[:-5],
        'data': load_json(path.join(LOG_DIR, f)),
        'opened': True
    } for f in listdir(LOG_DIR) if f.endswith('json')]

    # for l in logs:
    #     if l['file'] == 'dsyncerlog':
    #         l['data'] = list(l['data'].values())
    print(logs)

    save_json(logs, out_file)
示例#9
0
def main(args):
    shows = load_shows(read_only=True)
    parse_args(args)

    shows = sorted(shows.values(), key=get_series_name)
    p = multiprocessing.Pool(8)
    missing_files = p.map(get_show_data, shows)
    p.close()
    p.join()
    missing_files = [item for sublist in missing_files for item in sublist]
    for i in range(len(missing_files)):
        missing_files[i]['key'] = i

    save_json({
        'files': missing_files,
        'info': 'No Missing Files'
    }, environ[OUT_FILE])
示例#10
0
def main(args):
    global SHOWS
    io_utlis.parse_args(args)
    data = io_utlis.load_json(environ[CONF_FILE])
    io_utlis.save_json(data, 'data/save_infofiles.json')

    exceptions = io_utlis.load_json(EXCEPTIONS_FILE)
    data = list(data.values())
    for d in data:
        for i in d['items']:
            if i['delete']:
                if type(exceptions[d['key']]) is dict:
                    exceptions[d['key']][i['origin']].remove(i['text'])
                    if not exceptions[d['key']][i['origin']]:
                        del exceptions[d['key']][i['origin']]
                else:
                    exceptions[d['key']].remove(i['text'])
    io_utlis.save_json({'done': True}, environ[OUT_FILE])
    io_utlis.save_json(exceptions, EXCEPTIONS_FILE)
示例#11
0
def main(args):

    order = {
        'double': {
            'title': 'DoubleTitle',
            'order': 5
        },
        'lower': {
            'title': 'lowerCase',
            'order': 1
        },
        'upper': {
            'title': 'UpperCase',
            'order': 2
        },
        'lower_general': {
            'title': 'lowerGeneral',
            'order': 3
        },
        'part': {
            'title': 'Part',
            'order': 4
        },
        'title_match': {
            'title': 'TitleMatch',
            'order': 6
        }
    }
    parse_args(args)
    exceptions = load_json(EXCEPTIONS_FILE)

    files = [structure(k, v) for k, v in exceptions.items()]
    d = {}
    for f in files:
        k = list(f.keys())[0]
        d[order[k]['order']] = {
            'items': f[k],
            'open': False,
            'title': order[k]['title'],
            'key': k
        }

    save_json(d, environ[OUT_FILE])
示例#12
0
def main(args):
    global SHOWS, DICTIONARY
    SHOWS = load_shows(read_only=True)
    parse_args(args)

    DICTIONARY = load_json(DICT_FILE)
    load_all()
    new_dict = sorted(list(set(NEW_DICT)))
    if new_dict:
        save_json(new_dict, DICT_FILE)

    unique = sorted(UNIQUE.items(), key=sort_unique, reverse=True)
    unique_title = sorted(UNIQUE_TITLE.items(), key=sort_unique, reverse=True)
    unique_words_in_title = sorted(UNIQUE_WORDS_IN_TITLE.items(),
                                   key=sort_unique_words_in_title,
                                   reverse=True)
    print(unique)
    save_json({
        'words': WORDS,
        'info': 'Dictionary is up to date'
    }, environ[OUT_FILE])
示例#13
0
def main(args):
    global SHOWS
    parse_args(args)
    data = load_json(environ[CONF_FILE])
    save_json(data, 'data/save_words.json')
    SHOWS = load_shows()
    dictionary = load_json(DICT_FILE)

    if SHOWS is None:
        save_json({'shows_locked': True}, environ[OUT_FILE])
        print('shows locked')
        return

    for file in data['words']:
        changes = []
        for w in file['words']:
            if w['add'] and w['word'] not in dictionary:
                dictionary.append(w['word'])
            if w['changed']:
                changes.append([w['index'], w['word']])

        if changes:
            e = Episode(location=file['location'])
            old = file['file'].rsplit('.', 1)
            words = old[0].split(' ')
            for c in changes:
                words[c[0]] = c[1]
            words = list(filter(None, words))
            file['file'] = ' '.join(words)
            new_location = file['location'].replace(old[0], file['file'])
            try:
                move(file['location'], new_location)
                SHOWS[e.series_name].seasons[e.s_nr].episodes[
                    e.e_nr].set_location(new_location)
            except Exception as e:
                print('rename', e)
    if dictionary:
        save_json(dictionary, DICT_FILE)
    save_json({'done': True}, environ[OUT_FILE])
    save_shows(SHOWS)
示例#14
0
def main(args):
    global SHOWS

    parse_args(args)

    data = load_json(os.environ[CONF_FILE])
    save_json(data, 'data/batch_sync.json')
    SHOWS = load_shows()
    if SHOWS is None:
        save_json({'error': 'Shows locked'}, os.environ[OUT_FILE])
        print('shows locked')
        return
    show = prep(data)

    if show:
        sync_queue(show)
        update_summary()
        clean_up()
        SHOWS[show.series_name] = show
    save_json(REPORT, os.environ[OUT_FILE])
    save_shows(SHOWS)
    return REPORT
示例#15
0
def main(args):
    parse_args(args)
    backups_list = os.listdir(BACKUP_DIR)
    backups = {'backups': {}, 'selected': ''}
    for b in backups_list:
        b_split = b.split('_')
        timestamp = f'{b_split[0]} {b_split[1][:2]}:{b_split[1][2:4]}:{b_split[1][4:]} '
        folder = os.path.join(BACKUP_DIR, b)
        j_list = load_json_files(folder)
        backups['backups'][b] = {
            'key': b, 'content': j_list,
            'text': (timestamp + str(int(sum(os.path.getsize(f) for f in
                                             [os.path.join(BACKUP_DIR, b, d)
                                              for d in os.listdir(os.path.join(BACKUP_DIR, b))]
                                             if os.path.isfile(f) and 'test' not in f) / 1024)) + ' KB')}
        backups['selected'] = b
    current_files = load_json_files(ASSETS)
    backups['current'] = {'size': str(int(sum(os.path.getsize(f) for f in
                                              [os.path.join(ASSETS, d) for d in os.listdir(ASSETS)]
                                              if os.path.isfile(f) and 'test' not in f) / 1024)) + ' KB',
                          'content': current_files}
    save_json(backups, os.environ[OUT_FILE])
示例#16
0
def main(args):
    parse_args(args)
    unlock()
    config = load_json(environ[CONF_FILE])
    start = time()
    print('running', SERIES_DIR)
    if not backup.main():
        print('backup failed')
        exit(-2)
    else:
        print('backup successful')
    load_shows(reload=True)
    shows = load_files(SERIES_DIR)
    shows.update(load_files(ANIME_DIR))
    if config['reload_metadata']:
        shows = reload_metadata(shows)
    else:
        add_metadata(shows)
    save_shows(shows)

    print(time() - start)
    return shows
示例#17
0
def main(args=''):
    global SHOWS
    SHOWS = load_shows()

    if SHOWS is None:
        if args:
            save_json({'error': 'Shows locked'}, os.environ[OUT_FILE])
        print('shows locked')
        return False

    if args:
        parse_args(args)

    date = strftime("%Y%m%d_%H%M%S", gmtime())
    print(date)
    folder = os.path.join(BACKUP_DIR, date)
    os.makedirs(folder)
    file_list = os.listdir(ASSETS)
    for f in file_list:
        if 'lock' in f or 'test' in f:
            continue
        file = os.path.join(ASSETS, f)
        if os.path.isfile(file):
            try:
                copyfile(file, os.path.join(folder, f))
            except:
                pass

    folder_list = sorted(os.listdir(BACKUP_DIR))
    if len(folder_list) > 10:
        recursive_delete(os.path.join(BACKUP_DIR, folder_list[0]))

    unlock()
    if args:
        save_json({'done': True}, os.environ[OUT_FILE])
    return date
示例#18
0
def main(args):
    global SHOWS
    parse_args(args)
    data = load_json(os.environ[CONF_FILE])
    save_json(data, 'data/syncer.json')
    SHOWS = load_shows()

    if SHOWS is None:
        save_json({'shows_locked': True}, os.environ[OUT_FILE])
        print('shows locked')
        return
    save_json(data, 'data/sync')
    files = []
    for f in data:

        f = File(old_location=os.path.join(FILE_DIR, f['location']),
                 sync=f['sync'],
                 s_nr=f['s_nr'],
                 e_nr=f['e_nr'],
                 series_name=f[SERIES_NAME],
                 title=f['title'],
                 title2=f['title2'],
                 title3=f['title3'],
                 episode_option=f['e_o']['s'],
                 override=f['override'],
                 delete=f['delete'],
                 subs=f['subs'],
                 type_option=f['t_o']['s'],
                 status=f['status_o']['s'],
                 new_series=f['new_series'],
                 name_needed=True if f['name_o']['s'] == 'Name required' else False,
                 tvdb_id=f['tvdb_id'] if not f['tvdb_id'] == 0 else 0,
                 anime=True if f['new_series'] and f['anime_o']['s'] == 'Anime: Yes' else False)

        if f.new_series:
            create_new_series(f)
        files.append(f)

    for file in files:
        if file.delete:
            QUEUE.append(file)
            continue
        if file.type_option == '[ignore]':
            ignore_file(file)
            continue
        if not file.sync:
            continue
        if file.type_option in ['HD', 'SD']:
            queue_movie(file)
            continue
        if file.type_option == 'Series':
            file.anime = SHOWS[file.series_name].anime
            queue_episode(file)
            continue

    sync_queue()
    clean_up()
    report = []
    for file in QUEUE:
        report.append(file.get_report())
    log = load_json(os.path.join(
        os.path.dirname(os.environ[OUT_FILE]), 'synclog'))
    if not log:
        log = []
    log.extend(report)
    save_json(report, os.environ[OUT_FILE])
    save_json(log, os.path.join(os.path.dirname(
        os.environ[OUT_FILE]), 'synclog'))
    print(json.dumps(report, indent=4, sort_keys=True))
    save_shows(SHOWS)
示例#19
0
def main(args):
    global SHOWS
    SHOWS = load_shows(read_only=True)
    parse_args(args)
    print(environ[OUT_FILE])
    stats = {
        'shows': [],
        'status': [],
        'extension': [],
        'ratio': [],
        'quality': []
    }
    for show in SHOWS.values():
        show_stats = {
            SERIES_NAME: show.series_name,
            'status': {
                show.status: 1
            },
            'premiere': show.premiere,
            'avg_e_per_s': 0,
            'final': show.final,
            'ratio': {},
            'extension': {},
            'duration': 0,
            'episodes': 0,
            'genre1': show.genre1,
            'genre2': show.genre2,
            'seasons': 0,
            'size': 0,
            'quality': {},
            'selected': '',
            'color': '',
            'result': False
        }

        if show.status not in stats['status']:
            stats['status'].append(show.status)

        for season in show.seasons.values():
            show_stats['seasons'] += 1

            if not season.s_nr >= len(list(
                    show.seasons.values())) or not show.status == AIRING:
                show_stats['avg_e_per_s'] += len(season.episode_numbers)
            for episode in season.episodes.values():
                episode_option = 1 if episode.episode_option == SINGLE else 2 if episode.episode_option == DOUBLE else 3

                if not DEBUG and episode.duration == 0 or episode.quality == '':
                    episode.update_file_meta()

                show_stats['duration'] += episode.duration
                show_stats['episodes'] += episode_option
                show_stats['size'] += episode.size

                if episode.extension not in stats['extension']:
                    stats['extension'].append(episode.extension)
                if episode.extension in show_stats['extension']:
                    show_stats['extension'][
                        episode.extension] += episode_option
                else:
                    show_stats['extension'][episode.extension] = episode_option

                if episode.quality not in stats['quality']:
                    stats['quality'].append(episode.quality)
                if episode.quality in show_stats['quality']:
                    show_stats['quality'][episode.quality] += episode_option
                else:
                    show_stats['quality'][episode.quality] = episode_option

                if episode.ratio not in stats['ratio']:
                    stats['ratio'].append(episode.ratio)
                if episode.ratio in show_stats['ratio']:
                    show_stats['ratio'][episode.ratio] += episode_option
                else:
                    show_stats['ratio'][episode.ratio] = episode_option

        keys = list(show_stats['ratio'].keys())
        for key in keys:
            try:
                show_stats['ratio'][
                    ASPECT_RATIOS[key]] = show_stats['ratio'][key]
            except:
                pass
            finally:
                show_stats['ratio'].pop(key, None)

        if show_stats['episodes']:
            show_stats['avg_duration'] = int(
                show_stats['duration'] / show_stats['episodes'] * 100) / 100.0
            show_stats['avg_size'] = int(
                show_stats['size'] / show_stats['episodes'] * 100) / 100.0
        else:
            show_stats['avg_duration'] = 0
            show_stats['avg_size'] = 0

        show_stats['duration'] = int(
            show_stats['duration'] / 60.0 * 100) / 100.0
        show_stats['size'] = int(show_stats['size'] / 1024.0 * 100) / 100.0
        try:
            show_stats['avg_e_per_s'] = int(
                show_stats['avg_e_per_s'] /
                (show_stats['seasons'] if not show.status == AIRING else
                 show_stats['seasons'] - 1) * 100) / 100.0
        except ZeroDivisionError:
            pass
        stats['shows'].append(show_stats)

    temp = []
    for key in stats['ratio']:
        try:
            temp.append(ASPECT_RATIOS[key])
        except:
            pass
    stats['ratio'] = temp

    save_json(stats, environ[OUT_FILE])
    return stats
示例#20
0
def main(args):
    parse_args(args)
    conf = load_json(environ[CONF_FILE])
    save_json(conf, 'data/sync_prep.json')
    shows = load_shows(read_only=True)
    file_list = []
    subs = []
    for root, dirs, files in walk(FILE_DIR):
        for name in files:
            if '[ignore]' in root or '[ignore]' in name:
                continue
            extension = name.split('.')[-1].lower()
            if extension in EXTENSIONS:
                if 'sample' in name.lower():
                    continue
                file_list.append(File(old_location=path.join(root, name)))
            if extension in SUBS:
                subs.append({'text': name, 'value': path.join(root, name)})

    series_names_words = []
    series_names = []
    series_n = sorted(list(shows.keys()))
    for n in series_n:
        if shows[n].status == ENDED:
            continue
        n1 = clean_up(n)
        series_names_words.append(n1)
        series_names.append(n)
        n2 = n.replace('\'', '')
        n2 = n2.replace('.', '')
        n2 = n2.replace(',', '')
        n2 = clean_up(n2)

        if not set(n1) == set(n2):
            series_names.append(n)
            series_names_words.append(n2)
    print(series_names)

    if not conf['all']:
        remove_folders_with_multiple_files(file_list)

    i = 0
    for file in file_list:
        file.file_id = i
        i += 1

    regex = [
        {
            "e_end": 6,
            "e_start": 4,
            "regex": "s[0-9]{2}e[0-9]{2}",
            "s_end": 3,
            "s_start": 1,
        },
        {
            "e_end": 5,
            "e_start": 3,
            "regex": "[0-9]{2}x[0-9]{2}",
            "s_end": 2,
            "s_start": 0,
        },
    ]
    for file in file_list:
        location = file.old_location.lower()
        for reg in regex:

            pattern = re.compile(reg['regex'])
            match = re.findall(pattern, location)
            if match:
                try:
                    file.s_nr = int(match[0][reg['s_start']:reg['s_end']])
                    file.e_nr = int(match[0][reg['e_start']:reg['e_end']])
                    break
                except IndexError:
                    continue
        for name in series_names_words:
            if all(word in location for word in name):
                index = series_names_words.index(name)
                file.series_name = series_names[index]

    n_series = {}

    for n in list(shows.keys()):
        n_series[n] = {
            'tvdb_id': shows[n].tvdb_id,
            'name_needed': shows[n].name_needed
        }

    n_series['Series Name'] = 0

    json = {'shows': n_series, 'files': [], 'subs': subs}
    for file in file_list:
        json['files'].append(file.__str__())

    save_json(json, environ[OUT_FILE])
    pass
示例#21
0
def main(args):
    parse_args(args)
    shows = load_shows(read_only=True)
    names = list(shows.keys())
    save_json({'shows': names}, environ[OUT_FILE])