Beispiel #1
0
def main(args):
    global SHOWS, EXCEPTIONS
    unlock()
    parse_args(args)
    data = load_json(environ[CONF_FILE])
    save_json(data, 'data/save_tree.json')
    SHOWS = load_shows()
    EXCEPTIONS = load_json(EXCEPTIONS_FILE)
    if 'title_match' not in EXCEPTIONS:
        EXCEPTIONS['title_match'] = []
    if SHOWS is None:
        save_json({'shows_locked': True}, environ[OUT_FILE])
        print('shows locked')
        return
    queue = syncer.QUEUE
    queue_errors(data['errors'], queue)
    load_all(data['shows'], queue)
    save_json(EXCEPTIONS, EXCEPTIONS_FILE)
    save_queue(queue)
    report = []
    for file in queue:
        report.append(file.get_report())
    print(dumps(report, indent=4, sort_keys=True))
    save_shows(SHOWS)

    import file_tree
    file_tree.main(out_file=environ[OUT_FILE])
Beispiel #2
0
def main(args='', date=''):
    global SHOWS
    if args:
        parse_args(args)
        date = load_json(os.environ[CONF_FILE])['date']

    SHOWS = load_shows()
    if SHOWS is None:
        save_json({'error': 'Shows locked'}, os.environ[OUT_FILE])
        print('shows locked')
        return
    folder = os.path.join(BACKUP_DIR, date)
    file_list = os.listdir(ASSETS)
    for f in file_list:
        if 'lock' in f or 'test' in f:
            continue
        file = os.path.join(ASSETS, f)
        if os.path.isfile(file):
            try:
                os.remove(file)
                wait_on_delete(file)
            except:
                pass
    file_list = os.listdir(folder)
    for f in file_list:
        try:
            copyfile(os.path.join(folder, f), os.path.join(ASSETS, f))
        except:
            pass

    unlock()
    if args:
        save_json({'done': True}, os.environ[OUT_FILE])
Beispiel #3
0
def main(args):
    global SHOWS
    parse_args(args)
    data = load_json(environ[CONF_FILE])
    save_json(data, 'data/update_save.json')
    SHOWS = load_shows()

    if SHOWS is None:
        save_json({'error': 'Shows locked'}, environ[OUT_FILE])
        print('shows locked')
        return
    for s in data:
        show = SHOWS[s['series_name_unchanged']]
        if s[SERIES_NAME] == '13 Reasons Why':
            print(s)
        if s['changed']:
            show.name_needed = s[NAME_NEEDED]
            show.status = s[STATUS]
            show.premiere = s[PREMIERE]
            show.final = s[FINAL]
            show.tvdb_id = s[TVDB_ID]
            show.genre1 = s['genre1']
            show.genre2 = s['genre2']

        if not s['series_name_unchanged'] == s[SERIES_NAME]:
            update_location(show, s[SERIES_NAME])
            SHOWS.pop(s['series_name_unchanged'], None)
            show.series_name = s[SERIES_NAME]
            SHOWS[show.series_name] = show

    update_prep.SHOWS = SHOWS
    save_json(update_prep.prep_data(), environ[OUT_FILE])
    save_shows(SHOWS)
Beispiel #4
0
def main(args):
    parse_args(args)
    conf = load_json(environ[CONF_FILE])
    # conf = {'series_name': 'Doctor Who', 'level': 'title'}
    shows = load_shows(read_only=True)
    show: Series = shows[conf['series_name']]
    episodes, total = get_all_episodes(show, conf['level'])
    save_json({'episodes': episodes, 'total': total}, environ[OUT_FILE])
Beispiel #5
0
def main(args):
    parse_args(args)
    data = load_json(os.environ[CONF_FILE])
    save_json(data, 'data/batch_match.json')
    file_list = []
    for u in data['units']:
        if not u['select']:
            continue
        location = os.path.join(FILE_DIR, u['text'])
        if os.path.isfile(location):
            file = prep_file(location, '')
            if file:
                file_list.append(file)
        else:
            for root, dirs, files in os.walk(location):
                for name in files:
                    file = prep_file(name, root)
                    if file:
                        file_list.append(file)

    for reg in data['regex']:
        if not reg['regex']:
            continue
        pattern = re.compile(reg['regex'])
        for file in file_list:
            match = re.findall(pattern, file.old_location)
            if match:
                try:
                    file.s_nr = int(match[0][reg['s_start']:reg['s_end']])
                    file.e_nr = int(match[0][reg['e_start']:reg['e_end']])
                except IndexError:
                    continue
    output = {'files': []}
    for f in file_list:
        output['files'].append({
            'location': f.old_location.split(os.sep, 2 + MAC_OFFSET)[2 + MAC_OFFSET],
            'title': '',
            'title2': '',
            'title3': '',
            's_nr': f.s_nr,
            'e_nr': f.e_nr,
            'episode_option': 'Single',
            'sub': f.subs
        })

    output.update({SERIES_NAME: '',
                   TVDB_ID: '',
                   PREMIERE: '',
                   FINAL: '',
                   STATUS: '',
                   'anime': False,
                   NAME_NEEDED: True,
                   })

    save_json(output, os.environ[OUT_FILE])
    return output
def main(args):
    global SHOWS
    io_utlis.parse_args(args)
    data = io_utlis.load_json(environ[CONF_FILE])
    io_utlis.save_json(data, 'data/save_infofiles.json')

    exceptions = io_utlis.load_json(EXCEPTIONS_FILE)
    data = list(data.values())
    for d in data:
        for i in d['items']:
            if i['delete']:
                if type(exceptions[d['key']]) is dict:
                    exceptions[d['key']][i['origin']].remove(i['text'])
                    if not exceptions[d['key']][i['origin']]:
                        del exceptions[d['key']][i['origin']]
                else:
                    exceptions[d['key']].remove(i['text'])
    io_utlis.save_json({'done': True}, environ[OUT_FILE])
    io_utlis.save_json(exceptions, EXCEPTIONS_FILE)
Beispiel #7
0
def load_json_files(folder):
    j_list = []
    for f in os.listdir(folder):
        p = os.path.join(folder, f)
        if 'json' in p and 'test' not in p:
            j = load_json(p)
            if type(j) is dict:
                j = dict(sorted(j.items())[:10])
            else:
                j = j[:10]
            j_list.append({'key': f, 'data': j, 'opened': True})
    return j_list
Beispiel #8
0
def main(args):
    global SHOWS
    parse_args(args)
    data = load_json(environ[CONF_FILE])
    save_json(data, 'data/save_words.json')
    SHOWS = load_shows()
    dictionary = load_json(DICT_FILE)

    if SHOWS is None:
        save_json({'shows_locked': True}, environ[OUT_FILE])
        print('shows locked')
        return

    for file in data['words']:
        changes = []
        for w in file['words']:
            if w['add'] and w['word'] not in dictionary:
                dictionary.append(w['word'])
            if w['changed']:
                changes.append([w['index'], w['word']])

        if changes:
            e = Episode(location=file['location'])
            old = file['file'].rsplit('.', 1)
            words = old[0].split(' ')
            for c in changes:
                words[c[0]] = c[1]
            words = list(filter(None, words))
            file['file'] = ' '.join(words)
            new_location = file['location'].replace(old[0], file['file'])
            try:
                move(file['location'], new_location)
                SHOWS[e.series_name].seasons[e.s_nr].episodes[
                    e.e_nr].set_location(new_location)
            except Exception as e:
                print('rename', e)
    if dictionary:
        save_json(dictionary, DICT_FILE)
    save_json({'done': True}, environ[OUT_FILE])
    save_shows(SHOWS)
Beispiel #9
0
def main(args, out_file='data/load_logs.json'):
    if args:
        parse_args(args)
        out_file = environ[OUT_FILE]
    logs = [{
        'file': f[:-5],
        'data': load_json(path.join(LOG_DIR, f)),
        'opened': True
    } for f in listdir(LOG_DIR) if f.endswith('json')]

    # for l in logs:
    #     if l['file'] == 'dsyncerlog':
    #         l['data'] = list(l['data'].values())
    print(logs)

    save_json(logs, out_file)
def main(args):

    order = {
        'double': {
            'title': 'DoubleTitle',
            'order': 5
        },
        'lower': {
            'title': 'lowerCase',
            'order': 1
        },
        'upper': {
            'title': 'UpperCase',
            'order': 2
        },
        'lower_general': {
            'title': 'lowerGeneral',
            'order': 3
        },
        'part': {
            'title': 'Part',
            'order': 4
        },
        'title_match': {
            'title': 'TitleMatch',
            'order': 6
        }
    }
    parse_args(args)
    exceptions = load_json(EXCEPTIONS_FILE)

    files = [structure(k, v) for k, v in exceptions.items()]
    d = {}
    for f in files:
        k = list(f.keys())[0]
        d[order[k]['order']] = {
            'items': f[k],
            'open': False,
            'title': order[k]['title'],
            'key': k
        }

    save_json(d, environ[OUT_FILE])
Beispiel #11
0
def main(args):
    global SHOWS, DICTIONARY
    SHOWS = load_shows(read_only=True)
    parse_args(args)

    DICTIONARY = load_json(DICT_FILE)
    load_all()
    new_dict = sorted(list(set(NEW_DICT)))
    if new_dict:
        save_json(new_dict, DICT_FILE)

    unique = sorted(UNIQUE.items(), key=sort_unique, reverse=True)
    unique_title = sorted(UNIQUE_TITLE.items(), key=sort_unique, reverse=True)
    unique_words_in_title = sorted(UNIQUE_WORDS_IN_TITLE.items(),
                                   key=sort_unique_words_in_title,
                                   reverse=True)
    print(unique)
    save_json({
        'words': WORDS,
        'info': 'Dictionary is up to date'
    }, environ[OUT_FILE])
Beispiel #12
0
def main(args):
    global SHOWS

    parse_args(args)

    data = load_json(os.environ[CONF_FILE])
    save_json(data, 'data/batch_sync.json')
    SHOWS = load_shows()
    if SHOWS is None:
        save_json({'error': 'Shows locked'}, os.environ[OUT_FILE])
        print('shows locked')
        return
    show = prep(data)

    if show:
        sync_queue(show)
        update_summary()
        clean_up()
        SHOWS[show.series_name] = show
    save_json(REPORT, os.environ[OUT_FILE])
    save_shows(SHOWS)
    return REPORT
Beispiel #13
0
def main(args):
    parse_args(args)
    unlock()
    config = load_json(environ[CONF_FILE])
    start = time()
    print('running', SERIES_DIR)
    if not backup.main():
        print('backup failed')
        exit(-2)
    else:
        print('backup successful')
    load_shows(reload=True)
    shows = load_files(SERIES_DIR)
    shows.update(load_files(ANIME_DIR))
    if config['reload_metadata']:
        shows = reload_metadata(shows)
    else:
        add_metadata(shows)
    save_shows(shows)

    print(time() - start)
    return shows
Beispiel #14
0
def main(args):
    global SHOWS
    parse_args(args)
    data = load_json(os.environ[CONF_FILE])
    save_json(data, 'data/syncer.json')
    SHOWS = load_shows()

    if SHOWS is None:
        save_json({'shows_locked': True}, os.environ[OUT_FILE])
        print('shows locked')
        return
    save_json(data, 'data/sync')
    files = []
    for f in data:

        f = File(old_location=os.path.join(FILE_DIR, f['location']),
                 sync=f['sync'],
                 s_nr=f['s_nr'],
                 e_nr=f['e_nr'],
                 series_name=f[SERIES_NAME],
                 title=f['title'],
                 title2=f['title2'],
                 title3=f['title3'],
                 episode_option=f['e_o']['s'],
                 override=f['override'],
                 delete=f['delete'],
                 subs=f['subs'],
                 type_option=f['t_o']['s'],
                 status=f['status_o']['s'],
                 new_series=f['new_series'],
                 name_needed=True if f['name_o']['s'] == 'Name required' else False,
                 tvdb_id=f['tvdb_id'] if not f['tvdb_id'] == 0 else 0,
                 anime=True if f['new_series'] and f['anime_o']['s'] == 'Anime: Yes' else False)

        if f.new_series:
            create_new_series(f)
        files.append(f)

    for file in files:
        if file.delete:
            QUEUE.append(file)
            continue
        if file.type_option == '[ignore]':
            ignore_file(file)
            continue
        if not file.sync:
            continue
        if file.type_option in ['HD', 'SD']:
            queue_movie(file)
            continue
        if file.type_option == 'Series':
            file.anime = SHOWS[file.series_name].anime
            queue_episode(file)
            continue

    sync_queue()
    clean_up()
    report = []
    for file in QUEUE:
        report.append(file.get_report())
    log = load_json(os.path.join(
        os.path.dirname(os.environ[OUT_FILE]), 'synclog'))
    if not log:
        log = []
    log.extend(report)
    save_json(report, os.environ[OUT_FILE])
    save_json(log, os.path.join(os.path.dirname(
        os.environ[OUT_FILE]), 'synclog'))
    print(json.dumps(report, indent=4, sort_keys=True))
    save_shows(SHOWS)
Beispiel #15
0
from tvdb_client import ApiV2Client
from operator import itemgetter

from episode import Episode
from utils.constants import EXCEPTIONS_FILE, SERIES_NAME, ENDED, NUMERALS, WRONG_SYMBOLS, EXTENSIONS, TVDB_LOGIN
from utils.io_utlis import load_json

EXCEPTIONS = load_json(EXCEPTIONS_FILE)
NAMES = {}

api_client = ApiV2Client(TVDB_LOGIN['username'], TVDB_LOGIN['api_key'],
                         TVDB_LOGIN['account_identifier'])
api_client.login()


def _generate_error(message,
                    e,
                    show,
                    title='',
                    e_nr=None,
                    s_nr='',
                    update=False,
                    delete=False,
                    word='',
                    exception_type='',
                    exception=''):
    if exception_type:
        e_id = e.id()
        if exception_type == 'double':
            e_id = show.series_name
        if e_id in EXCEPTIONS[exception_type]:
Beispiel #16
0
from utils import io_utlis
import random

stats = io_utlis.load_json('data/stats_file.json')

shows = stats['shows']
random.shuffle(shows)


def find(string, ch):
    return [i for i, ltr in enumerate(string) if ltr == ch]


while shows:
    show = shows.pop()
    s = f"Episodes: {show['episodes']}\nSeasons: {show['seasons']}\n" \
        f"Status: {list(show['status'].keys())[0]}\n" \
        f"Premiere: {show['premiere']} Final: {show['final']}\n" \
        f"Extension: {show['extension']}\n" \
        f"Quality: {show['quality']}\n" \
        f"Duration: {show['avg_duration']} min avg / {show['duration']} h total"

    print(s)
    h = 'h'
    series_name = show['series_name']
    indexes = list(range(0, len(series_name)))
    spaces = find(series_name, ' ')
    for space in spaces:
        indexes.remove(space)
    random.shuffle(indexes)
    hint = ''.ljust(200)
Beispiel #17
0
from utils.io_utlis import load_json, load_shows, parse_args, save_json
from utils.constants import EXCEPTIONS_FILE, CONF_FILE, OUT_FILE, NUMERALS, SINGLE, TRIPLE
from os import environ
from re import sub
from series import Series
from episode import Episode
from sys import argv
LOWER_GENERAL = load_json(EXCEPTIONS_FILE)['lower_general']


def generate_episode(e: Episode, title_nr, level):
    title = e.title
    if title_nr == 1 and e.title2:
        title = e.title2
    if title_nr == 2 and e.title3:
        title = e.title3
    title = remove_part(title)
    if title_nr > 0 and not title and e.title:
        title = e.title
        title = remove_part(title)
    title_low = title.lower()
    title_text = sub('[^A-Za-z0-9 ]+', '', title_low)
    if level == 'word':
        title_list = title_low.split(' ')
        title_text_list = title_text.split(' ')
    elif level == 'no_lows':
        title_list = title_low.split(' ')
        title_text_list = title_text.split(' ')
        for exception in LOWER_GENERAL:
            while exception in title_list:
                title_list.remove(exception)
Beispiel #18
0
import multiprocessing
from tvdb_client import ApiV2Client

import backup
from episode import Episode
from series import Series
from utils.constants import META_FILE, STATUS, NAME_NEEDED, PREMIERE, FINAL, TVDB_ID, SERIES_DIR, ANIME_DIR, CONF_FILE
from utils.io_utlis import load_json, load_shows, save_shows, parse_args
from unlock_shows import unlock

api_client = ApiV2Client(TVDB_LOGIN['username'], TVDB_LOGIN['api_key'],
                         TVDB_LOGIN['account_identifier'])
api_client.login()

setrecursionlimit(10000)
META_DATA = load_json(META_FILE)


def load_files(top):
    shows = {}
    len_top = len(top.split(sep))
    for root, dirs, _ in walk(top):

        for name in dirs:
            if root == top:
                shows[name] = Series(location=path.join(root, name),
                                     series_name=name)
                continue

            show = path.basename(root)
Beispiel #19
0
def main(args):
    parse_args(args)
    conf = load_json(environ[CONF_FILE])
    save_json(conf, 'data/sync_prep.json')
    shows = load_shows(read_only=True)
    file_list = []
    subs = []
    for root, dirs, files in walk(FILE_DIR):
        for name in files:
            if '[ignore]' in root or '[ignore]' in name:
                continue
            extension = name.split('.')[-1].lower()
            if extension in EXTENSIONS:
                if 'sample' in name.lower():
                    continue
                file_list.append(File(old_location=path.join(root, name)))
            if extension in SUBS:
                subs.append({'text': name, 'value': path.join(root, name)})

    series_names_words = []
    series_names = []
    series_n = sorted(list(shows.keys()))
    for n in series_n:
        if shows[n].status == ENDED:
            continue
        n1 = clean_up(n)
        series_names_words.append(n1)
        series_names.append(n)
        n2 = n.replace('\'', '')
        n2 = n2.replace('.', '')
        n2 = n2.replace(',', '')
        n2 = clean_up(n2)

        if not set(n1) == set(n2):
            series_names.append(n)
            series_names_words.append(n2)
    print(series_names)

    if not conf['all']:
        remove_folders_with_multiple_files(file_list)

    i = 0
    for file in file_list:
        file.file_id = i
        i += 1

    regex = [
        {
            "e_end": 6,
            "e_start": 4,
            "regex": "s[0-9]{2}e[0-9]{2}",
            "s_end": 3,
            "s_start": 1,
        },
        {
            "e_end": 5,
            "e_start": 3,
            "regex": "[0-9]{2}x[0-9]{2}",
            "s_end": 2,
            "s_start": 0,
        },
    ]
    for file in file_list:
        location = file.old_location.lower()
        for reg in regex:

            pattern = re.compile(reg['regex'])
            match = re.findall(pattern, location)
            if match:
                try:
                    file.s_nr = int(match[0][reg['s_start']:reg['s_end']])
                    file.e_nr = int(match[0][reg['e_start']:reg['e_end']])
                    break
                except IndexError:
                    continue
        for name in series_names_words:
            if all(word in location for word in name):
                index = series_names_words.index(name)
                file.series_name = series_names[index]

    n_series = {}

    for n in list(shows.keys()):
        n_series[n] = {
            'tvdb_id': shows[n].tvdb_id,
            'name_needed': shows[n].name_needed
        }

    n_series['Series Name'] = 0

    json = {'shows': n_series, 'files': [], 'subs': subs}
    for file in file_list:
        json['files'].append(file.__str__())

    save_json(json, environ[OUT_FILE])
    pass
def main():
    log_path = path.join(LOG_DIR, 'download_transfer.json')
    log = load_json(log_path)
    if not log:
        log = {'files': {}, 'sizes': {}}

    def clean_up():
        for f in log['files'].keys():
            if log['files'][f] == 'delete':
                if recursive_delete(path.join(LOCAL_DIR, f)):
                    print('deleted', f)

    schedule.every().day.at('04:00').do(clean_up)

    def save_log():
        save_json(log, log_path)

    while True:
        local_files = listdir(LOCAL_DIR)
        schedule.run_pending()

        for file in local_files:
            if file not in log['files']:
                if file not in log['sizes'] or log['sizes'][file] != 'done':
                    current_size = get_size(path.join(LOCAL_DIR, file))
                    if file not in log[
                            'sizes'] or not current_size == log['sizes'][file]:
                        log['sizes'][file] = current_size
                        save_log()
                        continue
                    elif file.startswith('_UNPACK'):
                        continue
                    elif file.startswith('.'):
                        continue
                    else:
                        del log['sizes'][file]
                        save_log()
                print('copying', file)
                log['files'][file] = 'copying'
                save_log()
                path.join(LOCAL_DIR, file)
                if copytree(path.join(LOCAL_DIR, file), FILE_DIR, file, True):
                    log['files'][file] = 'copied'
                    print('copied', file)
                else:
                    del log['files'][file]
                save_log()

        remote_files = listdir(FILE_DIR)
        delete = []
        for file in log['files'].keys():
            if file not in remote_files:
                log['files'][file] = 'delete'
                save_log()

            if file not in local_files:
                delete.append(file)
                save_log()

        for key in delete:
            del log['files'][key]
            save_log()

        sleep(1)