Beispiel #1
0
def json_edit_func():
    from mylib.ex.fstk import write_json_file
    from mylib.ex.fstk import read_json_file
    args = rtd.args
    file = args.file or list_files(clipboard)[0]
    indent = args.indent
    delete = args.delete
    item_l = args.item
    d = read_json_file(file)

    if delete:

        def handle(key, value):
            if key in d:
                if value:
                    if d[key] == value:
                        del d[key]
                else:
                    del d[key]
    else:

        def handle(key, value):
            d[key] = value

    for item in item_l:
        k, v = map(eval_or_str, item.split('=', maxsplit=1))
        handle(k, v)
    write_json_file(file, d, indent=indent)
Beispiel #2
0
def update_json_file():
    from mylib.ex.fstk import write_json_file
    from mylib.ex.fstk import read_json_file
    args = rtd.args
    old, new = args.old, args.new
    d = read_json_file(old)
    d.update(read_json_file(new))
    write_json_file(old, d, indent=args.indent)
Beispiel #3
0
def download_pixiv_fanbox_creator(creator_id,
                                  root_dir='.',
                                  fanbox_api: PixivFanboxAPI = None,
                                  download_pool: DownloadPool = None,
                                  retry=-1,
                                  **kwargs_for_requests):
    download_params = {'retry': retry, **kwargs_for_requests}
    fanbox_api = fanbox_api or PixivFanboxAPI(**kwargs_for_requests)
    download_pool = download_pool or DownloadPool()
    creator = fanbox_api.get_creator_info(creator_id)
    creator_id = creator['creatorId']
    prefix = '{}.'.format(creator_id)
    creator['plans'] = fanbox_api.list_sponsor_plan_of_creator(creator_id)
    profile_images = [
        i for i in creator['profileItems'] if i['type'] == 'image'
    ]
    creator_folder = sanitize_xu(pixiv_fanbox_creator_folder(creator))
    os.makedirs(os.path.join(root_dir, creator_folder), exist_ok=True)

    write_json_file(os.path.join(root_dir, creator_folder,
                                 creator_id + '.json'),
                    creator,
                    indent=4)

    url = creator['user']['iconUrl']
    file = prefix + 'icon.' + os.path.split(url)[-1]
    filepath = os.path.join(root_dir, creator_folder, file)
    download_pool.put_download_in_queue(url, filepath, **download_params)

    url = creator['coverImageUrl']
    file = prefix + 'cover.' + os.path.split(url)[-1]
    filepath = os.path.join(root_dir, creator_folder, file)
    download_pool.put_download_in_queue(url, filepath, **download_params)

    n_width = width_of_int(len(profile_images))
    n = 0
    for i in profile_images:
        n += 1
        url = i['imageUrl']
        file = prefix + 'profile{}.{}'.format(
            str(n).zfill(n_width),
            os.path.split(url)[-1])
        filepath = os.path.join(root_dir, creator_folder, file)
        download_pool.put_download_in_queue(url, filepath, **download_params)

    for i in creator['plans']:
        url = i['coverImageUrl']
        file = prefix + 'plan{}.{}.{}'.format(i['fee'], i['title'],
                                              os.path.split(url)[-1])
        filepath = os.path.join(root_dir, creator_folder, file)
        download_pool.put_download_in_queue(url, filepath, **download_params)

    download_pool.start_queue_loop()
    download_pool.put_end_of_queue()
Beispiel #4
0
def download_pixiv_fanbox_post(post_or_id: PixivFanboxPost or dict or str
                               or int,
                               root_dir='.',
                               fanbox_api: PixivFanboxAPI = None,
                               download_pool: DownloadPool = None,
                               retry=-1,
                               **kwargs_for_requests):
    download_pool = download_pool or DownloadPool()
    if isinstance(post_or_id, PixivFanboxPost):
        post = post_or_id
    elif isinstance(post_or_id, dict):
        post = PixivFanboxPost(post_or_id)
    elif isinstance(post_or_id, (str, int)):
        fanbox_api = fanbox_api or PixivFanboxAPI(**kwargs_for_requests)
        post = fanbox_api.get_post_info(post_or_id)
    else:
        raise TypeError(
            "`post_or_id` must be PixivFanboxPost, dict, str, or int, not {}".
            format(type(post_or_id)))
    creator_folder = sanitize_xu(pixiv_fanbox_creator_folder(post.__data__))
    creator_id = post.creator_id
    prefix = '{}.'.format(creator_id)
    post_folder = sanitize_xu(
        '[{user[name]} ({creatorId})] {title} (pixiv fanbox {id})'.format(
            **post.__data__))
    os.makedirs(os.path.join(root_dir, creator_folder, post_folder),
                exist_ok=True)

    file = prefix + '{}.json'.format(post.id)
    filepath = os.path.join(root_dir, creator_folder, post_folder, file)
    write_json_file(filepath, post.__data__, indent=4)

    url = post.cover_image_url
    if url:
        file = prefix + '{}.cover.{}'.format(post.id, os.path.split(url)[-1])
        filepath = os.path.join(root_dir, creator_folder, post_folder, file)
        download_pool.put_download_in_queue(url, filepath, retry,
                                            **kwargs_for_requests)

    n_width = width_of_int(len(post.images))
    n = 0
    for image in post.images:
        n += 1
        file = prefix + '{}.{}.{}.{}'.format(post.id,
                                             str(n).zfill(n_width), image.id,
                                             image.extension)
        filepath = os.path.join(root_dir, creator_folder, post_folder, file)
        download_pool.put_download_in_queue(image.original_url, filepath,
                                            retry, **kwargs_for_requests)

    download_pool.start_queue_loop()
    download_pool.put_end_of_queue()
Beispiel #5
0
def list_several_cloudflare_ipaddr(file, hostname, as_list, isp):
    from mylib.sites.misc import get_cloudflare_ipaddr_hostmonit
    from mylib.ex.fstk import write_json_file
    from pprint import pformat
    data = get_cloudflare_ipaddr_hostmonit()
    info: dict = data['info']
    if isp:
        info = {isp: info[isp]}
    if as_list:
        lines = []
        for ip_isp, ip_l in info.items():
            lines.append(f'# {ip_isp}')
            for ip_d in ip_l:
                li = ip_d['ip']
                if hostname:
                    li = f'{li}  {hostname}'
                lines.append(li)
        output = '\r\n'.join(lines)
    else:
        output = pformat(info)
    if file:
        write_json_file(file, data, indent=4)
    clipboard.set(output)
    print(output)
Beispiel #6
0
def move_into_dir(src, dst, pattern, alias, dry_run, sub_dir):
    from mylib.ex.ostk import fs_move_cli
    from mylib.easy.text import find_words
    from mylib.ex.tui import prompt_choose_number, prompt_confirm
    conf_file = fstk.make_path('~',
                               '.config',
                               'fs.put_in_dir.json',
                               user_home=True)
    conf = fstk.read_json_file(conf_file) or {'dst_map': {}}
    dst_map = conf['dst_map']
    if pattern:

        def filename_words(fn: str):
            return find_words(' '.join(re.findall(pattern, fn)))
    else:
        filename_words = find_words
    if alias is None:
        pass
    elif not alias:
        for k, v in dst_map.items():
            print(f'{k}={v}')
    else:
        for a in alias:
            try:
                k, v = a.split('=', maxsplit=1)
            except ValueError:
                k, v = None, None
            if v:
                dst_map[k] = v
                print(f'{k}={v}')
            elif k and k in dst_map:
                del dst_map[k]
                print(f'{k}=')
            else:
                print(f'{a}={dst_map.get(a, "")}')
    fstk.write_json_file(conf_file, conf, indent=4)
    if not dst:
        return
    dst = dst_map.get(dst, dst)
    sub_dirs_l = next(os.walk(dst))[1]
    __ = []
    for sub_dir_basename in sub_dirs_l:
        if re.fullmatch(r'#\w+=', sub_dir_basename):
            for sub_sub_dir_basename in next(
                    os.walk(path_join(dst, sub_dir_basename)))[1]:
                __.append(path_join(sub_dir_basename, sub_sub_dir_basename))
        else:
            __.append(sub_dir_basename)
    sub_dirs_l = __
    if os.path.isfile(dst):
        print(f'! {dst} is file (should be directory)', file=sys.stderr)
        exit(1)
    os.makedirs(dst, exist_ok=True)
    db_path = fstk.make_path(dst, '__folder_name_words__.db')
    db = mylib.ex.fstk.read_sqlite_dict_file(db_path)
    db = {k: v for k, v in db.items() if k in sub_dirs_l}
    sub_dirs_d = {
        sd_bn: set(find_words(sd_bn.lower()))
        for sd_bn in sub_dirs_l if sd_bn not in db
    }
    # sd_bn: sub-dir basename
    sub_dirs_d.update(db)
    sub_dirs_d = {k: sub_dirs_d[k] for k in sorted(sub_dirs_d)}
    for ss in src:
        for s in fstk.path_or_glob(ss):
            tui_lp.d()
            print(s)
            tui_lp.l()
            if sub_dir:
                similar_d = {
                    basename: words_set
                    & set(filename_words(os.path.basename(s).lower()))
                    for basename, words_set in sub_dirs_d.items()
                }
                similar_d = {k: v for k, v in similar_d.items() if v}
                similar_l = sorted(similar_d,
                                   key=lambda x: similar_d[x],
                                   reverse=True)
                if similar_l:
                    target_dir_name = prompt_choose_number(
                        f'Select probable folder:', similar_l)
                    tui_lp.l()
                else:
                    target_dir_name = None
                if not target_dir_name:
                    keywords = input(
                        'Input custom keywords or leave it empty: ')
                    if keywords:
                        similar_d = {
                            basename:
                            words_set & set(filename_words(keywords.lower()))
                            for basename, words_set in sub_dirs_d.items()
                        }
                        similar_d = {k: v for k, v in similar_d.items() if v}
                        similar_l = sorted(similar_d,
                                           key=lambda x: similar_d[x],
                                           reverse=True)
                        if similar_l:
                            target_dir_name = prompt_choose_number(
                                f'Select probable folder for\n{keywords}:',
                                similar_l)
                            tui_lp.l()
                target_dir_name = target_dir_name or input(f'Create folder: ')
                if target_dir_name:
                    sub_dirs_d[target_dir_name] = set(
                        find_words(target_dir_name.lower()))
                    dir_path = fstk.make_path(dst, target_dir_name)
                    if not dry_run:
                        os.makedirs(dir_path, exist_ok=True)
                else:
                    dir_path = dst
            else:
                dir_path = dst
            d = fstk.make_path(dir_path, os.path.basename(s))
            if os.path.exists(d):
                if not prompt_confirm(f'Overwrite {d}?', default=False):
                    continue
            if not dry_run:
                fs_move_cli(s, d)
            print(f'{s} -> {d}')
    mylib.ex.fstk.write_sqlite_dict_file(db_path, sub_dirs_d, update_only=True)
Beispiel #7
0
 def __set_conf__(self, **kwargs):
     conf = self.__get_conf__()
     conf.update(kwargs)
     write_json_file(self._conf_file, conf, indent=4)
Beispiel #8
0
def rename_dialog(src: str):
    import PySimpleGUIQt as G
    ske = PySimpleGUISpecialKeyEvent()
    conf_file = real_join_path('~', '.config/rename_dialog.json')
    root = 'root'
    fname = 'fname'
    ext = 'ext'
    key_new_root = 'key_new_root'
    key_new_base = 'key_new_base'
    ok = 'OK'
    cancel = 'Cancel'
    pattern = 'pattern'
    replace = 'replace'
    substitute = 'substitute'
    save_replace = 'save_replace'
    save_pattern = 'save_pattern'
    add_root = 'add_root'
    rename_info_file = 'rename_info_file'
    bytes_count = 'bytes_count'
    title = 'Rename - {}'.format(src)
    h = None

    conf = read_json_file(conf_file, default={pattern: [''], replace: ['']})
    tmp_pl = conf[pattern] or ['']
    tmp_rl = conf[replace] or ['']
    old_root, old_base = os.path.split(src)
    old_fn, old_ext = os.path.splitext(old_base)
    info_file_base = [
        f for f in os.listdir(old_root) if f.endswith('.info') and (
            f.startswith(old_fn) or old_fn.startswith(f.rstrip('.info')))
    ]
    has_info = True if info_file_base else False

    @deco_factory_retry(Exception, 0, enable_default=True, default=None)
    def re_sub():
        return re.sub(data[pattern], data[replace], data[fname] + data[ext])

    def count_name_bytes(name: str):
        d = {}
        try:
            c, b = encode_default_locale(name)
            d[c] = len(b)
        except UnicodeEncodeError:
            pass
        u8 = 'utf-8'
        if u8 not in d:
            try:
                c, b = encode_default_locale(name, u8)
                d[c] = len(b)
            except UnicodeEncodeError:
                pass
        return f'Basename Length: {len(name)}, {", ".join([f"{k.upper()} {v} bytes" for k, v in d.items()])}'

    # sg.theme('SystemDefaultForReal')
    layout = [[G.T(src, key='src')], [G.HorizontalSeparator()],
              [
                  G.I(old_fn, key=fname, focus=True),
                  G.I(old_ext, key=ext, size=(42, h))
              ],
              [
                  G.I(old_root, key=root),
                  G.B('+', key=add_root, size=(20, h)),
                  G.FolderBrowse('...',
                                 target=root,
                                 initial_folder=old_root,
                                 size=(20, h))
              ], [G.HorizontalSeparator()],
              [G.T('Regular Expression Pattern & Replacement')],
              [
                  G.T(size=(0, h)),
                  G.Drop(tmp_pl,
                         key=pattern,
                         enable_events=True,
                         text_color='blue'),
                  G.CB('',
                       default=True,
                       key=save_pattern,
                       enable_events=True,
                       size=(15, h)),
                  G.Drop(tmp_rl,
                         key=replace,
                         enable_events=True,
                         text_color='blue'),
                  G.CB('',
                       default=True,
                       key=save_replace,
                       enable_events=True,
                       size=(15, h)),
                  G.B('Go', key=substitute, size=(25, h))
              ], [G.HorizontalSeparator()], [G.I(old_root, key=key_new_root)],
              [G.I(old_base, key=key_new_base)],
              [
                  G.Submit(ok, size=(10, 1)),
                  G.Stretch(),
                  G.T(count_name_bytes(old_base), key=bytes_count),
                  G.Stretch(),
                  G.Cancel(cancel, size=(10, 1))
              ]]
    if has_info:
        info_file_base = info_file_base[0]
        info_filepath = os.path.join(old_root, info_file_base)
        with open(info_filepath, encoding='utf8') as f:
            info = f.read()
        layout.insert(2, [
            G.CB(info_file_base,
                 default=True,
                 key=rename_info_file,
                 enable_events=True)
        ])
        layout.insert(2, [G.ML(info, key='ML')])
        layout.insert(4, [G.HorizontalSeparator()])

    ensure_sigint_signal()
    window = G.Window(title,
                      layout,
                      return_keyboard_events=True,
                      finalize=True,
                      font='arial 10',
                      element_padding=(1, 1))
    window.bring_to_front()
    ml = window.find_element('ML', silent_on_error=True)
    if ml:
        ml.update(readonly=True)

    loop = True
    data = {
        fname: old_fn,
        ext: old_ext,
        pattern: tmp_pl[0],
        replace: tmp_rl[0],
        root: old_root,
        key_new_root: '',
        key_new_base: ''
    }

    while loop:
        dst_from_data = os.path.join(data[key_new_root], data[key_new_base])
        try:
            tmp_fname = re_sub() or data[fname] + data[ext]
            dst = os.path.realpath(os.path.join(data[root], tmp_fname))
        except TypeError:
            dst = src
        if dst != dst_from_data:
            nr, nb = os.path.split(dst)
            window[key_new_root].update(nr)
            window[key_new_base].update(nb)
            window[bytes_count].update(count_name_bytes(nb))

        event, data = window.read()
        for k in (root, fname, ext, key_new_root, key_new_base):
            window[k].update(text_color=None)
        cur_p = data[pattern]
        cur_r = data[replace]

        if event == ske.esc:
            loop = False
        elif event == add_root:
            os.makedirs(data[root], exist_ok=True)
        elif event == substitute:
            data[fname], data[ext] = os.path.splitext(
                re_sub() or data[fname] + data[ext])
            window[fname].update(data[fname])
            window[ext].update(data[ext])
        elif event == save_pattern:
            if data[save_pattern]:
                conf[pattern].insert(0, cur_p)
                conf[pattern] = dedup_list(conf[pattern])
            else:
                conf[pattern] = remove_from_list(conf[pattern], [cur_p])
        elif event == save_replace:
            if data[save_replace]:
                conf[replace].insert(0, cur_r)
                conf[replace] = dedup_list(conf[replace])
            else:
                conf[replace] = remove_from_list(conf[replace], [cur_r])
        elif event == pattern:
            window[save_pattern].update(value=cur_p in conf[pattern])
        elif event == replace:
            window[save_replace].update(value=cur_r in conf[replace])
        elif event == ok:
            try:
                shutil.move(src, dst)
                if has_info:
                    if data[rename_info_file]:
                        shutil.move(info_filepath,
                                    os.path.splitext(dst)[0] + '.info')
                loop = False
            except FileNotFoundError:
                for k in (root, fname, ext):
                    window[k].update(text_color='red')
            except FileExistsError:
                for k in (key_new_root, key_new_base):
                    window[k].update(text_color='red')
            except OSError as e:
                G.PopupError(str(e))
        elif event in (None, cancel):
            loop = False
        else:
            ...
    else:
        write_json_file(conf_file, conf, indent=0)

    window.close()
Beispiel #9
0
def ehviewer_images_catalog(root_dir, *, dry_run: bool = False, db_json_path: str = 'ehdb.json'):
    logger = logging.get_logger('ehvimg', fmt=logging.LOG_FMT_MESSAGE_ONLY)
    logmsg_move = '* move {} -> {}'
    logmsg_skip = '# skip {}'
    logmsg_data = '+ /g/{}/{}'
    logmsg_err = '! {}'

    if os.path.isfile(db_json_path):
        logger.info('@ using DB file: {}'.format(db_json_path))
        db = fstk.read_json_file(db_json_path)
        db = {int(k): v for k, v in db.items()}
    else:
        db = {}

    with fstk.ctx_pushd(root_dir):
        not_found_gid_token = []
        files = []
        for f in next(os.walk('.'))[-1]:
            try:
                g = EHentaiGallery(f, logger=logger)
            except ValueError:
                logger.info(logmsg_skip.format(f))
                continue
            if g.gid not in db and (g.gid, g.token) not in not_found_gid_token:
                not_found_gid_token.append((g.gid, g.token))
                print(logmsg_data.format(g.gid, g.token))
            files.append(f)

        if not_found_gid_token:
            print('... RETRIEVE GALLERY DATA FROM E-HENTAI API ...')
            print('... IT WILL TAKE A LONG TIME ...')
            eh_api = EHentaiAPI()
            for d in eh_api.get_gallery_data(not_found_gid_token):
                db[d['gid']] = d
            fstk.write_json_file(db_json_path, db, indent=4)

        for f in files:
            g = EHentaiGallery(f, logger=logger)
            d = db[g.gid]
            creators = []
            title = d['title'].strip()
            try:
                core_title = find_core_title(title) or '__INVALID_CORE_TITLE__'
                core_title_l = re.findall(r'[\w]+[\-+\']?[\w]?', core_title)
                if title[:1] + title[-1:] == '[]':
                    creators.append(title[1:-1].strip())
            except AttributeError:
                print(logmsg_err.format(title))
                raise
            comic_magazine_title = None
            if core_title_l and core_title_l[0].lower() == 'comic':
                comic_magazine_title_l = []
                for s in core_title_l[1:]:
                    if re.match(r'^\d+', s):
                        break
                    elif re.match(r'^(?:vol|no\.|#)(.*)$', s.lower()):
                        break
                    else:
                        comic_magazine_title_l.append(s)
                if comic_magazine_title_l:
                    comic_magazine_title = 'COMIC ' + ' '.join(comic_magazine_title_l)

            tags = d['tags']

            if 'artist' in tags:
                creators = tags['artist']
            elif 'group' in tags:
                creators = tags['group']
            else:
                creators = guess_creators_from_ehentai_title(title)
                if creators:
                    core_title = title
                # todo: clean below code block if guess_creators_from_ehentai_title work well
                # for m in (
                #         re.match(r'^(?:\([^)]+\))\s*\[([^]]+)]', title),
                #         re.match(r'^\[(?:pixiv|fanbox|tumblr|twitter)]\s*(.+)\s*[(\[]', title, flags=re.I),
                #         re.match(r'^\W*artist\W*(\w.*)', title, flags=re.I),
                # ):
                #     if m:
                #         m1 = m.group(1).strip()
                #         if m1:
                #             if '|' in m1:
                #                 creators = [e.strip() for e in m1.split('|')]
                #             else:
                #                 creators = [m1]
                #             core_title = title
                #         break
            if comic_magazine_title:
                folder = comic_magazine_title.replace('COMIC X-E ROS', 'COMIC X-EROS')
            elif creators:
                if len(creators) > 3:
                    folder = VARIOUS
                else:
                    folder = ', '.join(creators)
            else:
                folder = UNKNOWN
            # print(f': {title}')  # DEBUG
            # print(f': {core_title}')  # DEBUG

            sub_folder = fstk.make_path(fstk.sanitize_xu200(folder),
                                        f'{fstk.sanitize_xu200(core_title)} {g.gid}-{g.token}')
            parent, basename = os.path.split(f)
            no_ext, ext = os.path.splitext(basename)
            no_ext = fstk.sanitize_xu240(no_ext.split()[-1])
            new_path = fstk.make_path(sub_folder, no_ext + ext)
            logger.info(logmsg_move.format(f, new_path))
            if not dry_run:
                os.makedirs(sub_folder, exist_ok=True)
                shutil.move(f, new_path)