def inited(): # set default delay if not set delays = hpx.get_setting("network", "delays", {}) delay_url = URLS['ch'] if delay_url not in delays: log.info(f"Setting delay on {delay_url} requests to {DEFAULT_DELAY}") delays[delay_url] = DEFAULT_DELAY hpx.update_setting("network", "delays", delays)
def inited(): # set default delay values if not set delays = hpx.get_setting("network", "delays", {}) for u in (URLS['ex'], URLS['eh'], "https://api.e-hentai.org", URLS['ex_api']): if u not in delays: log.info(f"Setting delay on {u} requests to {DEFAULT_DELAY}") delays[u] = DEFAULT_DELAY hpx.update_setting("network", "delays", delays)
def inited(): PLUGIN_CONFIG.update(hpx.get_plugin_config()) # set default delay values if not set delays = hpx.get_setting("network", "delays", {}) for u in (URLS['nh'], ): if u not in delays: log.info(f"Setting delay on {u} requests to {DEFAULT_DELAY}") delays[u] = DEFAULT_DELAY hpx.update_setting("network", "delays", delays)
def inited(): plugin_config.update(hpx.get_plugin_config()) # set default delay values if not set delays = hpx.get_setting("network", "delays", {}) for u in (urls['ex'], urls['eh'], "https://api.e-hentai.org"): if u not in delays: log.info(f"Setting delay on {u} requests to {default_delay}") delays[u] = default_delay hpx.update_setting("network", "delays", delays)
def inited(): # set default delay values if not set delays = hpx.get_setting("network", "delays", {}) for u in (MAIN_URLS['ex'], MAIN_URLS['eh']): if u not in delays: log.info(f"Setting delay on {u} requests to {default_delay}") delays[u] = default_delay hpx.update_setting("network", "delays", delays) # retrieve saved user info if os.path.exists(save_file): with open(save_file, "rb") as f: user_dict = pickle.load(f) if user_dict: login(user_dict, {}) if response is not None: log.info("Successfully re-logged in")
def filter_metadata(gdata, item, urls_to_apply=None): mdata = {} replace_metadata = hpx.get_setting('metadata', 'replace_metadata') if replace_metadata: item.titles = [] item.artists = [] item.tags = [] # flush is required so items are removed from the db hpx.command.GetSession().flush() if replace_metadata: mdata['titles'] = [] mdata['titles'].append((gdata['title'], 'english')) mdata['titles'].append((gdata['title_jpn'], 'japanese')) else: t = [] if not item.title_by_language("english"): t.append((gdata['title'], 'english')) if not item.title_by_language("japanese"): t.append((gdata['title_jpn'], 'japanese')) if t: mdata['titles'] = t if replace_metadata: mdata['category'] = gdata['category'] elif not item.category: mdata['category'] = gdata['category'] if replace_metadata: mdata['pub_date'] = arrow.Arrow.fromtimestamp(gdata['posted']) elif not item.pub_date: mdata['pub_date'] = arrow.Arrow.fromtimestamp(gdata['posted']) lang = "japanese" # def lang if not replace_metadata and item.language: lang = item.language.name artists = [] circles = [] parodies = [] extra_namespaces = ("artist", "parody", "group", "language") mdata['tags'] = {} for nstag in gdata['tags']: blacklist_tags = plugin_config.get("blacklist_tags") if blacklist_tags and nstag in blacklist_tags: continue ns = None if ':' in nstag: ns, t = nstag.split(':', 1) else: t = nstag if ns == 'language' and t != 'translated': lang = t elif ns == "artist": artists.append(t) elif ns == "group": circles.append(t) elif ns == "parody": parodies.append(t) if not (plugin_config.get("remove_namespaces") and ns in extra_namespaces): mdata['tags'].setdefault(ns, []).append(t) else: log.debug(f"removing namespace {ns}") log.debug(f"tags: {mdata['tags']}") if replace_metadata: mdata['language'] = lang elif not item.language: mdata['language'] = lang if parodies: if replace_metadata: item.parodies = [] mdata['parodies'] = parodies elif not item.parodies: mdata['parodies'] = parodies if artists: a_circles = [] for a in artists: a_circles.append((a, tuple(circles))) if replace_metadata: item.artists = [] mdata['artists'] = a_circles elif not item.artists: mdata['artists'] = a_circles if urls_to_apply: mdata['urls'] = urls_to_apply return mdata