deviant = input('Enter username: '******'Enter mode: ') from functions import rip_queue, manager manager.set_mode('rip_queue') manager.init_logging() with manager.get_dagr(): manager.get_browser().do_login() rip_queue(mode, deviant)
deviant = input('Enter username: '******'Enter mode: ') from functions import rip_nolink, manager manager.set_mode('rip_nolink') manager.init_logging() with manager.get_dagr(): manager.get_browser().do_login() rip_nolink(mode, deviant)
import logging from functions import manager, rip_gallery deviant = input('Enter username: '******'Full crawl?: ').lower().startswith('y') manager.set_mode('rip_gallery') manager.init_logging() logging.getLogger(__name__).info(f"Full crawl: {full_crawl}") with manager.get_dagr(): manager.get_browser().do_login() rip_gallery(deviant, full_crawl) logging.shutdown()
import logging from os import environ from functions import config, manager, rip_trash full_crawl = input('Full crawl?: ').lower().startswith('y') resort = input('Resort?: ').lower().startswith('y') env_level = environ.get('dagr.riptrash.logging.level', None) level_mapped = config.map_log_level( int(env_level)) if not env_level is None else None manager.set_mode('rip_trash') manager.init_logging(level_mapped) with manager.get_dagr(): rip_trash(full_crawl=full_crawl, resort=resort) logging.shutdown()
import logging from os import environ from functions import config, manager, monitor_watchlist env_level = environ.get('dagr.watchlist.logging.level', None) level_mapped = config.map_log_level( int(env_level)) if not env_level is None else None manager.set_mode('watchlist') manager.init_logging(level_mapped) with manager.get_dagr(): monitor_watchlist() logging.shutdown()
from aiohttp.web_response import json_response from dagr_revamped.lib import DagrException from dagr_revamped.utils import artist_from_url, convert_queue from functions import config, load_bulk, manager, update_bulk_galleries from QueueItem import QueueItem queue = asyncio.PriorityQueue() queue_lock = Lock() env_level = environ.get('dagr.queueman.logging.level', None) level_mapped = config.map_log_level( int(env_level)) if not env_level is None else None manager.set_mode('queueman') manager.init_logging(level_mapped) logger = logging.getLogger(__name__) regexes = { k: re.compile(v) for k, v in config.get('deviantart.regexes').items() } queue_slug = 'queue' cache = manager.get_cache() class waitingCount():
import asyncio import logging from copy import copy from pprint import pformat from time import sleep import requests # from dagr_revamped.DAGRDeviationProcessorFNS import DAGRDeviationProcessorFNS from functions import check_stop_file, flush_errors_to_queue, manager from QueueItem import QueueItem manager.set_mode('worker') manager.init_logging() logger = logging.getLogger(__name__) async def fetch_item(): try: resp = requests.get('http://192.168.20.50:3002/item') resp.raise_for_status() return QueueItem(**(resp.json())) except: logger.exception('Error while fetching work item') async def process_item(item): try: item.process() http_errors = manager.get_dagr().report_http_errors() if http_errors.get(400, 0) > 1:
import logging from functions import manager, rip_favs deviant = input('Enter username: '******'Full crawl?: ').lower().startswith('y') manager.set_mode('rip_favs') mamanger.init_logging() logging.getLogger(__name__).info(f"Full crawl: {full_crawl}") with manager.get_dagr(): manager.get_browser().do_login() rip_favs(deviant, full_crawl) logging.shutdown()