from enum import Enum, unique from loguru import logger from flexget import db_schema, plugin from flexget.components.sites.utils import torrent_availability from flexget.entry import Entry from flexget.event import event from flexget.utils.requests import RequestException from flexget.utils.requests import Session as RequestSession from flexget.utils.soup import get_soup logger = logger.bind(name='hebits') Base = db_schema.versioned_base('hebits', 0) requests = RequestSession() @unique class HeBitsCategory(Enum): movies = 1 tv = 2 theater = 3 software = 4 games = 5 music = 6 books = 7 movies_packs = 8 porno = 9 other = 10
from flexget.config_schema import one_or_more from flexget.entry import Entry from flexget.event import event from flexget.utils.pathscrub import pathscrub from flexget.utils.template import RenderError from flexget.utils.tools import parse_timedelta try: import requests.exceptions import transmission_rpc as transmissionrpc from transmission_rpc import TransmissionError except ImportError: # If transmissionrpc is not found, errors will be shown later pass logger = logger.bind(name='transmission') class TransmissionBase: def __init__(self): self.client = None self.opener = None def prepare_config(self, config): if isinstance(config, bool): config = {'enabled': config} config.setdefault('enabled', True) config.setdefault('host', 'localhost') config.setdefault('port', 9091) config.setdefault('main_file_ratio', 0.9) if 'netrc' in config:
_PLUGIN_NAME = 'wecom' _CORP_ID = 'corp_id' _CORP_SECRET = 'corp_secret' _AGENT_ID = 'agent_id' _TO_USER = '******' _GET_ACCESS_TOKEN_URL = 'https://qyapi.weixin.qq.com/cgi-bin/gettoken?corpid={corp_id}&corpsecret={corp_secret}' _POST_MESSAGE_URL = 'https://qyapi.weixin.qq.com/cgi-bin/message/send?access_token={access_token}' _UPLOAD_IMAGE = 'https://qyapi.weixin.qq.com/cgi-bin/media/upload?access_token={access_token}&type=image' _TEXT_LIMIT = 1024 AccessTokenBase = db_schema.versioned_base('wecom_access_token', 0) MessageBase = db_schema.versioned_base('message', 0) logger = logger.bind(name=_PLUGIN_NAME) class AccessTokenEntry(AccessTokenBase): __tablename__ = 'wecom_access_token' id = Column(String, primary_key=True) corp_id = Column(String, index=True, nullable=True) corp_secret = Column(String, index=True, nullable=True) access_token = Column(String, primary_key=True) expires_in = Column(Integer, index=True, nullable=True) gmt_modify = Column(DateTime, index=True, nullable=True) def __str__(self): x = ['id={0}'.format(self.id)] if self.corp_id:
import re from loguru import logger from flexget import plugin from flexget.event import event logger = logger.bind(name='manipulate') class Manipulate: r""" Usage: manipulate: - <destination field>: [find_all]: <boolean> [phase]: <phase> [from]: <source field> [extract]: <regexp> [separator]: <text> [replace]: regexp: <regexp> format: <regexp> [remove]: <boolean> Example: manipulate: - title: extract: \[\d\d\d\d\](.*)
import base64 import datetime from loguru import logger from requests.exceptions import RequestException from flexget import plugin from flexget.config_schema import one_or_more from flexget.event import event from flexget.plugin import PluginWarning from flexget.utils.requests import Session as RequestSession from flexget.utils.requests import TimedLimiter plugin_name = 'pushbullet' logger = logger.bind(name=plugin_name) PUSHBULLET_URL = 'https://api.pushbullet.com/v2/pushes' requests = RequestSession(max_retries=3) requests.add_domain_limiter(TimedLimiter('pushbullet.com', '5 seconds')) class PushbulletNotifier: """ Example:: notify: entries: via: pushbullet: apikey: <API_KEY>
from loguru import logger from flexget import plugin from flexget.event import event logger = logger.bind(name='accept_all') class FilterAcceptAll: """ Just accepts all entries. Example:: accept_all: true """ schema = {'type': 'boolean'} def on_task_filter(self, task, config): if config: for entry in task.entries: entry.accept() @event('plugin.register') def register_plugin(): plugin.register(FilterAcceptAll, 'accept_all', api_ver=2)
import os import sys from datetime import datetime from os import path d = path.dirname(__file__) sys.path.append(d) from flexget.entry import Entry from loguru import logger from flexget import plugin from flexget.event import event from qbittorrent_client import QBittorrentClient logger = logger.bind(name='qbittorrent_mod') class QBittorrentModBase: def __init__(self): self.client = None def prepare_config(self, config): if isinstance(config, bool): config = {'enabled': config} config.setdefault('enabled', True) config.setdefault('host', 'localhost') config.setdefault('port', 8080) config.setdefault('use_ssl', True) config.setdefault('verify_cert', True) return config def create_client(self, config):
from loguru import logger from sqlalchemy import Column, DateTime, ForeignKey, Integer, String, Unicode, select from sqlalchemy.orm import relation from sqlalchemy.orm import Session as DBSession from flexget import db_schema from flexget.entry import Entry from flexget.event import event from flexget.manager import Session from flexget.plugin import PluginError from flexget.utils import json, serialization from flexget.utils.database import entry_synonym from flexget.utils.sqlalchemy_utils import table_add_column, table_schema from flexget.utils.tools import TimedDict, get_config_hash, parse_timedelta logger = logger.bind(name='input_cache') if TYPE_CHECKING: class Base: def __init__(self, *args, **kwargs) -> None: ... else: Base = db_schema.versioned_base('input_cache', 2) @db_schema.upgrade('input_cache') def upgrade(ver: int, session: DBSession) -> int: if ver == 0: table = table_schema('input_cache_entry', session)
import mimetypes from loguru import logger from flexget import plugin from flexget.event import event logger = logger.bind(name='nzb_size') # a bit hacky, add nzb as a known mimetype mimetypes.add_type('application/x-nzb', '.nzb') class NzbSize: """ Provides entry size information when dealing with nzb files """ @plugin.priority(200) def on_task_modify(self, task, config): """ The downloaded file is accessible in modify phase """ try: from pynzb import nzb_parser except ImportError: # TODO: remove builtin status so this won't get repeated on every task execution # TODO: this will get loaded even without any need for nzb raise plugin.DependencyError(issued_by='nzb_size', missing='lib pynzb') for entry in task.accepted:
def main(): """Main function. - performs all the setup for PromED. - Sets up a big chunk of instrumentation. - Inits all parts of PromED. - Orchestrates discovery. """ interval = s.INTERVAL output_dir = s.OUTPUT_DIRECTORY should_throttle = s.WARMUP_THROTTLE configure_logging() expose_info() logger.info("Welcome to PromED, the Prometheus ECS Discoverer.") logger.bind(settings=s.as_dict()).info("Here is the used configuration.") DURATION = get_interval_histogram(interval) if s.PROMETHEUS_START_HTTP_SERVER: port = s.PROMETHEUS_SERVER_PORT logger.bind(port=port).info("Start Prometheus HTTP server to expose metrics.") start_http_server(port=port) logger.info("Create Boto3 session.") session = boto3.Session() config = Config(retries={"max_attempts": s.MAX_RETRY_ATTEMPTS, "mode": "standard"}) logger.info("Create Boto3 clients and CachedFetcher.") fetcher = fetching.CachedFetcher( session.client("ecs", config=config), session.client("ec2", config=config), should_throttle=should_throttle, throttle_interval_seconds=s.THROTTLE_INTERVAL_SECONDS, ) logger.info("Create PrometheusEcsDiscoverer.") discoverer = discovery.PrometheusEcsDiscoverer(fetcher) if should_throttle: logger.info("First discovery round will be throttled down.") logger.info("Ready for discovery. The discoverer will run until interrupted.") first_round = True while True: logger.info("Start new discovery round.") start_time = default_timer() if not first_round: discoverer.fetcher.should_throttle = False targets = discoverer.discover() marshalling.write_targets_to_file(targets, output_dir) if first_round and should_throttle: fetcher.should_throttle = False first_round = False duration = max(default_timer() - start_time, 0) logger.bind(duration=duration).info("Finished discovery round.") DURATION.observe(duration) if duration > interval: logger.bind(duration=duration).error( "Discovery round took longer than the configured interval. Please investigate." ) INTERVAL_BREACHED.inc() time_left = max(interval - duration, 0) time.sleep(time_left)
from django.db import models from django.utils import timezone from loguru import logger from game.models import MembersGroup from django.conf import settings log = logger.bind(task="app") class Subscriber(models.Model): """ Модель подписчика бота """ tg_chat_id = models.IntegerField(verbose_name="Идентификатор подписчика") is_active = models.BooleanField( default=False, verbose_name="Подписан ли польователь на бота") comment = models.TextField(blank=True, null=True, verbose_name="Комментарй к подписчику") members_group = models.ForeignKey( MembersGroup, on_delete=models.CASCADE, related_name='subscribers', verbose_name="Группа в которой состоит пользователь") day = models.IntegerField(default=1, verbose_name="День") step = models.CharField(max_length=1000, verbose_name="Шаг пользователя", blank=True, null=True) registry_date = models.DateField(default=timezone.now, editable=True) points_body = models.IntegerField(default=0, verbose_name="Уровень физ. развития")
def __init__(self, input_dim_encoder: int, hidden_dim_encoder: int, output_dim_encoder: int, dropout_p_encoder: float, sub_sampling_factor_encoder: int, sub_sampling_mode: int, output_dim_h_decoder: int, nb_classes: int, dropout_p_decoder: float, max_out_t_steps: int, mode: int) \ -> None: """ Recurrent Neural Network with bi-directional GRU and attention for audio captioning on Clotho Dataset :param input_dim_encoder: Input dimensionality of the encoder. :type input_dim_encoder: int :param hidden_dim_encoder: Hidden dimensionality of the encoder. :type hidden_dim_encoder: int :param output_dim_encoder: Output dimensionality of the encoder. :type output_dim_encoder: int :param dropout_p_encoder: Encoder RNN dropout. :type dropout_p_encoder: float :param sub_sampling_factor_encoder: Sub-sampling rate for the encoder :type sub_sampling_factor_encoder: int :param output_dim_h_decoder: Hidden output dimensionality of the decoder. :type output_dim_h_decoder: int :param nb_classes: Amount of output classes. :type nb_classes: int :param dropout_p_decoder: Decoder RNN dropout. :type dropout_p_decoder: float :param max_out_t_steps: Maximum output time-steps of the decoder. :type max_out_t_steps: int :param mode: if mode is 0, use decoder without attention, if mode is 1, use decoder with attention :type mode: int """ super().__init__() logger_inner = logger.bind(is_caption=False, indent=1) if mode == 0: logger_inner.info( f'Sub sampling attention model {mode} - no attention') elif mode == 1: logger_inner.info( f'Sub sampling attention model {mode} - use attention') self.mode = mode self.max_out_t_steps: int = max_out_t_steps self.nb_classes: int = nb_classes self.encoder: Module = SubSamplingEncoder( input_dim=input_dim_encoder, hidden_dim=hidden_dim_encoder, output_dim=output_dim_encoder, dropout_p=dropout_p_encoder, sub_sampling_factor=sub_sampling_factor_encoder, sub_sampling_mode=sub_sampling_mode) if self.mode == 0: self.decoder_alzheimer: Module = DecoderNoAttention( input_dim=output_dim_encoder * 2, output_dim=output_dim_h_decoder, nb_classes=nb_classes, dropout_p=dropout_p_decoder) elif self.mode == 1: self.decoder_attention: Module = AttentionDecoder( input_dim=output_dim_encoder * 2, output_dim=output_dim_h_decoder, nb_classes=nb_classes, dropout_p=dropout_p_decoder)
from loguru import logger from flexget import plugin from flexget.config_schema import one_or_more from flexget.entry import Entry from flexget.event import event from flexget.utils.cached_input import cached from flexget.utils.requests import RequestException logger = logger.bind(name='anilist') LIST_STATUS = [ 'current', 'planning', 'completed', 'dropped', 'paused', 'repeating' ] RELEASE_STATUS = [ 'finished', 'releasing', 'not_yet_released', 'cancelled', 'all' ] ANIME_FORMAT = ['tv', 'tv_short', 'movie', 'special', 'ova', 'ona', 'all'] TRAILER_SOURCE = { 'youtube': 'https://www.youtube.com/embed/', 'dailymotion': 'https://www.dailymotion.com/embed/video/', } class AniList(object): """" Creates entries for series and movies from your AniList list Syntax:
import socket import threading from typing import Optional, Dict, Tuple import cherrypy import zxcvbn from flask import Flask, abort, redirect from flask_login import UserMixin from loguru import logger from sqlalchemy import Column, Integer, Unicode from werkzeug.security import generate_password_hash from flexget.manager import Base from flexget.utils.database import with_session logger = logger.bind(name='web_server') _home: Optional[str] = None _app_register: Dict[str, Tuple[Flask, str]] = {} _default_app = Flask(__name__) rand = random.SystemRandom() def generate_key(): """ Generate key for use to authentication """ return str( hashlib.sha224(str(rand.getrandbits(128)).encode('utf-8')).hexdigest()) def get_random_string(
from loguru import logger from sqlalchemy import Column, DateTime, ForeignKey, Integer, Unicode, func from sqlalchemy.orm import relationship from sqlalchemy.sql.elements import and_ from flexget import plugin from flexget.db_schema import versioned_base, with_session from flexget.entry import Entry try: # NOTE: Importing other plugins is discouraged! from flexget.components.parsing.parsers import parser_common as plugin_parser_common except ImportError: raise plugin.DependencyError(issued_by=__name__, missing='parser_common') logger = logger.bind(name='movie_list') Base = versioned_base('movie_list', 0) class MovieListBase: """ Class that contains helper methods for movie list as well as plugins that use it, such as API and CLI. """ @property def supported_ids(self): # Return a list of supported series identifier as registered via their plugins return [ p.instance.movie_identifier for p in plugin.get_plugins(interface='movie_metainfo') ]
from datetime import datetime, timedelta from loguru import logger from sqlalchemy import Boolean, Column, DateTime, Integer, String, Unicode, select from flexget import db_schema from flexget.entry import Entry from flexget.event import event from flexget.utils import json, serialization from flexget.utils.database import entry_synonym from flexget.utils.sqlalchemy_utils import table_schema logger = logger.bind(name='pending_approval') Base = db_schema.versioned_base('pending_approval', 1) @db_schema.upgrade('pending_approval') def upgrade(ver, session): if ver == 0: table = table_schema('pending_entries', session) for row in session.execute(select([table.c.id, table.c.json])): if not row['json']: # Seems there could be invalid data somehow. See #2590 continue data = json.loads(row['json'], decode_datetime=True) # If title looked like a date, make sure it's a string title = str(data.pop('title')) e = Entry(title=title, **data) session.execute(table.update().where( table.c.id == row['id']).values(json=serialization.dumps(e)))
from loguru import logger from flexget import plugin from flexget.entry import Entry from flexget.event import event from flexget.utils.cached_input import cached try: from filmweb.exceptions import RequestFailed from filmweb.filmweb import Filmweb as FilmwebAPI from filmweb.items import LoggedUser except ImportError: # Errors are handled later pass logger = logger.bind(name='filmweb_watchlist') def translate_type(type): return {'shows': 'serial', 'movies': 'film'}[type] class FilmwebWatchlist: """ "Creates an entry for each movie in your Filmweb list.""" schema = { 'type': 'object', 'properties': { 'login': { 'type': 'string', 'description': 'Can be username or email address'
import re from urllib.parse import quote from loguru import logger from flexget import plugin from flexget.components.sites.urlrewriting import UrlRewritingError from flexget.components.sites.utils import normalize_unicode, torrent_availability from flexget.entry import Entry from flexget.event import event from flexget.utils import requests from flexget.utils.soup import get_soup logger = logger.bind(name='newtorrents') class NewTorrents: """NewTorrents urlrewriter and search plugin.""" def __init__(self): self.resolved = [] # UrlRewriter plugin API def url_rewritable(self, task, entry): # Return true only for urls that can and should be resolved if entry['url'].startswith('http://www.newtorrents.info/down.php?'): return False return ( entry['url'].startswith('http://www.newtorrents.info') and not entry['url'] in self.resolved )
from loguru import logger from flexget import plugin from flexget.event import event from flexget.plugin import PluginError logger = logger.bind(name='list_clear') class ListClear: schema = { 'type': 'object', 'properties': { 'what': { 'type': 'array', 'items': { 'allOf': [ { '$ref': '/schema/plugins?interface=list' }, { 'maxProperties': 1, 'error_maxProperties': 'Plugin options within list_clear plugin must be indented ' '2 more spaces than the first letter of the plugin name.', 'minProperties': 1, }, ] },
import yaml from _pytest.logging import caplog as _caplog from loguru import logger from vcr import VCR from vcr.stubs import VCRHTTPConnection, VCRHTTPSConnection import flexget.log from flexget import plugin from flexget.api import api_app from flexget.event import event from flexget.manager import Manager, Session from flexget.plugin import load_plugins from flexget.task import Task, TaskAbort from flexget.webserver import User logger = logger.bind(name='tests') VCR_CASSETTE_DIR = os.path.join(os.path.dirname(__file__), 'cassettes') VCR_RECORD_MODE = os.environ.get('VCR_RECORD_MODE', 'once') vcr = VCR( cassette_library_dir=VCR_CASSETTE_DIR, record_mode=VCR_RECORD_MODE, custom_patches=( (client, 'HTTPSConnection', VCRHTTPSConnection), (client, 'HTTPConnection', VCRHTTPConnection), ), ) # --- These are the public fixtures tests can ask for ---
from loguru import logger from flexget import plugin from flexget.config_schema import one_or_more from flexget.entry import Entry from flexget.event import event from flexget.utils.cached_input import cached from flexget.utils.requests import RequestException logger = logger.bind(name='my_anime_list') STATUS = { 'watching': 1, 'completed': 2, 'on_hold': 3, 'dropped': 4, 'plan_to_watch': 6, 'all': 7 } AIRING_STATUS = {'airing': 1, 'finished': 2, 'planned': 3, 'all': 6} ANIME_TYPE = [ 'all', 'tv', 'ova', 'movie', 'special', 'ona', 'music', 'unknown' ] class MyAnimeList: """" Creates entries for series and movies from MyAnimeList list Syntax:
from flexget.entry import Entry # retry configuration constants RETRY_INTERVAL_SEC: int = 15 RETRY_STEP_SEC: int = 5 try: import pysftp logging.getLogger("paramiko").setLevel(logging.ERROR) except ImportError: pysftp = None NodeHandler = Callable[[str], None] logger = logger.bind(name='sftp_client') class SftpClient: def __init__( self, host: str, port: int, username: str, password: Optional[str] = None, private_key: Optional[str] = None, private_key_pass: Optional[str] = None, connection_tries: int = 3, ): if not pysftp:
import re import zlib from pathlib import Path from urllib import parse from jinja2 import Template from loguru import logger from flexget import plugin from flexget.entry import Entry from flexget.event import event from flexget.utils.cached_input import cached from flexget.utils.soup import get_soup logger = logger.bind(name='html') class InputHtml: """ Parses urls from html page. Usefull on sites which have direct download links of any type (mp3, jpg, torrent, ...). Many anime-fansubbers do not provide RSS-feed, this works well in many cases. Configuration expects url parameter. Note: This returns ALL links on url so you need to configure filters to match only to desired content. """ schema = {
# Allow some request objects to be imported from here instead of requests import warnings from datetime import datetime, timedelta from typing import Dict, Optional, Union from urllib.parse import urlparse from urllib.request import urlopen import requests from loguru import logger from requests import RequestException from flexget import __version__ as version from flexget.utils.tools import TimedDict, parse_timedelta # If we use just 'requests' here, we'll get the logger created by requests, rather than our own logger = logger.bind(name='utils.requests') # Don't emit info level urllib3 log messages or below logging.getLogger('requests.packages.urllib3').setLevel(logging.WARNING) # same as above, but for systems where urllib3 isn't part of the requests pacakge (i.e., Ubuntu) logging.getLogger('urllib3').setLevel(logging.WARNING) # Time to wait before trying an unresponsive site again WAIT_TIME = timedelta(seconds=60) # Remembers sites that have timed out unresponsive_hosts = TimedDict(WAIT_TIME) def is_unresponsive(url: str) -> bool: """ Checks if host of given url has timed out within WAIT_TIME
from loguru import logger from flexget.api import api_app from flexget.config_schema import register_config_key from flexget.event import event from flexget.ui.v1 import register_web_ui as register_web_ui_v1 from flexget.ui.v2 import register_web_ui as register_web_ui_v2 from flexget.utils.tools import get_config_hash from flexget.webserver import get_secret, register_app, setup_server logger = logger.bind(name="web_server_daemon") config_hash = '' web_server = None web_config_schema = { 'oneOf': [ { 'type': 'boolean' }, { 'type': 'integer', 'minimum': 0, 'maximum': 65536 }, { 'type': 'object', 'properties': { 'bind': { 'type': 'string', 'format': 'ipv4' },
def build_metric_logger(self, name): output_fn = osp.join( self.results_folder, name + '.log') if self.loggers[name] is None: logger.add(output_fn, filter=make_filter(name)) self.loggers[name] = logger.bind(key_name=name)
from collections.abc import MutableSet from loguru import logger from requests import RequestException from flexget import plugin from flexget.entry import Entry from flexget.event import event from flexget.utils import requests logger = logger.bind(name='sonarr_list') SERIES_ENDPOINT = 'series' LOOKUP_ENDPOINT = 'series/lookup' PROFILE_ENDPOINT = 'profile' ROOTFOLDER_ENDPOINT = 'Rootfolder' DELETE_ENDPOINT = 'series/{}' # Sonarr qualities that do no exist in Flexget QUALITY_MAP = {'Raw-HD': 'remux', 'DVD': 'dvdrip'} class SonarrSet(MutableSet): supported_ids = ['tvdb_id', 'tvrage_id', 'tvmaze_id', 'imdb_id', 'slug', 'sonarr_id'] schema = { 'type': 'object', 'properties': { 'base_url': {'type': 'string', 'default': 'http://localhost'}, 'base_path': {'type': 'string', 'default': ''}, 'port': {'type': 'number', 'default': 80}, 'api_key': {'type': 'string'},
from time import sleep from urllib.parse import urljoin, urlparse, urlsplit from xmlrpc import client as xmlrpc_client from loguru import logger from requests.auth import HTTPBasicAuth, HTTPDigestAuth from flexget import plugin from flexget.config_schema import one_or_more from flexget.entry import Entry from flexget.event import event from flexget.utils.bittorrent import Torrent, is_torrent_file from flexget.utils.pathscrub import pathscrub from flexget.utils.template import RenderError logger = logger.bind(name='rtorrent') class _Method: # some magic to bind an XML-RPC method to an RPC server. # supports "nested" methods (e.g. examples.getStateName) def __init__(self, send, name): self.__send = send self.__name = name def __getattr__(self, name): return _Method(self.__send, "%s.%s" % (self.__name, name)) def __call__(self, *args): return self.__send(self.__name, args)
def test_colors_doesnt_strip_unrelated(writer): logger.add(writer, format="{message} {extra[trap]}", colorize=False) logger.bind(trap="<red>B</red>").opt(colors=True).debug("<red>A</red>") assert writer.read() == parse("<red>A</red>", strip=True) + " <red>B</red>\n"
from loguru import logger from flexget.components.emby.api_emby import EmbyAuth, EmbyApiLibrary from flexget.config_schema import one_or_more from flexget import plugin from flexget.event import event from flexget.components.emby.emby_util import SCHEMA_SERVER_TAG logger = logger.bind(name='emby_reload') class EmbyRefreshLibrary: """ Refresh Emby Library Example: emby_refresh: server: host: http://localhost:8096 username: <username> apikey: <apikey> return_host: wan when: accepted """ auth = None schema = { 'type': 'object', 'properties': { **SCHEMA_SERVER_TAG,