from __future__ import unicode_literals, division, absolute_import from builtins import * # noqa pylint: disable=unused-import, redefined-builtin import logging from flexget import plugin from flexget.entry import Entry from flexget.event import event from flexget.utils.cached_input import cached from flexget.utils.requests import RequestException, Session, TimedLimiter from flexget.utils.soup import get_soup log = logging.getLogger('letterboxd') requests = Session(max_retries=5) requests.add_domain_limiter(TimedLimiter('letterboxd.com', '1 seconds')) base_url = 'http://letterboxd.com' SLUGS = { 'default': { 'p_slug': '/%(user)s/list/%(list)s/', 'f_slug': 'data-film-slug' }, 'diary': { 'p_slug': '/%(user)s/films/diary/', 'f_slug': 'data-film-slug' }, 'likes': { 'p_slug': '/%(user)s/likes/films/', 'f_slug': 'data-film-link' },
from requests.exceptions import RequestException from flexget import plugin from flexget.config_schema import one_or_more from flexget.event import event from flexget.plugin import PluginWarning from flexget.utils.requests import Session as RequestSession from flexget.utils.requests import TimedLimiter plugin_name = 'pushbullet' logger = logger.bind(name=plugin_name) PUSHBULLET_URL = 'https://api.pushbullet.com/v2/pushes' requests = RequestSession(max_retries=3) requests.add_domain_limiter(TimedLimiter('pushbullet.com', '5 seconds')) class PushbulletNotifier: """ Example:: notify: entries: via: pushbullet: apikey: <API_KEY> [device: <DEVICE_IDEN> (can also be a list of device ids, or don't specify any ids to send to all devices)] [email: <EMAIL_ADDRESS> (can also be a list of user email addresses)] [channel: <CHANNEL_TAG> (you can only specify device / email or channel tag, cannot use both)]
from flexget.entry import Entry from flexget.event import event from flexget.manager import Session from flexget.utils.database import json_synonym from flexget.utils.requests import RequestException from flexget.utils.requests import Session as RequestSession from flexget.utils.requests import TimedLimiter from flexget.utils.soup import get_soup from flexget.utils.tools import parse_filesize log = logging.getLogger('morethantv') Base = db_schema.versioned_base('morethantv', 0) requests = RequestSession() requests.add_domain_limiter( TimedLimiter('morethan.tv', '5 seconds') ) # TODO find out if they want a delay CATEGORIES = {'Movies': 'filter_cat[1]', 'TV': 'filter_cat[2]', 'Other': 'filter_cat[3]'} TAGS = [ 'action', 'adventure', 'animation', 'anime', 'art', 'asian', 'biography', 'celebrities', 'comedy', 'cooking',
from flexget.event import event from flexget.plugins.internal.urlrewriting import UrlRewritingError from flexget.utils.requests import Session, TimedLimiter from flexget.utils.soup import get_soup from flexget.entry import Entry from flexget.utils.search import normalize_unicode import unicodedata log = logging.getLogger('newpct') requests = Session() requests.headers.update( {'User-Agent': 'Mozilla/4.0 (compatible; MSIE 5.5; Windows NT)'}) requests.add_domain_limiter(TimedLimiter('newpct1.com', '2 seconds')) requests.add_domain_limiter(TimedLimiter('newpct.com', '2 seconds')) NEWPCT_TORRENT_FORMAT = 'http://www.newpct.com/torrents/{:0>6}.torrent' NEWPCT1_TORRENT_FORMAT = 'http://www.newpct1.com/torrents/{:0>6}.torrent' class UrlRewriteNewPCT(object): """NewPCT urlrewriter and search.""" schema = {'type': 'boolean', 'default': False} # urlrewriter API def url_rewritable(self, task, entry): url = entry['url'] rewritable_regex = '^http:\/\/(www.)?newpct1?.com\/.*'
import unicodedata from datetime import date, datetime, timedelta from requests.exceptions import HTTPError, RequestException from flexget import plugin from flexget.entry import Entry from flexget.event import event from flexget.utils.requests import Session as RequestSession from flexget.utils.requests import TimedLimiter from flexget.utils.soup import get_soup log = logging.getLogger('search_npo') requests = RequestSession(max_retries=3) requests.add_domain_limiter(TimedLimiter('npostart.nl', '8 seconds')) class NPOWatchlist: """ Produces entries for every episode on the user's npostart.nl watchlist (Dutch public television). Entries can be downloaded using http://arp242.net/code/download-npo If 'remove_accepted' is set to 'yes', the plugin will delete accepted entries from the watchlist after download is complete. If 'max_episode_age_days' is set (and not 0), entries will only be generated for episodes broadcast in the last x days. This only applies to episodes related to series the user is following. If 'download_premium' is set to 'yes', the plugin will also download entries that are marked as exclusive content for NPO Plus subscribers. For example:
from flexget import plugin from flexget.entry import Entry from flexget.event import event from flexget.utils.requests import Session as RequestSession, TimedLimiter from flexget.utils.soup import get_soup from requests.exceptions import HTTPError, RequestException from datetime import datetime, date, timedelta import unicodedata import re log = logging.getLogger('search_npo') requests = RequestSession(max_retries=3) requests.add_domain_limiter(TimedLimiter('npo.nl', '5 seconds')) class NPOWatchlist(object): """ Produces entries for every episode on the user's npo.nl watchlist (Dutch public television). Entries can be downloaded using http://arp242.net/code/download-npo If 'remove_accepted' is set to 'yes', the plugin will delete accepted entries from the watchlist after download is complete. If 'max_episode_age_days' is set (and not 0), entries will only be generated for episodes broadcast in the last x days. This only applies to episodes related to series the user is following. For example: npo_watchlist: email: [email protected]
from __future__ import unicode_literals, division, absolute_import import logging from flexget import plugin from flexget.entry import Entry from flexget.event import event from flexget.config_schema import one_or_more from flexget.utils.requests import Session, get, TimedLimiter, RequestException from flexget.utils.search import normalize_scene log = logging.getLogger('rarbg') requests = Session() requests.add_domain_limiter( TimedLimiter('torrentapi.org', '2.3 seconds')) # they only allow 1 request per 2 seconds CATEGORIES = { 'all': 0, # Movies 'x264 720p': 45, 'x264 1080p': 44, 'XviD': 14, 'Full BD': 42, # TV 'HDTV': 41, 'SDTV': 18 }
from dateutil.parser import parse as dateutil_parse from flexget import plugin, db_schema from flexget.config_schema import one_or_more from flexget.entry import Entry from flexget.event import event from flexget.manager import Session from flexget.utils.database import json_synonym from flexget.utils.requests import Session as RequestSession, TimedLimiter, RequestException from flexget.utils.tools import parse_filesize log = logging.getLogger('passthepopcorn') Base = db_schema.versioned_base('passthepopcorn', 0) requests = RequestSession() requests.add_domain_limiter(TimedLimiter('passthepopcorn.me', '5 seconds')) TAGS = [ 'action', 'adventure', 'animation', 'arthouse', 'asian', 'biography', 'camp', 'comedy', 'crime', 'cult', 'documentary', 'drama', 'experimental', 'exploitation', 'family', 'fantasy', 'film.noir', 'history', 'horror', 'martial.arts', 'musical', 'mystery', 'performance', 'philosophy', 'politics', 'romance', 'sci.fi', 'short', 'silent', 'sport', 'thriller', 'video.art', 'war', 'western' ] ORDERING = { 'Relevance': 'relevance', 'Time added': 'timeadded', 'Time w/o reseed': 'timenoreseed', 'First time added': 'creationtime',
from datetime import datetime from dateutil.parser import ParserError, isoparse from loguru import logger from flexget import plugin from flexget.event import event from flexget.plugin import PluginWarning from flexget.utils.requests import RequestException, Session, TimedLimiter plugin_name = 'discord' logger = logger.bind(name=plugin_name) session = Session() session.add_domain_limiter(TimedLimiter('discord.com', '3 seconds')) class DiscordNotifier: """ Example:: notify: entries: via: - discord: web_hook_url: <string> [username: <string>] (override the default username of the webhook) [avatar_url: <string>] (override the default avatar of the webhook) [embeds: <arrays>[<object>]] (override embeds) """
install_aliases() import logging from urllib.parse import parse_qs, urlparse from bs4 import BeautifulSoup from flexget import plugin from flexget.event import event from flexget.utils.requests import TimedLimiter, RequestException from flexget.utils.requests import Session as RequestSession log = logging.getLogger('brokenstones_lookup') requests = RequestSession() requests.add_domain_limiter(TimedLimiter('brokenstones.club', '10 seconds')) def login(username, password): log.info('Logging in to BrokenStones...') data = {'username': username, 'password': password, 'keeplogged': '1'} try: login_url = 'https://brokenstones.club/login.php' r = requests.post(login_url, data=data) if r.url == login_url: raise plugin.PluginError('Failed to log in to BrokenStones') except RequestException as e: raise plugin.PluginError( 'Error logging in to BrokenStones: {}'.format(e))
def on_task_start(self, task, config): for domain, delay in config.items(): log.debug('Adding minimum interval of %s between requests to %s' % (delay, domain)) task.requests.add_domain_limiter(TimedLimiter(domain, delay))
def search(self, task, entry, config=None): """ Search for entries on SceneAccess """ session = task.requests if 'sceneaccess.eu' not in session.domain_limiters: session.add_domain_limiter(TimedLimiter('sceneaccess.eu', '7 seconds')) if not session.cookies: log.debug('Logging in to %s...' % URL) params = {'username': config['username'], 'password': config['password'], 'submit': 'come on in'} session.post(URL + 'login', data=params) if 'gravity_multiplier' in config: multip = config['gravity_multiplier'] else: multip = 1 # Prepare queries... BASE_URLS = list() entries = set() for category in self.processCategories(config): BASE_URLS.append(URL + '%(url_path)s?method=2%(category_url_string)s' % category) # Search... for search_string in entry.get('search_strings', [entry['title']]): search_string_normalized = normalize_unicode(clean_title(search_string)) search_string_url_fragment = '&search=' + quote(search_string_normalized.encode('utf8')) for url in BASE_URLS: url += search_string_url_fragment log.debug('Search URL for `%s`: %s' % (search_string, url)) page = session.get(url).content soup = get_soup(page) for result in soup.findAll('tr', attrs={'class': 'tt_row'}): entry = Entry() entry['title'] = result.find('a', href=re.compile(r'details\?id=\d+'))['title'] entry['url'] = URL + result.find('a', href=re.compile(r'.torrent$'))['href'] entry['torrent_seeds'] = result.find('td', attrs={'class': 'ttr_seeders'}).text entry['torrent_leeches'] = result.find('td', attrs={'class': 'ttr_leechers'}).text entry['search_sort'] = torrent_availability(entry['torrent_seeds'], entry['torrent_leeches']) * multip size = result.find('td', attrs={'class': 'ttr_size'}).__next__ size = re.search('(\d+(?:[.,]\d+)*)\s?([KMG]B)', size) if size: if size.group(2) == 'GB': entry['content_size'] = int(float(size.group(1)) * 1000 ** 3 / 1024 ** 2) elif size.group(2) == 'MB': entry['content_size'] = int(float(size.group(1)) * 1000 ** 2 / 1024 ** 2) elif size.group(2) == 'KB': entry['content_size'] = int(float(size.group(1)) * 1000 / 1024 ** 2) else: entry['content_size'] = int(float(size.group(1)) / 1024 ** 2) entries.add(entry) return entries
from loguru import logger from flexget import plugin from flexget.event import event from flexget.plugin import PluginWarning from flexget.utils.requests import Session as RequestSession, TimedLimiter from requests.exceptions import RequestException plugin_name = 'sms_free_fr' logger = logger.bind(name=plugin_name) SMS_SEND_URL = 'https://smsapi.free-mobile.fr/sendmsg' requests = RequestSession(max_retries=3) requests.add_domain_limiter(TimedLimiter('smsapi.free-mobile.fr', '5 seconds')) class SMSFreeFrNotifier(object): """ Sends SMS notification through smsapi.free-mobile.fr Informations: https://www.freenews.fr/freenews-edition-nationale-299/free-mobile-170/nouvelle-option-notifications-par-sms-chez-free-mobile-14817 Example: sms_free_fr: user: your login (accepted format example: '12345678') password: <PASSWORD>
from flexget.utils.soup import get_soup from slugify import slugify from .anidb_cache import cached_anidb, ANIDB_CACHE PLUGIN_ID = 'fadbs.util.anidb' CLIENT_STR = 'fadbs' CLIENT_VER = 1 log = logging.getLogger(PLUGIN_ID) requests = Session() requests.headers.update({'User-Agent': 'Python-urllib/2.6'}) requests.add_domain_limiter(TimedLimiter('api.anidb.net', '2 seconds')) class AnidbSearch(object): """ Search for an anime's id """ anidb_xml_url = 'http://api.anidb.net:9001/httpapi?request=anime' prelook_url = 'http://anisearch.outrance.pl?task=search' cdata_regex = re.compile(r'.+CDATA\[(.+)\]\].+') particle_words = { 'x-jat': {'no', 'wo', 'o', 'na', 'ja', 'ni', 'to', 'ga', 'wa'} } def __init__(self): self.debug = False
import xml.etree.ElementTree as ET from flexget import plugin from flexget.config_schema import one_or_more from flexget.event import event from flexget.plugin import PluginWarning from flexget.utils.requests import Session as RequestSession, TimedLimiter from requests.exceptions import RequestException plugin_name = 'prowl' log = logging.getLogger(plugin_name) PROWL_URL = 'https://api.prowlapp.com/publicapi/add' requests = RequestSession(max_retries=3) requests.add_domain_limiter(TimedLimiter('prowlapp.com', '5 seconds')) class ProwlNotifier(object): """ Send prowl notifications Example:: prowl: api_key: xxxxxxx [application: application name, default FlexGet] [event: event title, default New Release] [priority: -2 - 2 (2 = highest), default 0] [description: notification to send]
import logging from requests.exceptions import RequestException from flexget import plugin from flexget.config_schema import one_or_more from flexget.event import event from flexget.plugin import PluginWarning from flexget.utils.requests import Session as RequestSession from flexget.utils.requests import TimedLimiter plugin_name = 'join' log = logging.getLogger(plugin_name) requests = RequestSession(max_retries=3) requests.add_domain_limiter(TimedLimiter('appspot.com', '5 seconds')) JOIN_URL = 'https://joinjoaomgcd.appspot.com/_ah/api/messaging/v1/sendPush' class JoinNotifier: """ Example:: notify: entries: via: - join: [api_key: <API_KEY> (your join api key. Only required for 'group' notifications)] [group: <GROUP_NAME> (name of group of join devices to notify. 'all', 'android', etc.) [device: <DEVICE_ID> (can also be a list of device ids)]
from loguru import logger from flexget import plugin from flexget.components.sites.utils import normalize_scene from flexget.config_schema import one_or_more from flexget.entry import Entry from flexget.event import event from flexget.plugin import PluginError from flexget.utils.requests import RequestException, Session, TimedLimiter logger = logger.bind(name='rarbg') requests = Session() requests.add_domain_limiter(TimedLimiter( 'torrentapi.org', '3 seconds')) # they only allow 1 request per 2 seconds CATEGORIES = { 'all': 0, # Movies 'x264': 17, 'x264 720p': 45, 'x264 1080p': 44, 'x264 3D': 47, 'XviD': 14, 'XviD 720p': 48, 'Full BD': 42, # TV 'HDTV': 41, 'SDTV': 18, # Adult 'XXX': 4,
from flexget import db_schema, plugin from flexget.entry import Entry from flexget.event import event from flexget.manager import Session from flexget.utils.database import json_synonym from flexget.utils.requests import RequestException from flexget.utils.requests import Session as RequestSession from flexget.utils.requests import TimedLimiter from flexget.utils.soup import get_soup from flexget.utils.tools import parse_filesize logger = logger.bind(name='filelist') Base = db_schema.versioned_base('filelist', 0) requests = RequestSession() requests.add_domain_limiter(TimedLimiter('filelist.ro', '2 seconds')) BASE_URL = 'https://filelist.ro/' CATEGORIES = { 'all': 0, 'anime': 24, 'audio': 11, 'cartoons': 15, 'docs': 16, 'games console': 10, 'games pc': 9, 'linux': 17, 'misc': 18, 'mobile': 22, 'movies 3d': 25,
logger = logger.bind(name='imdb.utils') # IMDb delivers a version of the page which is unparsable to unknown (and some known) user agents, such as requests' # Spoof the old urllib user agent to keep results consistent requests = Session() requests.headers.update({'User-Agent': 'Python-urllib/2.6'}) # requests.headers.update({'User-Agent': random.choice(USERAGENTS)}) # this makes most of the titles to be returned in english translation, but not all of them requests.headers.update({'Accept-Language': 'en-US,en;q=0.8'}) requests.headers.update({ 'X-Forwarded-For': '24.110.%d.%d' % (random.randint(0, 254), random.randint(0, 254)) }) # give imdb a little break between requests (see: http://flexget.com/ticket/129#comment:1) requests.add_domain_limiter(TimedLimiter('imdb.com', '3 seconds')) def is_imdb_url(url): """Tests the url to see if it's for imdb.com.""" if not isinstance(url, str): return # Probably should use urlparse. return re.match(r'https?://[^/]*imdb\.com/', url) def is_valid_imdb_title_id(value): """ Return True if `value` is a valid IMDB ID for titles (movies, series, etc). """ if not isinstance(value, str):
from flexget import plugin, db_schema from flexget.entry import Entry from flexget.event import event from flexget.utils.requests import TimedLimiter, RequestException from flexget.manager import Session from flexget.utils.database import json_synonym from flexget.utils.requests import Session as RequestSession from flexget.utils.soup import get_soup from flexget.config_schema import one_or_more from flexget.utils.tools import parse_filesize log = logging.getLogger('morethantv') Base = db_schema.versioned_base('morethantv', 0) requests = RequestSession() requests.add_domain_limiter(TimedLimiter( 'morethan.tv', '5 seconds')) # TODO find out if they want a delay CATEGORIES = { 'Movies': 'filter_cat[1]', 'TV': 'filter_cat[2]', 'Other': 'filter_cat[3]' } TAGS = [ 'action', 'adventure', 'animation', 'anime', 'art', 'asian', 'biography', 'celebrities', 'comedy', 'cooking', 'crime', 'cult', 'documentary', 'drama', 'educational', 'elclasico', 'family', 'fantasy', 'film.noir', 'filmromanesc', 'food', 'football', 'formula.e', 'formula1', 'gameshow', 'highlights', 'history', 'horror', 'investigation', 'lifestyle', 'liga1', 'ligabbva', 'ligue1', 'martial.arts', 'morethan.tv', 'motogp', 'musical', 'mystery', 'nba', 'news', 'other', 'performance', 'philosophy', 'politics',
import logging from flexget import plugin from flexget.config_schema import one_or_more from flexget.event import event from flexget.plugin import PluginWarning from flexget.utils.requests import Session as RequestSession, TimedLimiter from requests.exceptions import RequestException plugin_name = 'pushover' log = logging.getLogger(plugin_name) PUSHOVER_URL = 'https://api.pushover.net/1/messages.json' requests = RequestSession(max_retries=3) requests.add_domain_limiter(TimedLimiter('pushover.net', '5 seconds')) class PushoverNotifier(object): """ Example:: notify: entries: via: - pushover: user_key: <USER_KEY> (can also be a list of userkeys) token: <TOKEN> [device: <DEVICE_STRING>] [priority: <PRIORITY>] [url: <URL>]
from flexget.plugins.internal.urlrewriting import UrlRewritingError from flexget.utils.requests import Session, TimedLimiter from flexget.utils.soup import get_soup from flexget.utils import requests from flexget.entry import Entry from flexget.utils.search import normalize_unicode import unicodedata log = logging.getLogger('descargas2020') requests = Session() requests.headers.update( {'User-Agent': 'Mozilla/4.0 (compatible; MSIE 5.5; Windows NT)'}) requests.add_domain_limiter(TimedLimiter('descargas2020.com', '2 seconds')) DESCARGAS2020_TORRENT_FORMAT = 'http://descargas2020.com/download/{:0>6}.torrent' class UrlRewriteDescargas2020(object): """Descargas2020 urlrewriter and search.""" schema = {'type': 'boolean', 'default': False} # urlrewriter API def url_rewritable(self, task, entry): url = entry['url'] rewritable_regex = '^http:\/\/(www.)?(descargas2020|tvsinpagar|tumejortorrent|torrentlocura|torrentrapid).com\/.*' return re.match(rewritable_regex, url) and not url.endswith('.torrent')
from requests.exceptions import RequestException from flexget import plugin from flexget.event import event from flexget.plugin import PluginWarning from flexget.utils.requests import Session as RequestSession from flexget.utils.requests import TimedLimiter plugin_name = 'sms_ru' logger = logger.bind(name=plugin_name) SMS_SEND_URL = 'http://sms.ru/sms/send' SMS_TOKEN_URL = 'http://sms.ru/auth/get_token' requests = RequestSession(max_retries=3) requests.add_domain_limiter(TimedLimiter('sms.ru', '5 seconds')) class SMSRuNotifier: """ Sends SMS notification through sms.ru http api sms/send. Phone number is a login assigned to sms.ru account. Example: notify: entries: via: - sms_ru: phone_number: <PHONE_NUMBER> (accepted format example: '79997776655') password: <PASSWORD>
import logging from flexget import plugin from flexget.event import event from flexget.config_schema import one_or_more from flexget.plugin import PluginWarning from flexget.utils.requests import Session as RequestSession, TimedLimiter from requests.exceptions import RequestException __name__ = 'rapidpush' log = logging.getLogger(__name__) RAPIDPUSH_URL = 'https://rapidpush.net/api' requests = RequestSession(max_retries=3) requests.add_domain_limiter(TimedLimiter('rapidpush.net', '5 seconds')) class RapidpushNotifier(object): """ Example:: rapidpush: apikey: xxxxxxx (can also be a list of api keys) [category: category, default FlexGet] [group: device group, default no group] [channel: the broadcast notification channel, if provided it will be send to the channel subscribers instead of your devices, default no channel] [priority: 0 - 6 (6 = highest), default 2 (normal)] """ schema = {
from flexget import plugin, db_schema from flexget.config_schema import one_or_more from flexget.entry import Entry from flexget.event import event from flexget.manager import Session from flexget.utils.database import json_synonym from flexget.utils.requests import Session as RequestSession, TimedLimiter, RequestException from flexget.utils.soup import get_soup from flexget.utils.tools import parse_filesize log = logging.getLogger('alpharatio') Base = db_schema.versioned_base('alpharatio', 0) requests = RequestSession() requests.add_domain_limiter(TimedLimiter('alpharatio.cc', '5 seconds')) # ElementZero confirmed with AlphaRato sysop 'jasonmaster' that they do want a 5 second limiter CATEGORIES = { 'tvsd': 'filter_cat[1]', 'tvhd': 'filter_cat[2]', 'tvdvdrip': 'filter_cat[3]', 'tvpacksd': 'filter_cat[4]', 'tvpackhd': 'filter_cat[5]', 'moviesd': 'filter_cat[6]', 'moviehd': 'filter_cat[7]', 'moviepacksd': 'filter_cat[8]', 'moviepackhd': 'filter_cat[9]', 'moviexxx': 'filter_cat[10]', 'mvid': 'filter_cat[11]', 'gamespc': 'filter_cat[12]',
def search(self, task, entry, config): """ Search for entries on AwesomeHD """ # need lxml to parse xml try: import lxml # noqa except ImportError as e: log.debug('Error importing lxml: %s', e) raise plugin.DependencyError( 'awesomehd', 'lxml', 'lxml module required. ImportError: %s' % e) config = self.prepare_config(config) # set a domain limit, but allow the user to overwrite it if 'awesome-hd.me' not in task.requests.domain_limiters: task.requests.add_domain_limiter( TimedLimiter('awesome-hd.me', '5 seconds')) entries = set() # Can only search for imdb if not entry.get('imdb_id'): log.debug('Skipping entry %s because of missing imdb id', entry['title']) return entries # Standard search params params = { 'passkey': config['passkey'], 'internal': int(config['only_internal']), 'action': 'imdbsearch', 'imdb': entry['imdb_id'] } try: response = task.requests.get(self.base_url + 'searchapi.php', params=params).content except RequestException as e: log.error('Failed to search for imdb id %s: %s', entry['imdb_id'], e) return entries try: soup = get_soup(response, 'xml') if soup.find('error'): log.error(soup.find('error').get_text()) return entries except Exception as e: log.error('Failed to parse xml result for imdb id %s: %s', entry['imdb_id'], e) return entries authkey = soup.find('authkey').get_text() for result in soup.find_all('torrent'): # skip audio releases for now if not result.find('resolution').get_text(): log.debug('Skipping audio release') continue e = Entry() e['imdb_id'] = result.find('imdb').get_text() e['torrent_id'] = int(result.find('id').get_text()) e['uploaded_at'] = dateutil_parse(result.find('time').get_text()) e['content_size'] = parse_filesize('{} b'.format( result.find('size').get_text())) e['torrent_snatches'] = int(result.find('snatched').get_text()) e['torrent_seeds'] = int(result.find('seeders').get_text()) e['torrent_leeches'] = int(result.find('leechers').get_text()) e['release_group'] = result.find('releasegroup').get_text() e['freeleech_percent'] = int( (1 - float(result.find('freeleech').get_text())) * 100) e['encode_status'] = result.find('encodestatus').get_text() e['subtitles'] = result.find('subtitles').get_text().split(', ') e['url'] = self.base_url + 'torrents.php?action=download&id={}&authkey={}&torrent_pass={}'\ .format(e['torrent_id'], authkey, config['passkey']) # Generate a somewhat sensible title audio = result.find('audioformat').get_text().replace( 'AC-3', 'AC3') # normalize a bit source = result.find('media').get_text() encoder = result.find('encoding').get_text() # calling a WEB-DL a remux is pretty redundant if 'WEB' in source.upper(): encoder = re.sub('REMUX', '', encoder, flags=re.IGNORECASE).strip() e['title'] = '{movie_name} {year} {resolution} {source} {audio} {encoder}-{release_group}'\ .format(movie_name=result.find('name').get_text(), year=result.find('year').get_text(), resolution=result.find('resolution').get_text(), source=source, audio=audio, encoder=encoder, release_group=e['release_group']) entries.add(e) return entries
import logging from flexget import plugin from flexget.config_schema import one_or_more from flexget.event import event from flexget.plugin import PluginWarning from flexget.utils.requests import Session as RequestSession, TimedLimiter from requests.exceptions import RequestException plugin_name = 'pushsafer' log = logging.getLogger(plugin_name) PUSHSAFER_URL = 'https://www.pushsafer.com/api' requests = RequestSession(max_retries=3) requests.add_domain_limiter(TimedLimiter('pushsafer.com', '5 seconds')) class PushsaferNotifier(object): """ Example:: pushsafer: private_key: <string> your private key (can also be a alias key) - Required title: <string> (default: task name) body: <string> (default: '{{series_name}} {{series_id}}' ) url: <string> (default: '{{imdb_url}}') url_title: <string> (default: (none)) device: <string> ypur device or device group id (default: (none)) icon: <integer> (default is 1) sound: <integer> (default is (none))
from loguru import logger from flexget import plugin from flexget.components.sites.utils import normalize_scene from flexget.config_schema import one_or_more from flexget.entry import Entry from flexget.event import event from flexget.plugin import PluginError from flexget.utils.requests import RequestException, Session, TimedLimiter logger = logger.bind(name='rarbg') requests = Session() requests.add_domain_limiter( TimedLimiter('torrentapi.org', '3 seconds') ) # they only allow 1 request per 2 seconds CATEGORIES = { 'all': 0, # Movies 'x264': 17, 'x264 720p': 45, 'x264 1080p': 44, 'x264 3D': 47, 'XviD': 14, 'XviD 720p': 48, 'Full BD': 42, # TV 'HDTV': 41, 'SDTV': 18, # Adult