def _make_provider(config): if not config: return None enable_backlog = 0 enable_daily = 0 search_fallback = 0 search_mode = 'eponly' try: values = config.split('|') if len(values) == 9: name, url, key, category_ids, enabled, search_mode, search_fallback, enable_daily, enable_backlog = values else: name = values[0] url = values[1] key = values[2] category_ids = values[3] enabled = values[4] except ValueError: logger.exception('Skipping Newznab provider string: \'{0}\', incorrect format'.format(config)) return None new_provider = NewznabProvider( name, url, key=key, catIDs=category_ids, search_mode=search_mode, search_fallback=search_fallback, enable_daily=enable_daily, enable_backlog=enable_backlog ) new_provider.enabled = enabled == '1' return new_provider
def _sendBoxcar2(self, msg, title, accesstoken): ''' Sends a boxcar2 notification to the address provided msg: The message to send title: The title of the message accesstoken: to send to this device returns: True if the message succeeded, False otherwise ''' # http://blog.boxcar.io/post/93211745502/boxcar-api-update-boxcar-api-update-icon-and post_data = { 'user_credentials': accesstoken, 'notification[title]': 'SickChill : {0}: {1}'.format(title, msg), 'notification[long_message]': msg, 'notification[sound]': 'notifier-2', 'notification[source_name]': 'SickChill', 'notification[icon_url]': settings.LOGO_URL } response = helpers.getURL(self.url, post_data=post_data, session=self.session, timeout=60, returns='json') if not response: logger.exception('Boxcar2 notification failed.') return False logger.debug('Boxcar2 notification successful.') return True
def search(self, search_params, age=0, ep_obj=None): # FIXME results = [] logger.debug("Search string: {0}".format(search_params)) self._check_auth() parsed_json = self.get_url(self.urls["search"], post_data=search_params, returns="json") if not parsed_json: return [] if self._check_auth_from_data(parsed_json): if parsed_json and "data" in parsed_json: items = parsed_json["data"] else: logger.exception( "Resulting JSON from provider isn't correct, not parsing it" ) items = [] for item in items: results.append(item) # FIXME SORTING return results
def favoriteShows(self): """ Fetches data from IMDB to show a list of popular shows. """ t = PageTemplate(rh=self, filename="addShows_favoriteShows.mako") e = None if self.get_body_argument("submit", None): tvdb_user = self.get_body_argument("tvdb_user") tvdb_user_key = filters.unhide( settings.TVDB_USER_KEY, self.get_body_argument("tvdb_user_key")) if tvdb_user and tvdb_user_key: if tvdb_user != settings.TVDB_USER or tvdb_user_key != settings.TVDB_USER_KEY: favorites.test_user_key(tvdb_user, tvdb_user_key, 1) try: favorite_shows = favorites.fetch_indexer_favorites() except Exception as e: logger.exception(traceback.format_exc()) logger.warning( _("Could not get favorite shows: {0}").format(str(e))) favorite_shows = None return t.render( title=_("Favorite Shows"), header=_("Favorite Shows"), favorite_shows=favorite_shows, favorites_exception=e, topmenu="home", controller="addShows", action="popularShows", )
def _send_matrix(message=None): url = 'https://{0}/_matrix/client/r0/rooms/{1}/send/m.room.message/{2}?access_token={3}'.format( settings.MATRIX_SERVER, settings.MATRIX_ROOM, time.time(), settings.MATRIX_API_TOKEN) logger.info("Sending matrix message: " + message) logger.info("Sending matrix message to url: " + url) jsonMessage = { "msgtype": "m.text", "format": "org.matrix.custom.html", "body": message, "formatted_body": message, } headers = {"Content-Type": "application/json"} try: r = requests.put(url, data=json.dumps(jsonMessage), headers=headers) r.raise_for_status() except Exception as e: logger.exception("Error Sending Matrix message: " + str(e)) return False return True
def _set_torrent_label(self, result): # No option for this built into the rpc, because it is a plugin label = settings.TORRENT_LABEL.lower() if result.show.is_anime: label = settings.TORRENT_LABEL_ANIME.lower() if ' ' in label: logger.exception(f'{self.name}: Invalid label. Label must not contain a space') return False if label: try: if 'label' not in [x.decode().lower() for x in self.client.core.get_available_plugins()]: logger.debug(f'{self.name}: label plugin not detected') return False self.client.core.enable_plugin('Label') self.client.core.enable_plugin('label') labels = [x.decode() for x in self.client.label.get_labels()] if label not in labels: logger.debug(f'{self.name}: {label} label does not exist in Deluge we must add it') self.client.label.add(label) logger.debug(f'{self.name}: [{label}] label added to deluge') self.client.label.set_torrent(result.hash, label) except Exception as error: logger.info(f'{self.name}: Could not add label to torrent') logger.debug(error) # logger.debug(self.client.daemon.get_method_list()) return False logger.debug(f'{self.name}: [{label}] label added to torrent') return True
def async_call(self, function): try: # TODO: Make all routes use get_argument so we can take advantage of tornado's argument sanitization, separate post and get, and get rid of this # nonsense loop so we can just yield the method directly kwargs = self.request.arguments for arg, value in kwargs.items(): if len(value) == 1: kwargs[arg] = xhtml_escape(value[0]) elif isinstance(value, str): kwargs[arg] = xhtml_escape(value) elif isinstance(value, list): kwargs[arg] = [xhtml_escape(v) for v in value] else: raise Exception return function(**kwargs) except TypeError: return function() except OSError as error: return Template( "Looks like we do not have enough disk space to render the page! {error}" ).render_unicode(error=error) except Exception: logger.exception('Failed doing webui callback: {0}'.format( (traceback.format_exc()))) raise
def _set_torrent_label(self, result): # No option for this built into the rpc, because it is a plugin label = settings.TORRENT_LABEL.lower() if result.show.is_anime: label = settings.TORRENT_LABEL_ANIME.lower() if ' ' in label: logger.exception(self.name + ': Invalid label. Label must not contain a space') return False if label: try: labels = self.client.label.get_labels() if label not in labels: logger.debug( self.name + ': ' + label + " label does not exist in Deluge we must add it") self.client.labels.add(label) logger.debug(self.name + ': ' + label + " label added to Deluge") self.client.label.set_torrent(result.hash, label) except Exception: logger.debug(self.name + ': ' + "label plugin not detected") return False logger.debug(self.name + ': ' + label + " label added to torrent") return True
def _sendBoxcar2(self, msg, title, accesstoken): """ Sends a boxcar2 notification to the address provided msg: The message to send title: The title of the message accesstoken: to send to this device returns: True if the message succeeded, False otherwise """ # http://blog.boxcar.io/post/93211745502/boxcar-api-update-boxcar-api-update-icon-and post_data = { "user_credentials": accesstoken, "notification[title]": "SickChill : {0}: {1}".format(title, msg), "notification[long_message]": msg, "notification[sound]": "notifier-2", "notification[source_name]": "SickChill", "notification[icon_url]": settings.LOGO_URL, } response = helpers.getURL(self.url, post_data=post_data, session=self.session, timeout=60, returns="json") if not response: logger.exception("Boxcar2 notification failed.") return False logger.debug("Boxcar2 notification successful.") return True
def _get_credentials(self, key): logger.info('Type of key is {}'.format(type(key))) """ Step 2 of authorization - poll server for access token. :param key: Authorization key received from twitter :return: True if succeeded, False otherwise """ logger.debug('Generating and signing request for an access token using key ' + key) oauth_session = OAuth1Session(client_key=self.consumer_key, client_secret=self.consumer_hash, resource_owner_key=settings.TWITTER_USERNAME, resource_owner_secret=settings.TWITTER_PASSWORD) try: access_token = oauth_session.fetch_access_token(self.ACCESS_TOKEN_URL, verifier=str(key)) except Exception as err: logger.exception('The request for a token with did not succeed: {}'.format(err)) return False logger.debug('Your Twitter Access Token key: {0}'.format(access_token['oauth_token'])) logger.debug('Access Token secret: {0}'.format(access_token['oauth_token_secret'])) settings.TWITTER_USERNAME = access_token['oauth_token'] settings.TWITTER_PASSWORD = access_token['oauth_token_secret'] return True
def _run_backup(self): # Do a system backup before update logger.info("Config backup in progress...") ui.notifications.message(_('Backup'), _('Config backup in progress...')) try: backup_dir = os.path.join(settings.DATA_DIR, 'backup') if not os.path.isdir(backup_dir): os.mkdir(backup_dir) if self._keep_latest_backup(backup_dir) and self._backup( backup_dir): logger.info("Config backup successful, updating...") ui.notifications.message( _('Backup'), _('Config backup successful, updating...')) return True else: logger.exception("Config backup failed, aborting update") ui.notifications.message( _('Backup'), _('Config backup failed, aborting update')) return False except Exception as error: logger.exception( 'Update: Config backup failed. Error: {}'.format(error)) ui.notifications.message( _('Backup'), _('Config backup failed, aborting update')) return False
def searchIndexersForShowName(self, search_term, lang=None, indexer=None): self.set_header('Cache-Control', 'max-age=0,no-cache,no-store') self.set_header('Content-Type', 'application/json') if not lang or lang == 'null': lang = settings.INDEXER_DEFAULT_LANGUAGE search_term = xhtml_unescape(search_term) searchTerms = [search_term] # If search term ends with what looks like a year, enclose it in () matches = re.match(r'^(.+ |)([12][0-9]{3})$', search_term) if matches: searchTerms.append("{0}({1})".format(matches.group(1), matches.group(2))) for searchTerm in searchTerms: # If search term begins with an article, let's also search for it without matches = re.match(r'^(?:a|an|the) (.+)$', searchTerm, re.I) if matches: searchTerms.append(matches.group(1)) results = {} final_results = [] # Query Indexers for each search term and build the list of results for i, j in sickchill.indexer if not int(indexer) else [(int(indexer), None)]: logger.debug( "Searching for Show with searchterm(s): {0} on Indexer: {1}". format(searchTerms, 'theTVDB')) for searchTerm in searchTerms: # noinspection PyBroadException try: indexerResults = sickchill.indexer[i].search(searchTerm, language=lang) except Exception: logger.exception(traceback.format_exc()) continue # add search results results.setdefault(i, []).extend(indexerResults) for i, shows in results.items(): # noinspection PyUnresolvedReferences final_results.extend({ (sickchill.indexer.name(i), i, sickchill.indexer[i].show_url, show['id'], show['seriesName'], show['firstAired'], sickchill.tv.Show.find(settings.showList, show['id']) is not None) for show in shows }) lang_id = sickchill.indexer.lang_dict()[lang] return json.dumps({ 'results': final_results, 'langid': lang_id, 'success': len(final_results) > 0 })
def search(self, search_strings, age=0, ep_obj=None): results = [] for mode in search_strings: items = [] logger.debug(_("Search Mode: {mode}".format(mode=mode))) for search_string in {*search_strings[mode]}: # Feed verified does not exist on this clone # search_url = self.urls['verified'] if self.confirmed else self.urls['feed'] search_url = self.urls["feed"] if mode != "RSS": logger.debug(_("Search String: {search_string}".format(search_string=search_string))) data = self.get_url(search_url, params={"f": search_string}, returns="text") if not data: logger.debug("No data returned from provider") continue if not data.startswith("<?xml"): logger.info("Expected xml but got something else, is your mirror failing?") continue try: with BS4Parser(data, "html5lib") as parser: for item in parser("item"): if item.category and "tv" not in item.category.get_text(strip=True).lower(): continue title = item.title.get_text(strip=True) t_hash = item.guid.get_text(strip=True).rsplit("/", 1)[-1] if not all([title, t_hash]): continue download_url = "magnet:?xt=urn:btih:" + t_hash + "&dn=" + title + self._custom_trackers torrent_size, seeders, leechers = self._split_description(item.find("description").text) size = convert_size(torrent_size) or -1 # Filter unseeded torrent if seeders < self.minseed or leechers < self.minleech: if mode != "RSS": logger.debug( "Discarding torrent because it doesn't meet the minimum seeders or leechers: {0} (S:{1} L:{2})".format( title, seeders, leechers ) ) continue result = {"title": title, "link": download_url, "size": size, "seeders": seeders, "leechers": leechers, "hash": t_hash} items.append(result) except Exception: logger.exception("Failed parsing provider. Traceback: {0!r}".format(traceback.format_exc())) # For each search mode sort all the items by seeders if available items.sort(key=lambda d: try_int(d.get("seeders", 0)), reverse=True) results += items return results
def hash_from_magnet(magnet): try: torrent_hash = re.findall(r'urn:btih:([\w]{32,40})', magnet)[0].upper() if len(torrent_hash) == 32: torrent_hash = b16encode(b32decode(torrent_hash)).upper() return torrent_hash except Exception: logger.exception('Unable to extract torrent hash or name from magnet: {0}'.format(magnet)) return ''
def exception_generator(): """ Dummy function to raise a fake exception and log it """ try: raise Exception('FAKE EXCEPTION') except Exception as error: logger.exception("FAKE ERROR: " + str(error)) logger.submit_errors() raise
def makeObject(cmd_arg, cur_path): if settings.USE_SYNOINDEX: synoindex_cmd = ['/usr/syno/bin/synoindex', cmd_arg, os.path.abspath(cur_path)] logger.debug("Executing command " + str(synoindex_cmd)) logger.debug("Absolute path to command: " + os.path.abspath(synoindex_cmd[0])) try: p = subprocess.Popen(synoindex_cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, cwd=settings.DATA_DIR, universal_newlines=True) out, err = p.communicate() logger.debug(_("Script result: {0}").format(str(out or err).strip())) except OSError as e: logger.exception("Unable to run synoindex: " + str(e))
def _checkSabResponse(jdata): """ Check response from SAB :param jdata: Response from requests api call :return: a list of (Boolean, string) which is True if SAB is not reporting an error """ if "error" in jdata: logger.exception(jdata["error"]) return False, jdata["error"] else: return True, jdata
def _sendFreeMobileSMS(title, msg, cust_id=None, apiKey=None): """ Sends a SMS notification msg: The message to send (str) title: The title of the message userKey: The pushover user id to send the message to (or to subscribe with) returns: True if the message succeeded, False otherwise """ if cust_id is None: cust_id = settings.FREEMOBILE_ID if apiKey is None: apiKey = settings.FREEMOBILE_APIKEY logger.debug("Free Mobile in use with API KEY: " + apiKey) # build up the URL and parameters msg = msg.strip() msg_quoted = urllib.parse.quote(title + ": " + msg) URL = "https://smsapi.free-mobile.fr/sendmsg?user="******"&pass="******"&msg=" + msg_quoted req = urllib.request.Request(URL) # send the request to Free Mobile try: urllib.request.urlopen(req) except IOError as e: if hasattr(e, 'code'): if e.code == 400: message = "Missing parameter(s)." logger.exception(message) return False, message if e.code == 402: message = "Too much SMS sent in a short time." logger.exception(message) return False, message if e.code == 403: message = "API service isn't enabled in your account or ID / API key is incorrect." logger.exception(message) return False, message if e.code == 500: message = "Server error. Please retry in few moment." logger.exception(message) return False, message except Exception as e: message = "Error while sending SMS: {0}".format(e) logger.exception(message) return False, message message = "Free Mobile SMS successful." logger.info(message) return True, message
def test_submitter(self): """ Test that an exception is raised """ try: with pytest.raises(Exception): raise Exception("FAKE EXCEPTION") except Exception as error: logger.exception("FAKE ERROR: " + str(error)) logger.submit_errors() with pytest.raises(Exception): raise
def _run_git(git_path, args, log_errors=False): output = err = exit_status = None if not git_path: logger.warning("No git specified, can't use git commands") exit_status = 1 return output, err, exit_status cmd = git_path + ' ' + args try: logger.debug("Executing {0} with your shell in {1}".format( cmd, settings.PROG_DIR)) p = subprocess.Popen(cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, shell=True, universal_newlines=True, cwd=settings.PROG_DIR) output, err = p.communicate() exit_status = p.returncode if output: output = output.strip() except OSError: logger.info("Command {} didn't work".format(cmd)) exit_status = 1 if exit_status == 0: logger.debug("{} : returned successful".format(cmd)) elif exit_status == 1: if 'stash' in output: logger.warning( "Please enable 'git reset' in settings or stash your changes in local files" ) elif log_errors: logger.exception("{0} returned : {1}".format(cmd, output)) elif log_errors: if exit_status in (127, 128) or 'fatal:' in output: logger.warning("{0} returned : ({1}) {2}".format( cmd, exit_status, output or err)) else: logger.exception( "{0} returned code {1}, treating as error : {2}".format( cmd, exit_status, output or err)) exit_status = 1 return output, err, exit_status
def _set_torrent_label(self, result): label = settings.TORRENT_LABEL.lower() if result.show.is_anime: label = settings.TORRENT_LABEL_ANIME.lower() if " " in label: logger.exception(self.name + ": Invalid label. Label must not contain a space") return False if label: # check if label already exists and create it if not post_data = json.dumps({ "method": "label.get_labels", "params": [], "id": 3 }) self._request(method="post", data=post_data) labels = self.response.json()["result"] if labels is not None: if label not in labels: logger.debug( self.name + ": " + label + " label does not exist in Deluge we must add it") post_data = json.dumps({ "method": "label.add", "params": [label], "id": 4 }) self._request(method="post", data=post_data) logger.debug(self.name + ": " + label + " label added to Deluge") # add label to torrent post_data = json.dumps({ "method": "label.set_torrent", "params": [result.hash, label], "id": 5 }) self._request(method="post", data=post_data) logger.debug(self.name + ": " + label + " label added to torrent") else: logger.debug(self.name + ": " + "label plugin not detected") return False return not self.response.json()["error"]
def _send_discord(self, webhook: str = None, name: str = None, avatar: str = None, tts=None): discord_webhook = webhook or settings.DISCORD_WEBHOOK discord_name = name or settings.DISCORD_NAME avatar_icon = avatar or settings.DISCORD_AVATAR_URL discord_tts = bool(settings.DISCORD_TTS if tts is None else tts) logger.info("Sending discord message: " + ", ".join(f["value"] for f in self.embed["fields"])) logger.info("Sending discord message to url: " + discord_webhook) headers = {"Content-Type": "application/json"} try: r = requests.post(discord_webhook, data=json.dumps( dict(embeds=[self.embed], username=discord_name, avatar_url=avatar_icon, tts=discord_tts)), headers=headers) r.raise_for_status() except requests.exceptions.ConnectionError as error: logger.info("Could not reach the webhook url") return False except requests.exceptions.RequestException as error: if error.response.status_code != 429 or int( error.response.headers.get("X-RateLimit-Remaining")) != 0: raise error logger.info( "Discord rate limiting, retrying after {} seconds".format( error.response.headers.get("X-RateLimit-Reset-After"))) time.sleep( int(error.response.headers.get("X-RateLimit-Reset-After")) + 1) r = requests.post(discord_webhook, data=json.dumps( dict(embeds=[self.embed], username=discord_name, avatar_url=avatar_icon, tts=discord_tts)), headers=headers) r.raise_for_status() except Exception as error: logger.exception("Error Sending Discord message: " + str(error)) return False return True
def dumpHTML(data): dumpName = os.path.join(settings.CACHE_DIR, 'custom_torrent.html') try: fileOut = open(dumpName, 'wb') fileOut.write(data) fileOut.close() helpers.chmodAsParent(dumpName) except IOError as error: logger.exception('Unable to save the file: {0}'.format(str(error))) return False logger.info('Saved custom_torrent html dump {0} '.format(dumpName)) return True
def test_user_key(user, key): user_object = tvdbsimple.User(user, key) try: user_object.info() except Exception: logger.exception(traceback.format_exc()) return False settings.TVDB_USER = user settings.TVDB_USER_KEY = key sickchill.start.save_config() return True
def get_subtitles_path(video_path): if os.path.isabs(settings.SUBTITLES_DIR): new_subtitles_path = settings.SUBTITLES_DIR elif settings.SUBTITLES_DIR: new_subtitles_path = os.path.join(os.path.dirname(video_path), settings.SUBTITLES_DIR) dir_exists = sickchill.oldbeard.helpers.makeDir(new_subtitles_path) if not dir_exists: logger.exception("Unable to create subtitles folder {0}".format(new_subtitles_path)) else: sickchill.oldbeard.helpers.chmodAsParent(new_subtitles_path) else: new_subtitles_path = os.path.dirname(video_path) return new_subtitles_path
def search(self, name, language=None, exact=False, indexer_id=False): """ :param name: Show name to search for :param language: Language of the show info we want :param exact: Exact when adding existing, processed when adding new shows :param indexer_id: Exact indexer id to get, either imdb or tvdb id. :return: list of series objects """ language = language or self.language result = [] if isinstance(name, bytes): name = name.decode() if re.match(r"^t?t?\d{7,8}$", name) or re.match(r"^\d{6}$", name): try: if re.match(r"^t?t?\d{7,8}$", name): result = self._search(imdbId=f'tt{name.strip("t")}', language=language) elif re.match(r"^\d{6}$", name): series = self._series(name, language=language) if series: result = [series.info(language)] except requests.exceptions.RequestException: logger.exception(traceback.format_exc()) else: # Name as provided (usually from nfo) names = [name] if not exact: # Name without year and separator test = re.match(r"^(.+?)[. -]+\(\d{4}\)?$", name) if test: names.append(test.group(1).strip()) # Name with spaces if re.match(r"[. -_]", name): names.append(re.sub(r"[. -_]", " ", name).strip()) if test: # Name with spaces and without year names.append( re.sub(r"[. -_]", " ", test.group(1)).strip()) for attempt in set(n for n in names if n.strip()): try: result = self._search(attempt, language=language) if result: break except requests.exceptions.RequestException: logger.exception(traceback.format_exc()) return result
def shutdown(self, event): """ Shut down SickChill :param event: Type of shutdown event, used to see if restart required """ if settings.started: sickchill.start.halt() # stop all tasks sickchill.start.saveAll() # save all shows to DB # shutdown web server if self.web_server: logger.info('Shutting down Tornado') self.web_server.shutdown() try: self.web_server.join(10) except Exception: pass self.clear_cache() # Clean cache # if run as daemon delete the pid file if self.run_as_daemon and self.create_pid: self.remove_pid_file(self.pid_file) if event == sickchill.oldbeard.event_queue.Events.SystemEvent.RESTART: install_type = settings.versionCheckScheduler.action.install_type popen_list = [] if install_type in ('git', 'source'): popen_list = [sys.executable, settings.MY_FULLNAME] elif install_type == 'win': logger.exception('You are using a binary Windows build of SickChill. ' 'Please switch to using git.') if popen_list and not settings.NO_RESTART: popen_list += settings.MY_ARGS if '--nolaunch' not in popen_list: popen_list += ['--nolaunch'] logger.info('Restarting SickChill with {options}'.format(options=popen_list)) # shutdown the logger to make sure it's released the logfile BEFORE it restarts SR. logger.shutdown() subprocess.Popen(popen_list, cwd=os.getcwd(), universal_newlines=True) # Make sure the logger has stopped, just in case logger.shutdown() os._exit(0)
def _migrate_v1(self): """ Reads in the old naming settings from your config and generates a new config template from them. """ settings.NAMING_PATTERN = self._name_to_pattern() logger.info("Based on your old settings I'm setting your new naming pattern to: " + settings.NAMING_PATTERN) settings.NAMING_CUSTOM_ABD = check_setting_bool(self.config_obj, "General", "naming_dates") if settings.NAMING_CUSTOM_ABD: settings.NAMING_ABD_PATTERN = self._name_to_pattern(True) logger.info("Adding a custom air-by-date naming pattern to your config: " + settings.NAMING_ABD_PATTERN) else: settings.NAMING_ABD_PATTERN = naming.name_abd_presets[0] settings.NAMING_MULTI_EP = int(check_setting_int(self.config_obj, "General", "naming_multi_ep_type", 1)) # see if any of their shows used season folders main_db_con = db.DBConnection() season_folder_shows = main_db_con.select("SELECT indexer_id FROM tv_shows WHERE flatten_folders = 0 LIMIT 1") # if any shows had season folders on then prepend season folder to the pattern if season_folder_shows: old_season_format = check_setting_str(self.config_obj, "General", "season_folders_format", "Season %02d") if old_season_format: try: new_season_format = old_season_format % 9 new_season_format = str(new_season_format).replace("09", "%0S") new_season_format = new_season_format.replace("9", "%S") logger.info("Changed season folder format from " + old_season_format + " to " + new_season_format + ", prepending it to your naming config") settings.NAMING_PATTERN = new_season_format + os.sep + settings.NAMING_PATTERN except (TypeError, ValueError): logger.exception("Can't change " + old_season_format + " to new season format") # if no shows had it on then don't flatten any shows and don't put season folders in the config else: logger.info("No shows were using season folders before so I'm disabling flattening on all shows") # don't flatten any shows at all main_db_con.action("UPDATE tv_shows SET flatten_folders = 0") settings.NAMING_FORCE_FOLDERS = naming.check_force_season_folders()
def _get_authorization(self): """ Step 1 of authorization - get app authorization url. :return: True if succeeded, False otherwise """ logger.debug('Requesting temp token from Twitter') oauth_session = OAuth1Session(client_key=self.consumer_key, client_secret=self.consumer_hash) try: request_token = oauth_session.fetch_request_token(self.REQUEST_TOKEN_URL) except requests.exceptions.RequestException as error: logger.exception(f'Invalid response from Twitter requesting temp token: {error}') else: settings.TWITTER_USERNAME = request_token['oauth_token'] settings.TWITTER_PASSWORD = request_token['oauth_token_secret'] return oauth_session.authorization_url(self.AUTHORIZATION_URL)
def _process_upgrade(connection, upgrade_class): instance = upgrade_class(connection) # logger.debug("Checking " + pretty_name(upgrade_class.__name__) + " database upgrade") if not instance.test(): logger.debug("Database upgrade required: " + pretty_name(upgrade_class.__name__)) try: instance.execute() except Exception as e: logger.exception("Error in " + str(upgrade_class.__name__) + ": " + str(e)) raise logger.debug(upgrade_class.__name__ + " upgrade completed") # else: # logger.debug(upgrade_class.__name__ + " upgrade not required") for upgradeSubClass in upgrade_class.__subclasses__(): _process_upgrade(connection, upgradeSubClass)