def fetchFile(self): retVal = self.FETCH_NOT_NEEDED fetch = False if not os.path.exists( self.filePath): # always fetch if file doesn't exist! fetch = True else: interval = int(self.addon.getSetting('xmltv.interval')) if interval <> self.INTERVAL_ALWAYS: modTime = datetime.datetime.fromtimestamp( os.path.getmtime(self.filePath)) td = datetime.datetime.now() - modTime # need to do it this way cause Android doesn't support .total_seconds() :( diff = (td.microseconds + (td.seconds + td.days * 24 * 3600) * 10**6) / 10**6 if ((interval == self.INTERVAL_12 and diff >= 43200) or (interval == self.INTERVAL_24 and diff >= 86400) or (interval == self.INTERVAL_48 and diff >= 172800)): fetch = True else: fetch = True if fetch: username = utils.get_setting(addon_id, 'username') password = utils.get_setting(addon_id, 'password') base64string = base64.encodestring( '%s:%s' % (username, password)).replace('\n', '') tmpFile = os.path.join(self.basePath, 'tmp') f = open(tmpFile, 'wb') try: request = urllib2.Request(self.fileUrl) request.add_header("Authorization", "Basic %s" % base64string) tmpData = urllib2.urlopen(request) data = tmpData.read() if tmpData.info().get('content-encoding') == 'gzip': data = zlib.decompress(data, zlib.MAX_WBITS + 16) f.write(data) f.close() except urllib2.HTTPError, e: if e.code == 401: utils.notify( addon_id, 'Authorization Error !!! Please Check Your Username and Password' ) else: utils.notify(addon_id, e) if os.path.getsize(tmpFile) > 256: if os.path.exists(self.filePath): os.remove(self.filePath) os.rename(tmpFile, self.filePath) retVal = self.FETCH_OK xbmc.log( '[script.ivueguide] file %s was downloaded' % self.filePath, xbmc.LOGDEBUG) else: retVal = self.FETCH_ERROR
def _thread_tracker(self): log("in tracker thread") total_time = self.getTotalTime() total_time_min = int(get_setting("min-length")) perc_mark = int(get_setting("scr-pct")) self._is_detected = True timeout = 1000 # if total_time set and is lower than total_time_min then we do not start the loop at all and stop the thread, if total_time <= 0 or total_time > total_time_min: while self._playback_lock.isSet() and not xbmc.abortRequested: try: # The max() assures that the total time is over two minutes # preventing it from scrobbling while buffering and solving #31 if min(99, 100 * self.getTime() / max(120, total_time)) >= perc_mark: success = self._api.mark_as_watched(self._item) if not success: if timeout == 1000: log("Failed to scrobble") notify(get_str(32080)) timeout = 30000 elif (self.getTime() / total_time) > 0.95: log("Stopped scrobbling") notify(get_str(32081)) break else: log("Retrying") elif success and bool(get_setting("bubble")): self._show_bubble(self._item) break except: pass xbmc.sleep(timeout) log('track stop')
def __init__(self, fileName, addon): folderPath = utils.folder() self.addon = addon self.filePath = os.path.join(self.basePath, fileName) if fileName == utils.get_setting(addon_id,'xmltv.url'): self.fileUrl = fileName self.fileName = fileName.split('/')[-1] self.filePath = os.path.join(self.basePath, 'custom.xml') elif fileName == utils.get_setting(addon_id,'xmltv.url') and fileName.endswith(".zip"): self.fileUrl = fileName self.fileName = fileName.split('/')[-1] self.filePath = os.path.join(self.basePath, 'custom.xml') elif fileName == utils.get_setting(addon_id,'sub.xmltv.url'): self.fileUrl = fileName self.fileName = fileName.split('/')[-1] self.filePath = os.path.join(self.basePath, utils.get_setting(addon_id,'sub.xmltv')+'.xml') else: self.fileUrl = folderPath + fileName.replace('.xml', '.zip') # make sure the folder is actually there already! if not os.path.exists(self.basePath): os.makedirs(self.basePath)
def _check(self, view): if not (get_setting('use_autoimport_improvements', False) or get_setting('pyflakes_linting', False)): return PyFlakesChecker(view, view.substr(sublime.Region(0, view.size())), view.file_name().encode('utf-8')).start()
def filter_results(method, results): log.debug("results before filtered: %s", repr(results)) if get_setting('filter_keywords_enabled', bool): results = filter.keywords(results) log.debug("results after filtering keywords: %s", repr(results)) if get_setting('filter_size_enabled', bool): results = filter.size(method, results) log.debug("results after filtering size: %s", repr(results)) if get_setting('filter_include_resolution_enabled', bool): results = filter.resolution(results) log.debug("results after filtering resolution: %s", repr(results)) if get_setting('filter_include_release', bool): results = filter.release_type(results) log.debug("results after filtering release type: %s", repr(results)) if get_setting('filter_exclude_no_seed', bool): results = filter.seed(results) log.debug("results after filter no seeds: %s", repr(results)) # todo remove dupes # todo maybe rating and codec log.debug("results after filtering: %s", repr(results)) return results
def on_validation_finished(self): if get_setting("pyflakes_linting"): self.visualize_errors() if get_setting('use_autoimport_improvements'): for error in self.errors: if isinstance(error, pyflakes.messages.UndefinedName): AutoImport(self.view, error.message_args[0]).start() break
def __init__(self) -> None: """Initialize CertProxyClient instance.""" self.cert_store = get_setting("CERT_STORE") self.server_name = get_setting("SERVER_NAME") self.cert_dir = Path(f"{self.cert_store}/aws/{self.server_name}") self.nginx_cert_dir = Path( f"{self.cert_store}/nginx/{self.server_name}") self.cert = Path(f"{self.cert_dir}/fullchain.pem") self.renew_before_expiry = int(get_setting("CERT_SELF_RENEWAL"))
def load(): from elementum.provider import log from utils import get_setting if get_setting("enable_debugger", bool): import pkgutil import re from os import path import sys additional_libraries = get_setting("debugger_additional_libraries") if additional_libraries != "": if not path.exists(additional_libraries): log.error( "Debugger has been enabled but additional libraries directory, skipping loading of debugger" ) return sys.path.append(additional_libraries) if pkgutil.find_loader("pydevd_pycharm") is None: log.error( "Debugger currently only supports IntelliJ IDEA and derivatives. If you need additional " ) return host = get_setting("debugger_host") valid_host_regex = re.compile( r''' ^ (?: (?:(?:(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\.){3}(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])) | (?:(?:(?:[a-zA-Z]|[a-zA-Z][a-zA-Z0-9\-]*[a-zA-Z0-9])\.)+(?:[A-Za-z|[A-Za-z][A-Za-z0-9\-]*[A-Za-z0-9])) ) $ ''', re.VERBOSE) if not valid_host_regex.match(host): log.error("debugger: invalid host detected.. Skipping") return False try: port = get_setting("debugger_port", int) except ValueError: log.exception("debugger: invalid port detected") return if not (0 < int(port) <= 65535): log.exception("debugger: port must be between 0 and 65535") return import pydevd_pycharm pydevd_pycharm.settrace(host, port=port, stdoutToServer=True, stderrToServer=True) log.info("pycharm debugger successfully loaded")
def _check(self, view): if not (get_setting('use_autoimport_improvements', False) or get_setting('pyflakes_linting', False)): return PyFlakesChecker( view, view.substr(sublime.Region(0, view.size())), view.file_name().encode('utf-8') ).start()
def _get_aws_uri_(obj: str) -> Tuple[str, str]: """ Lookup the URI for an AWS S3 object and the profile for accessing it. Lookup the AWS S3 URI and the profile for accessing it. Returns the values as a Tuple (aws_s3_uri, aws_profile) """ aws_bucket = get_setting("AWS_S3_CERT_LOC") aws_profile = get_setting("AWS_S3_PROFILE") return f"s3://{aws_bucket}/{obj}", aws_profile
def search_shows(self, title, season=None, episode=None, imdb_id=None): if "search_tags" not in self._caps: notify(translation(32701), image=get_icon_path()) return [] tv_search_caps = self._caps["search_tags"]['tv-search'] if not tv_search_caps['enabled']: notify(translation(32702).format("show"), image=get_icon_path()) log.warning( "Jackett has no tvsearch capabilities, please add a indexer that has tvsearch capabilities. " "Falling back to query search...") title_ep = title if bool(season): title_ep = "{} S{:0>2}".format(title_ep, season) if bool(episode): title_ep = "{}E{:0>2}".format(title_ep, episode) results = self.search_query(title_ep) if get_setting("search_season_on_episode", bool) and bool(season) and bool(episode): season_query = re.escape("{:0>2}".format(season)) results = results + self._filter_season( self.search_query("{} S{}".format(title, season_query)), season) return results # todo what values are possible for imdb_id? tv_params = tv_search_caps["params"] request_params = {"t": "tvsearch", "apikey": self._api_key} has_imdb_caps = 'imdbid' in tv_params log.debug("movie search; imdb_id=%s, has_imdb_caps=%s", imdb_id, has_imdb_caps) if imdb_id and has_imdb_caps: request_params["imdbid"] = imdb_id else: log.debug("searching tv show with query=%s, season=%s, episode=%s", title, season, episode) request_params["q"] = title if bool(season) and 'season' in tv_params: request_params["season"] = season if bool(episode) and 'ep' in tv_params: request_params["ep"] = episode results = self._do_search_request(request_params) if get_setting( "search_season_on_episode", bool ) and 'season' in request_params and 'ep' in request_params: del request_params['ep'] results = results + self._filter_season( self._do_search_request(request_params), season) return results
def keywords(results): block_keywords = get_setting('keywords_block').split(",") require_keywords = get_setting('keywords_require').split(",") for word in block_keywords: results = [result for result in results if word in result["name"]] for word in require_keywords: results = [result for result in results if word not in result["name"]] return results
def run(self, edit): modules = get_setting("autoimport_modules", []) if modules: sublime.status_message("Generating modules cache {0}...".format(" ".join(modules))) ctx = ropemate.context_for(self.view) ctx.building = True ctx.__enter__() ctx.importer.class_methods = get_setting("include_classmethods_in_globals", False) thread = PythonGenerateModulesCache.GenerateModulesCache(ctx, modules) thread.start() else: sublime.error_message("Missing modules in configuration file")
async def on_message(self, msg): if ( msg.author.id != self.client.user.id and utils.get_setting(msg.guild, "autoresponses") and not msg.content.startswith("l.")): text = msg.content.lower() for pair in utils.get_setting(msg.guild, "autoresponse_file"): for keyword in pair["keywords"]: if keyword in text: response = random.choice(pair["responses"]) if response != "": return await msg.channel.send(response)
def fetchFile(self): retVal = self.FETCH_NOT_NEEDED fetch = False if not os.path.exists(self.filePath): # always fetch if file doesn't exist! fetch = True else: interval = int(self.addon.getSetting('xmltv.interval')) if interval <> self.INTERVAL_ALWAYS: modTime = datetime.datetime.fromtimestamp(os.path.getmtime(self.filePath)) td = datetime.datetime.now() - modTime # need to do it this way cause Android doesn't support .total_seconds() :( diff = (td.microseconds + (td.seconds + td.days * 24 * 3600) * 10 ** 6) / 10 ** 6 if ((interval == self.INTERVAL_12 and diff >= 43200) or (interval == self.INTERVAL_24 and diff >= 86400) or (interval == self.INTERVAL_48 and diff >= 172800)): fetch = True else: fetch = True if fetch: username = utils.get_setting(addon_id,'username') password = utils.get_setting(addon_id,'password') base64string = base64.encodestring('%s:%s' % (username, password)).replace('\n', '') tmpFile = os.path.join(self.basePath, 'tmp') f = open(tmpFile, 'wb') try: request = urllib2.Request(self.fileUrl) request.add_header("Authorization", "Basic %s" % base64string) tmpData = urllib2.urlopen(request) data = tmpData.read() if tmpData.info().get('content-encoding') == 'gzip': data = zlib.decompress(data, zlib.MAX_WBITS + 16) f.write(data) f.close() except urllib2.HTTPError, e: if e.code == 401: utils.notify(addon_id, 'Authorization Error !!! Please Check Your Username and Password') else: utils.notify(addon_id, e) if os.path.getsize(tmpFile) > 256: if os.path.exists(self.filePath): os.remove(self.filePath) os.rename(tmpFile, self.filePath) retVal = self.FETCH_OK xbmc.log('[script.ivueguide] file %s was downloaded' % self.filePath, xbmc.LOGDEBUG) else: retVal = self.FETCH_ERROR
def run(self, edit): modules = get_setting('autoimport_modules', []) if modules: sublime.status_message('Generating modules cache {0}...'.format( ' '.join(modules))) ctx = ropemate.context_for(self.view) ctx.building = True ctx.__enter__() ctx.importer.class_methods = get_setting( 'include_classmethods_in_globals', False) thread = PythonGenerateModulesCache.GenerateModulesCache( ctx, modules) thread.start() else: sublime.error_message("Missing modules in configuration file")
def __init__(self, fileName, addon): USER_ID = utils.get_setting(addon_id, 'userid') USER_NAME = USER_ID MAIN_URL = utils.get_setting(addon_id, 'mainurl') USER_URL = utils.get_setting(addon_id, 'userurl') self.addon = addon self.filePath = os.path.join(self.basePath, fileName) if fileName == 'addons.ini': self.fileUrl = MAIN_URL + fileName else: self.fileUrl = MAIN_URL + fileName # make sure the folder is actually there already! if not os.path.exists(self.basePath): os.makedirs(self.basePath)
def __init__(self, fileName, addon): USER_ID = utils.get_setting(addon_id,'userid') USER_NAME = USER_ID MAIN_URL = utils.get_setting(addon_id,'mainurl') USER_URL = utils.get_setting(addon_id,'userurl') self.addon = addon self.filePath = os.path.join(self.basePath, fileName) if fileName == 'addons.ini': self.fileUrl = MAIN_URL + fileName else: self.fileUrl = MAIN_URL + fileName # make sure the folder is actually there already! if not os.path.exists(self.basePath): os.makedirs(self.basePath)
def handle_syntax_error(self): if not get_setting('pyflakes_linting', False): return e = self.syntax_error msg = e.args[0] (lineno, offset, text) = e.lineno, e.offset, e.text if text is None: print >> sys.stderr, "SublimeRope problem decoding src file %s" % ( self.filename,) else: line = text.splitlines()[-1] if offset is not None: offset = offset - (len(text) - len(line)) self.view.erase_regions('sublimerope-errors') if offset is not None: text_point = self.view.text_point(lineno - 1, 0) + offset self.view.add_regions( 'sublimerope-errors', [sublime.Region(text_point, text_point + 1)], 'keyword', 'dot', PyFlakesChecker.drawType) else: self.view.add_regions( 'sublimerope-errors', [self.view.line(self.view.text_point(lineno - 1, 0))], 'keyword', 'dot', PyFlakesChecker.drawType ) self.view.erase_status('sublimerope-errors') self.view.set_status('sublimerope-errors', msg)
def get_proxy_certs(self) -> None: """ Get certificates for all proxy domains. For each domain listed in the CERT_PROXY_DOMAINS environment variable: - generate an SSL certificate from Let's Encrypt - configure the "renew before expiry" period to be the time specified in the CERT_PROXY_RENEWAL environment variable. (Note that all proxy certificates have the same renewal period). - copy the certificate files to the AWS S3 service. The following objects are saved in the S3 bucket: * ${AWS_S3_CERT_LOC}/{domain}/cert.pem * ${AWS_S3_CERT_LOC}/{domain}/chain.pem * ${AWS_S3_CERT_LOC}/{domain}/fullchain.pem * ${AWS_S3_CERT_LOC}/{domain}/privkey.pem where ${AWS_S3_CERT_LOC} is the environment variable that specifies the S3 bucket to use; {domain} is the individual domain from the CERT_PROXY_DOMAINS variable. """ domain_list = get_setting("CERT_PROXY_DOMAINS").split() cert_created = False for domain in domain_list: if self.get_cert([domain]): cert_created = True LetsEncryptCert.update_renew_before_expiry(domain, self.renew_before_expiry) else: print(f"Could not get certificate for {domain}") if cert_created: CertProxyServer.create_renew_hook() aws_push_certs()
async def minesweeper(self, ctx, *args): if utils.get_setting(ctx.guild, "minesweeper"): if len(args) == 0: width = 9 height = 9 mine_count = 10 elif len(args) == 3 and all(map(lambda s: s.isnumeric(), args)): int_args = list(map(int, args)) if any(map(lambda n: n < 1, int_args)): return await ctx.send(embed = utils.embeds["minesweeper error"]) width, height, mine_count = int_args else: return await ctx.send(embed = utils.embeds["minesweeper error"]) if ( width > 40 or height > 40 or mine_count > width * height): return await ctx.send(embed = utils.embeds["minesweeper limits"]) minesweeper_board = gen_minesweeper(width, height, mine_count) if len(minesweeper_board) > 2000: return await ctx.send(embed = utils.embeds["minesweeper char limit"]) await ctx.send(minesweeper_board) else: await ctx.send(embed = utils.command_disabled)
def handle_syntax_error(self): if not get_setting('pyflakes_linting', False): return e = self.syntax_error msg = e.args[0] (lineno, offset, text) = e.lineno, e.offset, e.text if text is None: print >> sys.stderr, "SublimeRope problem decoding src file %s" % ( self.filename, ) else: line = text.splitlines()[-1] if offset is not None: offset = offset - (len(text) - len(line)) self.view.erase_regions('sublimerope-errors') if offset is not None: text_point = self.view.text_point(lineno - 1, 0) + offset self.view.add_regions( 'sublimerope-errors', [sublime.Region(text_point, text_point + 1)], 'keyword', 'dot', PyFlakesChecker.drawType) else: self.view.add_regions( 'sublimerope-errors', [self.view.line(self.view.text_point(lineno - 1, 0))], 'keyword', 'dot', PyFlakesChecker.drawType) self.view.erase_status('sublimerope-errors') self.view.set_status('sublimerope-errors', msg)
def __init__(self, view, single_file=False): self.view = view self.project = None self.resource = None self.tmpfile = None self.input = "" self.building = False self.file_path = self.view.file_name() if self.file_path is None: # unsaved buffer self.file_path = self._create_temp_file() self.project_dir = _find_ropeproject(self.file_path) class_methods_in_globals = get_setting('include_classmethods_in_globals', False) if not single_file and self.project_dir: self.project = project.Project(self.project_dir, fscommands=FileSystemCommands()) self.importer = autoimport.AutoImport( project=self.project, observe=False, class_methods=class_methods_in_globals) if os.path.exists("%s/__init__.py" % self.project_dir): sys.path.append(self.project_dir) else: # create a single-file project(ignoring other files in the folder) folder = os.path.dirname(self.file_path) ignored_res = os.listdir(folder) ignored_res.remove(os.path.basename(self.file_path)) self.project = project.Project( ropefolder=None, projectroot=folder, ignored_resources=ignored_res, fscommands=FileSystemCommands()) self.importer = autoimport.AutoImport( project=self.project, observe=False, class_methods=class_methods_in_globals)
async def convert(self, ctx, *args): if utils.get_setting(ctx.guild, "convert"): if len(args) <= 1: await ctx.send(embed=utils.embeds["convert more args"]) else: url = args[1] extension = args[0] temp_file = "".join( random.choice(string.ascii_lowercase) for _ in range(10)) try: image_file = requests.get(url, allow_redirects=True) open(temp_file, "wb").write(image_file.content) try: im = Image.open(temp_file) im.save(f"{temp_file}.{extension}", quality=100) nfile = discord.File( f"{temp_file}.{extension}", filename=f"Converted File.{extension}") await ctx.send(file=nfile, embed=utils.embeds["convert success"]) except: await ctx.send( embed=utils.embeds["convert error converting"]) except: await ctx.send( embed=utils.embeds["convert error downloading"]) finally: if os.path.isfile(f"{temp_file}.{extension}"): os.remove(f"{temp_file}.{extension}") if os.path.isfile(temp_file): os.remove(temp_file) else: await ctx.send(embed=utils.command_disabled)
async def knockknock(self, ctx): if utils.get_setting(ctx.guild, "knockknock"): channel = ctx.channel user = ctx.author await slow_send(channel, "Who's there?", 0.5) msg = await utils.wait_for_response(self.client, channel, user, 60) if msg is None: return await ctx.send( random.choice([ "Must've been the wind", "Guess there's nobody", "Eh, I'll just go back to bed" ])) await slow_send(channel, f"{msg.content} who?", 0.5) msg = await utils.wait_for_response(self.client, channel, user, 60) if msg is None: return await ctx.send( random.choice([ "Was there really no punchline?", "Huh, where'd you go?", "Oh well, I was hoping for a punchline." ])) await slow_send(channel, ":joy:", 0.5) else: await ctx.send(embed=utils.command_disabled)
def parse_payload(method, payload): if method == 'general': if 'query' in payload: payload['title'] = payload['query'] payload['titles'] = {'source': payload['query']} else: payload = { 'title': payload, 'titles': { 'source': payload }, } payload['titles'] = dict( (k.lower(), v) for k, v in payload['titles'].iteritems()) if get_setting('kodi_language', bool): kodi_language = xbmc.getLanguage(xbmc.ISO_639_1) if not kodi_language: log.warning("Kodi returned empty language code...") elif kodi_language not in payload.get('titles', {}): log.info("No '%s' translation available..." % kodi_language) else: payload["search_title"] = payload["titles"][kodi_language] if "search_title" not in payload: log.info( "Could not determine search title, falling back to normal title: %s", repr(payload["title"])) payload["search_title"] = payload["title"] return payload
def show_popup_and_wait(self, episode, next_up_page, still_watching_page): try: play_time = self.player.getTime() total_time = self.player.getTotalTime() except RuntimeError: self.log('exit early because player is no longer running', 2) return False, False progress_step_size = calculate_progress_steps(total_time - play_time) next_up_page.set_item(episode) next_up_page.set_progress_step_size(progress_step_size) still_watching_page.set_item(episode) still_watching_page.set_progress_step_size(progress_step_size) played_in_a_row_number = get_setting('playedInARow') self.log('played in a row settings %s' % played_in_a_row_number, 2) self.log('played in a row %s' % self.state.played_in_a_row, 2) showing_next_up_page = False showing_still_watching_page = False if int(self.state.played_in_a_row) <= int(played_in_a_row_number): self.log( 'showing next up page as played in a row is %s' % self.state.played_in_a_row, 2) next_up_page.show() set_property('service.upnext.dialog', 'true') showing_next_up_page = True else: self.log( 'showing still watching page as played in a row %s' % self.state.played_in_a_row, 2) still_watching_page.show() set_property('service.upnext.dialog', 'true') showing_still_watching_page = True while (self.player.isPlaying() and (total_time - play_time > 1) and not next_up_page.is_cancel() and not next_up_page.is_watch_now() and not still_watching_page.is_still_watching() and not still_watching_page.is_cancel()): try: play_time = self.player.getTime() total_time = self.player.getTotalTime() except RuntimeError: if showing_next_up_page: next_up_page.close() showing_next_up_page = False if showing_still_watching_page: still_watching_page.close() showing_still_watching_page = False break remaining = total_time - play_time runtime = episode.get('runtime') if not self.state.pause: if showing_next_up_page: next_up_page.update_progress_control(remaining=remaining, runtime=runtime) elif showing_still_watching_page: still_watching_page.update_progress_control( remaining=remaining, runtime=runtime) sleep(100) return showing_next_up_page, showing_still_watching_page
async def cmd_8ball(self, ctx, *_): if utils.get_setting(ctx.guild, "cmd_8ball"): async with ctx.typing(): await ctx.send(":8ball: **Divining your fate...**") await asyncio.sleep(2) await ctx.send(embed=random.choice(responses)) else: await ctx.send(embed=utils.command_disabled)
def onInit(self): # pylint: disable=invalid-name self.set_info() self.prepare_progress_control() if bool(get_setting('stopAfterClose') == 'true'): self.getControl(3013).setLabel(localize(30033)) # Stop else: self.getControl(3013).setLabel(localize(30034)) # Close
def run(self, edit): ctx = ropemate.context_for(self.view) ctx.building = True # we have to enter on main, but build on worker thread ctx.__enter__() ctx.importer.class_methods = get_setting('include_classmethods_in_globals', False) thread = PythonRegenerateCache.RegenerateCacheThread(ctx) thread.start()
def aws_push_certs() -> None: """Push all proxy certificates to AWS S3.""" cert_file_list = ("cert.pem", "chain.pem", "fullchain.pem", "privkey.pem") domain_list = get_setting("CERT_PROXY_DOMAINS").split() for domain in domain_list: cert_dir = Path(f"{LetsEncryptCert.LETSENCRYPT_DIR}/live/{domain}") for cert_file in cert_file_list: aws_s3_put(Path(f"{cert_dir}/{cert_file}"), f"{domain}/{cert_file}")
def resolution(results): filtered = [] for result in results: log.info("res %s: name=%s; id=%d", result['name'], result['_resolution'], result['resolution']) if get_setting('include_resolution_' + result["_resolution"], bool): filtered.append(result) return filtered
def size(method, results): include_unknown = get_setting('size_include_' + UNKNOWN, bool) if method in ["movie", "season", "episode"]: min_size = get_setting('size_' + method + '_min', float) max_size = get_setting('size_' + method + '_max', float) else: min_size = get_setting('size_min', float) max_size = get_setting('size_max', float) # MB KB B min_size = min_size * (1024 * 1024 * 1024) max_size = max_size * (1024 * 1024 * 1024) return [ result for result in results if _should_include_size_result( result["_size_bytes"], min_size, max_size, include_unknown) ]
def onClick(self, controlId): # pylint: disable=invalid-name if controlId == 3012: # Watch now self.set_watch_now(True) self.close() elif controlId == 3013: # Close / Stop self.set_cancel(True) if bool(get_setting('stopAfterClose') == 'true'): Player().stop() self.close()
def get_protocol_factory(self): """Creates the instance of the protocol factory for a given COMaster""" prtocol_factory = get_setting('POLL_PROTOCOL_FACTORY', 'protocols.mara.client.MaraClientProtocolFactory') pf_class = import_class(prtocol_factory) instance = pf_class(self) return instance
def run(self, edit): ctx = ropemate.context_for(self.view) ctx.building = True # we have to enter on main, but build on worker thread ctx.__enter__() ctx.importer.class_methods = get_setting( 'include_classmethods_in_globals', False) thread = PythonRegenerateCache.RegenerateCacheThread(ctx) thread.start()
async def dino(self, ctx, *args): if utils.get_setting(ctx.guild, "dino"): if len(args) != 0: dino = process.extractOne(args[0], dino_names)[0] await ctx.send(embed = dino_to_embed(dino)) else: await utils.menus.reload(self.client, ctx, dino_menu) else: await ctx.send(embed = utils.command_disabled)
def run(self): if utils.get_setting("error_popup") == "true": # Start error monitor log_location = logviewer.log_location(False) reader = logviewer.LogReader(log_location) # Ignore initial errors reader.tail() while not xbmc.abortRequested and self.running: content = reader.tail() parsed_errors = logviewer.parse_errors(content, set_style=True) if parsed_errors: logviewer.window(utils.ADDON_NAME, parsed_errors, default=utils.is_default_window()) xbmc.sleep(500)
def on_selection_modified(self, view): if not "Python" in view.settings().get("syntax") or not get_setting("pyflakes_linting", False): return vid = view.id() errors_by_line = ERRORS_BY_LINE.get(vid, None) if not errors_by_line: view.erase_status("sublimerope-errors") return lineno = view.rowcol(view.sel()[0].end())[0] + 1 if lineno in errors_by_line.keys(): view.set_status( "sublimerope-errors", "; ".join([m.message % m.message_args for m in errors_by_line[lineno]]) ) else: view.erase_status("sublimerope-errors")
def on_selection_modified(self, view): if (not 'Python' in view.settings().get('syntax') or not get_setting('pyflakes_linting', False)): return vid = view.id() errors_by_line = ERRORS_BY_LINE.get(vid, None) if not errors_by_line: view.erase_status('sublimerope-errors') return lineno = view.rowcol(view.sel()[0].end())[0] + 1 if lineno in errors_by_line.keys(): view.set_status('sublimerope-errors', '; '.join( [m.message % m.message_args for m in errors_by_line[lineno]] )) else: view.erase_status('sublimerope-errors')
def checkSettings(): username = utils.get_setting(addon_id,'username') password = utils.get_setting(addon_id,'password') userid = utils.get_setting(addon_id,'userid') if not username: retval = dlg.input('Enter Ivue Guide Username', type=xbmcgui.INPUT_ALPHANUM) if retval and len(retval) > 0: utils.set_setting(addon_id, 'username', str(retval)) username = utils.get_setting(addon_id, 'username') if not password: retval = dlg.input('Enter Ivue Guide Password', type=xbmcgui.INPUT_ALPHANUM, option=xbmcgui.ALPHANUM_HIDE_INPUT) if retval and len(retval) > 0: utils.set_setting(addon_id, 'password', str(retval)) password = utils.get_setting(addon_id, 'password') if not userid: retval = dlg.input('Enter Ivue Guide UserID', type=xbmcgui.INPUT_ALPHANUM, option=xbmcgui.ALPHANUM_HIDE_INPUT) if retval and len(retval) > 0: utils.set_setting(addon_id, 'userid', str(retval)) userid = utils.get_setting(addon_id, 'userid')
from django.core.files.base import File, ContentFile from django.core.files.storage import get_storage_class, default_storage, \ Storage from django.db.models.fields.files import ImageFieldFile, FieldFile from django.utils.html import escape from django.utils.safestring import mark_safe import engine, models, utils import datetime import os from django.utils.http import urlquote import logging log = logging.getLogger('genel') DEFAULT_THUMBNAIL_STORAGE = get_storage_class( utils.get_setting('DEFAULT_STORAGE'))() def get_thumbnailer(object, relative_name=None): """ Get a :class:`Thumbnailer` for a source file. The ``object`` argument is usually either one of the following: * ``FieldFile`` instance (i.e. a model instance file/image field property). * ``File`` or ``Storage`` instance, and for both of these cases the ``relative_name`` argument must also be provided * A string, which will be used as the relative name (the source will be
from PIL import Image except ImportError: import Image import utils import os try: from cStringIO import StringIO except ImportError: from StringIO import StringIO from PIL import ImageFile ImageFile.MAXBLOCK = 1024*1024 DEFAULT_PROCESSORS = [utils.dynamic_import(p) for p in utils.get_setting('PROCESSORS')] SOURCE_GENERATORS = [utils.dynamic_import(p) for p in utils.get_setting('SOURCE_GENERATORS')] def process_image(source, processor_options, processors=None): """ Process a source PIL image through a series of image processors, returning the (potentially) altered image. """ if processors is None: processors = DEFAULT_PROCESSORS image = source for processor in processors: image = processor(image, **processor_options)
def apply_config(self): utils.log("Applying settings to {}".format(utils.CONFIG_PATH)) config = OrderedDict() overclock_preset = utils.get_setting('overclock_preset') utils.log("Using {} overclock settings".format(overclock_preset)) if overclock_preset == 'Custom': for prop in utils.OVERCLOCK_PRESET_PROPERTIES: config[prop] = utils.get_property_setting(prop) elif overclock_preset in utils.OVERCLOCK_PRESETS: config = OrderedDict(zip(utils.OVERCLOCK_PRESET_PROPERTIES, utils.OVERCLOCK_PRESETS[overclock_preset])) for prop in utils.OTHER_PROPERTIES: value = utils.get_property_setting(prop) if value is not None: config[prop] = value if ('force_turbo' in config and config['force_turbo'] == 1 and 'over_voltage' in config and config['over_voltage'] > 0): if not xbmcgui.Dialog().yesno("OpenELEC RPi Config WARNING!!", "Overvolting with dynamic overclock disabled", "will void your warranty!!", "Continue, or fix by enabling dynamic overclock?", "Fix", "Continue"): utils.log("Enabling dynamic overclock") config['force_turbo'] = 0 else: utils.log("Warranty warning was ignored") if 'max_usb_current' in config and config['max_usb_current'] == 1: if not xbmcgui.Dialog().yesno("OpenELEC RPi Config WARNING!", "To output 1.2A from the USB ports", "you will need to use a good 2A power supply.", "Are you sure you want to set max_usb_current?"): config['max_usb_current'] = 0 updated = False if os.path.isfile(utils.CONFIG_PATH): with open(utils.CONFIG_PATH, 'r') as f: config_txt = f.read() config_txt_new = config_txt for prop, value in config.iteritems(): utils.log("==== {} ====".format(prop)) config_property_re = re.compile(utils.CONFIG_SUB_RE_STR.format(prop), re.MULTILINE) match = config_property_re.search(config_txt) if match: comment = bool(match.group(1)) old_value = match.group(3) if value is None: utils.log(" Commenting out") config_txt_new = config_property_re.sub(utils.comment_out, config_txt_new) updated = True elif comment or str(value) != old_value: utils.log(" Setting to {}".format(value)) config_txt_new = config_property_re.sub(partial(utils.replace_value, value), config_txt_new) updated = True else: utils.log(" Unchanged ({})".format(value)) elif value is not None: utils.log(" Appending {}={}".format(prop, value)) config_txt_new += utils.property_value_str(prop, value) + '\n' updated = True else: utils.log("A new {} will be created".format(utils.CONFIG_PATH)) config_txt_new = utils.add_property_values(config) updated = True reboot_needed = False if updated: reboot_needed = True with utils.remount(): try: utils.write_config(config_txt_new) except (OSError, IOError) as e: reboot_needed = False utils.write_error(utils.CONFIG_PATH, str(e)) if reboot_needed: if utils.restart_countdown("Ready to reboot to apply changes in config.txt"): xbmc.restart() else: utils.log("Cancelled reboot") else: utils.log("No changes made")
PATH_LIST = ['../','../packages','../shared','../djangoapps'] syspath_append(PATH_LIST) utils.set_root(ROOT) ADMINS = ( # ('Your Name', '*****@*****.**'), ) MANAGERS = ADMINS DATABASES = { 'default': { 'ENGINE': utils.get_setting('DATABASE_ENGINE'), # Add 'postgresql_psycopg2', 'postgresql', 'mysql', 'sqlite3' or 'oracle'. 'NAME': utils.get_setting('DATABASE_NAME'), # Or path to database file if using sqlite3. 'USER': utils.get_setting('DATABASE_USER') , # Not used with sqlite3. 'PASSWORD': utils.get_setting('DATABASE_PASSWORD') , # Not used with sqlite3. 'HOST': utils.get_setting('DATABASE_HOST') , # Set to empty string for localhost. Not used with sqlite3. 'PORT': utils.get_setting('DATABASE_PORT'), # Set to empty string for default. Not used with sqlite3. } } # Local time zone for this installation. Choices can be found here: # http://en.wikipedia.org/wiki/List_of_tz_zones_by_name # although not all choices may be available on all operating systems. # On Unix systems, a value of None will cause Django to use the same # timezone as the operating system. # If running in a Windows environment this must be set to the same as your # system time zone.
def showTOS(): tos = utils.get_setting(addon_id,'tos')== "true"