def home(): """homepage of the app gui: shows the group name as title along with an input field for emails shows number of users in group, if specified in flag (see SHOW_USERS) view: sends a POST request to slack to get the number of users entering a valid email results in a POST request to /invite """ # get number of users if flag is True if show_users: url = 'https://' + slack_group + '.slack.com/api/users.list' data = { 'token': token, } r = request_post(url, data) data = json_load(r.text) users = len(data['members']) else: users = '' # render the template 'homepage.tpl' return template( 'homepage', show_users=show_users, users=users, bg_color=bg_color, group_name=group_name, )
def pro_init(): """Function to handle first-run on Screenly Pro""" is_pro_init = path.isfile(path.join(settings.get_configdir(), "not_initialized")) if is_pro_init: logging.debug("Detected Pro initiation cycle.") load_browser(url=HOME + INTRO) else: return False status_path = path.join(settings.get_configdir(), "setup_status.json") while is_pro_init: with open(status_path, "rb") as status_file: status = json_load(status_file) browser_send('js showIpMac("%s", "%s")' % (status.get("ip", ""), status.get("mac", ""))) if status.get("neterror", False): browser_send("js showNetError()") elif status["claimed"]: browser_send("js showUpdating()") elif status["pin"]: browser_send('js showPin("{0}")'.format(status["pin"])) logging.debug("Waiting for node to be initialized.") sleep(5) return True
def get_name(cls, address): with open(cls.FILE) as f: json_obj = json_load(f) for name, registered_address in json_obj.iteritems(): if registered_address[0] == address: return name raise KeyError("Can't find address %s" % address)
def __init__(self, callable, *args, **kwargs): super(CustomTask, self).__init__(callable, *args, **kwargs) if env.ssh_config_path and os.path.isfile(os.path.expanduser(env.ssh_config_path)): env.use_ssh_config = True if env.host_string == 'localhost' or not env.hosts: env.pyexecutable = sys_executable env.cd = partial(custom_cd, lcd, 002) env.run = local conffile = 'devenv.json' else: env.cd = partial(custom_cd, cd, 002) env.run = run if 'production' in env and env.production: error('TBD') else: conffile = 'testenv.json' if 'conffile' in env: conffile = env.conffile with open(conffile) as f: d = json_load(f) env.update(d) env.activate = ''.join(['. ', env.venvpath, '/bin/activate'])
def pro_init(): """Function to handle first-run on Screenly Pro""" is_pro_init = path.isfile(path.join(settings.get_configdir(), 'not_initialized')) if is_pro_init: logging.debug('Detected Pro initiation cycle.') load_browser(url=HOME + INTRO) else: return False status_path = path.join(settings.get_configdir(), 'setup_status.json') while is_pro_init: with open(status_path, 'rb') as status_file: status = json_load(status_file) browser_send('js showIpMac("%s", "%s")' % (status.get('ip', ''), status.get('mac', ''))) if status.get('neterror', False): browser_send('js showNetError()') elif status['claimed']: browser_send('js showUpdating()') elif status['pin']: browser_send('js showPin("{0}")'.format(status['pin'])) logging.debug('Waiting for node to be initialized.') sleep(5) return True
def reload_mapcycle_json(): if not MAPCYCLE_JSON_FILE.isfile(): raise FileNotFoundError("Missing mapcycle.json") global mapcycle_json with open(str(MAPCYCLE_JSON_FILE)) as f: # TODO: Do we need str() here? mapcycle_json = json_load(f)
def parseConfigfile(configpath): """ Parse a configuration file and return a Configuration object. Configuration file is formatted as JSON with two sections, "cache" and "layers": { "cache": { ... }, "layers": { "layer-1": { ... }, "layer-2": { ... }, ... } } The full path to the file is significant, used to resolve any relative paths found in the configuration. See the Caches module for more information on the "caches" section, and the Core and Providers modules for more information on the "layers" section. """ config_dict = json_load(urlopen(configpath)) scheme, host, path, p, q, f = urlparse(configpath) if scheme == '': scheme = 'file' path = realpath(path) dirpath = '%s://%s%s' % (scheme, host, dirname(path).rstrip('/') + '/') return Config.buildConfiguration(config_dict, dirpath)
def generate_all_files(output=OUTPUT, client_secrets=CLIENT_SECRETS, credentials=CREDENTIALS): response = None if os.path.exists(output): try: with open(OUTPUT, "r") as f: response = json_load(f) except ValueError: response = None os.remove(output) if not response: storage = Storage(credentials) credentials = storage.get() if not credentials: flow = flow_from_clientsecrets(client_secrets, scope="https://www.googleapis.com/auth/drive") credentials = run_flow(flow, storage) http = Http() http = credentials.authorize(http) service = build("drive", "v2", http=http) try: response = {"all_files": retrieve_all_files(service), "about": service.about().get().execute()} except errors.HttpError, e: error("An error occurred: %s" % e) response = None else: with open(output, "w") as f: json_dump(response, f, indent=2)
def parseConfigfile(configpath): """ Parse a configuration file and return a Configuration object. Configuration file is formatted as JSON with two sections, "cache" and "layers": { "cache": { ... }, "layers": { "layer-1": { ... }, "layer-2": { ... }, ... } } The full filesystem path to the file is significant, used to resolve any relative paths found in the configuration. See the Caches module for more information on the "caches" section, and the Core and Providers modules for more information on the "layers" section. """ config_dict = json_load(open(configpath, 'r')) dirpath = dirname(configpath) return Config.buildConfiguration(config_dict, dirpath)
def Playback(this_program, from_json, logfile=None, timeout=None, python=DEFAULT_PYTHON): """Read I/O `from_json` and Playback it to `this_program`.""" if from_json is None: raise Exception("-j, --json needed!") with open(from_json, 'r') as fp: captured = json_load(fp) with Capsule(this_program, logfile=logfile, python=python) as capsule: capsule.run(with_check=False) for captured_stdin, _ in captured: _TerminalCapsuleUtils.endpoints( _TerminalCapsuleUtils.stream( capsule, captured_stdin, timeout=timeout, ), ) while not capsule.is_dead(): _TerminalCapsuleUtils.endpoints( _TerminalCapsuleUtils.stream( capsule, None, timeout=timeout, ) )
def imgur_export(image, drawable): # Temp file name. temp_file = gettempdir() + '/gimp-imgur.png' # Creating duplicate image to prevent screwing up. duplicate_image = image.duplicate() # Compressing all of the layers. compressed_layer = pdb.gimp_image_merge_visible_layers(duplicate_image, CLIP_TO_IMAGE) # Saving the file to temp directory to be processed. pdb.gimp_file_save(duplicate_image, compressed_layer, temp_file, '?') # Clearing up memory. pdb.gimp_image_delete(duplicate_image) # Post values for the API. values = { 'type' : 'base64', 'image' : b64encode(open(temp_file, 'rb').read()), } # Authentication headers. headers = { 'Authorization': 'Client-ID ' + IMGUR_CLIENT_ID, 'Accept': 'application/json' } # Send it out data = urllib.urlencode(values) req = urllib2.Request('https://api.imgur.com/3/image', data, headers) try: response = urllib2.urlopen(req) response_data = json_load(str(response.read())) if (response_data['status'] == 200): webbrowser_open_new(response_data['data']['link']) else: pdb.gimp_message(str(response_data)) except Exception, e: pdb.gimp_message(str(e) + "\nPlease check Client-ID")
def load_configuration(cfgfile_path): """ Loads the configuration from a JSON file and returns it as a dictionary. This function takes care of resolving the service object class name and puts the resolved instance in the returned configuration dictionary in place of the qualified name. :param str cfgfile_path: configuration file path :returns: the configuration data :rtype: dict :raises ValueError: if path is not provided of configuration data are invalid :raises IOError: if not found or not a file :raises ConfigurationError: if the configuration is not valid """ if not cfgfile_path: raise ValueError('missing parameter : cfgfile_path') with file(cfgfile_path, 'rt') as fp: cfg = json_load(fp) # resolves the service object class name try: cfg[CFG_SERVICE_OBJECT_CLASS] = symbol_for_name(cfg[CFG_SERVICE_OBJECT_CLASS]) except (ImportError, NameError) as e: raise ConfigurationError(e) else: return cfg
def read_problem_data(input_name='', output_name=''): """ Reads the problem data from the user provided file name. The file can either be an .inp file or a .json file. """ if not input_name: input_name = raw_input('Enter input file name: ') file_name, file_ext = os.path.splitext(input_name) if file_ext == "": if os.path.exists(file_name + ".json"): file_ext = ".json" else: print("Cannot find valid input file. Expecting a .json file.") exit(127) with open(file_name + file_ext, "r") as input_file: problem_data = json_load(input_file) if output_name: problem_data["output"] = output_name if ("output" not in problem_data) or (not problem_data["output"]): problem_data["output"] = file_name + "_output.json" return problem_data
def get_keywords(image): with open(cascades_json, encoding='utf-8') as json_file: cascades = json_load(json_file) keywords = [] for cascade in sorted(cascades): if not cascades[cascade]['use']: continue trace.debug('Check with cascade {0}'.format(cascade)) img = cv2.imread(image) gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY) faces = cv2_get_objects(gray, cascades[cascade]['cascade_file'], cascades[cascade]['scale_factor'], cascades[cascade]['min_neighbors'], cascades[cascade]['flags'], cascades[cascade]['min_x'], cascades[cascade]['min_y'], cascades[cascade]['max_x'], cascades[cascade]['max_y']) if faces.__len__(): keywords.extend(cascades[cascade]['keywords']) if 'nested_object' in cascades[cascade]: for (x1, y1, x2, y2) in faces: #show face #img = cv2.rectangle(img,(x1,y1),(x1+x2,y1+y2),(255,0,0),2) #roi_color = img[y1:y1+y2, x1:x1+x2] roi_gray = gray[y1:y1+y2, x1:x1+x2] smiles = cv2_get_objects(roi_gray, cascades[cascade]['nested_object']['cascade_file'], cascades[cascade]['nested_object']['scale_factor'], cascades[cascade]['nested_object']['min_neighbors'], cascades[cascade]['nested_object']['flags'], cascades[cascade]['nested_object']['min_x'], cascades[cascade]['nested_object']['min_y'], cascades[cascade]['nested_object']['max_x'], cascades[cascade]['nested_object']['max_y']) if smiles.__len__(): #show smile #for (ex,ey,ew,eh) in smiles: # cv2.rectangle(roi_color,(ex,ey),(ex+ew,ey+eh),(0,255,0),2) keywords.extend(cascades[cascade]['nested_object']['keywords']) #resize #height, width = img.shape[:2] #img = cv2.resize(img, (int(height/1.5), int(width/1.2))) #show image #cv2.imshow('img',img) #cv2.waitKey(0) #cv2.destroyAllWindows() return list(set(keywords))
def __get__(self, url): """ http GET request method handler """ self.__response = urlopen(url) if self.__ok__(): return json_load(self.__response.read()) else: raise Exception("Request error :(")
def async_render_PUT(self, request): submission = json_load(request.content.read().decode()) response_body = yield self.run_query_PUT(submission, request.args) if response_body is None: request.setResponseCode(404) returnValue(b'') request.setHeader('Content-Type', 'application/json') returnValue(json_dump(response_body, cls=ComplexEncoder).encode())
def readAvailableDataFile(): try: reader = open(EM_util.webJSONAvailableDataFile, 'rb') data = reader.read() except: raise finally: reader.close() return json_load(data)
def get_worldcat_data_from_json (json_data): try: data = json_load(json_data) return WorldcatData(title=data["title"], libraries=[l["oclcSymbol"] for l in data["library"]]) except: return WorldcatData()
def get_packages(cls): """ Package.get_packages() Returns a Package object for all known packages (as specified in the packages.json file). """ with open(dirname(__file__) + "/packages.json", "r") as fd: return [cls(**pkgdata) for pkgdata in json_load(fd)]
def _get_resp(self, url, data=None): """ Send a query to bit.ly and return the received data. """ resp = self.opener.open(url, data) json_data = json_load(resp) status = RespStatus(json_data['status_code'], json_data['status_txt']) data = json_data['data'] return status, data
def import_public_posts(self): """ Load the JSON of public posts for this user and create local posts from them. """ url = self.server + 'people/{0}'.format(self.guid) req = Request(url) req.add_header('User-Agent', USER_AGENT) req.add_header('Accept', 'application/json') entries = json_load(urlopen(req, timeout=10)) if isinstance(entries, dict): return # Faulty node? for entry in entries: user_guid = entry['author']['guid'] username = entry['author']['diaspora_id'] user = self if self.username == username \ else DiasporaContact.get_by_username(username, commit=False) if not user or user.guid != user_guid: continue post_guid = entry['guid'] existing_post = DiasporaPost.get_by_guid(post_guid) if existing_post or not entry['public']: continue # Already imported if entry.get('root'): root_guid = entry['root']['guid'] root_post = DiasporaPost.get_by_guid(root_guid) if root_post: parent = root_post.post else: continue # Cannot find parent else: parent = None post = Post(author=user.contact, parent=parent) db.session.add(post) post.created_at = datetime.strptime( entry['created_at'], '%Y-%m-%dT%H:%M:%SZ' ) post.thread_modified(when=datetime.strptime( entry['interacted_at'], '%Y-%m-%dT%H:%M:%SZ' )) post.add_part( MimePart( type='text/x-markdown', body=entry['text'].encode('utf-8'), text_preview=entry['text'] ), inline=True, order=0 ) post.share_with([user.contact], show_on_wall=True) post.diasp = DiasporaPost(guid=post_guid, type='public')
def reload_scheme(filename): if not filename.endswith('.json'): filename += '.json' filepath = KS_DIR / filename if not filepath.isfile(): raise FileNotFoundError("Cannot find {}".format(filename)) with open(str(filepath)) as f: ks_database.load_from_json(json_load(f))
def remove(cls, name): with open(cls.FILE, "r+") as f: json_obj = json_load(f) if name not in json_obj: logger.debug("No such name %s" % (name,)) return False del json_obj[name] with open(cls.FILE, "w") as f: json_dump(json_obj, f, indent=2, sort_keys=True) return True
def __call__(self, file): try: with open(file, mode='rt', encoding='UTF-8', errors='strict') as fp: return json_load(fp) except (OSError, IOError) as error: msg = 'Can not open {!r}. {}'.format raise ArgumentTypeError(msg(file, error)) except ValueError as error: msg = 'Error when parsing config file {!r}. {}'.format raise ArgumentTypeError(msg(file, error))
def make_request(sentence): url = "http://demo.ark.cs.cmu.edu/parse/api/v1/parse?sentence=" request_url = url + sentence.replace(' ', '+') try: socket = urlopen(request_url) json = json_load(socket.read()) socket.close() except URLError: logging.error("You are not connected to the Internet!") return json
def __post__(self, data): """ Http POST request method handler """ data['api_key'] = self.API_KEY data['format'] = self.REQUEST_FORMAT self.__response = urlopen(self.API_URL, urlencode(data)) if self.__ok__(): return json_load(self.__response.read()) else: raise Exception("Request error :(")
def get_advanced_search(self, term, rows=1): requester = self.get_opener() encoding = urllib.parse.urlencode( {'q': term, 'mediatype': None, 'rows': rows, 'page': 1, 'output': 'json', 'save': 'no#raw'}) url = "{0}advancedsearch.php?{1}".format(self.host, encoding) if self.debug: formatter.debug_message("Call to Advanced search from Archive.org API URL: {0}".format(url)) with requester.open(url, timeout=self.timeout) as f: data = f.read().decode("utf-8") data_json = json_load(data) return data_json
def get_details(self, title): requester = self.get_opener() encoding = urllib.parse.urlencode({'output': 'json', 'callback': 'IAE.favorite'}) url = "{0}details/{1}&{2}".format(self.host, title, encoding) if self.debug: formatter.debug_message("Call to details from Archive.org API URI: {0}".format(url)) with requester.open(url, timeout=self.timeout) as f: data = f.read().decode("utf-8") data_json = json_load(data[13:-1]) return data_json
def parseConfig(configpath): """ Parse a configuration file and return a raw dictionary and dirpath. Return value can be passed to TileStache.Config.buildConfiguration(). """ if urlparse(configpath).scheme in ('', 'file'): with open(urlparse(configpath).path) as file: config_dict = json_load(file) else: config_dict = json_load(urlopen(configpath)) scheme, host, path, p, q, f = urlparse(configpath) if scheme == '': scheme = 'file' path = realpath(path) dirpath = '%s://%s%s' % (scheme, host, dirname(path).rstrip('/') + '/') return config_dict, dirpath
def __init__(self, url_root, cache_responses=True, debug_level="DEBUG"): logging.basicConfig(level=debug_level) # Call API server at url to grab cache_dict cache_dict = json_load(urlopen(url_root + "/cache")) dirpath = '/tmp/stache' config = ExternalConfiguration(url_root, cache_dict, cache_responses, dirpath) TileStache.WSGITileServer.__init__(self, config, False)
# ============================================================================ # >> ALL DECLARATION # ============================================================================ __all__ = ( 'Player', ) # ============================================================================ # >> GLOBAL VARIABLES # ============================================================================ if (CFG_PATH / 'privileges.json').isfile(): with open(CFG_PATH / 'privileges.json') as inputfile: try: privileges = json_load(inputfile) except: warn('Unable to load the privileges.json file.') except_hooks.print_exception() privileges = {} for x in ('players', ): if x not in privileges: privileges[x] = {} else: privileges = { 'players':{ 'dummy':{ 'wcsadmin': 1, 'wcsadmin_githubaccess': 1,
def get_dict_from_json(file_path): with open(file_path) as json_data: return json_load(json_data)
def load_manifest(path: Path): """Loads a json manifest""" with open(path, "rt") as fin: data = json_load(fin) return Manifest.schema().load(data, many=True)
# -*- coding: utf-8 -*- from flask import Flask, render_template, request, render_template_string, abort, Response from wtforms import Form, StringField, TextAreaField, SubmitField, ValidationError from wtforms.validators import input_required, optional from functools import wraps from json import load as json_load with open("content.json", encoding='utf-8') as f: POSTS = json_load(f)["posts"] app = Flask(__name__) def dangerous(f): @wraps(f) def decorated_function(*args, **kwargs): if request.method == "POST": debug_msg = "" # FIXME: tplmap still bypasses my test try: if "tplmap" in request.headers["User-Agent"].lower(): return render_template("error.html", error_code=403, error_message="Requête bloquée par le WAF"), 403 except: pass for field, data in request.form.items(): try: tmp = render_template_string(data) except: abort(500) # TODO
def __init__(self, user_class, user_id, json_settings): self._user_class = user_class self._user_id = user_id # self._values = jsonloads(json_settings) self._values = json_load(json_settings) self._read_only = True
def get_cache(storage, cache, nodes): # DONE """ Acquire and store asset ids, and asset precisions This is called once prior to spawning additional processes """ storage["bw_depth"] = 10 storage["updates"] = [] def wwc(): """ Winnowing Websocket Connections... """ print("\033c") # cache = logo(cache) print( it( "cyan", """ +===============================+ ╦ ╦ ╔═╗ ╔╗╔ ╔═╗ ╔═╗ ╔╦╗ ╠═╣ ║ ║ ║║║ ║╣ ╚═╗ ║ ╩ ╩ ╚═╝ ╝╚╝ ╚═╝ ╚═╝ ╩ MARKET - PEGGED - ASSETS +===============================+ The right of nullification is a natural right, which all admit to be a remedy against insupportable oppression. $$$ James Madison $$$ If it had not been for the justice of our cause, and the consequent interposition of Providence, in which we had faith, we must have been ruined. $$$ Ben Franklin $$$ Resistance and disobedience in economic activity is the most moral human action possible. $$$ Samuel E. Konkin III $$$ """, )) print("") print(ctime(), "\n") print(wwc.__doc__, "\n") asset_ids, currency_ids = [], [] asset_precisions, currency_precisions = [], [] # trustless of multiple nodes while True: try: wwc() black = race_read(doc="blacklist.txt") white = race_read(doc="whitelist.txt") # switch nodes nodes = get_nodes() shuffle(nodes) node = nodes[0] print(node) if node in black: raise ValueError("blacklisted") if node in white: raise ValueError("whitelisted") # reconnect and make calls rpc, _, _ = wss_handshake(storage, node) ( asset_id, asset_precision, currency_id, currency_precision, ) = rpc_lookup_asset_symbols(rpc) # prepare for statistical mode of cache items asset_ids.append(asset_id) asset_precisions.append(asset_precision) currency_ids.append(json_dump(currency_id)) currency_precisions.append(json_dump(currency_precision)) # mode of cache if len(asset_ids) > 4: print(cache) try: cache["begin"] = int(time()) cache["asset_id"] = mode(asset_ids) cache["asset_precision"] = mode(asset_precisions) cache["currency_id"] = json_load(mode(currency_ids)) cache["currency_precision"] = json_load( mode(currency_precisions)) enableTrace(False) print_market(storage, cache) winnow(storage, "whitelist", node) break except BaseException: winnow(storage, "blacklist", node) continue except Exception as error: print(trace(error)) continue return storage, cache
def __init__(self, user_class: Type[Model], user_id, json_settings: TextField): self._user_class = user_class self._user_id = user_id self._values = json_load(json_settings) self._read_only = True
from sys import argv from json import load as json_load from jsonschema import validate as jsonschema_validate from yaml import safe_load as yaml_safe_load spec_path = argv[1] sample_path = argv[2] with open(sample_path) as sample_data, open(spec_path) as spec_data: sample_dict = json_load(sample_data) spec_dict = yaml_safe_load(spec_data) schema_dict = {**spec_dict, "$ref": "#/$defs/SystemProfile"} jsonschema_validate(instance=sample_dict, schema=schema_dict)
if character not in PUNCTUATION: return False return True if __name__ == '__main__': time_start = time() logger = getLogger(__name__) basicConfig(format='%(asctime)s : %(levelname)s : %(message)s', level=INFO) logger.info('started.') with open('./pdf_folder_to_tfidf.json', 'r') as settings_fp: settings = json_load(settings_fp, cls=None, object_hook=None, parse_float=None, parse_int=None, parse_constant=None, object_pairs_hook=None) filter_threshold = settings[ 'filter_threshold'] if 'filter_threshold' in settings.keys() else 1 if 'filter_threshold' in settings.keys(): logger.info('filter threshold: {}'.format(filter_threshold)) else: logger.warning( 'filter threshold not in settings; using default {}'.format( filter_threshold)) input_folder = settings['input_folder'] if 'input_folder' in settings.keys( ) else None if input_folder:
def json(path): logger.debug(f"Opening JSON file at {path}.") with open(path, 'r') as ifile: res = json_load(ifile) return res
def create_profile( extension: Optional[Union[List[str], str]] = None, prefs_js: Optional[str] = None, template: Optional[str] = None, working_path: Optional[str] = None, ) -> str: """Create a profile to be used with Firefox. Args: extension: Path to an extension to be installed. prefs_js: Path to the prefs.js file to install in the profile. template: Path to an existing profile directory to use. working_path: Used as base directory for temporary files. Returns: Path to directory to be used as a profile. """ profile = mkdtemp(dir=working_path, prefix="ffprofile_") try: if template is not None: LOG.debug("using profile template: %r", template) rmtree(profile) copytree(template, profile) invalid_prefs = pathjoin(profile, "Invalidprefs.js") # if Invalidprefs.js was copied from the template profile remove it if isfile(invalid_prefs): remove(invalid_prefs) if prefs_js is not None: LOG.debug("using prefs.js: %r", prefs_js) copyfile(prefs_js, pathjoin(profile, "prefs.js")) # times.json only needs to be created when using a custom prefs.js times_json = pathjoin(profile, "times.json") if not isfile(times_json): with open(times_json, "w") as times_fp: times_fp.write(f'{{"created":{int(time()) * 1000}}}') except OSError: rmtree(profile) raise # extension support try: if extension is None: extensions = [] elif isinstance(extension, (list, tuple)): extensions = extension else: extensions = [extension] if extensions and not isdir(pathjoin(profile, "extensions")): mkdir(pathjoin(profile, "extensions")) for ext in extensions: if isfile(ext) and ext.endswith(".xpi"): copyfile(ext, pathjoin(profile, "extensions", basename(ext))) elif isdir(ext): # read manifest to see what the folder should be named ext_name = None if isfile(pathjoin(ext, "manifest.json")): try: with open(pathjoin(ext, "manifest.json")) as manifest: manifest_loaded_json = json_load(manifest) ext_name = manifest_loaded_json["applications"][ "gecko"]["id"] except (OSError, KeyError, ValueError) as exc: LOG.debug("Failed to parse manifest.json: %s", exc) elif isfile(pathjoin(ext, "install.rdf")): try: xmlns = { "x": "http://www.w3.org/1999/02/22-rdf-syntax-ns#", "em": "http://www.mozilla.org/2004/em-rdf#", } tree = ElementTree.parse(pathjoin(ext, "install.rdf")) assert tree.getroot().tag == f"{{{xmlns['x']}}}RDF" ids = tree.findall("./x:Description/em:id", namespaces=xmlns) assert len(ids) == 1 ext_name = ids[0].text except (AssertionError, OSError, ElementTree.ParseError) as exc: LOG.debug("Failed to parse install.rdf: %s", exc) if ext_name is None: raise RuntimeError( f"Failed to find extension id in manifest: {ext!r}") copytree(abspath(ext), pathjoin(profile, "extensions", ext_name)) else: raise RuntimeError(f"Unknown extension: {ext!r}") except Exception: # cleanup on failure rmtree(profile, True) raise return profile
def load_data() -> dict: return json_load(open("data.json", "r"))
def load_json_file(path: str) -> object: """Load json from file.""" with open(path, "r") as fin: return json_load(fin)
def _read_attrs(self): value = json_load(self.value) self._entity_repr = value.pop(0) self._related_line_id = value.pop( 0) if self.line_type.has_related_line else 0 self._modifications = value
[int(255 * arg_colormap(arg_float)[index]) for index in range(3)]) return '#{:02x}{:02x}{:02x}'.format(color_value[0], color_value[1], color_value[2]) if __name__ == '__main__': time_start = time() logger = getLogger(__name__) basicConfig(format='%(asctime)s : %(levelname)s : %(message)s', level=INFO) logger.info('started.') with open('./demo_wordcloud.json', 'r') as settings_fp: settings = json_load(settings_fp, cls=None, object_hook=None, parse_float=None, parse_int=None, parse_constant=None, object_pairs_hook=None) imshow_interpolation = settings[ 'imshow_interpolation'] if 'imshow_interpolation' in settings.keys( ) else 20 if 'imshow_interpolation' in settings.keys(): logger.info('imshow interpolation: {}'.format(imshow_interpolation)) else: logger.warning( 'imshow interpolation not in settings; default value is {}'.format( imshow_interpolation)) input_folder = settings['input_folder'] if 'input_folder' in settings.keys( ) else None
def __init__(self, filename: str = None): if filename is None: filename = os_path.join(here, 'table.json') self.rot_table = json_load(open(filename))
def bifurcation(storage, cache): # DONE """ Given 7 dictionaries of data (mavens) find the most common Send good (statistical mode) data to pricefeed_dex """ while True: try: sleep(BIFURCATION_PAUSE) # take a deep breath # initialize the dex_data dictionary dex_data = {} # initialize lists to sort data from each maven by key last = [] whitelist = [] blacklist = [] blocktime = [] pings = [] # gather list of maven opinions from the nascent_trend() mavens = race_read(doc="mavens.txt") # sort maven data for statistical mode analysis by key len_m = len(mavens) for i in range(len_m): maven = mavens[i] last.append(json_dump(maven["last"])) blocktime.append(maven["blocktime"]) whitelist.append(maven["whitelist"]) blacklist.append(maven["blacklist"]) pings.append(maven["ping"]) # stringify lists for statistical mode of json text # the mean ping of the mavens is passed to the dex_data ping = int(1000 * sum(pings) / (len(pings) + 0.00000001)) / 1000.0 ping = min(1, ping) # find the youngest bitshares blocktime in our dataset try: blocktime = max(blocktime) except BaseException: print("validating the nascent trend...") continue # get the mode of the mavens for each metric # allow 1 or 2 less than total & most recent for mode # accept "no mode" statistics error as possibility try: last = json_load(mode(last)) except BaseException: try: last = mode(last[-(len_m - 1):]) except BaseException: last = mode(last[-(len_m - 2):]) # attempt a full whitelist and blacklist white_l = [] for i in whitelist: white_l += i whitelist = list(set(white_l))[-storage["bw_depth"]:] black_l = [] for i in blacklist: black_l += i blacklist = list(set(black_l))[-storage["bw_depth"]:] # if you made it this far without statistics error # truncate and rewrite the dex_data with curated data # Must be JSON type # 'STRING', 'INT', 'FLOAT', '{DICT}', or '[LIST]' dex_data["ping"] = ping # FLOAT about 0.500 dex_data["last"] = last # DICT dex_data["whitelist"] = whitelist # LIST dex_data["blacklist"] = blacklist # LIST dex_data["blocktime"] = int(blocktime) # INT dex_data["asset"] = cache["asset"] # STRING SYMBOL dex_data["asset_id"] = cache["asset_id"] # STRING A.B.C dex_data["asset_precision"] = int(cache["asset_precision"]) # INT dex_data["currency"] = cache["currency"] # STRING SYMBOL dex_data["currency_id"] = cache["currency_id"] # STRING A.B.C dex_data["pair"] = cache["pair"] # STRING A:B # add index to dex_data dex_data["keys"] = list(dex_data.keys()) # solitary process with write storage['access'] to pricefeed_dex.txt dex_data = json_dump(dex_data) race_write(doc="pricefeed_dex.txt", text=dex_data) print("pricefeed_dex.txt updated") storage["updates"].append(time()) storage["updates"] = [ t for t in storage["updates"] if (time() - t) < 60 ] # clear namespace del dex_data del mavens del maven del last del whitelist del blacklist del blocktime del len_m del black_l del white_l except Exception as error: # wait a second and try again # common msg is "no mode statistics error" if DEV: msg = trace(error) print(msg) race_append(doc="pricefeed_dexlog.txt", text=msg) continue # from top of while loop NOT pass through error
def file_mgmt(operation, file_path, content=None, cfg_parser=None): """A generic function to manage files (read/write). :param operation: File operation type to perform :type operation: str :param file_path: File name including path :type file_path: str :param content: Data to write to a file :type content: object :param cfg_parser: Config parser object (Only needed if the file being processed is a configuration file parser language) :type cfg_parser: object :return: Data that was read from a file :rtype: object """ # Determine file extension file_ext = splitext(file_path)[-1] if operation in ['r', 'read']: # Read if exists(file_path): if file_ext == ".json": # json with open(file_path) as f_raw: return json_load(f_raw) elif file_ext in ['.yaml', '.yml', '.paws']: # yaml with open(file_path) as f_raw: return yaml_load(f_raw) else: content = '' # text with open(file_path) as f_raw: if cfg_parser is not None: # Config parser file content = cfg_parser.readfp(f_raw) else: content = f_raw.read() return content else: raise IOError("%s not found!" % file_path) elif operation in ['w', 'write']: # Write mode = 'w+' if exists(file_path) else 'w' if file_ext == ".json": # json with open(file_path, mode) as f_raw: json_dump(content, f_raw, indent=4, sort_keys=True) elif file_ext in ['.yaml', '.yml', '.paws']: # yaml with open(file_path, mode) as f_raw: yaml_dump(content, f_raw, default_flow_style=False) else: # text with open(file_path, mode) as f_raw: if cfg_parser is not None: # Config parser file cfg_parser.write(f_raw) else: f_raw.write(content) else: raise Exception("Unknown file operation: %s." % operation)
def _load(self, filename): with open(filename, 'r') as f: obj = json_load(f) self.update(obj) self._check(os.path.dirname(filename))
def get_bricks_contents(self) -> List[Tuple[str, str]]: """Build a list of tuples (brick_ID, brick_HTML) which can be serialised to JSON. @return A JSON-friendly list of tuples. """ request = self.request brick_renders = [] bricks = self.get_bricks() context = self.get_bricks_context().flatten() bricks_manager = BricksManager.get(context) if self.check_bricks_permission: # has_perm = request.user.has_perm user = request.user for brick in bricks: # permission = brick.permission # # if permission and not has_perm(permission): # raise PermissionDenied( # f'Error: you are not allowed to view this brick: {brick.id_}' # ) permissions = brick.permissions # TODO: factorise ? (see creme_core.views.generic.base.PermissionsMixin) if permissions and not (user.has_perm(permissions) if isinstance(permissions, str) else user.has_perms(permissions)): raise PermissionDenied( f'Error: you are not allowed to view this brick: {brick.id_}' ) all_reloading_info = {} all_reloading_info_json = request.GET.get('extra_data') if all_reloading_info_json is not None: try: decoded_reloading_info = json_load(all_reloading_info_json) except ValueError as e: logger.warning('Invalid "extra_data" parameter: %s.', e) else: if not isinstance(decoded_reloading_info, dict): logger.warning( 'Invalid "extra_data" parameter (not a dict).') else: all_reloading_info = decoded_reloading_info # TODO: only one group (add_group should not take *bricks, because the length is limited) for brick in bricks: bricks_manager.add_group(brick.id_, brick) render_method = self.brick_render_method for brick in bricks: reloading_info = all_reloading_info.get(brick.id_) if reloading_info is not None: brick.reloading_info = reloading_info render_func = getattr(brick, render_method, None) if render_func is None: logger.warning( 'Brick without %s(): %s (id=%s)', render_method, brick.__class__, brick.id_, ) else: # NB: the context is copied is order to a 'fresh' one for each # brick, & so avoid annoying side-effects # Notice that build_context() creates a shared dictionary with # the "shared" key in order to explicitly share data between 2+ bricks. brick_renders.append((brick.id_, render_func({**context}))) return brick_renders
iptc_address_tags = { "Country-PrimaryLocationName", "Province-State", "City", "Sub-location" } object_detector = path.abspath('bin/face_detect.exe') # Geocoders yandex_geocoder = 'http://geocode-maps.yandex.ru/1.x/' osm_geocoder = 'http://nominatim.openstreetmap.org/search/' # Paths to JSON configs cascades_json = 'json/cascades.json' osm_types_and_classes_json = 'json/osm_types_and_classes.json' settings_json = 'json/settings.json' with open('json/name_dictionary.json', encoding='utf-8') as _f: name_dictionary = json_load(_f) with open(settings_json, encoding='utf-8') as _f: settings = json_load(_f) language = settings['language'] project_file = 'photo_project.json' readme = "readme.txt" dir_source = 'Source' dir_fullsize = 'Fullsize' dir_monitor = 'Monitor' dir_web = 'Web' dir_panorama = 'Panorama' dir_layered = 'Layered' dir_unsorted = '_unsorted'
def json(response): '''Get dict from json response.''' return json_load(response.read().decode())
def _get_data(view_index): with open(view_index) as f: return json_load(f)
def parse(self): with open(expanduser(self._config_path), 'r') as config_file: config = json_load(config_file) return config
def parse_layer(self, layer_json): layer_dict = json_load(layer_json) return TileStache.Config._parseConfigLayer(layer_dict, self.config, self.dirpath)
def _cache_load(self, cache_file): with open(join(self._cache_path, cache_file), 'rb') as read_stream: return json_load(read_stream)
from discord.ext import commands import discord from codecs import open from json import load as json_load import locale from requests import get from datetime import datetime from re import sub from cogs.utils import Defaults with open('config.json', 'r', encoding='utf8') as f: config = json_load(f) prefix = config['prefix'] ksoft_auth = config['ksoft_authentication'] locale.setlocale(locale.LC_ALL, '') class Konverter(commands.Cog): def __init__(self, bot): self.bot = bot @commands.bot_has_permissions(embed_links=True) @commands.cooldown(1, 2, commands.BucketType.guild) @commands.command(aliases=['fahrenheittocelcius']) async def ftc(self, ctx, tall): """Konverterer temperatur fra fahrenheit til celcius"""
from distutils.core import setup from json import load as json_load LONG_DESCRIPTION = None try: LONG_DESCRIPTION = open('README.md').read() except: pass DETAILS = None with open('details.json') as file: DETAILS = json_load(file) setup(name=DETAILS['name'], packages=DETAILS['packages'], version=DETAILS['version'], description=DETAILS['description'], long_description=LONG_DESCRIPTION, author=DETAILS['author'], author_email=DETAILS['author_email'], url=DETAILS['url'], license=DETAILS['license'], platforms=DETAILS['platforms'])
cmd_args = parse_commandline_arguments() if not cmd_args: instruction = "\n\nFunctions:\n" + "\n".join( f"'{name}' - {func.__doc__}" for name, func in FUNCS.items()) print(instruction) return FUNCS.get(cmd_args.function)(cmd_args) # Load Configuration CURRENT_DIR_PATH = getcwd() CONFIG_FILE_PATH = os_path.join(CURRENT_DIR_PATH, 'qsb-config.json') if os_path.exists(CONFIG_FILE_PATH): with open(CONFIG_FILE_PATH, mode='r') as config: SETTINGS = json_load(config) else: logger.warning(f'Configuration file "{CONFIG_FILE_PATH}" not found!\n\n') SETTINGS = configure() FUNCS = { 'github': github_push, 'local': start_local_server, 'backup': make_backup } if __name__ == "__main__": main()
"""This file provides Python access to the local data files""" from json import load as json_load from os.path import dirname, join __location__ = dirname(__file__) DATA_DIRECTORY = join(dirname(__location__), 'data') KEYWORDS_FILE = join(DATA_DIRECTORY, 'keywords.json') KEYWORDS = [] with open(KEYWORDS_FILE, 'r') as keywords_file: KEYWORDS = json_load(keywords_file)