def _save_coupon(self, json): # Coupon name code = '' if 'Code' in json.keys(): code = json['Code'] # Coupon expiration expires = 0 if 'Tags' in json.keys() and \ 'ExpiresOn' in json['Tags'].keys(): expires = json['Tags']['ExpiresOn'] # Coupon name name = '' if 'Name' in json.keys(): name = json['Name'] # Coupon price price = '' if 'Price' in json.keys(): price = json['Price'] # Add the follower to the DB self.coupon_db.insert({ 'coupon_id': code, 'expires': expires, 'name': name, 'price': price }) log_string = "Coupon #%s - $%s: %s" % (code, price, name) self.logger.warning(log_string, extra={'action': 'Coupon'}) return True
def load_from_json(json): if not type(json) == dict: raise Exception("json is not valid") if not all(i in json.keys() for i in ["id", "eyes_color", "genre", "date_of_birth", "first_name", "last_name", "is_married_to", "children"]): raise Exception("JSON missing fields") if "type" in json.keys(): try: return {"Baby": Baby, "Teenager": Teenager, "Adult": Adult, "Senior": Senior, }[json["type"]](json["id"], str(json["first_name"]), json["date_of_birth"], json["genre"], json["eyes_color"], json["is_married_to"], str(json["last_name"]), json["children"]) except: pass #default return person return Person(json["id"], str(json["first_name"]), json["date_of_birth"], json["genre"], json["eyes_color"], json["is_married_to"], str(json["last_name"]), json["children"])
def connect_fpl_api(): url = 'https://fantasy.premierleague.com/api/bootstrap-static/' r = requests.get(url) json = r.json() json.keys() elements_df = pd.DataFrame(json['elements']) elements_types_df = pd.DataFrame(json['element_types']) teams_df = pd.DataFrame(json['teams']) elements_df['position'] = elements_df.element_type.map(elements_types_df.set_index('id').singular_name) elements_df['team'] = elements_df.team.map(teams_df.set_index('id').name) elements_df['name'] = elements_df['first_name'] + ' ' + elements_df['second_name'] final_df = elements_df[['name','team', 'position','total_points', 'selected_by_percent', 'now_cost',\ 'minutes', 'transfers_in', 'value_season','goals_scored','assists','clean_sheets',\ 'creativity','creativity_rank','threat','threat_rank','influence','influence_rank','ict_index',\ 'ict_index_rank','element_type','penalties_missed','points_per_game',\ 'bonus','bps']] final_df['name'] = final_df['name'].astype(str) final_df['team'] = final_df['team'].astype(str) final_df['creativity'] = final_df['creativity'].astype(float) final_df['threat'] = final_df['threat'].astype(float) final_df['ict_index'] = final_df['ict_index'].astype(float) final_df['points_per_game'] = final_df['points_per_game'].astype(float) final_df['influence'] = final_df['influence'].astype(float) final_df['value_season'] = final_df['value_season'].astype(float) final_df = final_df.sort_values(by=['value_season'], ascending=False).reset_index(drop=True) final_df['value_minutes'] = (final_df['value_season']/final_df['minutes'])*100 return final_df
def loadValuesFromJSON(self, values): if not isinstance(values, dict): raise ParamExceptions.WrongValue( '401', str(values) + ' not correct for ' + str(self.id)) if str(self.id) not in [str(key) for key in values.keys()]: raise ParamExceptions.WrongValue('407', str(self.id) + ' not in input') json = values[self.id] if not isinstance(json, dict): raise ParamExceptions.WrongValue( '401', str(json) + ' not correct for ' + str(self.id)) for key in json.keys(): for it in self.items: if str(it.id) == str(key): if isinstance(it, Param): it.loadValuesFromJSON({str(key): json[key]}) else: it.setValue(json[key]) del json[key] break if len(json) > 0: raise ParamExceptions.WrongValue( '403', str(json.keys()[0]) + ' not correct for ' + str(self.id))
def connect_fpl_api(): url = 'https://fantasy.premierleague.com/api/bootstrap-static/' r = requests.get(url) json = r.json() json.keys() elements_df = pd.DataFrame(json['elements']) elements_types_df = pd.DataFrame(json['element_types']) teams_df = pd.DataFrame(json['teams']) elements_df['position'] = elements_df.element_type.map( elements_types_df.set_index('id').singular_name) elements_df['team'] = elements_df.team.map(teams_df.set_index('id').name) elements_df[ 'name'] = elements_df['first_name'] + ' ' + elements_df['second_name'] final_df = elements_df[[ 'name', 'team', 'position', 'total_points', 'selected_by_percent', 'now_cost', 'minutes', 'transfers_in', 'value_season' ]] final_df['value_season'] = final_df['value_season'].astype(float) final_df = final_df.sort_values(by=['value_season'], ascending=False).reset_index(drop=True) final_df['value_minutes'] = (final_df['value_season'] / final_df['minutes']) * 100 return final_df
def __init__(self, title="No Title", author="No Author", release_year="No Release Year", url="No URL", json=None): self.json = json if self.json != None: if "collectionName" in json.keys(): self.title = json["collectionName"] else: self.title = "No Title" if "artistName" in json.keys(): self.author = json["artistName"] else: self.author = "No Author" if "releaseDate" in json.keys(): self.release_year = json["releaseDate"][0:4] else: self.release_year = "No Release Year" if "collectionViewUrl" in json.keys(): self.url = json["collectionViewUrl"] else: self.url = "No URL" else: self.title = title self.author = author self.release_year = release_year self.url = url
def update(self): try: json = self.request.json except ValueError: json = None file_ = self.request.params.get('file') if self.form_submit() and file_ is None: self.result = { 'status': 'error', 'msg': 'Ein unerwarteter Fehler ist aufgetreten' } elif file_ is not None: created, route_name = self.handle_upload(file_) transaction.savepoint() result = self.resource_data_item(created, route_name) log_with_user(user_data_log.info, self.request.user, 'Datei in %s %s hochgeladen.', self.title.split(' ')[0], self.context.id) elif json and list(json.keys())[0] != 'attachments': error = self.save(list(json.keys())[0], list(json.values())[0]) if error: result = {'status': 'error', 'msg': error} else: log_with_user(user_data_log.info, self.request.user, '%s %s bearbeitet.', self.title.split(' ')[0], self.context.id) result = {'status': 'success'} else: if not self.context.id: transaction.savepoint() result = self.get_result() self.result = result
def __init__(self, title="No Title", author="No Author", release_year="No Release Year", url="No Url", json=None): if json is None: self.title = title self.author = author self.release_year = release_year self.url = url else: #for member in json: #print(type(json),'this is the type of json!!!')##need to comment out later!!! #print(json,'this is json') if 'trackName' in json.keys(): self.title = json['trackName'] elif 'collectionName' in json.keys(): self.title = json['collectionName'] elif 'trackCensoredName' in json.keys(): self.title = json['trackCensoredName'] elif 'collectionCensoredName' in json.keys(): self.title = json['collectionCensoredName'] self.author = json['artistName'] self.release_year = json['releaseDate'].split("-")[0] try: self.url = json['trackViewUrl'] except: self.url = json['collectionViewUrl']
def __compare_projects_map(self, db, json): # Compare the projects coming from db and from a json file in eclipse ds_map_db = {} ds_map_json = { "git":"scm", "pipermail":"mls", "gerrit":"scr", "bugzilla":"its" } for ds in ds_map_json: ds_map_db[ds_map_json[ds]] = ds db_projects = [] dss = db.keys() # Check that all db data is in the JSON file for ds in dss: for repository in db[ds]: # A repository could be in more than one project. But we get only one. project = db[ds][repository] if project not in db_projects: db_projects.append(project) if project not in json: logging.error("Project not found in JSON ", project) raise else: if ds == 'mls': repo_mls = repository.split("/")[-1] repo_mls = repo_mls.replace(".mbox", "") repository = 'https://dev.eclipse.org/mailman/listinfo/' + repo_mls if ds_map_db[ds] not in json[project]: logging.error("db repository not found in json %s", repository) elif repository not in json[project][ds_map_db[ds]]: logging.error("db repository not found in json %s", repository) for project in json.keys(): if project not in db_projects: logging.debug("JSON project %s not found in db" % project) # Check that all JSON data is in the database for project in json: for ds in json[project]: if ds not in ds_map_json: # meta continue for repo in json[project][ds]: if ds == 'pipermail': repo_mls = repo.split("/")[-1] repo = "/mnt/mailman_archives/%s.mbox/%s.mbox" % (repo_mls, repo_mls) if repo in db[ds_map_json[ds]]: # print("Found ", repo, ds) pass else: logging.debug("Not found repository in db %s %s", repo, ds) logging.debug("Number of db projects: %i", len(db_projects)) logging.debug("Number of json projects: %i (>=%i)", len(json.keys()), len(db_projects))
def __compare_projects_map(self, db, json): # Compare the projects coming from db and from a json file in eclipse ds_map_db = {} ds_map_json = { "git": "scm", "pipermail": "mls", "gerrit": "scr", "bugzilla": "its" } for ds in ds_map_json: ds_map_db[ds_map_json[ds]] = ds db_projects = [] dss = db.keys() # Check that all db data is in the JSON file for ds in dss: for repository in db[ds]: # A repository could be in more than one project. But we get only one. project = db[ds][repository] if project not in db_projects: db_projects.append(project) if project not in json: logger.error("Project not found in JSON ", project) raise NotFoundError("Project not found in JSON " + project) else: if ds == 'mls': repo_mls = repository.split("/")[-1] repo_mls = repo_mls.replace(".mbox", "") repository = 'https://dev.eclipse.org/mailman/listinfo/' + repo_mls if ds_map_db[ds] not in json[project]: logger.error("db repository not found in json %s", repository) elif repository not in json[project][ds_map_db[ds]]: logger.error("db repository not found in json %s", repository) for project in json.keys(): if project not in db_projects: logger.debug("JSON project %s not found in db" % project) # Check that all JSON data is in the database for project in json: for ds in json[project]: if ds not in ds_map_json: # meta continue for repo in json[project][ds]: if ds == 'pipermail': repo_mls = repo.split("/")[-1] repo = "/mnt/mailman_archives/%s.mbox/%s.mbox" % (repo_mls, repo_mls) if repo in db[ds_map_json[ds]]: # print("Found ", repo, ds) pass else: logger.debug("Not found repository in db %s %s", repo, ds) logger.debug("Number of db projects: %i", len(db_projects)) logger.debug("Number of json projects: %i (>=%i)", len(json.keys()), len(db_projects))
def cachemaps(): logger.info("Location add request received") json = request.get_json() if 'name' not in json.keys() or 'lat' not in json.keys( ) or 'lng' not in json.keys(): return 'Error: Invalid POST format', 400 newJSON = mapcache.cacheLocation( json['name'], json['lat'], json['lng'], json['zoom'] if 'zoom' in json.keys() else 17) logger.info("Location Added") return newJSON
def validate_json_keys(json): if 'Name' not in json.keys(): print("key : Name is not present") return False if 'Price' not in json.keys(): print("key : Price is not present") return False if 'Address' not in json.keys(): print("key : Price is not present") return False return True
def set_srss(group, json): """Sets SRS values in Dat HDF from either full sweeplogs or minimally json which contains SRS_{#} keys""" srs_ids = [key[4] for key in json.keys() if key[:3] == 'SRS'] for num in srs_ids: if f'SRS_{num}' in json.keys(): srs_data = srs_from_json(json, num) # Converts to my standard ntuple = data_to_NamedTuple(srs_data, SRStuple) # Puts data into named tuple srs_group = group.require_group(f'srss') # Make sure there is an srss group HDU.set_attr(srs_group, f'srs{num}', ntuple) # Save in srss group else: logger.error(f'No "SRS_{num}" found in json') # Should not get to here
def get_absolute_path_to_key(json, key): if not isinstance(json, dict): return None if key in json.keys(): return key ans = None for json_key in json.keys(): r = get_absolute_path_to_key(json[json_key], key) if r is None: continue else: ans = "{}.{}".format(json_key, r) return ans
def ParamMultiFromJSON(json): id = json['id'] label = json['label'] if 'label' in json.keys() else None items = [] if 'items' in json.keys(): for item in json['items']: if item['type'] == 'Param': items.append(ParamFromJSON(item)) elif item['type'] == 'ParamMulti': items.append(ParamMultiFromJSON(item)) else: items.append(ConfigElementFromJSON(item)) trigger = json['trigger'] if 'trigger' in json.keys() else {} return ParamMulti(id,label=label,items=items,trigger=trigger)
def ParamMultiFromJSON(json): id = json['id'] label = json['label'] if 'label' in json.keys() else None items = [] if 'items' in json.keys(): for item in json['items']: if item['type'] == 'Param': items.append(ParamFromJSON(item)) elif item['type'] == 'ParamMulti': items.append(ParamMultiFromJSON(item)) else: items.append(ConfigElementFromJSON(item)) trigger = json['trigger'] if 'trigger' in json.keys() else {} return ParamMulti(id, label=label, items=items, trigger=trigger)
def batch_patch(id): try: # must be batch patch, where json["id"] is prefixed and equals id json = batch_patch_schema.extend({"id": All(prefixed_id("BAT"), id)})(request.json) forced = forced_schema(request.args).get("force") except MultipleInvalid as e: return problem.invalid_params_response(e) existing_batch = Batch.from_mongodb_doc(db.batch.find_one({"_id": id})) if not existing_batch: return problem.missing_batch_response(id) if json.get("sku_id"): existing_sku = db.sku.find_one({"_id": json['sku_id']}) if not existing_sku: return problem.invalid_params_response( problem.missing_resource_param_error( "sku_id", "must be an existing sku id")) if (existing_batch.sku_id and "sku_id" in json and existing_batch.sku_id != json["sku_id"] and not forced): return problem.dangerous_operation_unforced_response( "sku_id", "The sku of this batch has already been set. Can not change without force=true." ) if "props" in json.keys(): db.batch.update_one({"_id": id}, {"$set": {"props": json['props']}}) if "name" in json.keys(): db.batch.update_one({"_id": id}, {"$set": {"name": json['name']}}) if "sku_id" in json.keys(): db.batch.update_one({"_id": id}, {"$set": {"sku_id": json['sku_id']}}) if "owned_codes" in json.keys(): db.batch.update_one({"_id": id}, {"$set": { "owned_codes": json['owned_codes'] }}) if "associated_codes" in json.keys(): db.batch.update_one( {"_id": id}, {"$set": { "associated_codes": json['associated_codes'] }}) updated_batch = Batch.from_mongodb_doc(db.batch.find_one({"_id": id})) return BatchEndpoint.from_batch(updated_batch).redirect_response(False)
def is_valid_json(json): """ Checks if a cleaner or ignore file is valid """ if _JSON_IGNORE in list(json.keys()): return is_valid_ignore(json) else: return is_valid_cleaner(json)
def checksecjson(json, requirefield=["SecurityGroup"], requirekeys=["AWS", "GIT", "SSH"]): """ Check if security JSON file has all required fields and if the key file has the correct permissions :param requirefield: which is the required field :param requirekeys: ssh keys to be verified """ missing = [k for k in requirefield if k not in json.keys()] if len(missing): raise IOError("Your json file is missing the following keys " + missing.__str__()) if not len(requirekeys): return if "AccessKeys" not in json: raise KeyError("You must specify access keys " + requirekeys.__str__()) missing = [k for k in requirekeys if k not in json["AccessKeys"]] if len(missing): raise IOError("Your json file keys are missing the following Access Keys " + missing.__str__()) for key, val in json["AccessKeys"].iteritems(): if key not in requirekeys: continue if not os.path.exists(os.path.expanduser(val["KeyFile"])): raise IOError("Keyfiles must exist " + val["KeyFile"]) if "------" not in lD.run_quiet("ls -l " + val["KeyFile"]): raise IOError( "Your private keyfile " + val["KeyFile"] + " " + key + " needs to have X00 permissions (400 or 600).") return True
def __init__(self, json): self.nodes = set() self.edges = set() for el in json.keys(): if el == "jgEdges": for e in json[el]: self.edges.add(Edge(e)) elif el == "jgNodes": for n in json[el]: self.nodes.add(Node(n)) # rename source and target of lessAtom edges with src or dst being a record structure nodemap = {} for n in self.nodes: nodemap[n.ID] = n for e in self.edges: if e.rel == "LessAtoms": if (nodemap[e.src].Type == "isProtocolRule"): e.src += ":act" if (nodemap[e.dst].Type == "isProtocolRule"): e.dst += ":act" # rename source and target of edges where the node is no record structure (e.g. missingNodePrem) for e in self.edges: nodetypes = ["missingNodePrem", "missingNodeConc"] key = e.src.split(':')[0] if (nodemap[key].Type in nodetypes): e.src = key key = e.dst.split(':')[0] if (nodemap[key].Type in nodetypes): e.dst = key
def checksecjson(json, requirefield=["SecurityGroup"], requirekeys=["AWS", "GIT", "SSH"]): missing = [k for k in requirefield if k not in json.keys()] if len(missing): raise IOError("Your json file is missing the following keys " + missing.__str__()) if not len(requirekeys): return if "AccessKeys" not in json: raise KeyError("You must specify access keys " + requirekeys.__str__()) missing = [k for k in requirekeys if k not in json["AccessKeys"]] if len(missing): raise IOError( "Your json file keys are missing the following Access Keys " + missing.__str__()) for key, val in json["AccessKeys"].iteritems(): if key not in requirekeys: continue if not os.path.exists(os.path.expanduser(val["KeyFile"])): raise IOError("Keyfiles must exist " + val["KeyFile"]) if "------" not in lD.run_quiet("ls -l " + val["KeyFile"]): raise IOError("Your private keyfile " + val["KeyFile"] + " " + key + " needs to have X00 permissions (400 or 600).") return True
def json_print_value(json,key,number): number = number +1 #number代表第几层结构 key_value = "not found" if isinstance(json,dict): for k in json.keys(): if k == key: #print json.get(key),number return json.get(key),number else: #print k #print json.get(k); s = json_print_value(json.get(k), key, number) if s != 'not found': #print s return s elif isinstance(json,list): print "is a list" for json_array in json: s = json_print_value(json_array, key, number) if s != 'not found': #print s return s #print key_value return key_value
def write_xml(json, directory): with open(os.path.join(directory,resfile),mode='w', encoding="utf-8") as f: write_opening(f) for key in json.keys(): val = json.get(key) f.write(' <string name="%s">%s</string>\n' % (key,val)) write_closing(f)
def fromJsonFragment(json, nameFromParent): if isinstance(json, dict) and hasKeys(json.keys(), ["entries", "data"]): if json["entries"] in ("nan", "inf", "-inf") or isinstance( json["entries"], numbers.Real): entries = float(json["entries"]) else: raise JsonFormatException(json, "Branch.entries") if isinstance(json["data"], list): values = [] for i, x in enumerate(json["data"]): if isinstance(x, dict) and hasKeys(x.keys(), ["type", "data"]): if isinstance(x["type"], basestring): factory = Factory.registered[x["type"]] else: raise JsonFormatException( x, "Branch.data {0} type".format(i)) values.append(factory.fromJsonFragment( x["data"], None)) else: raise JsonFormatException(json, "Branch.data") return Branch.ed(entries, *values) else: raise JsonFormatException(json, "Branch")
def parseData(self, json, audio_features, video_features_file=None, context_video_features_file=None, text_bert_embeddings=None, context_bert_embeddings=None): ''' Prepares json data into lists data_input = [ (utterance:string, speaker:string, context:list_of_strings, context_speakers:list_of_strings, utterance_audio:features ) ] data_output = [ sarcasm_tag:int ] ''' self.data_input, self.data_output = [], [] for idx, ID in enumerate(json.keys()): self.data_input.append( (json[ID]["utterance"], json[ID]["speaker"], json[ID]["context"], json[ID]["context_speakers"], audio_features[ID] if audio_features else None, video_features_file[ID][()] if video_features_file else None, context_video_features_file[ID][()] if context_video_features_file else None, text_bert_embeddings[idx] if text_bert_embeddings else None, context_bert_embeddings[idx] if context_bert_embeddings else None, json[ID]["show"], str(ID))) self.data_output.append(int(json[ID]["sarcasm"]))
def character(update, context): message = update.effective_message search = message.text.split(' ', 1) if len(search) == 1: update.effective_message.reply_animation(CHARACTER_IMG, caption="""Format : /character < character name >""", parse_mode="markdown") return search = search[1] variables = {'query': search} json = requests.post( url, json={ 'query': character_query, 'variables': variables }).json() if 'errors' in json.keys(): update.effective_message.reply_text('Character not found') return if json: json = json['data']['Character'] msg = f"* {json.get('name').get('full')}*(`{json.get('name').get('native')}`) \n" description = f"{json['description']}" site_url = json.get('siteUrl') char_name = f"{json.get('name').get('full')}" msg += shorten(description, site_url) image = json.get('image', None) if image: image = image.get('large') buttons = [[InlineKeyboardButton("Save as Waifu ❣️", callback_data=f"xanime_fvrtchar={char_name}")]] update.effective_message.reply_photo( photo=image, caption=msg.replace('<b>', '</b>'), reply_markup=InlineKeyboardMarkup(buttons), parse_mode=ParseMode.MARKDOWN) else: update.effective_message.reply_text( msg.replace('<b>', '</b>'), reply_markup=InlineKeyboardMarkup(buttons), parse_mode=ParseMode.MARKDOWN)
def fromJsonFragment(json, nameFromParent): if isinstance(json, dict) and hasKeys(json.keys(), ["entries", "sub:type", "data"]): if json["entries"] in ("nan", "inf", "-inf") or isinstance( json["entries"], numbers.Real): entries = float(json["entries"]) else: raise JsonFormatException(json, "Index.entries") if isinstance(json["sub:type"], basestring): factory = Factory.registered[json["sub:type"]] else: raise JsonFormatException(json, "Index.sub:type") if isinstance(json["data"], list): values = [ factory.fromJsonFragment(x, None) for x in json["data"] ] else: raise JsonFormatException(json, "Index.data") return Index.ed(entries, *values).specialize() else: raise JsonFormatException(json, "Index")
def fromJsonFragment(json, nameFromParent): if isinstance(json, dict) and hasKeys(json.keys(), ["entries", "data"]): if json["entries"] in ("nan", "inf", "-inf") or isinstance( json["entries"], numbers.Real): entries = float(json["entries"]) else: raise JsonFormatException(json, "UntypedLabel.entries") if isinstance(json["data"], dict): pairs = {} for k, v in json["data"].items(): if isinstance(v, dict) and hasKeys(v.keys(), ["type", "data"]): factory = Factory.registered[v["type"]] pairs[k] = factory.fromJsonFragment(v["data"], None) else: raise JsonFormatException( k, "UntypedLabel.data {0}".format(v)) else: raise JsonFormatException(json, "UntypedLabel.data") return UntypedLabel.ed(entries, **pairs).specialize() else: raise JsonFormatException(json, "UntypedLabel")
def fromJsonFragment(json, nameFromParent): if isinstance(json, dict) and hasKeys( json.keys(), ["entries", "mean", "variance"], ["name"]): if json["entries"] in ("nan", "inf", "-inf") or isinstance( json["entries"], numbers.Real): entries = float(json["entries"]) else: raise JsonFormatException(json["entries"], "Deviate.entries") if isinstance(json.get("name", None), basestring): name = json["name"] elif json.get("name", None) is None: name = None else: raise JsonFormatException(json["name"], "Deviate.name") if json["mean"] in ("nan", "inf", "-inf") or isinstance( json["mean"], numbers.Real): mean = float(json["mean"]) else: raise JsonFormatException(json["mean"], "Deviate.mean") if json["variance"] in ("nan", "inf", "-inf") or isinstance( json["variance"], numbers.Real): variance = float(json["variance"]) else: raise JsonFormatException(json["variance"], "Deviate.variance") out = Deviate.ed(entries, mean, variance) out.quantity.name = nameFromParent if name is None else name return out.specialize() else: raise JsonFormatException(json, "Deviate")
def execute(self, context): if not __package__ in context.user_preferences.addons.keys(): return {'FINISHED'} tmp_path = get_temp_path(context) if tmp_path.exists(): filepath_list = list(tmp_path.glob('**/*')) for path in filepath_list: os.remove(str(path)) else: tmp_path.mkdir(parents=True) props = context.window_manager.poly preferences = context.user_preferences.addons[__package__].preferences payload = self.getPayload(preferences, props) self.recreatePreviews(props) r = requests.get("https://poly.googleapis.com/v1/assets", params=payload) json = r.json() if not 'assets' in json.keys(): return {'INTERFACE'} props.nextPageToken = json['nextPageToken'] # Save JSON json_path = tmp_path.joinpath(props.category_type + ".json") with json_path.open("w", encoding='utf-8') as f: f.write(r.text) self.writeThumbnails(json, tmp_path) return {'FINISHED'}
def _check_and_update(config, json): for k in json.keys(): if k not in config: raise Exception( "Unknown model config `{}`, all model configs: {}".format( k, config.keys())) config.update(json)
def fromJsonFragment(json, nameFromParent): if isinstance(json, dict) and hasKeys(json.keys(), ["entries", "sub:type", "numerator", "denominator"], ["name", "sub:name"]): if json["entries"] in ("nan", "inf", "-inf") or isinstance(json["entries"], numbers.Real): entries = float(json["entries"]) else: raise JsonFormatException(json, "Fraction.entries") if isinstance(json.get("name", None), basestring): name = json["name"] elif json.get("name", None) is None: name = None else: raise JsonFormatException(json["name"], "Fraction.name") if isinstance(json["sub:type"], basestring): factory = Factory.registered[json["sub:type"]] else: raise JsonFormatException(json, "Fraction.type") if isinstance(json.get("sub:name", None), basestring): subName = json["sub:name"] elif json.get("sub:name", None) is None: subName = None else: raise JsonFormatException(json["sub:name"], "Fraction.sub:name") numerator = factory.fromJsonFragment(json["numerator"], subName) denominator = factory.fromJsonFragment(json["denominator"], subName) out = Fraction.ed(entries, numerator, denominator) out.quantity.name = nameFromParent if name is None else name return out.specialize() else: raise JsonFormatException(json, "Fraction")
def main(): parser = argparse.ArgumentParser(description='plot results from annSim') parser.add_argument( '--resultdir', metavar='resultdir', type=str, help='resultdir for loading data files including figures') parser.add_argument('--savedir', metavar='savedir', type=str, help='Directory to write the pdf figure to') parser.add_argument('--filename', metavar='filename', type=str, help='Filename for pdf figure') parser.add_argument('--addgabelresults', metavar='filename', type=str2bool, help='Filename for pdf figure', default=False) args = parser.parse_args() if not len(sys.argv) > 1: print("not enough arguments") parser.print_help() sys.exit(1) resultfile = args.resultdir json = readJson(resultfile) save_directory = args.savedir filename = args.filename plotTrainings(json.keys(), json, save_directory, 5)
def fromJsonFragment(json, nameFromParent): if isinstance(json, dict) and hasKeys(json.keys(), ["entries", "values", "range"], ["name"]): if json["entries"] in ("nan", "inf", "-inf") or isinstance(json["entries"], numbers.Real): entries = json["entries"] else: raise JsonFormatException(json["entries"], "Bag.entries") if isinstance(json.get("name", None), basestring): name = json["name"] elif json.get("name", None) is None: name = None else: raise JsonFormatException(json["name"], "Bag.name") if json["values"] is None: values = None elif json["values"] is None or isinstance(json["values"], list): values = {} for i, nv in enumerate(json["values"]): if isinstance(nv, dict) and hasKeys(nv.keys(), ["w", "v"]): if nv["w"] in ("nan", "inf", "-inf") or isinstance(nv["w"], numbers.Real): n = float(nv["w"]) else: raise JsonFormatException(nv["w"], "Bag.values {0} n".format(i)) if nv["v"] in ("nan", "inf", "-inf") or isinstance(nv["v"], numbers.Real): v = floatOrNan(nv["v"]) elif isinstance(nv["v"], basestring): v = nv["v"] elif isinstance(nv["v"], (list, tuple)): for j, d in enumerate(nv["v"]): if d not in ("nan", "inf", "-inf") and not isinstance(d, numbers.Real): raise JsonFormatException(d, "Bag.values {0} v {1}".format(i, j)) v = tuple(map(floatOrNan, nv["v"])) else: raise JsonFormatException(nv["v"], "Bag.values {0} v".format(i)) values[v] = n else: raise JsonFormatException(nv, "Bag.values {0}".format(i)) elif json["values"] is None: values = None else: raise JsonFormatException(json["values"], "Bag.values") if isinstance(json["range"], basestring): range = json["range"] else: raise JsonFormatException(json["range"], "Bag.range") out = Bag.ed(entries, values, range) out.quantity.name = nameFromParent if name is None else name return out.specialize() else: raise JsonFormatException(json, "Bag")
def fromJsonFragment(json, nameFromParent): if isinstance(json, dict) and hasKeys(json.keys(), ["entries", "sum"], ["name"]): if json["entries"] in ("nan", "inf", "-inf") or isinstance(json["entries"], numbers.Real): entries = float(json["entries"]) else: raise JsonFormatException(json["entries"], "Sum.entries") if isinstance(json.get("name", None), basestring): name = json["name"] elif json.get("name", None) is None: name = None else: raise JsonFormatException(json["name"], "Sum.name") if json["sum"] in ("nan", "inf", "-inf") or isinstance(json["sum"], numbers.Real): sum = float(json["sum"]) else: raise JsonFormatException(json["sum"], "Sum.sum") out = Sum.ed(entries, sum) out.quantity.name = nameFromParent if name is None else name return out.specialize() else: raise JsonFormatException(json, "Sum")
def fromJsonFragment(json, nameFromParent): if isinstance(json, dict) and hasKeys(json.keys(), ["entries", "mean", "variance"], ["name"]): if json["entries"] in ("nan", "inf", "-inf") or isinstance(json["entries"], numbers.Real): entries = float(json["entries"]) else: raise JsonFormatException(json["entries"], "Deviate.entries") if isinstance(json.get("name", None), basestring): name = json["name"] elif json.get("name", None) is None: name = None else: raise JsonFormatException(json["name"], "Deviate.name") if json["mean"] in ("nan", "inf", "-inf") or isinstance(json["mean"], numbers.Real): mean = float(json["mean"]) else: raise JsonFormatException(json["mean"], "Deviate.mean") if json["variance"] in ("nan", "inf", "-inf") or isinstance(json["variance"], numbers.Real): variance = float(json["variance"]) else: raise JsonFormatException(json["variance"], "Deviate.variance") out = Deviate.ed(entries, mean, variance) out.quantity.name = nameFromParent if name is None else name return out.specialize() else: raise JsonFormatException(json, "Deviate")
def pixel_CSV_stat_header(name_scene, x, y, nb_samples=-1): """ create a csv file from a rawls repertory by indicating the pixel to study """ if name_scene not in scene_list: return {"error": errors[0]} json = search_png(name_scene) for key in json.keys(): if key == "error": return json im = Image.open(os.path.join(images_path, name_scene + ".png")) original_image_width, original_image_height = im.size if (original_image_width < x) or (original_image_height < y): return {"error": errors[1]} if (x < 0) or (y < 0): return {"error": errors[2]} create_CSV(folder_rawls_path + "/" + name_scene, x, y, "/tmp", nb_samples) if nb_samples == -1: nb_samples = 0 for name in os.listdir(folder_rawls_path + "/" + name_scene): if name.endswith(".rawls"): nb_samples += 1 CSV_file = "/tmp/" + name_scene + "_" + str(x) + "_" + str(y) + ".csv" res = [CSV_file, nb_samples] return res
def fromJsonFragment(json, nameFromParent): if isinstance(json, dict) and hasKeys(json.keys(), ["entries", "sum"], ["name"]): if json["entries"] in ("nan", "inf", "-inf") or isinstance( json["entries"], numbers.Real): entries = float(json["entries"]) else: raise JsonFormatException(json["entries"], "Sum.entries") if isinstance(json.get("name", None), basestring): name = json["name"] elif json.get("name", None) is None: name = None else: raise JsonFormatException(json["name"], "Sum.name") if json["sum"] in ("nan", "inf", "-inf") or isinstance( json["sum"], numbers.Real): sum = float(json["sum"]) else: raise JsonFormatException(json["sum"], "Sum.sum") out = Sum.ed(entries, sum) out.quantity.name = nameFromParent if name is None else name return out.specialize() else: raise JsonFormatException(json, "Sum")
def formatRecord(json): for key in json.keys(): new_key = formatName(key, "field") json[new_key] = json.pop(key) if isinstance(json[new_key], dict) == True: json[new_key] = formatRecord(json[new_key]) return json
def assign_groups(request): json = request.data if request.method == 'POST': username = json.keys()[0] data = json.values()[0] for entry in data: user = User.objects.get(username=username) time = entry['time'].split(':')[0] + ':' + entry['time'].split(':')[1] timestamp = datetime.combine( datetime.strptime(entry['date'], '%d/%m/%Y').date(), datetime.strptime(time, '%H:%M').time() ) GroupLocalization.objects.create( user=UserProfile.objects.get(user=user), timestamp=timestamp, group=str(entry['group']) ) location = assign_geofence(entry['location']['lat'], entry['location']['long']) if LocationDensity.objects.filter(timestamp=timestamp, location=location).exists(): loc_obj = LocationDensity.objects.get(timestamp=timestamp, location=location) loc_obj.density += 1 loc_obj.save() else: LocationDensity.objects.create(timestamp=timestamp, location=location, density=1) return Response({"message": "Got some data!", "data": request.data}) return Response({"message": "Data format inaccurate !!!!!"})
def fromJsonFragment(json, nameFromParent): if isinstance(json, dict) and hasKeys( json.keys(), ["entries", "sub:type", "data"], ["name"]): if json["entries"] in ("nan", "inf", "-inf") or isinstance( json["entries"], numbers.Real): entries = float(json["entries"]) else: raise JsonFormatException(json, "Select.entries") if isinstance(json.get("name", None), basestring): name = json["name"] elif json.get("name", None) is None: name = None else: raise JsonFormatException(json["name"], "Select.name") if isinstance(json["sub:type"], basestring): factory = Factory.registered[json["sub:type"]] else: raise JsonFormatException(json, "Select.type") cut = factory.fromJsonFragment(json["data"], None) out = Select.ed(entries, cut) out.quantity.name = nameFromParent if name is None else name return out.specialize() else: raise JsonFormatException(json, "Select")
def add_cloudcast(index, json): logger.debug( 'add_cloudcast %s', ','.join(json.keys()) ) if STR_NAME not in json or not json[STR_NAME]: logger.warn('name not found in track %s', index) return {} json_name=json[STR_NAME] logger.debug('parsing track %s', json_name) json_key='' json_year=0 json_date='' json_length=0 json_userkey='' json_username='' json_image='' json_comment='' json_genre='' if STR_KEY in json and json[STR_KEY]: json_key=json[STR_KEY] logger.debug('Key is %s', json_key) if STR_CREATEDTIME in json and json[STR_CREATEDTIME]: json_created=json[STR_CREATEDTIME] json_structtime=time.strptime(json_created[0:10],'%Y-%m-%d') json_year=int(time.strftime('%Y',json_structtime)) json_date=time.strftime('%d/%m/Y',json_structtime) if STR_AUDIOLENGTH in json and json[STR_AUDIOLENGTH]: json_length=json[STR_AUDIOLENGTH] if STR_USER in json and json[STR_USER]: json_user=json[STR_USER] if STR_KEY in json_user and json_user[STR_KEY]: json_userkey=json_user[STR_KEY] if STR_NAME in json_user and json_user[STR_NAME]: json_username=json_user[STR_NAME] if STR_PICTURES in json and json[STR_PICTURES]: json_pictures=json[STR_PICTURES] if thumb_size in json_pictures and json_pictures[thumb_size]: json_image=json_pictures[thumb_size] if STR_DESCRIPTION in json and json[STR_DESCRIPTION]: json_comment=json[STR_DESCRIPTION].encode('ascii', 'ignore') if STR_TAGS in json and json[STR_TAGS]: json_tags=json[STR_TAGS] for json_tag in json_tags: if STR_NAME in json_tag and json_tag[STR_NAME]: if json_genre<>'': json_genre += ', ' json_genre=json_genre+json_tag[STR_NAME] infolabels = { STR_COUNT:index, STR_TRACKNUMBER:index, STR_TITLE:json_name, STR_ARTIST:json_username, STR_DURATION:json_length, STR_YEAR:json_year, STR_DATE:json_date, STR_COMMENT:json_comment, STR_GENRE:json_genre, STR_KEY:json_key } return infolabels
def get_suggested_meaning(self, traumae_word): json = self.get_traumae_json() for s_id in json.keys(): # ["pixi","research","Expression","sure","head"] if json[s_id][0] == traumae_word: return json[s_id][1] return "N/A"
def get_suggested_definition(self, english_word): json = self.get_traumae_json_for_word(english_word) word = "?" if json: word = ", ".join(json.keys()) return word
def _bind_json(self, json): self.href = json.get('href') store_type = [ k for k in Target._store_types if k in json] if len(store_type) != 1: self.binding_failed('invalid store entry: %s', json.keys()) self.store_type = store_type[0] repr = json[self.store_type] super(Target, self)._bind_json(repr)
def add_control_from_json(self, json): """Creates a Control object from a given json object.""" if 'type' not in json.keys(): print "no type. skipping object: %s" return else: obj = self.type_map[json['type']].from_json(json) self.add_control(obj)
def get_suggested_meaning(self, traumae_word): json = self.get_traumae_json() for s_id in json.keys(): # THIS ISNT WHAT IT LOOKS LIKE: # ["pixi","research","Expression","sure","head"] if json[s_id][0]["adult"] == traumae_word: return json[s_id][0]["eng"] return "N/A"
def validateJSON(json): """ Validates that the fields are all present, not empty, and do not only contain whitespace""" if len(VALID_POST_PARAMETERS) == len(json.keys()): for key in VALID_POST_PARAMETERS: if (key not in json) or (len(json[key]) < 1) or (json[key].isspace() == True): #validate not empty return False return validateEmailAddresses(json) else: return False
def key_dict_clean(json): if json is None: return None array = [] for key in json.keys(): tmp_dict = json.get(key) tmp_dict["name"] = key array.append(tmp_dict) return array
def load_node_rec(self, node, json): for k in sorted(json.keys()): v = json[k] if isinstance(v, dict): child = Node(k) node.children.append(child) self.load_node_rec(child, v) else: node.children.append(PropertyNode(k, v))
def get_list_information(self, listids=None, forcereload=False): idstolist = listids is not None and ','.join(listids) or 'all' result = [] json = self._get_list_information_helper(forcereload) for k in json.keys(): json[k]['listid'] = json[k]['id'] if idstolist == 'all' or json[k]['listid'] in idstolist: result.append(ActiveCampaignList(**json[k])) return result
def loadValuesFromJSON(self,values): if not isinstance(values,dict): raise ParamExceptions.WrongValue('401',str(values) + ' not correct for ' + str(self.id)) if str(self.id) not in [str(key) for key in values.keys()]: raise ParamExceptions.WrongValue('407',str(self.id) + ' not in input') json = values[self.id] if not isinstance(json,dict): raise ParamExceptions.WrongValue('401',str(json) + ' not correct for ' + str(self.id)) for key in json.keys(): for it in self.items: if str(it.id) == str(key): if isinstance(it,Param): it.loadValuesFromJSON({str(key):json[key]}) else: it.setValue(json[key]) del json[key] break if len(json)>0: raise ParamExceptions.WrongValue('403',str(json.keys()[0]) + ' not correct for ' + str(self.id))
def record_phase(): try: json = retrieve_data(request.params) record_id = json.keys()[0] record = database.DRECORD[record_id] record[PHA] += [json[record_id]] database.DRECORD[record_id] = record return record except Exception: return "Fase dd jogo não foi gravado %s" % str(request.params.values())
def record_end(): try: json = retrieve_data(request.params) record_id = json.keys()[0] record = database.DRECORD[record_id] record[END] = json[record_id] database.DRECORD[record_id] = record return record except Exception: return "Fim de jogo não foi gravado %s" % str(request.params.values())
def read(): try: json = retrieve_data(request.params) record_id = json.keys()[0] record = database.DRECORD[record_id] record[PEC] += [json[record_id]] database.DRECORD[record_id] = record return record except Exception: return "Movimento de peça não foi gravado %s" % str(request.params.values())
def key_list_clean(json): if json is None: return None array = [] for key in json.keys(): tmp_dict = {} tmp_dict["name"] = key tmp_dict["values"] = json.get(key) array.append(tmp_dict) return array
def _parse_sentence(json): sentence = Sentence() sentence.set_id(json[ID]) language = None for key in json.keys(): if key != ID: language = Language() language.set_locale(key) language.set_value(json[key]) sentence.add_language(language) return sentence
def impl(context, async_operation): json = context.request.json() if async_operation == 'upgrade': expected_keys = set(('status', 'upgrade_to', 'upgraded', 'in_process', 'started_at', 'finished_at')) elif async_operation == 'restart': expected_keys = set(('status', 'restarted', 'in_process', 'started_at', 'finished_at')) actual_keys = set(json.keys()) assert actual_keys == expected_keys, \ 'Expected keys {0}, got {1}'.format(expected_keys, actual_keys)
def load_template(): header_files = ['transheader.h', 'transproduct.h', 'supplier.h', 'customer.h', 'account.h', 'terminal.h'] here = os.path.abspath(os.path.dirname(__file__)) with DbConnectionManager() as connection: headerFileParsedTemplate = HeaderfileParsedTemplate(connection=connection) headerFileParsedTemplate.remove() for header_file in header_files: file_name = os.path.join(here, 'resources', header_file) json = HeaderParser.generate_tempate(file_name) data_name = json.keys()[0] headerFileParsedTemplate.save(data_name=data_name, data=json[data_name], version=1)
def __init__(self, name, port_type, json, resource_key): self.name = name assert port_type in [Port.INPUT, Port.OUTPUT, Port.CONFIG] self.port_type = port_type if self.port_type == Port.CONFIG: self.qualified_name = resource_key + ".config_port" else: self.qualified_name = resource_key + "." + self.port_type + "." + self.name self.properties = {} for prop_name in json.keys(): self.properties[prop_name] = PortProperty(prop_name, json[prop_name], self.qualified_name, port_type)