def convert_dict_to_playjson(json, indentation): if isinstance(json, dict): print "Json.obj()", indentation += 1 for k, v in json.items(): print "" print "\t" * indentation, if v is None: sys.stdout.write('.addNull("' + k + '")') else: sys.stdout.write('.add("' + k + '", ') convert_dict_to_playjson(v, indentation) sys.stdout.write(")") sys.stdout.write(".build()") indentation -= 1 elif isinstance(json, list): print 'Json.arr()', indentation += 1 for v in json: print "" print "\t" * indentation, sys.stdout.write('.add(') convert_dict_to_playjson(v, indentation) sys.stdout.write(")") sys.stdout.write(".build()") indentation -= 1 elif isinstance(json, basestring): sys.stdout.write('"' + json.replace("\\", "\\\\") + '"') else: sys.stdout.write(str(json)) return
def _filter_out_links(json): if isinstance(json, dict): return {key: _filter_out_links(value) for key, value in json.items() if key != 'links'} elif isinstance(json, list): return [_filter_out_links(value) for value in json] else: return json
def parse(cls, api, json): stream_meta_data = cls(api) cls.fix_parse_misspellings(json) setattr(stream_meta_data, '_json', json) for k, v in json.items(): if k == "_embedded": for ek, ev in v.items(): if ek == "interpolationType": ev = ev[0].get('_links', {}).get('self', {}).get('href', ) setattr(stream_meta_data, "interpolation_type", InterpolationType(ev)) elif ek == "observedProperty": ev = ev[0].get('_links', {}).get('self', {}).get('href', ) # Remove local vocab checks for now setattr(stream_meta_data, "observed_property", ev) elif ek == "unitOfMeasure": ev = ev[0].get('_links', {}).get('self', {}).get('href', ) # Remove local vocab checks for now setattr(stream_meta_data, "unit_of_measure", ev) else: setattr(stream_meta_data, ek, ev) print("parse: %s, %s" % (ek,ev)) else: setattr(stream_meta_data, k, v) return stream_meta_data
def parse_row_and_update_global_unique_keys(json, global_unique_keys): list_of_tuple = [] for k, v in json.items(): if k not in global_unique_keys: global_unique_keys.append(k) list_of_tuple.append((k, v)) return list_of_tuple, global_unique_keys
def remove_url_keys_from_json(json): if isinstance(json, dict): return {key: remove_url_keys_from_json(value) for key, value in json.items() if not key.endswith("url")} elif isinstance(json, list): return [remove_url_keys_from_json(value) for value in json] else: return json
def gl_limits(self): json = self.data_json() if json is None: return None limits = {} for (k, v) in json.items(): if not k.startswith('GL_'): continue if k == 'GL_VERSION': m = re.match(r'^(\d+\.\d+).*', v) if m: limits[k] = '%s [...]' % m.group(1) continue if k in ('GL_RENDERER', 'GL_EXTENSIONS'): continue # Hide some values that got deleted from the report in r8953, for consistency if k in ('GL_MAX_COLOR_MATRIX_STACK_DEPTH', 'GL_FRAGMENT_PROGRAM_ARB.GL_MAX_PROGRAM_ADDRESS_REGISTERS_ARB', 'GL_FRAGMENT_PROGRAM_ARB.GL_MAX_PROGRAM_NATIVE_ADDRESS_REGISTERS_ARB'): continue # Hide some pixel depth values that are not really correlated with device if k in ('GL_RED_BITS', 'GL_GREEN_BITS', 'GL_BLUE_BITS', 'GL_ALPHA_BITS', 'GL_INDEX_BITS', 'GL_DEPTH_BITS', 'GL_STENCIL_BITS', 'GL_ACCUM_RED_BITS', 'GL_ACCUM_GREEN_BITS', 'GL_ACCUM_BLUE_BITS', 'GL_ACCUM_ALPHA_BITS'): continue limits[k] = v return limits
def json_to_object(self, json, classname, inherit_from=None): cls = 'QueryObjectSubElement' if isinstance(json, list): object_list = [] for item in json: object_list.append(self.json_to_object(item, classname)) return object_list elif isinstance(json, dict): object_dict = {} for key, value in json.items(): key = self.make_valid_python_variable_name(key) if isinstance(value, list): object_list = [] for item in value: object_list.append(self.json_to_object(item, cls)) object_dict[key] = object_list elif isinstance(value, dict): object_dict[key] = self.json_to_object(value, cls) else: object_dict[key] = value if isinstance(inherit_from, tuple): inherits = inherit_from else: inherits = () return type(classname, inherits, object_dict) else: return json
def parse(cls, api, json): ss = cls(api) for k, v in json.items(): if k == 'created_at': setattr(ss, k, parse_datetime(v)) else: setattr(ss, k, v) return ss
def build_json(self, params): json = params.get("json") if json and params["headers"]["Content-Type"] == "application/json": for k, v in json.items(): if v == "": json[k] = None params["json"] = json return json
def parse(cls, api, json): result = cls(api) setattr(result, '_json', json) for k, v in json.items(): setattr(result, k, v) return result
def parse(cls, api, json): result = cls(api) for k,v in json.items(): if k == 'connections': setattr(result, 'is_following', 'following' in v) setattr(result, 'is_followed_by', 'followed_by' in v) else: setattr(result, k, v) return result
def build_csv_from_json(json, data): base_str = '' for chore, name in sorted(json.items()): person = [i for i in data if i['name'] == name] assert len(person) == 1 umich = person[0]['umich'] date = get_next_wednesday() if 'wednesday' in chore else get_next_sunday() base_str += "%s,%s,%s,%s\n" % (name, umich, chore, date) return base_str
def flatten(json, parent_key='', sep='_'): items = [] for k, v in json.items(): new_key = parent_key + sep + str(k) if parent_key else k if isinstance(v, collections.MutableMapping): items.extend(flatten(v, new_key, sep=sep).items()) else: items.append((new_key, v)) return OrderedDict(items)
def findAllReferences(json): if type(json) is dict: if json.has_key("reference"): references[json["reference"]] = json for key, value in json.items(): findAllReferences(value) elif type(json) is list: for value in json: findAllReferences(value)
def parse(cls, api, json): lst = List(api) for k,v in json.items(): if k == 'user': setattr(lst, k, User.parse(api, v)) elif k == 'created_at': setattr(lst, k, parse_datetime(v)) else: setattr(lst, k, v) return lst
def parse(cls, api, json): result = cls() for k, v in json.items(): if k == 'created_at': setattr(result, k, parse_search_datetime(v)) elif k == 'source': setattr(result, k, parse_html_value(unescape_html(v))) else: setattr(result, k, v) return result
def parse(cls, api, json): dm = cls(api) for k, v in json.items(): if k == 'sender' or k == 'recipient': setattr(dm, k, User.parse(api, v)) elif k == 'created_at': setattr(dm, k, parse_datetime(v)) else: setattr(dm, k, v) return dm
def load(self, json=None): if json is None: raise Exception("Incorrect JSON data!") for cloud_name, params in json.items(): cp = CloudParam(cloud_name) cp.load(params) self.cloud_params.append(cp) return self.cloud_params
def _gen(json): for k, v in json.items(): if isinstance(v, dict): for name, message in v.items(): if isinstance(message, dict): # New-style facts.json with attribution yield Fact(name=name, lang=k, message=message['fact'], author=message.get('author')) else: # Newer-style facts.json with language but not attribution -- or explicit deletion of fact. yield Fact(name=name, lang=k, message=message) else: # Old-style facts.json, single language yield Fact(name=k, lang=lang, message=v)
def sanitize_json(json): if isinstance(json, dict): for (k, v) in json.items(): json[k] = str(v) if isinstance(v, Decimal) else v if isinstance(v, (dict, list, tuple)): json[k] = sanitize_json(v) if isinstance(json, (list, tuple)): for (i, item) in enumerate(json): json[i] = sanitize_json(item) return json
def replace_variable(self, json, variable, text): if isinstance(json, dict): return {k: self.replace_variable(v, variable, text) for k, v in json.items()} elif isinstance(json, list): return [self.replace_variable(i, variable, text) for i in json] elif isinstance(json, str): return json.replace('${' + variable + '}', text) else: return json
def build_html_from_json(json): base_str = ( '<div class="container"><div class="panel panel-default"><div class="panel-heading"> Chores must be done by midnight on <b>' + get_next_sunday() + '</b></div><table class="table table-striped"><tr><td><b>Chore</b></td><td><b>Name</b></td><td><b>Done?</b></td></tr>' ) for chore, name in sorted(json.items()): base_str += "<tr> <td>" + chore + "</td> <td>" + name + "</td> <td> </td> </tr>" base_str += "</table></div></div>" return base_str
def parse(cls, api, json): result = cls(api) for k,v in json.items(): if k == 'value' and json['kind'] in ['Tweet', 'LookedupStatus']: setattr(result, k, Status.parse(api, v)) elif k == 'results': setattr(result, k, Relation.parse_list(api, v)) else: setattr(result, k, v) return result
def findValues(self, json, dstMap): for k,v in json.items(): if isinstance(v, dict): self.findValues(v, dstMap) elif isinstance(v, list): for subV in v: self.findValues(subV, dstMap) elif 'name' in json and 'value' in json: key = json['name'].encode('ascii', 'ignore') value = json['value'].encode('ascii', 'ignore') dstMap[key] = value
def JSONUnicode2Str(self, json): newdict = {} for k, v in json.items(): newInnerDict = {} if isinstance(v, dict): for a, b in v.items(): newInnerDict[str(a)] = b newdict[str(k)] = newInnerDict else: newdict[str(k)] = map(str, v) return newdict
def recursively_find_uuids(json, uuids): for key, val in json.items(): if key == 'uuid': uuids.add(val) elif isinstance(val, list): for item in val: if isinstance(item, dict): uuids = recursively_find_uuids(item, uuids) elif isinstance(val, dict): uuids = recursively_find_uuids(val, uuids) return uuids
def parse(cls, api, json): stream = cls(api) setattr(stream, '_json', json) for k, v in json.items(): if k == "results": setattr(stream, "results", UnivariateResult.parse_list(api, v)) if k == "stream": setattr(stream, "steam", Stream.parse(api, v)) else: setattr(stream, k, v) return stream
def parse(cls, api, json): user = cls(api) attrs = [ 'id', '_links', '_embedded', ] setattr(user, '_json', json) for k, v in json.items(): if k in attrs: setattr(user, k, v) return user
def _eval_json(json): if isinstance(json, dict): return {k: LspCommandProcessor._eval_json(v) for k, v in json.items()} elif isinstance(json, list): return [LspCommandProcessor._eval_json(i) for i in json] elif isinstance(json, str): match = re.match(r'>>>(.*)', json) if match is None: return json return eval(match.group(1)) # noqa: P204 else: return json
def controller(work,pnode,svica,NX_g,yql,iterate,query): mydic={} mylst=[] netdic={} N=0 r=["AED", "EUR", "JPY", "CNY", "GBP", "CAD", "KRW", "AUD", "NZD", "CHF", "NOK", "SEK", "SGD", "HKD", "CNY", "INR", "RUB", "MXN", "TRY", "AED","BRL"] for x in range(0,20): N+=1 url=iterate(N,url="") fir=yql(url) json = query(fir) node_pnts = map(pnode, json.items()) nxdg = NX_g(node_pnts) P = svica(nxdg) A=work(N,P, nxdg) net_p=float(A[0]) V=r[N] mylst.append(net_p) netdic[N]=net_p netdic[V] = netdic.pop(N) L=A[1] mydic[N]=L mydic[V] = mydic.pop(N) else: minval=min(mylst) whr=mylst.index(minval) whr+=1 pos=r[whr] N=(whr) url=iterate(N,url="") fir=yql(url) json = query(fir) node_pnts = map(pnode, json.items()) nxdg = NX_g(node_pnts) P = svica(nxdg) A=work(N,P, nxdg) return mydic, netdic
def parse_json(json): category_to_parser = { 'bitmask': BitmaskType, 'enum': EnumType, 'native': NativeType, 'callback': CallbackType, 'object': ObjectType, 'structure': StructureType, } types = {} by_category = {} for name in category_to_parser.keys(): by_category[name] = [] for (name, json_data) in json.items(): if name[0] == '_': continue category = json_data['category'] parsed = category_to_parser[category](name, json_data) by_category[category].append(parsed) types[name] = parsed for obj in by_category['object']: link_object(obj, types) for struct in by_category['structure']: link_structure(struct, types) for callback in by_category['callback']: link_callback(callback, types) for category in by_category.keys(): by_category[category] = sorted(by_category[category], key=lambda typ: typ.name.canonical_case()) by_category['structure'] = topo_sort_structure(by_category['structure']) for struct in by_category['structure']: struct.update_metadata() return { 'types': types, 'by_category': by_category }
def _name_to_scale(json, select_names, scale_key, scale_factor): return { name: ( scale_key is None and 1.0 or scale_key_to_scale[scale_key] ) * scale_factor for name, scale_key_to_scale in json.items() if ( select_names is None or name in select_names ) and ( scale_key is None or scale_key in scale_key_to_scale ) }
def dump_values(json): values = [] for key, value in json.items(): if isinstance(value, dict): values.append(None) values.extend(dump_values(value)) elif isinstance(value, list): values.append(None) for val in value: if isinstance(val, dict): values.extend(dump_values(val)) else: values.append(val) else: values.append(value) return values
def data_from_json(json, id_values=None, form=0, ignore=["name", "path"]): if id_values is None: if form == 0: id_values = {} else: id_values = [] for key, val in json.items(): if key not in ignore: if isinstance(val, dict): data_from_json(val, id_values, form, ignore) else: if form == 0: id_values[key] = val elif form == 1: id_values.append(code_id(str(key), str(val))) elif form == 2: id_values.append([str(key), str(val)]) return id_values
def from_json(json: typing.Any): if type( json ) == dict and '__class__' in json and construction.factories.has_class( json['__class__']): reader = Reader(construction.factories.get_builder(json['__class__']), json, {}) reader.read() return reader.obj elif type(json) in (list, tuple): return [from_json(item) for item in json] elif type(json) == dict: result = {} for prop_name, value in json.items(): result[prop_name] = from_json(json[prop_name]) return result else: return json
def __init__(self, client, clone=None, json=None): self._id = uuid.uuid4() self._client = client self._allocated = False self._requests = list() self._manifest = dict() self._state = State.CREATED.name if clone: ret = self._client.active(Id=clone).json() for s in ret: for k, v in s.items(): self._manifest.update(s) self._requests.extend(v['request']) if json: for k, v in json.items(): self._manifest.update(json) self._requests.extend(v['request'])
def parse(cls, api, json): stream = cls(api) setattr(stream, '_json', json) for k, v in json.items(): if k == "resulttype": setattr(stream, "result_type", StreamResultType(v)) if k == "_embedded": for ek, ev in v.items(): if ek == "organisation": setattr(stream, "organisations", Organisation.parse_list(api, ev)) elif ek == "groups": setattr(stream, "groups", Group.parse_list(api, ev)) elif ek == "metadata": # metadata is also a list ????? setattr(stream, "metadata", StreamMetaData.parse(api, ev[0])) else: setattr(stream, k, v) return stream
def reader(self, json): '''reads the json file, adds any new keys and files to the list NOTES: some keys may be defined outside of target files''' for key, value in json.items(): if key not in self.key_list: if type( value ) == dict: # messy code to deal with a single item stored in a dict value = next(itertools.islice(value.values(), 0, None)) # get the first value continue print(value) self.key_list[key] = set([value]) self.key_counter[key] = 0 continue self.key_list[key].add(value) self.key_counter[key] += 1
def json2yaml(json, level=0): spaces = " " new_line = "\n" to_print = "" for key, value in json.items(): to_print += (spaces * level) + key + ":" vType = type(value) if vType is dict: to_print += "\n" + json2yaml(value, level + 1) elif vType is list: for item in value: to_print += new_line + (spaces * level + spaces) + "- " + item to_print += new_line else: to_print += " " + value + new_line return to_print
def print_json(metric_prefix, ts, json, tags): if json is None: pass elif isinstance(json, (int, float, long)): print_metric(metric_prefix, ts, json, tags) elif isinstance(json, basestring): #print "not collecting string yet %s" % json pass elif isinstance(json, list): i = 0 for item in json: print_json(("%s/%d" % (metric_prefix, i)), ts, item, tags) i += 1 elif isinstance(json, dict): for k, v in json.items(): print_json(("%s/%s" % (metric_prefix, k)), ts, v, tags) else: sys.stderr.write("unknown type %s of json object to print\n" % type(json))
def save_json(json): dir = "./2264" if not os.path.exists(dir): os.mkdir(dir) for (k, v) in json.items(): res_url = base_res_url + v + "/" + k filename = res_url.split('/')[-1] try: response = requests.get(res_url) if response.status_code == 200: file_path = '{0}/{1}'.format(dir, filename) if not os.path.exists(file_path): with open(file_path, 'wb') as f: f.write(response.content) else: print('Already Downloaded', file_path) except requests.ConnectionError: print('failed to save json')
def _replace_anon_tag(self, json, tag, new_tag): """Replaces source paths and resets anon_ tags to increment from 1""" if isinstance(json, list): for item in json: self._replace_anon_tag(item, tag, new_tag) return if isinstance(json, dict): for key, value in json.items(): if key == "name" and isinstance(value, str): if value == tag: json[key] = new_tag elif key == "tag" and isinstance(value, str): if value == tag: json[key] = new_tag elif key == "src" and isinstance(value, list): if value and "temp.h" in value[0]: value[0] = "/some-path/temp.h" else: self._replace_anon_tag(value, tag, new_tag)
def _object_hook(json): filename = json.pop('__rfm_file__', None) typename = json.pop('__rfm_class__', None) if filename is None or typename is None: return json mod = util.import_module_from_file(filename) cls = getattr(mod, typename) obj = cls.__new__(cls) if hasattr(obj, '__dict__'): obj.__dict__.update(json) else: for attr, value in json.items(): setattr(obj, attr, value) if hasattr(obj, '__rfm_json_decode__'): obj.__rfm_json_decode__(json) return obj
def from_json(beyond, json): missing = next((f for f, d in fields.items() if json.get(f) is None and fields[f] is None), None) if missing is not None: raise Exception( 'missing mandatory JSON key for ' '%s: %s' % (self.__name__, missing)) body = deepcopy(fields) # Replace optionals by 'None'. body.update({ k: None for k, v in fields.items() if isinstance(v, Optional) }) body.update({ k: v for k, v in json.items() if k in fields }) return self_type(beyond, **body)
def predict_tweet(): """ Called when API receives a POST request. It expects a json with dictionary of tweets, then it calls the model module to generate predictioNs and returns it Parameters: json (dict): request dict, should contain ids as keys and tweets as values Returns: dict (json): key is tweet id and value the prediction """ try: # get request json json = request.get_json() # check if payload is empty if json != {}: # get each id and tweet from request ids = [] tweets = [] for id, tweet in json.items(): ids.append(id) tweets.append(tweet) # get predictions result = predict(tweets) # generate response json if result == []: result = empty_response(ids) else: result = prepare_response(ids, result) else: raise RuntimeError("Error while processing response") return jsonify(result) except Exception as e: app.logger.info("Application Error while processing request") abort(400)
def parse(cls, api, json): stream_meta_data = cls(api) cls.fix_parse_misspellings(json) setattr(stream_meta_data, '_json', json) for k, v in json.items(): if k == "_embedded": for ek, ev in v.items(): if ek == "interpolationType": ev = ev[0].get('_links', {}).get('self', {}).get('href', ) setattr(stream_meta_data, "interpolation_type", InterpolationType(ev)) if ek == "observedProperty": ev = ev[0].get('_links', {}).get('self', {}).get('href', ) setattr(stream_meta_data, "observed_property", find_observed_property(ev)) if ek == "unitOfMeasure": ev = ev[0].get('_links', {}).get('self', {}).get('href', ) setattr(stream_meta_data, "unit_of_measure", find_unit_of_measurement(ev)) else: setattr(stream_meta_data, k, v) return stream_meta_data
def get_type_str(json): """ Recreates json as str :param json: input json :return: json string. """ type_str = '' if json.get('type') is not None: type_str += json['type'] for key, val in json.items(): if key == 'type': continue if key == 'typedef': type_str += get_type_str(val) else: if isinstance(val, list) or isinstance(val, dict): type_str += ' {} {} {}'.format('{', ','.join([str(i) for i in val]), '}') else: type_str += ' {} {} {}'.format('{', val, '}') return type_str
def eval_json(json, env): """Should be called 2nd, after preprocessing. Simply meant to allow more complicated structures (e.g. creating of dict with int keys) from JSON""" if isinstance(json, dict): parsed = {} for k, v in json.items(): parsed[k] = eval_json(v, env) call = parsed.pop("_call", None) if call is not None: if call.startswith("!"): parsed["_call"] = call[1:] return parsed if isinstance(call, str): call = env[call] args = parsed.pop("_args", []) parsed = call(*args, **parsed) return parsed elif isinstance(json, list): return [eval_json(l, env) for l in json] return json
def children(json): t = type(json) if type(json) is str: return json elif t is list: s = "" for i in json: g = children(i) if g: s += children(i) return s elif t is dict: s = "" for key, value in json.items(): if key == "children": s += children(value) elif key == "tag" and value == "br": s += "\n" return s return ""
def sql_update_by_json(self, table, json): json_columns = [] json_values = [] for column, value in json.items(): json_columns.append(str(column)) json_values.append(str(value)) sql_values = ("?," * len(json_values))[:-1] if self.connection: sql_query = f"""INSERT OR REPLACE INTO {table}( {",".join(json_columns)} ) VALUES({sql_values})""" # print(sql_query) cur = self.connection.cursor() cur.execute(sql_query, (json_values)) self.connection.commit() cur.close() else: print("no connection")
def Install(self, json, tablename, dbname): try: for k, v in json.items(): if k == "initDatas": if len(json["initDatas"]) != 0: self.__helper.installsql( "TRUNCATE table %s.%s" % (dbname, tablename), dbname) for temp in json["initDatas"]: fields = ','.join( ['`{0}`'.format(x['field']) for x in temp]) values = ','.join( ['"{0}"'.format(x['value']) for x in temp]) sql = 'INSERT INTO `{0}`.`{1}`({2})VALUES({3})'.format( dbname, tablename, fields, values) try: self.__helper.installsql(sql, dbname) except Exception, e: print e except Exception, e: print e
def obj_props(json): lst = [] def props(el): if el['type'] in ('vslider', 'hslider', 'nentry', 'checkbox', 'button'): return ['@' + el['label']] elif 'items' in el: lst = [] for i in el['items']: lst += props(i) return lst return [] for k, v in json.items(): if k == "ui": for ui in v: lst += props(ui) return lst
def parse_json(json): category_to_parser = { 'enum': EnumType, 'native': NativeType, 'object': ObjectType, } types = {} by_category = {} for name in category_to_parser.keys(): by_category[name] = [] for (name, record) in json.items(): category = record['category'] parsed = category_to_parser[category](name, record) by_category[category].append(parsed) types[name] = parsed for obj in by_category['object']: link_object(obj, types)
def log(self, text=None, json=None, severity='DEFAULT', **kwargs): if isinstance(severity, str): severity = { 'DEFAULT': 0, 'DEBUG': 100, 'INFO': 200, 'NOTICE': 300, 'WARNING': 400, 'WARN': 400, 'ERROR': 500, 'ERR': 500, 'CRITICAL': 600, 'ALERT': 700, 'EMERGENCY': 800 }[severity] if len(kwargs): if json is not None: json = {k: v for k, v in json.items()} json.update(kwargs) else: json = kwargs # Mask user tokens if isinstance(json, dict) and 'token' in json: json['token'] = '****************' if text is None: if json is None: raise ValueError("No input provided") if not self.trunate_payload( json, labels=self.labels, severity=severity): self.logger.log_struct(json, labels=self.labels, severity=severity) elif json is not None: json = {'message': text, 'json': json} if not self.trunate_payload(json, self.labels, severity): self.logger.log_struct(json, labels=self.labels, severity=severity) elif not self.trunate_payload(text, self.labels, severity): self.logger.log_text(text, labels=self.labels, severity=severity)
def request(self, *, session, params=None, json=None, fast=False, **kwargs): if not fast: if params is not None: params = {k: v for k, v in params.items() if k in self.params} if json is not None: if self.array: json = [{k: v for k, v in i.items() if k in self.json} for i in json] else: json = {k: v for k, v in json.items() if k in self.json} headers = kwargs.setdefault('headers', {}) headers.update(session.global_headers) fmt = kwargs.pop('fmt', {}) fmt.update(session.global_fmt) url = self.url % fmt return session.request(self.method, url, params=params, json=json, **kwargs)
def first_parse(cls, api, json): status = cls(api) for k, v in json.items(): if k == 'user': user_model = getattr(api.parser.model_factory, 'user') user = user_model.parse(api, v) setattr(status, 'author', user) setattr(status, 'user', user) # DEPRECIATED elif k == 'created_at': setattr(status, k, parse_datetime(v)) elif k == 'source': if '<' in v: setattr(status, k, parse_html_value(v)) setattr(status, 'source_url', parse_a_href(v)) else: setattr(status, k, v) setattr(status, 'source_url', None) elif k == 'retweeted_status': setattr(status, k, Status.parse(api, v)) else: setattr(status, k, v) return status
def prepare_path_from_json(key, json): """ take all path keys from the merklehashtree :param key: str 'path' :param json: :return: """ results = [] for k, v in json.items(): if k == key and 'size' in json.keys(): results.append(v) if isinstance(v, dict): for result in prepare_path_from_json(key, v): results.append(result) elif isinstance(v, list): if v: for d in v: for result in prepare_path_from_json(key, d): results.append(result) else: print('none') return results
def from_json(cls, json): result = cls() for key, value in json.items(): field_by_json, schema_key = cls.get_field_by_json_key(key) if key in cls.schema or field_by_json: field = field_by_json if field_by_json else cls.schema[key] object_key = schema_key if schema_key else key def deserialize(field, value): if hasattr(field, 'from_json'): return field.from_json(value) else: return value if isinstance(value, list): setattr(result, object_key, [ deserialize(field, item) if isinstance(item, object) else item for item in value ]) else: setattr(result, object_key, deserialize(field, value)) return result
def evaluate_skills(candidate,json,param,skill_percent): score = 0 incomplete=0 for taxonomy_name,taxonomy_value in json.items(): if taxonomy_name in candidate != False: for subTaxonomy_name,subTaxonomy_value in taxonomy_value.items(): val=subTaxonomy_value["percent"] if subTaxonomy_name in candidate[taxonomy_name]!= False: skill_match = 0 cand_val=skill_percent[taxonomy_name][subTaxonomy_name] skill_penalize=5 if (cand_val-val)>=0 else 10 score += skill_penalize/100 *val*tanh((cand_val-val)/27) candidate_skills=candidate[taxonomy_name][subTaxonomy_name] for skill_name,skill_value in subTaxonomy_value.items(): if skill_name in candidate[taxonomy_name][subTaxonomy_name] != False: skill_match += 1 childSkill_match = 0 candidate_childSkill_list = candidate[taxonomy_name][subTaxonomy_name][skill_name] for childSkill in skill_value: if childSkill in candidate_childSkill_list != False: childSkill_match +=1 score += (param["matchSkillScr"]-5)/100 * (val / (len(subTaxonomy_value)-1)) / len(skill_value) if len(skill_value) == 0 : score += (param["matchSkillScr"]-5)/100 * (val / (len(subTaxonomy_value)-1)) elif len(candidate_childSkill_list) == 0 : score += (param["matchSkillScr"]-5)/100 * (val / (len(subTaxonomy_value)-1)) / 2 incomplete +=(val / (len(subTaxonomy_value)-1)) / 2 elif len(candidate_childSkill_list) != 0 : len_nonMatch = len(candidate_childSkill_list)-childSkill_match score += param["extraSkillScr"]/100*(val / (len(subTaxonomy_value)-1))*tanh(len_nonMatch/5) # (len_nonMatch)*(val/(len(subTaxonomy_value)-1))/(len(skill_value) + 2*len_nonMatch) if len(subTaxonomy_value) == 1 : score += (param["matchSkillScr"]-5)/100 * val elif len(candidate_skills) == 0 : score += (param["matchSkillScr"]-5)/100 * val / 2 incomplete += val/2 elif len(candidate_skills) != 0 : nonMatch_skills=len(candidate_skills)-skill_match score += param["extraSkillScr"]/100 * val*tanh(nonMatch_skills/10) # (len(subTaxonomy_value)-1+ constant*nonMatch_skills) return score,incomplete
def transform_json_segments(transformer, json, segments, filepath): if type(json) == list: return [ transform_json_segments(transformer, el, segments, filepath) for el in json ] if not segments: return transformer.transform(json, filepath) else: if type(json) == dict: (head, tail) = (segments[0], segments[1:]) if head.selector == "*": return { k: transform_json_segments(transformer, v, tail, filepath) for (k, v) in json.items() if head.match_attr(v) } elif head.selector in json: v = json[head.selector] if type(v) == list: res = [] for child in v: if head.match_attr(child): res.append( transform_json_segments( transformer, child, tail, filepath)) else: res.append(child) json[head.selector] = res return json else: if head.match_attr(v): json[head.selector] = transform_json_segments( transformer, v, tail, filepath) return json else: return json return json return json