def jstree(json): result = [] if isinstance(json, dict): for k, v in json.iteritems(): a = random.uniform(0, 10) if not isinstance(v, dict) and not isinstance(v, list): if len(v) > 60 and r'://' in v: temp = v pos = temp.index(r'://') content = temp[0:pos + 3] + "..." + temp[-57:] result.append({ "id": k + str(a), "text": str(k) + ":" + content }) else: result.append({ "id": k + str(a), "text": str(k) + ":" + str(v) }) else: chstr = jstree(v) result.append({"id": k + str(a), "text": k, "children": chstr}) elif isinstance(json, list): for v in json: a = random.uniform(0, 10) if not isinstance(v, dict) and not isinstance(v, list): result.append({"id": v + str(a), "text": v}) else: chstr = jstree(v) result.append({"id": str(a), "text": "--", "children": chstr}) result_sorted = sorted(result, key=operator.itemgetter('text')) return result_sorted
def json_to_table_data(format_id, json, angle_to_gl): table_data = "" parsed = { "space": " ", "id": format_id, } for k, v in json.iteritems(): parsed[k] = v if "glInternalFormat" not in parsed: parsed["glInternalFormat"] = angle_to_gl[format_id] if "fboImplementationInternalFormat" not in parsed: parsed["fboImplementationInternalFormat"] = parsed["glInternalFormat"] if "componentType" not in parsed: parsed["componentType"] = get_component_type(format_id) if "channels" not in parsed: parsed["channels"] = get_channels(format_id) if "bits" not in parsed: parsed["bits"] = get_bits(format_id) # Derived values. parsed["mipGenerationFunction"] = get_mip_generation_function(parsed) parsed["colorReadFunction"] = get_color_read_function(parsed) return format_entry_template.format(**parsed)
def _store(json): 'return the head domain object update with all sub-domain (or derivate)' #1 - first top-level object creation D=eval("%(_type)s('%(name)s',path='%(domain)s')"%json) #2 - sub-domain creation &ref dir=json['dir'] if dir : #this is a not a leaf then. for name in dir : dir[name]=domain._store(dir[name]) #3 - Parent domain reference D.domain=domain.find(json['domain']) #4 - stuff attributes General for attr in ('dir','domain') : del json[attr] D.__dict__.update(json) print D #4 - stuff **custom** attributes conversion f=D.__class__._attr_conv for k,attr in json.iteritems(): if k in f : if isinstance(attr,str): D.__dict__[k]=eval("%s('%s')" % (f[k][0],attr)) else: D.__dict__[k]=eval('%s(%s)' % (f[k][0],attr)) return D
def json_to_table_data(format_name, prefix, json): table_data = "" parsed = { "space": " ", "formatName": format_name, "texFormat": "DXGI_FORMAT_UNKNOWN", "srvFormat": "DXGI_FORMAT_UNKNOWN", "rtvFormat": "DXGI_FORMAT_UNKNOWN", "dsvFormat": "DXGI_FORMAT_UNKNOWN", "glInternalFormat": "GL_NONE", "condition": prefix, } for k, v in json.iteritems(): parsed[k] = v if (format_name != "ANGLE_FORMAT_NONE") and (parsed["glInternalFormat"] == "GL_NONE"): print("Missing 'glInternalFormat' from " + format_name) sys.exit(1) if "fboImplementationInternalFormat" not in parsed: parsed["fboImplementationInternalFormat"] = parsed["glInternalFormat"] # Derived values. parsed["blitSRVFormat"] = get_blit_srv_format(parsed) parsed["swizzleFormat"] = get_swizzle_format_id(format_name, parsed) parsed["mipGenerationFunction"] = get_mip_generation_function(parsed) parsed["colorReadFunction"] = get_color_read_function(parsed) if len(prefix) > 0: return split_format_entry_template.format(**parsed) else: return format_entry_template.format(**parsed)
def json_to_table_data(internal_format, format_name, prefix, json): table_data = "" parsed = { "space": " ", "internalFormat": internal_format, "formatName": format_name, "texFormat": "DXGI_FORMAT_UNKNOWN", "srvFormat": "DXGI_FORMAT_UNKNOWN", "uavFormat": "DXGI_FORMAT_UNKNOWN", "rtvFormat": "DXGI_FORMAT_UNKNOWN", "dsvFormat": "DXGI_FORMAT_UNKNOWN", "condition": prefix, } for k, v in json.iteritems(): parsed[k] = v # Derived values. parsed["blitSRVFormat"] = get_blit_srv_format(parsed) parsed["swizzleFormat"] = get_swizzle_format_id(internal_format, parsed) parsed["initializer"] = angle_format.get_internal_format_initializer( internal_format, parsed["formatName"]) if len(prefix) > 0: return split_format_entry_template.format(**parsed) else: return format_entry_template.format(**parsed)
def addClassFromJson(json, objColl, className, conf): classDef = None if className is not None: className = conf.mapClassName(className) classDef = SupaDupaClassDef.SupaDupaClassDef(className) objColl[className] = classDef for k,v in json.iteritems(): proccessedKey = conf.mapKey(k) if conf.classOverrideForProp(proccessedKey) is not None: override = conf.classOverrideForProp(proccessedKey) classDef.addProperty(proccessedKey, conf.getWriteClass().formatClassName(override)) elif isinstance(v, dict): formattedClassName = conf.getWriteClass().formatClassName(k) if classDef is not None: containedClassName = conf.mapClassName(formattedClassName) classDef.addProperty(proccessedKey, containedClassName) classDef.addDependency(containedClassName) addClassFromJson(v, objColl, formattedClassName, conf) elif isinstance(v, list): classDef.addProperty(proccessedKey, getClassNameFromWriter(list, conf)) if len(v) > 0: addClassFromJson(v[0], objColl, convertNameFromArray(proccessedKey), conf) elif conf.getWriteClass().defaultClassTypes.has_key(str(type(v))): classDef.addProperty(proccessedKey, getClassNameFromWriter(type(v), conf)) else: raise Exception("Class type '%s' not supported by language '%s'" % (str(type(v)), conf.lang)) return objColl
def _strip_comments(self, json): """ Recursively strip comments out of loaded JSON files. The delimiter used to define a comment is defined in a global variable (JSON_COMMENT). Args: json: JSON content to strip comments from Returns: JSON content with comments removed """ if type(json) in [str, unicode, int, float, bool, type(None)]: return json if isinstance(json, list): return [ self._strip_comments(j) for j in json if not ((type(j) in [str, unicode]) and j.startswith(DelegateTools.JSON_COMMENT)) ] if isinstance(json, dict): return dict((k, self._strip_comments(v)) for (k, v) in json.iteritems() if k != DelegateTools.JSON_COMMENT ) # there would be a dict comprehension from 2.7
def load_node_rec(self, node, json): for k, v in json.iteritems(): if isinstance(v, dict): child = Node(k) node.children.append(child) self.load_node_rec(child, v) else: node.children.append(PropertyNode(k, v))
def to_object(json): if isinstance(json, dict): obj = _dict_to_object({k : to_object(v) for (k,v) in json.iteritems()}) elif isinstance(json, list): obj = [to_object(e) for e in json] else: obj = json return obj
def to_object(json): if isinstance(json, dict): obj = _dict_to_object({k: to_object(v) for (k, v) in json.iteritems()}) elif isinstance(json, list): obj = [to_object(e) for e in json] else: obj = json return obj
def create_song_map(): json = read_json(SBD_ITEMS) tracks = {s: get_tracks(i) for s, i in json.iteritems()} metadata = {s: i['metadata'] for s, i in json.iteritems()} song_map = {} for recording, ts in tracks.iteritems(): for track in ts: if 'title' in track: if track['title'] not in song_map: song_map[track['title']] = [] version = {} version['recording'] = recording version['track'] = track['name'] version['year'] = metadata[recording]['year'] if isinstance(version['year'], list): version['year'] = version['year'][0] song_map[track['title']].append(version) write_json(song_map, SONG_MAP)
def decode_entry(self, json): e = TableEntry() for (k, v) in json.iteritems(): if k == "match": e.match = self.decode_match(v) elif k == "actions": e.actions = [self.decode_action(a) for a in v] else: setattr(e, k, v) return e
def dirStructureToList( jsonFile ): fileList = [] json = File2Json( jsonFile ) for topdir, sub in json.iteritems(): for L in sub: if type(L) == type({}): for subdir, list in L.iteritems(): for file in list: fileList.append( str(topdir)+str(subdir)+str(file) ) return fileList
def guessTypenameForDict(json): info = {} for key, value in json.iteritems(): if type(value) == dict: info[key] = guessTypenameForDict(value) elif type(value) == list: info[key] = guessTypenameForArray(value) else: info[key] = set([guessTypename(value)]) return info
def decode_entry(self, json): e = TableEntry() for (k, v) in json.iteritems(): if k == "match": e.match = self.decode_match(v) elif k == "actions": e.actions = [ self.decode_action(a) for a in v ] else: setattr(e, k, v) return e
def json_to_table_data(format_id, json, angle_to_gl): table_data = "" parsed = { "id": format_id, "fastCopyFunctions": "NoCopyFunctions", } for k, v in json.iteritems(): parsed[k] = v if "glInternalFormat" not in parsed: parsed["glInternalFormat"] = angle_to_gl[format_id] if "fboImplementationInternalFormat" not in parsed: parsed["fboImplementationInternalFormat"] = parsed["glInternalFormat"] if "componentType" not in parsed: parsed["componentType"] = angle_format.get_component_type(format_id) if "channels" not in parsed: parsed["channels"] = angle_format.get_channels(format_id) if "bits" not in parsed: parsed["bits"] = angle_format.get_bits(format_id) # Derived values. parsed["mipGenerationFunction"] = get_mip_generation_function(parsed) parsed["colorReadFunction"] = get_color_read_function(parsed) parsed["colorWriteFunction"] = get_color_write_function(parsed) for channel in angle_format.kChannels: if parsed["bits"] != None and channel in parsed["bits"]: parsed[channel] = parsed["bits"][channel] else: parsed[channel] = "0" parsed["namedComponentType"] = get_named_component_type( parsed["componentType"]) if format_id == "B8G8R8A8_UNORM": parsed["fastCopyFunctions"] = "BGRACopyFunctions" sum_of_bits = 0 for channel in angle_format.kChannels: sum_of_bits += int(parsed[channel]) parsed["pixelBytes"] = sum_of_bits / 8 parsed["componentAlignmentMask"] = get_component_alignment_mask( parsed["channels"], parsed["bits"]) parsed["isBlock"] = "true" if format_id.endswith("_BLOCK") else "false" parsed["isFixed"] = "true" if "FIXED" in format_id else "false" return format_entry_template.format(**parsed)
def to_xml(json): ####### Desired #if len(json) == 0: # return etree.Element(_internal_json_envelope_tag) ####### Current ####### if isinstance(json, list) or len(json) > 1: json = {_internal_json_envelope_tag: json} _, value = next(json.iteritems()) if isinstance(value, list): json = {_internal_json_envelope_tag: json} return etree.fromstring(xmltodict.unparse(json).encode('utf-8'))
def json_to_table_data(format_id, json, angle_to_gl): table_data = "" parsed = { "id": format_id, "fastCopyFunctions": "NoCopyFunctions", } for k, v in json.iteritems(): parsed[k] = v if "glInternalFormat" not in parsed: parsed["glInternalFormat"] = angle_to_gl[format_id] if "fboImplementationInternalFormat" not in parsed: parsed["fboImplementationInternalFormat"] = parsed["glInternalFormat"] if "componentType" not in parsed: parsed["componentType"] = angle_format.get_component_type(format_id) if "channels" not in parsed: parsed["channels"] = angle_format.get_channels(format_id) if "bits" not in parsed: parsed["bits"] = angle_format.get_bits(format_id) # Derived values. parsed["mipGenerationFunction"] = get_mip_generation_function(parsed) parsed["colorReadFunction"] = get_color_read_function(parsed) parsed["colorWriteFunction"] = get_color_write_function(parsed) for channel in angle_format.kChannels: if parsed["bits"] != None and channel in parsed["bits"]: parsed[channel] = parsed["bits"][channel] else: parsed[channel] = "0" parsed["namedComponentType"] = get_named_component_type(parsed["componentType"]) if format_id == "B8G8R8A8_UNORM": parsed["fastCopyFunctions"] = "BGRACopyFunctions" sum_of_bits = 0 for channel in angle_format.kChannels: sum_of_bits += int(parsed[channel]) parsed["pixelBytes"] = sum_of_bits / 8 parsed["componentAlignmentMask"] = get_component_alignment_mask( parsed["channels"], parsed["bits"]) parsed["isBlock"] = "true" if format_id.endswith("_BLOCK") else "false" parsed["isFixed"] = "true" if "FIXED" in format_id else "false" return format_entry_template.format(**parsed)
def to_json(xml): json = xmltodict.parse(etree.tostring(xml), expat=expat, force_list=_should_force_list) if len(json) == 1 and isinstance(json, dict): key, value = next(json.iteritems()) if key in _internal_envelope_tags: ####### Desired #json = {} if value is None else value ####### Current json = value ####### return json
def format_body(json): try: param_list = [] for k, v in json.iteritems(): param_dict = {"name": k, "value": v} param_list.append(param_dict) body = {"Submit": "Build", "json": {"parameter": param_list}} logging.debug(body) return urllib.urlencode(body) except Exception, e: logging.error(e)
def fetch_xmls(): main_xml = urllib2.urlopen(URL_CARTO, timeout=10).read() json = _parse_main_xml(main_xml) for k, v in json.iteritems(): sta_xml_data = urllib2.urlopen(URL_STATION % k, timeout=10).read() sta_dict = _parse_station_xml(sta_xml_data) v["station"] = sta_dict v["station_valid"] = not int(sta_dict["total"]) == 0 now = datetime.datetime.now() resp = {"markers": json, "updated": time.mktime(now.timetuple())} return now, resp
def _json_to_cfg(json): config = ConfigParser.RawConfigParser() config.optionxform = str for section, data in json.iteritems(): if section != ConfigParser.DEFAULTSECT: config.add_section(section) for key, value in data.iteritems(): config.set(section, key, value) output = StringIO.StringIO() config.write(output) result = output.getvalue() output.close() return result
def fetch_xmls(): main_xml = urllib2.urlopen(URL_CARTO, timeout=10).read() json = _parse_main_xml(main_xml) for k, v in json.iteritems(): sta_xml_data = urllib2.urlopen(URL_STATION % k, timeout=10).read() sta_dict = _parse_station_xml(sta_xml_data) v['station'] = sta_dict v['station_valid'] = not int(sta_dict['total']) == 0 now = datetime.datetime.now() resp = {'markers': json, 'updated': time.mktime(now.timetuple())} return now, resp
def showPage(show): json = getJson(apiUrl + "?seasons=" + show) if ("Stagione" in str(json.items()[0][0])): for seasonname, seasonlink in sorted(json.iteritems()): addDirectoryItem(seasonname, {"action" : "season", "link": seasonlink}) else: for episode in sorted(json): title = episode eplink = json[episode][0] img = json[episode][1] desc = json[episode][2] duration = json[episode][3] addDirectoryItem(title, {"action" : "episode", "link": eplink}, img, False, desc, duration) xbmcplugin.endOfDirectory(thisPlugin)
def from_json(cls, json, selfObj=None): if selfObj is None: self = cls() else: self = selfObj exclude = (cls.__exclude__ or ()) + Serializable.__exclude__ include = cls.__include__ or () if json: for prop, value in json.iteritems(): # ignore all non user data, e.g. only if (not (prop in exclude) | (prop in include)) and isinstance( getattr(cls, prop, None), QueryableAttribute): setattr(self, prop, value) return self
def compress(json): """ Translate lumis to lumiranges e.g. [1, 2, 3, 5, 6] -> [[1,3],[5,6]]""" cjson = {} for run, lumis in json.iteritems(): slumis = sorted(lumis) ranges = [] start = slumis[0] for a, b in zip(slumis, slumis[1:]): if b != a+1 and b != a: ranges.append([start, a]) start = b ranges.append([start, b]) cjson[run] = ranges return cjson
def probe(self): json = self.get_json() for k, v in json.iteritems(): if k == 'status': yield nagiosplugin.Metric(k, v) elif k == 'number_of_nodes': yield nagiosplugin.Metric(k, v, context='nodes') elif k == 'number_of_data_nodes': yield nagiosplugin.Metric(k, v, context='data_nodes') elif k in ['active_primary_shards', 'active_shards', 'relocating_shards', 'initializing_shards', 'unassigned_shards', 'delayed_unassigned_shards', 'number_of_pending_tasks', 'number_of_in_flight_fetch']: yield nagiosplugin.Metric(k, v, context='default') elif k in ['task_max_waiting_in_queue_millis']: yield nagiosplugin.Metric(k, v, uom='ms', context='default') elif k in ['active_shards_percent_as_number']: yield nagiosplugin.Metric(k, v, uom='%', context='default')
def _cleanup_json(cls, json): for k, v in json.iteritems(): try: v = int(v) except (ValueError, TypeError): pass if k.startswith('time_') or k in ['since']: v = datetime.fromtimestamp(v) if v else None if isinstance(v, dict): v = cls._cleanup_json(v) json[k] = v return json
def print_json(json): print "0) " + str(json) + " - " + type(json).__name__ for key, val in json.iteritems(): print "1) " + key + " - " + str(val) + " - " + type(val).__name__ # list if type(val).__name__ in ('list'): for subkey in val: print "2) " + str(subkey) + " - " + type(subkey).__name__ if type(subkey).__name__ in ('dict'): print_json(subkey) # tuple elif type(val).__name__ in ('tuple'): print_json(val) return
def search_key_in_json(self, json, searchkey): for key, val in json.iteritems(): #print "1) " + str(key) + " - " + str(val) if searchkey == key: print searchkey + ":" + val return val # list if type(val).__name__ in ('list', 'dict'): for item in val: if searchkey == item: print searchkey + ":" + val return val else: if type(item).__name__ in ('dict', 'list'): return self.search_key_in_json(item, searchkey)
def _parseJSONForAPI(self, json): """ Formats the JSON to be in a more usable syntax, collecting by date. """ cleanJSON = {} for hitType, records in json.iteritems(): for date, hits in records.iteritems(): if date not in cleanJSON: cleanJSON[date] = {} cleanJSON[date][hitType] = int(hits) return cleanJSON
def fetch_xmls(): main_xml = urlopen(URL_CARTO, timeout=10).read() json = _parse_main_xml(main_xml) for k, v in json.iteritems(): sta_xml_data = urlopen(URL_STATION % k, timeout=10).read() sta_dict = _parse_station_xml(sta_xml_data) v['station'] = sta_dict v['station_valid'] = not int(sta_dict['total']) == 0 now = datetime.datetime.now() resp = { 'markers': json, 'updated': time.mktime(now.timetuple()) } return now, resp
def format_body(json): try: param_list = [] for k,v in json.iteritems(): param_dict = {"name":k, "value":v} param_list.append(param_dict) body = {"Submit":"Build", "json":{ "parameter":param_list} } logging.debug(body) return urllib.urlencode(body) except Exception, e: logging.error(e)
def to_xwwwform(json): tostring = _InternalXWWWFORMJSONConverter._tostring tuplify = _InternalXWWWFORMJSONConverter._tuplify if isinstance(json, (dict, OrderedDict)): return _XWWWFormList([{ tostring(key): tuplify(value) } for key, value in json.iteritems()]) try: return _XWWWFormList( [{ tostring(key): tuplify(value) } for key, value in _InternalXWWWFORMJSONConverter._try_treat_as_xwwform(json)]) except (TypeError, AttributeError) as e: return _XWWWFormList([{ _internal_json_envelope_tag: tuplify(json) }])
def json_to_table_data(format_id, json, angle_to_gl): table_data = "" parsed = { "id": format_id, "fastCopyFunctions": "NoCopyFunctions", } for k, v in json.iteritems(): parsed[k] = v if "glInternalFormat" not in parsed: parsed["glInternalFormat"] = angle_to_gl[format_id] if "fboImplementationInternalFormat" not in parsed: parsed["fboImplementationInternalFormat"] = parsed["glInternalFormat"] if "componentType" not in parsed: parsed["componentType"] = angle_format.get_component_type(format_id) if "channels" not in parsed: parsed["channels"] = angle_format.get_channels(format_id) if "bits" not in parsed: parsed["bits"] = angle_format.get_bits(format_id) # Derived values. parsed["mipGenerationFunction"] = get_mip_generation_function(parsed) parsed["colorReadFunction"] = get_color_read_function(parsed) parsed["colorWriteFunction"] = get_color_write_function(parsed) for channel in "ABDGLRS": if parsed["bits"] != None and channel in parsed["bits"]: parsed[channel] = parsed["bits"][channel] else: parsed[channel] = "0" parsed["namedComponentType"] = get_named_component_type( parsed["componentType"]) if format_id == "B8G8R8A8_UNORM": parsed["fastCopyFunctions"] = "BGRACopyFunctions" return format_entry_template.format(**parsed)
def load_json(self, json, cas_value): for key, val in json.iteritems(): if key in self._meta['_fields']: try: origvalue = self._last_json_value[key] except KeyError: if key in self._modified and \ self._meta['_fields'][key].to_json( getattr(self, key)) != val: raise Document.DataCollisionError( "It's _modified and not equal to current val") setattr(self, key, self._meta['_fields'][key].from_json( self, val)) try: del self._modified[key] except KeyError: pass self._last_json_value[key] = \ self._meta['_fields'][key].to_json(getattr(self, key)) else: from_json_val = self._meta['_fields'][key].from_json( self, val) if from_json_val == getattr(self, key): continue currentvalue = self._meta['_fields'][key].to_json( getattr(self, key)) if not self._meta['_fields'][key].have_values_changed( origvalue, val) or\ not self._meta['_fields'][key].have_values_changed( currentvalue, val): continue if not self._meta['_fields'][key].have_values_changed( currentvalue, origvalue): setattr(self, key, self._meta['_fields'][key].from_json( self, val)) self._last_json_value[key] =\ self._meta['_fields'][key].to_json( getattr(self, key)) continue if val == origvalue or val == currentvalue: continue raise self.DataCollisionError() self._cas_value = cas_value return self
def json_to_table_data(format_id, json, angle_to_gl): table_data = "" parsed = { "id": format_id, "fastCopyFunctions": "NoCopyFunctions", } for k, v in json.iteritems(): parsed[k] = v if "glInternalFormat" not in parsed: parsed["glInternalFormat"] = angle_to_gl[format_id] if "fboImplementationInternalFormat" not in parsed: parsed["fboImplementationInternalFormat"] = parsed["glInternalFormat"] if "componentType" not in parsed: parsed["componentType"] = angle_format.get_component_type(format_id) if "channels" not in parsed: parsed["channels"] = angle_format.get_channels(format_id) if "bits" not in parsed: parsed["bits"] = angle_format.get_bits(format_id) # Derived values. parsed["mipGenerationFunction"] = get_mip_generation_function(parsed) parsed["colorReadFunction"] = get_color_read_function(parsed) parsed["colorWriteFunction"] = get_color_write_function(parsed) for channel in "ABDGLRS": if parsed["bits"] != None and channel in parsed["bits"]: parsed[channel] = parsed["bits"][channel] else: parsed[channel] = "0" parsed["namedComponentType"] = get_named_component_type(parsed["componentType"]) if format_id == "B8G8R8A8_UNORM": parsed["fastCopyFunctions"] = "BGRACopyFunctions" return format_entry_template.format(**parsed)
def count_duplicates_within_source(json): unique = set() new_map = dict() source = json['source'] json.pop('source') for k, v in json.iteritems(): new_map[k] = dict() for string in v: if string not in unique: unique.add(string) new_map[k].update({string: { 'source': source, 'count': 1 } }) else: new_map[k][string]['count'] += 1 return new_map
def _strip_comments(self, json): """ Recursively strip comments out of loaded JSON files. The delimiter used to define a comment is defined in a global variable (JSON_COMMENT). Args: json: JSON content to strip comments from Returns: JSON content with comments removed """ if type(json) in [str, unicode, int, float, bool, type(None)]: return json if isinstance(json, list): return [self._strip_comments(j) for j in json if not ((type(j) in [str, unicode]) and j.startswith(DelegateTools.JSON_COMMENT))] if isinstance(json, dict): return dict( (k, self._strip_comments(v)) for (k,v) in json.iteritems() if k != DelegateTools.JSON_COMMENT) # there would be a dict comprehension from 2.7
def json2yaml(json, level=0): spaces = " " new_line = "\n" to_print = "" for key, value in json.iteritems(): to_print += (spaces * level) + key + ":" vType = type(value) if vType is dict: to_print += "\n" + json2yaml(value, level + 1) elif vType is list: for item in value: to_print += new_line + (spaces * level + spaces) + "- " + item to_print += new_line else: to_print += " " + value + new_line return to_print
def __json_load_helper(self, json): if "id" in json: node = self.add_node(unicode(json["id"])) else: node = self.add_node(self.__generate_id()) for k, v in json.iteritems(): t = self.ns_split_id(k) if t is not None: ns, pred = t if pred == "id": #only IDs for now if self.ns.get(ns) is None: self.add_ns_adapter(ns, NullAdapter) self.__add_ns_id_for_node(ns, v, self.ids[node]) else: if type(v) == type([]): for target in v: if type(target) == type({}): t = self.__json_load_helper(target) self.add_link((node, k, t)) elif type(target) == type([]): raise JSONLoadException() else: if target.startswith(":"): target = self.add_node(target) self.add_link((node, k, target)) elif type(v) == type({}): t = self.__json_load_helper(v) self.add_link((node, k, t)) elif v is None: continue elif type(v) == type(3): self.add_link((node, k, unicode(v))) else: if k == "id": continue if v.startswith(":"): v = self.add_node(v) self.add_link((node, k, v)) return node
def _parseJSONForAPI(self, json, excludeEmptyDates=False): """ Returns a simpler dictionary containing form: {'date' : {'total_hits': x, 'unique_hits': y}} """ cleanJSON = {} for date, hits in json.iteritems(): dayHits = {self._strTotalHits: 0, self._strUniqueHits: 0} if hits: # Piwik returns hits as a list of dictionaries, per date. for metrics in hits: dayHits[self._strTotalHits] += metrics[self._totalHitMetric] dayHits[self._strUniqueHits] += metrics[self._totalUniqueHits] elif excludeEmptyDates: continue cleanJSON[date] = dayHits return cleanJSON
def write_json_key_values(file, json, prefix = 'EB_CONFIG'): for key, value in json.iteritems(): key = re.sub('\\W', '_', key) if prefix == 'EB_CONFIG_PLUGINS_RDS_ENV' or \ prefix == 'EB_CONFIG_ENV' or prefix == 'EB_CONFIG_PHP_ENV': prefix = None elif prefix is not None: key = key.upper() if isinstance(value, dict): write_json_key_values(file, value, '_'.join((prefix, key))) elif key == 'ENV' and isinstance(value, list): for val in value: file.write('export %s\n' % (val)) else: if value is None: value = '' value = str(value).encode("string_escape") if prefix is None: pdata = '' else: pdata = prefix + '_' file.write('export %s%s="%s"\n' % (pdata, key, value))
def resolve_ref(json, base=None): """return a json with resolved internal references only support local reference to the same json """ if not base : base = json temp = None if type(json) is list: temp = []; for item in json: temp.append(resolve_ref(item, base=base)) elif type(json) is dict: temp = {}; for key,value in json.iteritems(): if key == '$ref': return resolve_ref(jsonpointer.resolve_pointer(base,value), base=base) else : temp[key]=resolve_ref(value, base=base) else : return json return temp
def json_to_series(json, field, func): results = [(int(t), aggregate_vals(r, field, func)) for t, r in json.iteritems()] results.sort(key=lambda r: r[0]) tracks, values = zip(*results) return pd.Series(values, index=tracks)
def __build_message(self, json): message_lines = [] for header, value in json.iteritems(): message_lines.append("%s: %s" % (header, value)) return mimetools.Message(StringIO("\n".join(message_lines)))
def json_to_table_data(format_id, json, angle_to_gl): table_data = "" parsed = { "id": format_id, "fastCopyFunctions": "NoCopyFunctions", } for k, v in json.iteritems(): parsed[k] = v if "glInternalFormat" not in parsed: parsed["glInternalFormat"] = angle_to_gl[format_id] if "fboImplementationInternalFormat" not in parsed: parsed["fboImplementationInternalFormat"] = parsed["glInternalFormat"] if "componentType" not in parsed: parsed["componentType"] = angle_format.get_component_type(format_id) if "channels" not in parsed: parsed["channels"] = angle_format.get_channels(format_id) if "bits" not in parsed: parsed["bits"] = angle_format.get_bits(format_id) # Derived values. parsed["mipGenerationFunction"] = get_mip_generation_function(parsed) parsed["colorReadFunction"] = get_color_read_function(parsed) parsed["colorWriteFunction"] = get_color_write_function(parsed) for channel in angle_format.kChannels: if parsed["bits"] != None and channel in parsed["bits"]: parsed[channel] = parsed["bits"][channel] else: parsed[channel] = "0" parsed["namedComponentType"] = get_named_component_type( parsed["componentType"]) if format_id == "B8G8R8A8_UNORM": parsed["fastCopyFunctions"] = "BGRACopyFunctions" is_block = format_id.endswith("_BLOCK") pixel_bytes = 0 if is_block: assert 'blockPixelBytes' in parsed, \ 'Compressed format %s requires its block size to be specified in angle_format_data.json' % \ format_id pixel_bytes = parsed['blockPixelBytes'] else: sum_of_bits = 0 for channel in angle_format.kChannels: sum_of_bits += int(parsed[channel]) pixel_bytes = ceil_int(sum_of_bits, 8) parsed["pixelBytes"] = pixel_bytes parsed["componentAlignmentMask"] = get_component_alignment_mask( parsed["channels"], parsed["bits"]) parsed["isBlock"] = bool_str(is_block) parsed["isFixed"] = bool_str("FIXED" in format_id) parsed["isScaled"] = bool_str("SCALED" in format_id) parsed["isSRGB"] = bool_str("SRGB" in format_id) # For now we only look for the 'PLANE' substring in format string. Expand this condition # when adding support for YUV formats that have different identifying markers. parsed["isYUV"] = bool_str("PLANE" in format_id) parsed[ "vertexAttribType"] = "gl::VertexAttribType::" + get_vertex_attrib_type( format_id) return format_entry_template.format(**parsed)
filelist.append(line.strip()) print " > found ", len(filelist), " files in the run range" ################### mask the JSON (if any provided) by the run range skimmedJSON = {} if args.jsonFile: print "... using JSON file: ", args.jsonFile try: json = eval(open(args.jsonFile).read()) except: print "** Error: json", args.jsonFile, "not a valid JSON" sys.exit() print " > doing intersection with run range ", args.runrange[ 0], args.runrange[1] skimmedJSON = dict([ (key, value) for key, value in json.iteritems() if key >= args.runrange[0] and key <= args.runrange[1] ]) # print skimmedJSON if len(skimmedJSON) == 0: print "** Error: json ", args.jsonFile, " has no intersection with run range", args.runrange[ 0], args.runrange[1] sys.exit() ################### prepare input to each job passing as command line parameters # save JSON as txt if args.jsonFile: jsonName = args.tag + "/json_" + args.runrange[ 0] + "_" + args.runrange[1] + ".txt" fJSON = open(jsonName, 'w') fJSON.write('{')
def json_to_table_data(format_id, json, angle_to_gl): table_data = "" parsed = { "id": format_id, "fastCopyFunctions": "NoCopyFunctions", } for k, v in json.iteritems(): parsed[k] = v if "glInternalFormat" not in parsed: parsed["glInternalFormat"] = angle_to_gl[format_id] if "fboImplementationInternalFormat" not in parsed: parsed["fboImplementationInternalFormat"] = parsed["glInternalFormat"] if "componentType" not in parsed: parsed["componentType"] = angle_format.get_component_type(format_id) if "channels" not in parsed: parsed["channels"] = angle_format.get_channels(format_id) if "bits" not in parsed: parsed["bits"] = angle_format.get_bits(format_id) # Derived values. parsed["mipGenerationFunction"] = get_mip_generation_function(parsed) parsed["colorReadFunction"] = get_color_read_function(parsed) parsed["colorWriteFunction"] = get_color_write_function(parsed) for channel in angle_format.kChannels: if parsed["bits"] != None and channel in parsed["bits"]: parsed[channel] = parsed["bits"][channel] else: parsed[channel] = "0" parsed["namedComponentType"] = get_named_component_type( parsed["componentType"]) if format_id == "B8G8R8A8_UNORM": parsed["fastCopyFunctions"] = "BGRACopyFunctions" is_block = format_id.endswith("_BLOCK") pixel_bytes = 0 if is_block: assert 'blockPixelBytes' in parsed, \ 'Compressed format %s requires its block size to be specified in angle_format_data.json' % \ format_id pixel_bytes = parsed['blockPixelBytes'] else: sum_of_bits = 0 for channel in angle_format.kChannels: sum_of_bits += int(parsed[channel]) pixel_bytes = sum_of_bits / 8 parsed["pixelBytes"] = pixel_bytes parsed["componentAlignmentMask"] = get_component_alignment_mask( parsed["channels"], parsed["bits"]) parsed["isBlock"] = "true" if is_block else "false" parsed["isFixed"] = "true" if "FIXED" in format_id else "false" parsed["isScaled"] = "true" if "SCALED" in format_id else "false" parsed[ "vertexAttribType"] = "gl::VertexAttribType::" + get_vertex_attrib_type( format_id) return format_entry_template.format(**parsed)
def get_timestamps(self, json, itemname): global ts global newjson if type(json) == unicode: return json elif type(json) == list: return json else: for key, value in json.iteritems(): if "Item" in key: name = key.replace('Item', '') version = name + "Versions" for versions in value[version]: data = versions["data"] ts = versions["timestamp"] newjson = {} newjson.update({"data": {itemname: {}}}) newjson.update({"timestamp": ts}) if newjson not in jsonArray: jsonArray.append(newjson) else: if type(value) == unicode: s = self.get_timestamps(value, itemname) elif type(value) == list: s = self.get_timestamps(value, itemname) for i in range(0, len(s)): if type(s[i]) != unicode: for k, v in s[i].iteritems(): if k == "timestamp": newjson = {} newjson.update( {"data": { itemname: {} }}) newjson.update({"timestamp": v}) if newjson not in jsonArray: jsonArray.append(newjson) elif type(value) == dict: for k, v in value.iteritems(): s = self.get_timestamps(v, itemname) return jsonArray
import json, sys from pprint import pprint if len(sys.argv) != 2 : print sys.argv[0], "running with arglist:", str(sys.argv), \ "\ncorrect syntax :", sys.argv[0], "INPUT_JSON_FILE" sys.exit(100) fileData=open(sys.argv[1]).read() json = json.loads(fileData) tCutString = "" isFirstRun = True for run, lumiRanges in json.iteritems() : if not isFirstRun : tCutString += " || " isFirstRun = False tCutString += "( run==" + str(run) + " && ( " isFirstRange = True for lumiRange in lumiRanges : if not isFirstRange : tCutString += " || " isFirstRange = False tCutString += "( lumi>=" + str(lumiRange[0]) + " && lumi<=" + str(lumiRange[1]) + ")" tCutString += " ) ) "
def showLetter(letter): json = getJson(apiUrl + "?letter=" + letter) for showname, showlink in sorted(json.iteritems()): addDirectoryItem(showname, {"action" : "show", "link": showlink}) xbmcplugin.endOfDirectory(thisPlugin)