def __process_json_command(self,json,session): if not json.has_key("times"): json["times"] = 1 if json["cmd"]=="mail": value = self.player_notifications self.player_notifications = [] return value if self.lost: return {"error":"You have lost the game."} nanite = None if json.has_key("nanite"): nanite = [nanite for nanite in self.nanites if nanite.globalUUID==json["nanite"]][0] if json["cmd"]=="message": for player in self.game.players: player.send_or_schedule({"msg":json["msg"],"special":"message","player":player.globalUUID}) return {} elif json["cmd"]=="move": return nanite.immediate_or_schedule(nanite.move,json["times"],1,json["dir"]) elif json["cmd"]=="mine": return nanite.immediate_or_schedule(nanite.mine,json["times"],1) elif json["cmd"]=="duplicate": return nanite.immediate_or_schedule(nanite.duplicate,json["times"],1,json["dir"]) elif json["cmd"]=="search": return nanite.immediate_or_schedule(nanite.search,json["times"],2,json["resource"]) elif json["cmd"]=="clear": return nanite.immediate_or_schedule(nanite.clear,1,0) elif json["cmd"]=="count": count = -1 if json["resource"]=="plutonium": count = self.plutonium elif json["resource"]=="nanomaterial": count = self.nanomaterial elif json["resource"]=="bandwidth": count = self.bandwidth self.decrement_bandwidth() return {"count":count,"special":"count","resource":json["resource"]} elif json["cmd"]=="bye": self.session.player = None self.session = None return {"special":"bye"} elif json["cmd"]=="ready": self.ready = True self.game.check_for_tick() return {} elif self.game.debug: if json["cmd"] == "listNanites": return {"nanites": map(lambda n: n.json(), self.nanites)} if json["cmd"] == "listResources": return {"nanomaterial": self.nanomaterial, "bandwidth": self.bandwidth, "plutonium": self.plutonium} if json["cmd"] == "debugMap": return {"map": map(lambda tile: tile.toJson(), self.game.map.gen_area_tiles())} raise Exception("Command not known or not acceptable now: %s" % json["cmd"])
def post(self): """ method for handler user authentication """ self.check_json_header() json = escape.json_decode(self.request.body) if not json.has_key('email') and not json.has_key('password'): self.send_error(httplib.UNAUTHORIZED) email = json['email'] pass_hash = json['password'] account = yield user.Account.find_by_credentials(self.db, email, pass_hash, self._on_auth_complete)
def _getDetailedProgramFromJson( self, program, json ): if json.has_key( "description" ): program.description = _lineFilter( json["description"] ) else: program.description = "" if json.has_key( "episodeTitle" ): program.subtitle = _lineFilter( json["episodeTitle"] ) else: program.subtitle = "" if json.has_key( "aspectratio" ): program.aspectRatio = _lineFilter( json["aspectratio"] ) else: program.aspectRatio = "" if json.has_key( "nicamParentalRating" ): program.parentalRating = _lineFilter( json["nicamParentalRating"] ) else: program.parentalRating = "" program.detailed = True program.genres = self._getAllGenresFromJson( program.id, json ) program.actors = self._getAllPersonsFromJson( program.id, json, "actors", EpgProgramActor ) program.directors = self._getAllPersonsFromJson( program.id, json, "directors", EpgProgramDirector ) program.presenters = self._getAllPersonsFromJson( program.id, json, "presenters", EpgProgramPresenter ) program.ratings = [] if json.has_key( "nicamWarning" ): rating = json["nicamWarning"] if rating & 1: program.ratings.append( "angst" ) if rating & 2: program.ratings.append( "seks" ) if rating & 4: program.ratings.append( "geweld" ) if rating & 8: program.ratings.append( "drugs" ) if rating & 16: program.ratings.append( "grof_taalgebruik" ) if rating & 32: program.ratings.append( "discriminatie" ) # print if there are keys not yet handled by this class for key in json.keys(): if key not in ["start", "end", "id", "name", "description", "episodeTitle", "actors", "directors", "genres", "presenters", "aspectratio", "nicamWarning", "nicamParentalRating", "disableRestart", "restartPriceTier", "images"]: self._logger.warning( "Unknown json key: %s: %s" % ( key, json[key] ) ) return program
def from_json(self, json): if json is not None: if json.has_key('street'): self['street'] = json['street'] if json.has_key('city'): self['city'] = json['city'] if json.has_key('areas'): self['areas'] = json['areas'] if json.has_key('region'): self['region'] = json['region'] if json.has_key('state'): self['state'] = json['state'] if json.has_key('postal_code'): self['postal_code'] = json['postal_code'] if json.has_key('zip'): self['zip_code'] = json['zip'] if json.has_key('display_name'): self['display_name'] = json['display_name'] if json.has_key('country_code'): self['country_code'] = json['country_code'] if json.has_key('qs'): self['qs'] = json['qs']
def json_wrapper(resp): if resp is None: return {} try: json = resp.json() if not json.has_key('code'): json['code'] = resp.status_code if not json.has_key('message'): json['message'] = json['error'] if json.has_key('error') else resp.reason return json except: utils.logger.warning('response can not be paresd to json') abort(500)
def put_request(self, alias, uri, data=None, json=None, params=None, files=None, headers=None, allow_redirects=None, timeout=None): """ Send a PUT request on the session object found using the given `alias` ``alias`` that will be used to identify the Session object in the cache ``uri`` to send the PUT request to ``data`` a dictionary of key-value pairs that will be urlencoded and sent as PUT data or binary data that is sent as the raw body content ``json`` a value that will be json encoded and sent as PUT data if data is not specified ``headers`` a dictionary of headers to use with the request ``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed. ``params`` url parameters to append to the uri ``timeout`` connection timeout """ #import sys, pdb; pdb.Pdb(stdout=sys.__stdout__).set_trace() session = self._cache.switch(alias) data = self._format_data_according_to_header(session, data, headers) redir = True if allow_redirects is None else allow_redirects if json: try: json = {str(key): int(val) for key, val in json.items()} except: json = json if json.has_key("Id"): json['Id'] = int(json['Id']) if json: if json.has_key("roles"): json["roles"] = [json["roles"]] response = self._body_request("put", session, uri, data, json, params, files, headers, redir, timeout) if isinstance(data, bytes): data = data.decode('utf-8') print('Put Request using : alias=%s, uri=%s, data=%s, \ headers=%s, allow_redirects=%s ' % (alias, uri, data, headers, redir)) return response
def check_common(self, json_data_array): result = [] for k, v in enumerate(json_data_array): status, json = self.json_decode(v) if not status: return status, json if not json.has_key("type"): return False, "check key error [type] key not found" if not json.has_key("data"): return False, "check key error [data] key not found" if not self.m_table_type.has_key(json["type"]): return False, "check common not found type=%s" % json.type result.append(json) return True, result
def json_for_graph(self, g): """ Pass in a rdflib.Graph and get back a chunk of JSON using the Talis JSON serialization for RDF: http://n2.talis.com/wiki/RDF_JSON_Specification """ json = {} # go through all the triples in the graph for s, p, o in g: # initialize property dictionary if we've got a new subject if not json.has_key(s): json[s] = {} # initialize object list if we've got a new subject-property combo if not json[s].has_key(p): json[s][p] = [] # determine the value dictionary for the object v = {'value': unicode(o)} if isinstance(o, rdflib.URIRef): v['type'] = 'uri' elif isinstance(o, rdflib.BNode): v['type'] = 'bnode' elif isinstance(o, rdflib.Literal): v['type'] = 'literal' if o.language: v['lang'] = o.language if o.datatype: v['datatype'] = unicode(o.datatype) # add the triple json[s][p].append(v) return simplejson.dumps(json, indent = 2)
def addVMtoManagedList(json): global vmmInfos global lastUsageInfos dist_name = ' '.join(json['machine'][:-1]) dist_arch = json['machine'][-1] host_name = json['hostname'] if json.has_key('template'): template_name = json['template'] else: template_name = "Undefined" ipaddr = json['network'][0] hwaddr = json['network'][1] bForcedUpdate = False if not vmmInfos.has_key(hwaddr): vmmInfos[hwaddr] = {} bForcedUpdate = True vminfo = {'VMInfo': [host_name, ipaddr, hwaddr, dist_name, dist_arch, template_name], 'lastupdate': time.time()} vmmInfos[hwaddr] = vminfo if bForcedUpdate: usageInfos = retrieveAllUsageInfo(hwaddr) updateUsageInfos(datetime.datetime.now(), usageInfos) lastUsageInfos[hwaddr] = usageInfos[hwaddr]
def time_day(json): time_day="N/A" if json.has_key('source'): source = json['source'] if source.has_key('istdt') : time_day = int(source['istdt'].split()[-1].split(":")[0]) return (source['istdt'], time_day)
def handle(self, *args, **options): print 'Starting...' unique_bills = Bill.objects.exclude(bill_type__isnull=True).filter(congress_no__gt=108).values('congress_no', 'bill_type', 'bill_no').distinct() unique_bills_count = unique_bills.count() for i, bill in enumerate(unique_bills): print i+1 if i + 1 < int(options['begin_at']): continue url = self.format_url('http://www.opencongress.org/api/bills', congress=bill['congress_no'], type=bill['bill_type'], number=bill['bill_no']) print "{0}/{1}: {2}".format(i + 1, unique_bills_count, url) json = self.get_url_json(url) title = '' if json.has_key('bills'): titles = json['bills'][0]['bill_titles'] title = self.get_right_title(titles) print '------> {0}'.format(title) BillTitle.objects.create( congress_no=bill['congress_no'], bill_type=bill['bill_type'], bill_no=bill['bill_no'], title=title )
def to_XMLtree(self, g): """ Pass in a rdflib.Graph and get back a XMLtree """ json = {} # go through all the triples in the graph for s, p, o in g: # initialize property dictionary if we've got a new subject if not json.has_key(s): json[s] = {} # initialize object list if we've got a new subject-property combo if not json[s].has_key(p): json[s][p] = [] # determine the value dictionary for the object v = {'value': unicode(o)} if isinstance(o, rdflib.URIRef): v['type'] = 'uri' elif isinstance(o, rdflib.BNode): v['type'] = 'bnode' elif isinstance(o, rdflib.Literal): v['type'] = 'literal' if o.language: v['lang'] = o.language if o.datatype: v['datatype'] = unicode(o.datatype) # add the triple json[s][p].append(v) diff = ET.Element("diff") for s in json[s]: element = ET.Element(s) for p in s: for o in p: element.set(p,o) diff.append(element) return diff
def _get_json(self, url, ignore_errors=None): page = 1 results = [] while True: ret = requests.get(url, auth=HTTPBasicAuth(self.foreman_user, self.foreman_pw), verify=self.foreman_ssl_verify, params={ 'page': page, 'per_page': 250 }) if ignore_errors and ret.status_code in ignore_errors: break ret.raise_for_status() json = ret.json() if not json.has_key('results'): return json if type(json['results']) == type({}): return json['results'] results = results + json['results'] if len(results) >= json['total']: break page += 1 return results
def addVMtoManagedList(json): global vmmInfos global lastUsageInfos dist_name = ' '.join(json['machine'][:-1]) dist_arch = json['machine'][-1] host_name = json['hostname'] if json.has_key('template'): template_name = json['template'] else: template_name = "Undefined" ipaddr = json['network'][0] hwaddr = json['network'][1] bForcedUpdate = False if not vmmInfos.has_key(hwaddr): vmmInfos[hwaddr] = {} bForcedUpdate = True vminfo = { 'VMInfo': [host_name, ipaddr, hwaddr, dist_name, dist_arch, template_name], 'lastupdate': time.time() } vmmInfos[hwaddr] = vminfo if bForcedUpdate: usageInfos = retrieveAllUsageInfo(hwaddr) updateUsageInfos(datetime.datetime.now(), usageInfos) lastUsageInfos[hwaddr] = usageInfos[hwaddr]
def _getAllPersonsFromJson( self, id_, json, key, keyClass ): programPersons = [] if json.has_key( key ): for person in json[key]: programPerson = self._getPersonFromJson( id_, person, key, keyClass ) if programPerson: programPersons.append( programPerson ) return programPersons
def _getAllGenresFromJson( self, id_, json ): programGenres = [] if json.has_key( "genres" ): for genre in json["genres"]: programGenre = self._getGenreFromJson( id_, genre ) if programGenre: programGenres.append( programGenre ) return programGenres
def _getAllGenresFromJson(self, id_, json): programGenres = [] if json.has_key("genres"): for genre in json["genres"]: programGenre = self._getGenreFromJson(id_, genre) if programGenre: programGenres.append(programGenre) return programGenres
def findAllReferences(json): if type(json) is dict: if json.has_key("reference"): references[json["reference"]] = json for key, value in json.items(): findAllReferences(value) elif type(json) is list: for value in json: findAllReferences(value)
def _getAllPersonsFromJson(self, id_, json, key, keyClass): programPersons = [] if json.has_key(key): for person in json[key]: programPerson = self._getPersonFromJson( id_, person, key, keyClass) if programPerson: programPersons.append(programPerson) return programPersons
def getCandidates(json): ret = [] if not json.has_key('stats'): return ret stats = json['stats'] for stat in stats.itervalues(): if stat['type'] == "localcandidate" or stat['type'] == "remotecandidate": ret.append(stat) return ret
def extract_authors(json): json = json["res:resource"] if not json.has_key("bibo:authorList"): return [] else: return [(d.get("foaf:givenname", None), d.get("foaf:surname", None)) for d in json["bibo:authorList"].values() if isinstance(d, dict)]
def loadCancelBuySell(self, json): try: if not json.has_key("id"): return "No query id" self.id = int(json["id"]) return RequestOperations.processReq(self) except Exception as e: print traceback.print_exc() return str(e)
def nicerow(self, json): row = {} for att in ('id', 'type', 'public', 'created_at'): row[att] = json[att] for jatt in ('actor', 'repo', 'org', 'payload'): if json.has_key(jatt): row[jatt] = self.encoder.encode(json[jatt]) else: row[jatt] = None return row
def __init__(self, name, json, parent, port_type): self.name = name self.qualified_name = parent + "." + self.name self.json = json self.port_type = port_type self.referenced_prop_values = [] # list of other properties referenced if isinstance(json, str) or isinstance(json, unicode): self.prop_type = json else: if not isinstance(json, dict) or not json.has_key("type"): raise ParseException("Property %s has invalid definition. Value was %s" % (self.qualified_name, json.__repr__())) self.prop_type = json["type"] if json.has_key("source"): self.referenced_prop_values.append(json["source"]) if json.has_key("default") and _is_string(json["default"]): self.referenced_prop_values.extend(_find_prop_refs_in_template_string(json["default"])) if json.has_key("fixed-value") and _is_string(json["fixed-value"]): self.referenced_prop_values.extend(_find_prop_refs_in_template_string(json["fixed-value"]))
def call(self, command): json = self.call_fullresult(command) if json == None: return if json.has_key('result'): result = json['result'] else: # TODO: or raise an exception? result = json return result
def parseStructure(self, json ): if json.has_key('stages'): #json['bounds'] = MMBounds( json['bounds'] ) for stage in json['stages']: # the start_point and end_point of each stage to be MMLatLon objects stage['start_point'] = MMLatLon(float(stage['start_point']['lat']), float(stage['start_point']['lon'])); stage['end_point'] = MMLatLon(float(stage['end_point']['lat']), float(stage['end_point']['lon'])); # the StartAddress and EndAddress of each stage to be MMAddress objects stage['start_address'] = MMAddress(stage['start_address']); stage['end_address'] = MMAddress(stage['end_address']); # the bounds of each Stage to be MMBounds objects #stage['bounds'] = MMBounds(); #stage['bounds'].from_json(stage['bounds']); # the start_point and end_point of each step to be MMLatLon objects if stage['steps']: step_index = 0 for step in stage['steps']: step['start_point'] = MMLatLon(float(step['start_point']['lat']), float(step['start_point']['lon'])); step['end_point'] = MMLatLon(float(step['end_point']['lat']), float(step['end_point']['lon'])); elif json.has_key('errors'): json['error_code'] = 'MM_ROUTE_GEOCODING_ERRORS' json['geocoding_errors'] = json['errors']; for error in json['geocoding_errors']: error['address'] = MMAddress( error['address'] ) if error['results']: locations = [] for result in error['results']: location = MMLocation() location.from_json(result) locations.append( location ) error['results'] = locations return json
def _tsv(json): json['datasource_and_rights'] = json.get('url') header = util.DWC_HEADER_LIST values = [] for x in header: if json.has_key(x): values.append(unicode(json[x]).rstrip()) else: values.append(u'') return u'\t'.join(values).encode('utf-8')
def write_profile(worksheet, row, json, excel_number_format): worksheet.write_string(row, 0, filename) date, run = parseDateAndRun(filename) worksheet.write_string(row, 1, date) worksheet.write_string(row, 2, run) col=3 value="" for i in PROFILE_FIELDS: if json.has_key(i): worksheet.write_number(row, col, json[i], excel_number_format) col=col+1
def tsv(self): json = self.json json['datasource_and_rights'] = json.get('url') header = util.DWC_HEADER_LIST values = [] for x in header: if json.has_key(x): values.append(unicode(json[x])) else: values.append('') return '\t'.join(values) #.encode('utf-8')
def write_profile(worksheet, row, json, excel_number_format): worksheet.write_string(row, 0, filename) date, run = parseDateAndRun(filename) worksheet.write_string(row, 1, date) worksheet.write_string(row, 2, run) col = 3 value = "" for i in PROFILE_FIELDS: if json.has_key(i): worksheet.write_number(row, col, json[i], excel_number_format) col = col + 1
def verify_acl_config(json, acl_name): if json.has_key("result"): result = json.get("result") if result == None: print("ACL " + acl_name + " is not configured") else: try: found_acl = result['body']['TABLE_ip_ipv6_mac']['ROW_ip_ipv6_mac']['acl_name'] if found_acl == acl_name: print("ACL " + acl_name + " was configured") except TypeError: print("Unexpected JSON output")
def extract_year(json): if not json.has_key("issued"): return "" if not json["issued"].has_key("raw"): return "" regexed = year_re.findall(json["issued"]["raw"]) if len(regexed) > 0: return regexed[0] try: return parse(json["issued"]["raw"]).year except: return ""
def getTweets(tweetCriteria, refreshCursor='', receiveBuffer=None, bufferLength=100, proxy=None): results = [] resultsAux = [] cookieJar = cookielib.CookieJar() if hasattr(tweetCriteria, 'username') and (tweetCriteria.username.startswith("\'") or tweetCriteria.username.startswith("\"")) and (tweetCriteria.username.endswith("\'") or tweetCriteria.username.endswith("\"")): tweetCriteria.username = tweetCriteria.username[1:-1] active = True while active: json = TweetManager.getJsonReponse(tweetCriteria, refreshCursor, cookieJar, proxy) if len(json['items_html'].strip()) == 0: break if not json.has_key('min_position'): break refreshCursor = json['min_position'] if refreshCursor == None: break tweets = PyQuery(json['items_html'])('div.js-stream-tweet') if len(tweets) == 0: break for tweetHTML in tweets: tweet = getTweet(tweetHTML) if hasattr(tweetCriteria, 'sinceTimeStamp'): if tweet.created_at < tweetCriteria.sinceTimeStamp: active = False break if hasattr(tweetCriteria, 'untilTimeStamp'): if tweet.created_at <= tweetCriteria.untilTimeStamp: results.append(tweet.__dict__) else: results.append(tweet.__dict__) #resultsAux.append(tweet) if receiveBuffer and len(resultsAux) >= bufferLength: receiveBuffer(resultsAux) resultsAux = [] if tweetCriteria.maxTweets > 0 and len(results) >= tweetCriteria.maxTweets: active = False break if receiveBuffer and len(resultsAux) > 0: receiveBuffer(resultsAux) return results
def loadQueryMarket(self, json): try: if not json.has_key("commodity"): return "No commodity" if not MarketState.commodities.has_key(json["commodity"]): return "Bad commodity" self.commodity = int(json["commodity"]) return RequestOperations.processReq(self) except Exception as e: print traceback.print_exc() return str(e)
def loadBuySell(self, json): try: if not json.has_key("price") or not json.has_key("commodity") or not json.has_key("amount"): return "No price or commodity type/amount" self.price = abs(int(json["price"])) self.commodity = int(json["commodity"]) self.amount = abs(int(json["amount"])) if not MarketState.commodities.has_key(self.commodity): return "Bad commodity" if self.amount <= 0: return "Bad amount" if self.price <= 0: return "Bad price" self.id = MarketState.generateRequestId() return RequestOperations.processReq(self) except Exception as e: print traceback.print_exc() return str(e)
def verify_acl_config(json, acl_name): if json.has_key("result"): result = json.get("result") if result == None: print("ACL " + acl_name + " is not configured") else: try: found_acl = result['body']['TABLE_ip_ipv6_mac'][ 'ROW_ip_ipv6_mac']['acl_name'] if found_acl == acl_name: print("ACL " + acl_name + " was configured") except TypeError: print("Unexpected JSON output")
def handle_message(json): try: if json.has_key('name'): print('receive message: ' + str(json)) name = json['name'] if handler.removeCategory(name) and handler.build(): emit('remove_result', 'Succeed') else: emit('remove_result', 'Failed') else: emit('remove_result', 'Failed') except: emit('error', printException()) emit('remove_result', 'Failed')
def _tsv(json): # These should be the names of the original fields in the index document. download_fields = vnutil.download_field_list() values = [] for x in download_fields: if json.has_key(x): if x=='dynamicproperties': dp = vnutil.format_json(json[x]) values.append(unicode(dp.rstrip())) else: values.append(unicode(json[x]).rstrip()) else: values.append(u'') return u'\t'.join(values).encode('utf-8')
def getDialog(original, screen_name, conversation_id, refreshCursor='', receiveBuffer=None, bufferLength=100, proxy=None): results = {} results['original'] = original.__dict__ results['conversation'] = [] resultsAux = [] cookieJar = cookielib.CookieJar() active = True while active: json = DialogManager.getJsonReponse(screen_name, conversation_id, refreshCursor, cookieJar, proxy) if len(json['items_html'].strip()) == 0: break if not json.has_key('min_position'): break refreshCursor = json['min_position'] items = PyQuery(json['items_html'])('ol.stream-items') if len(items) == 0: break for item in items: tweets = [] for tweet in PyQuery(item)('div.js-stream-tweet'): tweets.append(getTweet(tweet).__dict__) results['conversation'].append(tweets) #resultsAux.append(tweets) if receiveBuffer and len(resultsAux) >= bufferLength: receiveBuffer(resultsAux) resultsAux = [] if refreshCursor == None: break if receiveBuffer and len(resultsAux) > 0: receiveBuffer(resultsAux) return results
def _getProgramFromJson(self, epgId, json): startTime = json["start"] endTime = json["end"] if json.has_key("name"): title = _lineFilter(json["name"]) else: title = "Onbekend" program = EpgProgram() program.epgId = epgId program.originalId = json["id"] program.startTime = startTime program.endTime = endTime program.title = title return program
def handle_message(json): try: if json.has_key('question'): print('receive message: ' + str(json)) emit('get', json) question = json['question'] print question answer = handler.reply(question, emit) emit('reply', answer.replace(u'\n', u'<br/>')) else: name = json['name'] if type(name) is not unicode: name = name.decode('utf-8') emit('info', handler.getCategoryInfo(name)) except: emit('error', printException())
def _getProgramFromJson( self, epgId, json ): startTime = json["start"] endTime = json["end"] if json.has_key( "name" ): title = _lineFilter( json["name"] ) else: title = "Onbekend" program = EpgProgram() program.epgId = epgId program.originalId = json["id"] program.startTime = startTime program.endTime = endTime program.title = title return program
def _simplify_json(self, json): result = {} # First, resolve references for element in json.values(): for k,v in element.items(): if isinstance(v, unicode) and v.startswith("http://") and json.has_key(v): element[k] = json[v] for k, v in json.items(): if v.has_key("_typeGroup"): group = v["_typeGroup"] if not result.has_key(group): result[group]=[] del v["_typeGroup"] v["__reference"] = k result[group].append(v) return result
def _get_json(self, url): page = 1 results = [] while True: ret = requests.get(url, auth=HTTPBasicAuth(self.foreman_user, self.foreman_pw), verify=self.foreman_ssl_verify, params={'page': page, 'per_page': 250}) ret.raise_for_status() json = ret.json() if not json.has_key('results'): return json results = results + json['results'] if len(results) >= json['total']: break page += 1 return results
def _tsv(json): # These should be the names of the original fields in the index document. download_fields = vnutil.download_field_list() values = [] for x in download_fields: if json.has_key(x): if x=='dynamicproperties': dp = vnutil.format_json(json[x]) values.append(unicode(dp.rstrip())) else: values.append(unicode(json[x]).rstrip()) else: values.append(u'') # logging.debug('%s: JSON: %s' % (DOWNLOAD_VERSION, json)) # logging.debug('%s: DOWNLOAD_FIELDS: %s' % (UTIL_VERSION, download_fields)) # logging.debug('%s: VALUES: %s' % (DOWNLOAD_VERSION, values)) return u'\t'.join(values).encode('utf-8')
def deserialize_file(self, json): file = rest.model.VersionedFile() if json.has_key('id'): file.id = int(json['id']) if json.has_key('source_path'): file.source_path = json['source_path'] if json.has_key('storage_path'): file.storage_path = json['storage_path'] if json.has_key('md5'): file.md5 = json['md5'] if json.has_key('metadata'): file.metadata = unicode(json['metadata']) if json.has_key('version'): file.version = int(json['version']) if json.has_key('category'): file.category = unicode(json['category']) if json.has_key('updated'): # fix wrong date format on reading updated = re.sub(r" ", "T", json['updated']) file.updated = updated file.state = '' if json.has_key('state'): file.state = json['state'] return file
def tsv(self): # Note similar functionality in download.py _tsv(json) json = self.json # json['datasource_and_rights'] = json.get('url') # download_fields = vnutil.DWC_HEADER_LIST download_fields = vnutil.download_field_list() values = [] for x in download_fields: if json.has_key(x): if x=='dynamicproperties': # logging.info('dynamicproperties before: %s' % json[x] ) dp = vnutil.format_json(json[x]) # logging.info('dynamicproperties after: %s' % dp) values.append(unicode(dp.rstrip())) else: values.append(unicode(json[x]).rstrip()) else: values.append(u'') return u'\t'.join(values).encode('utf-8')
def _tsv(json): # These should be the names of the original fields in the index document. download_fields = vnutil.download_field_list() values = [] for x in download_fields: if json.has_key(x): if x == 'dynamicproperties': dp = vnutil.format_json(json[x]) values.append(unicode(dp.rstrip())) else: values.append(unicode(json[x]).rstrip()) else: values.append(u'') # logging.debug('%s: JSON: %s' % (DOWNLOAD_VERSION, json)) # logging.debug('%s: DOWNLOAD_FIELDS: %s' % (UTIL_VERSION, download_fields)) # logging.debug('%s: VALUES: %s' % (DOWNLOAD_VERSION, values)) return u'\t'.join(values).encode('utf-8')
def populate_object_from_mapping_and_conversions(obj, json, mapping, conversions = {}): errors = [] for k,v in mapping.items(): if not json.has_key(k): errors.append("missing key '%s' corresponding to property '%s'" % (k,v)) continue value = json[k] if conversions.has_key(k): value = conversions[k](value) setattr(obj, v, value) if any(errors): message = """ Errors while parsing from %s Errors: \t%s""" % (pformat(json), "\n\t".join(errors)) raise CgdCommunicationException(message) return obj
def args_to_json( args, measurement_type ): """ Convert the given arguments to a JSON dictionary. """ json = {} for key in args.keys(): if not '.' in key: continue section, var = key.split('.') if not json.has_key(section): json[section] = [{ var : args[key] }] else: json[section][0][var] = args[key] json["definitions"][0]["type"] = measurement_type return json
def args_to_json(args, measurement_type): """ Convert the given arguments to a JSON dictionary. """ json = {} for key in args.keys(): if not '.' in key: continue section, var = key.split('.') if not json.has_key(section): json[section] = [{var: args[key]}] else: json[section][0][var] = args[key] json["definitions"][0]["type"] = measurement_type return json
def _get_json(self, url, ignore_errors=None): page = 1 results = [] while True: ret = requests.get(url, auth=HTTPBasicAuth(self.foreman_user, self.foreman_pw), verify=self.foreman_ssl_verify, params={'page': page, 'per_page': 250}) if ignore_errors and ret.status_code in ignore_errors: break ret.raise_for_status() json = ret.json() if not json.has_key('results'): return json if type(json['results']) == type({}): return json['results'] results = results + json['results'] if len(results) >= json['total']: break page += 1 return results
def recursive_keys(json, keys, log): data_list = [] json_keys = keys.keys() json_keys.sort() for jk in json_keys: if not json.has_key(jk): log.error(u'没有键%s', jk) data_list.append(u'不公示') elif isinstance(keys[jk], dict): temp = recursive_keys(json[jk], keys[jk], log) data_list.extend(temp) elif isinstance(keys[jk], list): temp_list = [] for k in json[jk]: temp = recursive_keys(k, keys[jk][0], log) temp_list.append(temp) data_list.append(temp_list) else: log.debug(u'key:value %s:%s' % (jk, json[jk])) temp = json[jk] data_list.append(temp) return data_list
def hasError(json): return json.has_key('error')
def json_has(self, json, key): if (json.has_key(key)): return json[key] return ""
def _test_json(self, json, required_keys = [("kT", "Boltzmann's constant times temperature")]): for rk in required_keys: if(not json.has_key(rk[0])): raise IOError("Error in input file, could not find %s\n. Set using %s keyword" % (rk[1], rk[0]))
def loadFromJson(self, json): try: if not json.has_key("auth"): return "No auth key" if not json["auth"].has_key("user") or not json["auth"].has_key("token"): return "No user or auth token" nonce = None if json["auth"].has_key("nonce"): nonce = json["auth"]["nonce"] user = json["auth"]["user"].encode("utf8") token = json["auth"]["token"] keys = MarketState.userPublicKeys if not keys.has_key(user): return "Bad user" public_key_object = RSA.importKey(keys[user]) cipher = PKCS1_v1_5.new(public_key_object) digest = SHA256.new() if nonce is not None: digest.update("%s_%s"%(user,nonce)) if nonce in userNonces[user]: return "Non unique nonce" userNonces[user].append(nonce) userNonces[user] = userNonces[user][-100:] else: digest.update(user) verified = cipher.verify(digest, token.decode('base64')) if not verified: return "Verification failure" self.user = user userTimings[self.user].append(time.time()) userTimings[self.user] = userTimings[self.user][-20:] if userTimings[self.user][-1] - userTimings[self.user][0] < 10: print "Penalty for user %s"%(self.user) MarketState.semaphore.release() time.sleep(120) MarketState.semaphore.acquire() if not json.has_key("type"): return "No type key" self.type = json["type"].encode("utf8") print "User %s Request %s"%(user, self.type) if not self.requestTypes.has_key(self.type): return "Bad request type" resp = self.requestTypes[self.type](self, json) if nonce is not None: cipher = pkcs.new(public_key_object) resp = "".join(map(lambda i:cipher.encrypt(resp[i:i + 64]), range(0,len(resp),64))) return resp.encode('base64') else: return resp except Exception as e: print traceback.print_exc() return str(e)