def encode_json(request, obj): callback = get_request_value(request, 'callback') if callback: ret = callback + '(' + json.encode(obj) + ');' else: ret = json.encode(obj) return ret
def check_for_package(): props = {} check_itunes_version(props) check_certs(props) sdks,ipad = get_sdks() props['sdks']=sdks props['ipad']=ipad print json.encode(props).encode('utf-8')
def check_for_package(): props = {} check_itunes_version(props) check_certs(props) sdks, ipad = get_sdks() props['sdks'] = sdks props['ipad'] = ipad print json.encode(props).encode('utf-8')
def jsonReply(s): print 'Status: 200 OK'; print "Content-type: application/json\r\n"; if type(s) == str: print s else: print encode(s) exit(0)
def DoJsonGet(): entries = ReadFromDatabase() json = "{\n\t" json = json + "\"status\": 200,\n\t" json = json + " \"entries\": [\n" for entry in entries: json = json + entry.toJson() + ",\n" json = json[:-2] json = json + "\n\t]\n}" #HTTP-Header print "Content-Type: application/json; charset=utf-8" print "Status:200" print print json.encode('utf-8')
def getHash(self): """Return an md5 hash for this set of parameters""" json = self.getJSON() import hashlib return hashlib.md5(json.encode()).hexdigest()
def get(self): min_latitude = float(self.request.get('min_latitude')) min_longitude = float(self.request.get('min_longitude')) max_latitude = float(self.request.get('max_latitude')) max_longitude = float(self.request.get('max_longitude')) # Restrict latitude/longitude to restrict bulk downloads. #if (max_latitude - min_latitude) > 1: # max_latitude = min_latitude + 1 #if (max_longitude - min_longitude) > 1: # max_longitude = min_longitude + 1 # Sync the add cache. min_geopt = db.GeoPt(min_latitude, min_longitude) max_geopt = db.GeoPt(max_latitude, max_longitude) query = datamodel.Mark.gql('WHERE geopt > :min_geopt AND geopt < :max_geopt ', min_geopt = min_geopt, max_geopt = max_geopt) add_events = query.fetch(1000) output = { 'timestamp': time.time(), 'adds': add_events } self.response.headers['Content-Type'] = 'text/plain' self.response.out.write(json.encode(output));
def serialize_string( json: str, sign: typing.Callable[[bytes], bytes], headers: typing.Optional[typing.Dict[str, typing.Any]] = None ) -> bytes: header = PROTECTED_HEADER if headers is None else encode_headers(headers) payload = base64.urlsafe_b64encode(json.encode(ENCODING)) sig = sign(signing_message(payload, header=header)) return b".".join([header, payload, base64.urlsafe_b64encode(sig)])
def get(self): # TODO(mbelshe): the dev server doesn't properly handle logins? #if not user: # self.redirect(users.create_login_url(self.request.uri)) # return resource_type = self.request.get("type") if not resource_type: self.send_json_error("Could not find type: ", type) return # Do a query for the appropriate resource type. if resource_type == "summary": self.do_summary() return elif resource_type == "result": # TODO(mbelshe): implement me! return elif resource_type == "set": self.do_set() return elif resource_type == "set_search": self.do_set_search() return elif resource_type == "filters": self.do_filters() return elif resource_type == "latestresults": self.do_latestresults() return self.response.out.write(json.encode({}))
def do_set(self): """Lookup a specific TestSet.""" set_id = self.request.get("id") if not set_id: self.send_json_error("Bad request, no id param") return test_set = models.TestSet.get_by_id(int(set_id)) if not test_set: self.send_json_error("Could not find id: ", id) return # We do manual coalescing of multiple data structures # into a single json blob. json_output = {} json_output['obj'] = test_set json_output['version'] = test_set.version json_output['cpu'] = test_set.cpu json_output['network'] = test_set.network summaries_query = test_set.summaries summaries_query.order("url") json_output['summaries'] = [s for s in summaries_query] # There is no data; go ahead and pull the individual runs. if len(json_output['summaries']) == 0: query = models.TestResult.all() query.filter("set = ", db.Key.from_path('TestSet', int(set_id))) query.order('url') json_output['summaries'] = [r for r in query] self.response.out.write(json.encode(json_output))
def do_filters(self): """Lookup the distinct values in the TestSet data, for use in filtering. """ cached_response = memcache.get("filters") if cached_response is not None: self.response.out.write(cached_response) return versions = set() cpus = set() networks = set() query = models.Version.all() for item in query: versions.add((item.version, str(item.key().id()))) query = models.Cpu.all() for item in query: cpus.add((item.cpu, str(item.key().id()))) query = models.Network.all() for item in query: networks.add((item.network_type, str(item.key().id()))) filters = {} filters["versions"] = sorted(versions) filters["cpus"] = sorted(cpus) filters["networks"] = sorted(networks) response = json.encode(filters) memcache.add("filters", response, 60 * 10) # Cache for 10 mins self.response.out.write(response)
def do_GET(self): print(self.path) self.send_response(200) self.end_headers() if self.path == '/plot/index.html': self.send_header('Content-type', 'text/html') with open(os.path.abspath('plot/index.html')) as f: html = f.read() self.wfile.write(html.encode()) if self.path == '/plot/bigData.json': self.send_header('Content-type', 'application/json') with open(os.path.abspath('plot/bigData.json')) as f: json = f.read() self.wfile.write(json.encode()) if self.path == '/plot/app.js': self.send_header('Content-type', 'application/javascript') with open(os.path.abspath('plot/app.js')) as f: js = f.read() self.wfile.write(js.encode()) if self.path == '/plot/style.css': self.send_header('Content-type', 'application/x-css') with open('D:/update_remove_install3/plot/style.css') as f: css = f.read().encode('utf-8') print(css) self.wfile.write(css) return
def transformNighthawkJson(self, json, format="human"): """Use to obtain one of the supported output from Nighthawk's raw json output. Arguments: json: String containing raw json output obtained via nighthawk_client --output-format=json format: String that specifies the desired output format. Must be one of [human|yaml|dotted-string|fortio]. Optional, defaults to "human". """ # TODO(oschaaf): validate format arg. args = [] if os.getenv("NH_DOCKER_IMAGE", "") != "": args = [ "docker", "run", "--rm", "-i", os.getenv("NH_DOCKER_IMAGE") ] args = args + [ self._nighthawk_output_transform_path, "--output-format", format ] logging.info("Nighthawk output transform popen() args: %s" % args) client_process = subprocess.Popen(args, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE) logging.info("Nighthawk client popen() args: [%s]" % args) stdout, stderr = client_process.communicate(input=json.encode()) # We suppress declared but not used warnings below, as these may produce helpful # in test failures (via pytests introspection and logging). logs = stderr.decode('utf-8') # noqa(F841) output = stdout.decode('utf-8') # noqa(F841) assert (client_process.returncode == 0) return stdout.decode('utf-8')
def do_summary(self): """ Lookup a specific TestSummary""" set_id = self.request.get("id") if not set_id: self.send_json_error("Bad request, no id param") return memcache_key = "summary." + set_id cached_response = memcache.get(memcache_key) if cached_response is not None: self.response.out.write(cached_response) return test_summary = models.TestSummary.get_by_id(int(set_id)) if not test_summary: self.send_json_error("Could not find id: ", id) return json_output = {} json_output['obj'] = test_summary test_set = models.TestSet.get_by_id(test_summary.set.key().id()) test_results = test_set.results test_results.filter("url =", test_summary.url) json_output['results'] = [r for r in test_results] response = json.encode(json_output) memcache.add(memcache_key, response, 60) # Cache for 1min self.response.out.write(response)
def scrape_api(self,object_type,p,g): """Run the json generator and store the json records in the database. """ import time from sqlalchemy.exc import IntegrityError lr = self.init_log_rate(1000) extant = set([row.id for row in p.rows]) with p.inserter(cache_size = 1) as ins: for idn, object_id, url, response_code, json in self.generate_json(g): if object_id in extant: self.log('Duplicate {} {}'.format(object_type, object_id)) continue lr("{} {} ".format(idn, url)) d = dict( id = object_id, type = object_type, object_id = object_id, access_time = time.time(), response_code = response_code, data = json.encode('utf8').encode('zlib') ) try: ins.insert(d) except IntegrityError: self.log("Duplicate for {} {} ".format(d['type'], d['id'])) except: self.error("Failed for {} {} ".format(d['type'], d['id']))
def do_filters(self): """Lookup the distinct values in the TestSet data, for use in filtering. """ cached_response = memcache.get("filters") if cached_response is not None: self.response.out.write(cached_response) return versions = set() cpus = set() networks = set() query = models.Version.all() for item in query: versions.add(( item.version, str(item.key().id()) )) query = models.Cpu.all() for item in query: cpus.add(( item.cpu, str(item.key().id()) )) query = models.Network.all() for item in query: networks.add(( item.network_type, str(item.key().id()) )) filters = {} filters["versions"] = sorted(versions) filters["cpus"] = sorted(cpus) filters["networks"] = sorted(networks) response = json.encode(filters) memcache.add("filters", response, 60 * 10) # Cache for 10 mins self.response.out.write(response)
def get(self, user): user = cgi.escape(user, True) self.key = self.setKey(user) json_data = self.getJson(user) stats = memcache.get_stats() self.response.headers['Content-Type'] = 'application/json' self.response.out.write(json.encode(json_data))
def do_set_search(self): memcache_key = "set_search." + self.request.query cached_response = memcache.get(memcache_key) if cached_response is not None: self.response.out.write(cached_response) return query = models.TestSet.all() query.order("-date") # Apply filters. networks = self.request.get("networks_filter") if networks: query.filter("network IN ", [db.Key.from_path('Network', int(k)) for k in set(networks.split(","))]) versions = self.request.get("version_filter") if versions: query.filter("version IN ", [db.Key.from_path('Version', int(k)) for k in set(versions.split(","))]) cpus = self.request.get("cpus_filter") if cpus: query.filter("cpu IN ", [db.Key.from_path('Cpu', int(k)) for k in set(cpus.split(","))]) if self.request.get("set_id"): test_set = models.TestSet.get_by_id(int(self.request.get("set_id"))) results = test_set.summaries results = query.fetch(500) response = json.encode(results) memcache.add(memcache_key, response, 30) # Cache for 30secs self.response.out.write(response)
def get(self, user): user = cgi.escape(user,True) self.key = self.setKey(user) json_data = self.getJson(user) stats = memcache.get_stats() self.response.headers['Content-Type']='application/json' self.response.out.write(json.encode(json_data))
def generate(css_file, json_file): """Generate a file that contains code for character names """ # check css_file exists if not os.path.isfile(css_file): raise FileNotFoundError("File '{}' not found".format(css_file)) # load css file with open(css_file, "r") as file: css_content = file.read() # parse css file css_matcher = re.findall(CSS_ICON_NAME_PARSER, css_content, re.S) # convert icons icon_dict = {} for name, code in css_matcher: if code.startswith("\\"): code_hex = "0x" + code[1:] else: code_hex = hex(ord(code)) icon_dict[name] = code_hex # write json file with open(json_file, "w") as file: file.write(json.encode(icon_dict))
def command_menu(): """ This displays a menu of available commands. """ if not is_valid_user(request): return redirect(url_for('index')) machines_json = json.encode(EvilMommy.Config.machines) return render_template('command_menu.html', machines = machines_json)
def test_sanity(self): # Expected iterators work as expected. testobject = [42, 42] json = JSONEncoderDelegator() output = json.encode(testobject) self.assertEqual(output, "[42, 42]", "Standard items serialized correctly.")
def send(self, log_info): log_name = log_info['name'] contents = self.get_logs(log_info) json = JSONEncoder() log_name = log_name.replace('program_', '').upper() log_name = log_name.replace('-', '_').replace('.', '_') self.send_log_to_user(json.encode(dict(name=log_name, value=contents)))
def _valid_payload(): """ Builds a valid HTTP header payload – Base64 encoded JSON string with valid data. """ identity = __class__._valid_identity() dict_ = {"identity": identity._asdict()} json = dumps(dict_) return b64encode(json.encode())
def __call__ (self, request=None, *args, **kw): if request: if request.is_ajax(): return json.encode (self.context) else: self.context ['request'] = request # shouldn't need this return render (request, self.template, self.context, *args, **kw) else: # wants string render_to_string (self.template, self.context, RequestContext (request))
def compile(config): global project_dir project_dir = config['project_dir'] # remove unserializable data config['android_builder'] = None config['tiapp'] = None config['logger'] = None outfile = os.path.join(project_dir, 'plugin_compile.json') open(outfile, "w").write(json.encode(config))
def save_manifest(self): payload = { 'paths': self.hashed_files, 'version': self.manifest_version } if self.exists(self.manifest_name): self.delete(self.manifest_name) contents = json.encode() self._save(self.manifest_name, ContentFile(contents))
def setter(path, data): url = "http://{conn_str}/{path}".format(conn_str=conn_str, path=path) try: json_data = encode(data) resp = put(url, data=json_data) if not resp.ok: resp.raise_for_status() except Exception as e: raise e
def admin_apikey(): provider = 'admin' user_id = '' magic_calc = "%s%s%s" % (provider, user_id, MAGIC_KEY) magic_calc = md5(magic_calc).hexdigest() api_key = {'p': provider, 'u': user_id, 'm': magic_calc} api_key = json.dumps(api_key) api_key = json.encode('hex') return api_key
def unicode_to_html_entities(json): """Daddy, make the scary unicode go away!""" if isinstance(json, str): return json.encode('ascii', 'xmlcharrefreplace').decode('ascii') if isinstance(json, list): return [unicode_to_html_entities(j) for j in json] if isinstance(json, dict): return {k: unicode_to_html_entities(j) for k, j in json.items()} return json
def main(): gridcells = {} for line in open('gentdata').read().split('\n'): lat, lon, azimuth, beamwidth, freq = line.split('|') lat = float(lat) lon = float(lon) azimuth = float(azimuth) beamwidth = float(beamwidth) freq = float(freq) cells = gridcalc.gridcalc(lon, lat, azimuth, 2000, beamwidth) for cell in cells: print azimuth, beamwidth, freq, cell.angle, cell.dist value = loss.project_loss(azimuth, beamwidth, freq, cell.angle, cell.dist) othervalue = value key = (cell.x, cell.y) if key in gridcells: othervalue = gridcells[key] gridcells[key] = max(value, othervalue) print json.encode(gridcells)
def get(self,heritage): #Retrieve feedbacks of a particular heritage requested heritage = urllib.unquote(heritage) feedbacks = db.GqlQuery("SELECT * FROM Feedback WHERE heritage = :1", heritage) feedDict = {} for feedback in feedbacks: feedDict[str(feedback.key())] = feedback.toDict() self.response.out.write(json.encode(feedDict));
def send_incident(self): json = self._current_incident.to_JSON() self._logger.info(json) try: url = 'http://192.168.2.1:3000/incidents' req = urllib.request.Request(url, data=json.encode('utf8'), headers = {'content-type': 'application/json'}) response = urllib.request.urlopen(req) print(response) except: print(traceback.format_exc())
def get_logs(self, log_info): print(f"Found changes on {log_info['name']}...") stdin, stdout, stderr = self.ssh.exec_command( 'sudo tail -n +%d -q %s | tail -n %d -q' % (log_info['start'], log_info['longname'], self.maxlines)) messages = [str(line) for line in stdout.readlines()] json = JSONEncoder() return json.encode(messages)
def post_json(self, json, url, headers: dict, second=30): if headers: if json: req = urllib.request.Request(url=url, headers=headers, data=json.encode()) else: req = urllib.request.Request( url=url, headers=headers, ) else: if json: req = urllib.request.Request(url=url, data=json.encode()) try: date = urllib.request.urlopen(req, timeout=second).read() except HTTPError: return "{}" return date
def sendPacket(self, parsedPacket: ParsedPacket): super().sendPacket(parsedPacket) tmp = "\"date\": \"{}\",\"tv\": {},\"bluray\": {},\"appleTv\": {},\"ipTv\": {}" json = tmp.format(parsedPacket.date.isoformat(), parsedPacket.tv, parsedPacket.bluray, parsedPacket.appleTv, parsedPacket.ipTv) #self.client.publish("AppliancesBucket","{"+json+"}") json = "{" + json + "}" self.producer.send("AppliancesBucket", value=json.encode("UTF-8"))
def run_dbconsistency(json): python = 'python3' dbconsistency = 'extras/tools/dbconsistency.py' p = subprocess.Popen( [python, dbconsistency, '/dev/stdin'], stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE) out, err = p.communicate(input=json.encode('utf8')) return Result(p, out, err)
def postRequest(api, authToken=None, **kwds): opener = urllib2.build_opener(CreatedHandler) data=encode(kwds) request = urllib2.Request(call(api, authToken)) request.add_data(data) request.get_method = lambda: 'POST' request.add_header("Content-Type", "application/x-www-form-urlencoded") request.add_header("Content-Length", str(len(data))) response = opener.open(request) return decode(response.read())
def write(self, packets): payload = json.encode(packets) data = ( 'Event: orbited\n' + '\n'.join(['data: %s' % line for line in payload.splitlines()]) + '\n\n' ) # print 'WRITE:', data.replace('\n', '\\n\n').replace('\r', '\\r') # print '===' self.request.write(data)
def processLang(lang, q, outfile): print "Attempting to generate", outfile, "... ", f = open(outfile, "w") f.write(lang + "_interfacemap = " + json.encode(q.virtualInterfaceMap).encode('utf-8') + ";\n") f.write(lang + "_keymap = " + json.encode({"map": q.getVirtualKB()}).encode('utf-8') + ";\n") f.write(lang + "_pattern = /" + repr(q.dumpAksharaPattern())[2:-1] + "/g ;\n") f.write(lang + "_zwnjmap = " + json.encode({ "zwjSignificant": q.zwjSignificant, "zwnjSignificant": q.zwnjSignificant, "zwjCode": repr(q.zwjCode)[2:-1], "zwnjCode": repr(q.zwnjCode)[2:-1], "halanth": repr(q.halanth)[2:-1], "nukta": repr(q.nukta)[2:-1] }) + ";") f.close() print "done"
def apply_friend_offline(entityID, applyID): now = int(time.time()) field = "friend_applys" if not Player.simple_load(entityID, []): return msgTips.FAIL_MSG_PLAYER_NOT_FOUND encode = Player.fields[field].encoder key = "_".join([field, "p{%d}" % entityID]) if not Player.pool.execute( "HSET", key, applyID, encode({"applytime": now})): return msgTips.FAIL_MSG_FRIEND_ALREADY_APPLYED return OK
def district(request, pid): json_data = [] if pid: loc = Location.objects.get(pk=pid) for district in Location.objects.order_by('name').\ filter(parent=loc.lid): json_data.append({ 'id' : district.id, 'name' : district.name, }) return HttpResponse(json.encode(json_data))
def __call__(self, request=None, *args, **kw): if request: if request.is_ajax(): return json.encode(self.context) else: self.context['request'] = request # shouldn't need this return render(request, self.template, self.context, *args, **kw) else: # wants string render_to_string(self.template, self.context, RequestContext(request))
def admin_apikey(): provider = 'admin' user_id = '' magic_calc = "%s%s%s" % (provider,user_id,MAGIC_KEY) magic_calc = md5(magic_calc).hexdigest() api_key = { 'p' : provider, 'u' : user_id, 'm' : magic_calc } api_key = json.dumps(api_key) api_key = json.encode('hex') return api_key
def save_json(self, file_path, dataset): """ save a python structure to json """ file_path = os.path.join(self.export_dir, file_path) with open(file_path, "w") as fh: fh.write(json.encode(dataset, sort_keys=True, indent=4, separators=(",", ": "))) print "\nfile saved to: %s" % file_path return self
def main(): datadir=argv[1] out=argv[2] rows=[] for f in listdir(datadir): row=parse(datadir+'/'+f) rows.append(row) json=encode(rows) o=open(out,'w') o.write(json) o.close()
def _encode_view_options(options): """ Encode any items in the options dict that are sent as a JSON string to a view/list function. """ retval = {} for name, value in options.items(): if name in ('key', 'startkey', 'endkey') \ or not isinstance(value, str): value = json.encode(value) retval[name] = value return retval
def get(self): global sync_interval global last_sync global chat_cache global move_cache min_latitude = float(self.request.get('min_latitude')) min_longitude = float(self.request.get('min_longitude')) max_latitude = float(self.request.get('max_latitude')) max_longitude = float(self.request.get('max_longitude')) zoom = self.request.get('zoom') if self.request.get('since') == '': since = 0 else: since = float(self.request.get('since')) since_datetime = datetime.datetime.fromtimestamp(since) # Restrict latitude/longitude to restrict bulk downloads. if (max_latitude - min_latitude) > 1: max_latitude = min_latitude + 1 if (max_longitude - min_longitude) > 1: max_longitude = min_longitude + 1 chat_events = [] move_events = [] if since > 0: RefreshCache() for entry in chat_cache: if (entry.timestamp > since_datetime and entry.latitude > min_latitude and entry.latitude < max_latitude and entry.longitude > min_longitude and entry.longitude < max_longitude): chat_events.append(entry) for entry in move_cache: if (entry['timestamp'] > since_datetime and entry['latitude'] > min_latitude and entry['latitude'] < max_latitude and entry['longitude'] > min_longitude and entry['longitude'] < max_longitude): move_events.append(entry) output = { 'timestamp': time.time(), 'chats': chat_events, 'moves': move_events, } self.response.headers['Content-Type'] = 'text/plain' self.response.out.write(json.encode(output));
def validate(self): global types global validtypes global dictionary global errorcodes # check resources and operations if (dictionary[ "Resources" ] == []): errorcodes.append(1) else: for i in (dictionary[ "Resources" ]): if (i[ "Operations" ] == []): errorcodes.append(2) #print "Types: ", types #print "Valid Types: ", validtypes # check datatypes for i in types: if (i.lower()) not in validtypes: errorcodes.append(-1) if (i == "Unspecified"): errorcodes.append(-2) if (i == "Unknown"): errorcodes.append(-3) # if api description is valid, display it if (errorcodes == []): data = memcache.get("apidescription") self.response.headers['Content-Type'] = 'application/json' self.response.write(json.encode(data,indent=4,separators=(',', ': '))) # if api description is not valid, display the errors else: self.response.write(("API description was not generated.\n").replace("\n", "<br />")) self.response.write(("The following error(s) were found:\n").replace("\n", "<br />")) for i in errorcodes: if (i == 1): self.response.write(("- ERROR: No resources found.\n").replace("\n", "<br />")) if (i == 2): self.response.write(("- ERROR: Resource with no operations.\n").replace("\n", "<br />")) if (i == 100): self.response.write(("- ERROR: Incorrect number of arguments.\n").replace("\n", "<br />")) if (i == 200): self.response.write(("- ERROR: Invalid number of return types.\n").replace("\n", "<br />")) if (i == -1): self.response.write(("- ERROR: Undefined data type.\n").replace("\n", "<br />")) if (i == -2): self.response.write(("- ERROR: Information unspecified.\n").replace("\n", "<br />")) if (i == -3): self.response.write(("- ERROR: Information unknown.\n").replace("\n", "<br />"))