def exploit(request): if not request.url.params: return for param in request.url.params: # only get for poc in SQL_POCS: injectable = False req_tmp = request.copy() req_tmp.__class__ = GetRequest req_tmp.setParam(param, req_tmp.getParam(param) + poc) for (dbms, regex) in ((dbms, regex) for dbms in SQLI_ERROR_SINGS for regex in SQLI_ERROR_SINGS[dbms]): if re.search(regex, req_tmp.fetch(), re.I): # print "%s" % req_tmp util.report({"type":"sqli", "content":util.json_encode({"sqli_type":"%s Error Based" % dbms, "param":param, "detail":"%s" % req_tmp})}) return for prefix, suffix in itertools.product(SQL_PREFIXES, SQL_SUFFIXES): poc1 = "%s AND 1=1 %s" % (prefix, suffix) poc2 = "%s AND 1=2 %s" % (prefix, suffix) req_tmp1 = request.copy() req_tmp1.__class__ = GetRequest req_tmp1.setParam(param, req_tmp1.getParam(param) + poc1) req_tmp2 = request.copy() req_tmp2.__class__ = GetRequest req_tmp2.setParam(param, req_tmp2.getParam(param) + poc2) if (len(req_tmp1.fetch()) != len(req_tmp2.fetch())): util.report({"type":"sqli", "content":util.json_encode({"sqli_type":"UNION query", "param":param, "detail":"%s" % req_tmp})}) # print "UNION SQLI:param %s %s" % (param,req_tmp2) return
def process(self, **kwargs): payload = kwargs.get("payload", {}) extra = kwargs.get("extra", {}) alert = kwargs.get("alert", None) apns = kwargs.get("apns", None) token = kwargs["token"] if alert is not None and not isinstance(alert, dict): alert = {"body": alert, "title": alert} payload_data = {"aps": {"alert": alert, **apns}} payload = json_encode(payload_data) PATH = "/3/device/{0}".format(token) headers = self.build_headers() logging.info(payload) self.http2.request("POST", PATH, payload, headers=headers) resp = self.http2.get_response() if resp.status >= 400: headers = resp.headers # for k, v in headers.items(): # logging.error("%s: %s" % (k.decode("utf-8"), v.decode("utf-8"))) body = resp.read().decode("utf-8") logging.error(body) raise ApnsException(400, body)
def areas(): lat = float(request.args['lat']) lon = float(request.args['lon']) include_geom = bool(request.args.get('include_geom', True)) pretty = bool(request.args.get('pretty', False)) json_callback = request.args.get('callback', None) # This. Is. Python. ogr.UseExceptions() features = [] point = ogr.Geometry(wkt='POINT(%f %f)' % (lon, lat)) args = point, include_geom # # Look at four files in turn # for (dataname, shpname, zipname) in filenames: features += get_intersecting_features(ogr.Open(shpname), dataname, *args) geojson = dict(type='FeatureCollection', features=features) body, mime = json_encode(geojson, pretty=pretty), 'application/json' if json_callback: body = '%s(%s);\n' % (json_callback, body) mime = 'text/javascript' return Response(body, headers={'Content-type': mime})
def areas(): ''' Retrieve geographic areas. ''' is_census = is_census_datasource(environ) lat = float(request.args['lat']) lon = float(request.args['lon']) include_geom = bool(request.args.get('include_geom', True)) json_callback = request.args.get('callback', None) layer_names = is_census and request.args.get('layers', '') layer_names = layer_names and set(layer_names.split(',')) # This. Is. Python. ogr.UseExceptions() point = ogr.Geometry(wkt='POINT(%f %f)' % (lon, lat)) if is_census: features = census_features(point, include_geom, layer_names) else: datasource = get_datasource(environ) features = get_intersecting_features(datasource, point, include_geom) geojson = dict(type='FeatureCollection', features=features) body, mime = json_encode(geojson), 'application/json' if json_callback: body = '%s(%s);\n' % (json_callback, body) mime = 'text/javascript' return Response(body, headers={'Content-type': mime, 'Access-Control-Allow-Origin': '*'})
def select(): ''' Retrieve features. ''' if is_census_datasource(environ): error = "Can't select individual features from " + census_url return Response(render_template('error.html', error=error), status=404) where_clause = request.args.get('where', None) where_clause = where_clause and str(where_clause) page_number = int(request.args.get('page', 1)) include_geom = bool(request.args.get('include_geom', True)) json_callback = request.args.get('callback', None) # This. Is. Python. ogr.UseExceptions() try: datasource = get_datasource(environ) features = get_matching_features(datasource, where_clause, page_number, include_geom) except QueryError, e: body, mime = json_encode({'error': str(e)}), 'application/json' if json_callback: body = '%s(%s);\n' % (json_callback, body) mime = 'text/javascript' return Response(body, status=400, headers={'Content-type': mime, cors: '*'})
def build_request(self, token, alert, **kwargs): if alert is not None and not isinstance(alert, dict): alert = {"body": alert, "title": alert} fcm_param = kwargs.get("payload", {}) android = fcm_param.get("android", {}) apns = fcm_param.get("apns", {}) webpush = fcm_param.get("webpush", {}) data = fcm_param.get("data", {}) # data structure: https://firebase.google.com/docs/reference/fcm/rest/v1/projects.messages payload = {"message": {"token": token}} if alert: payload["message"]["notification"] = self.format_values(alert) if data: payload["message"]["data"] = self.format_values(data) if android: payload["message"]["android"] = android if webpush: payload["message"]["webpush"] = webpush if apns: payload["message"]["apns"] = apns text = json_encode(payload) return text
def post(self): id = self.get_argument("id", None) content = self.get_argument("content", None) if not (id and content and self.current_user): return page = self.get_argument("page", None) user_id = self.current_user["_id"] comment = { "_id": self.comment_dal.get_id(), "user_id": user_id, "user": self.comment_dal.dbref("users", user_id), "entry_id": int(id), "entry": self.comment_dal.dbref("entries", int(id)), "content": content, "published": datetime.datetime.now() } cid = self.comment_dal.save(comment) self.entry_dal.update_comments_count(int(id)) comment = self.comment_dal.get({"_id": cid}) if not page: html = self.render_string("modules/mini_comment.html", comment=comment) else: html = self.render_string("modules/comment.html", comment=comment) self.write("{id: %s, html: '%s'}" % (id, util.json_encode(html)))
async def remind(ctx, time, *, reminder): reminder = reminder.strip() if len(reminder) > 512: await ctx.send(embed=util.error_embed( "Maximum reminder length is 512 characters", "Foolish user error")) return extra_data = { "author_id": ctx.author.id, "channel_id": ctx.message.channel.id, "message_id": ctx.message.id, "guild_id": ctx.message.guild and ctx.message.guild.id, "original_time_spec": time } try: now = datetime.now(tz=timezone.utc) time = util.parse_time(time) except: await ctx.send(embed=util.error_embed( "Invalid time (wrong format/too large/non-integer months or years)" )) return await bot.database.execute( "INSERT INTO reminders (remind_timestamp, created_timestamp, reminder, expired, extra) VALUES (?, ?, ?, ?, ?)", (time.timestamp(), now.timestamp(), reminder, 0, util.json_encode(extra_data))) await bot.database.commit() await ctx.send( f"Reminder scheduled for {util.format_time(time)} ({util.format_timedelta(now, time)})." )
async def remind(self, ctx, time, *, reminder): reminder = reminder.strip() if len(reminder) > 512: await ctx.send(embed=util.error_embed( "Maximum reminder length is 512 characters", "Foolish user error")) return extra_data = { "author_id": ctx.author.id, "channel_id": ctx.message.channel.id, "message_id": ctx.message.id, "guild_id": ctx.message.guild and ctx.message.guild.id, "original_time_spec": time } tz = await util.get_user_timezone(ctx) try: now = datetime.now(tz=timezone.utc) time = util.parse_time(time, tz) except: await ctx.send(embed=util.error_embed( "Invalid time (wrong format/too large months or years)")) return utc_time, local_time = util.in_timezone(time, tz) id = (await self.bot.database.execute_insert( "INSERT INTO reminders (remind_timestamp, created_timestamp, reminder, expired, extra) VALUES (?, ?, ?, ?, ?)", (utc_time.timestamp(), now.timestamp(), reminder, 0, util.json_encode(extra_data))))["last_insert_rowid()"] await self.bot.database.commit() await ctx.send( f"Reminder scheduled for {util.format_time(local_time)} ({util.format_timedelta(now, utc_time)})." ) self.insert_reminder(id, utc_time.timestamp())
def post(self): id = self.get_argument("id", None) content = self.get_argument("content", None) if not (id and content and self.current_user): return page = self.get_argument("page", None) user_id = self.current_user["_id"] comment = { "_id": self.comment_dal.get_id(), "user_id": user_id, "user": self.comment_dal.dbref("users", user_id), "entry_id": int(id), "entry": self.comment_dal.dbref("entries", int(id)), "content": content, "published": datetime.datetime.now() } cid = self.comment_dal.save(comment) self.entry_dal.update_comments_count(int(id)) comment = self.comment_dal.get({"_id": cid}) if not page: html = self.render_string( "modules/mini_comment.html", comment=comment) else: html = self.render_string( "modules/comment.html", comment=comment) self.write("{id: %s, html: '%s'}" % (id, util.json_encode(html)))
def process(self, **kwargs): alert = kwargs.get("alert", None) apns = kwargs.get("apns", {}) token = kwargs["token"] if alert is not None and not isinstance(alert, dict): alert = {"body": alert, "title": alert} # data structure: # https://developer.apple.com/library/archive/documentation/NetworkingInternet/Conceptual/RemoteNotificationsPG/PayloadKeyReference.html#//apple_ref/doc/uid/TP40008194-CH17-SW1 payload_data = {"aps": {"alert": alert, **apns}} payload = json_encode(payload_data) self.payload = payload PATH = "/3/device/{0}".format(token) self.headers = self.build_headers() self.http2.request("POST", PATH, payload, headers=self.headers) resp = self.http2.get_response() if resp.status >= 400: # headers = resp.headers # for k, v in headers.items(): # logging.error("%s: %s" % (k.decode("utf-8"), v.decode("utf-8"))) body = resp.read().decode("utf-8") logging.error(body) raise ApnsException(400, body)
def read_config(args): if not os.path.isfile("config.json"): print "config.json does not exist! Please copy config-sample.json to config.json and edit to your liking, then run the script." sys.exit(1) countries = args.country if isinstance(countries, basestring): countries = [countries] countries = [country.lower().strip() for country in countries] for country in countries: if not os.path.isfile("proxies/proxies-%s.json" % country): print "The proxy configuration file proxies-%s.json does not exist! Exiting." % country sys.exit(1) content = util.get_contents("config.json") config = util.json_decode(content) if args.ip: config["public_ip"] = args.ip if args.bind_ip: config["bind_ip"] = args.ip if args.base_ip: config["base_ip"] = args.base_ip if args.base_port: config["base_port"] = args.base_port if not config["public_ip"]: try: print("Autodetecting public IP address...") public_ip = urllib2.urlopen("http://l2.io/ip").read().strip() print("Detected public IP as %s. If it's wrong, please cancel the script now and set it in config.json or specify with --ip" % public_ip) time.sleep(1) config["public_ip"] = public_ip except: print("Could not detect public IP. Please update the public_ip setting in config.json or specify with --ip.") sys.exit(1) if args.save: util.put_contents('config.json', util.json_encode(config)) groups = {} for country in countries: groups.update(util.json_decode(util.get_contents("proxies/proxies-%s.json" % country))) if args.only: only = set(args.only) for item in args.only: if item not in groups: print "Nonexistent Item: %s, exiting" % item sys.exit() for item in groups.keys(): if item not in only: del groups[item] elif args.skip: for item in args.skip: del groups[item] config["groups"] = groups return config
def exploit(request): if not request.url.params: return for param in request.url.params: # only get for poc in SQL_POCS: injectable = False req_tmp = request.copy() req_tmp.__class__ = GetRequest req_tmp.setParam(param, req_tmp.getParam(param) + poc) for (dbms, regex) in ((dbms, regex) for dbms in SQLI_ERROR_SINGS for regex in SQLI_ERROR_SINGS[dbms]): if re.search(regex, req_tmp.fetch(), re.I): # print "%s" % req_tmp util.report({ "type": "sqli", "content": util.json_encode({ "sqli_type": "%s Error Based" % dbms, "param": param, "detail": "%s" % req_tmp }) }) return for prefix, suffix in itertools.product(SQL_PREFIXES, SQL_SUFFIXES): poc1 = "%s AND 1=1 %s" % (prefix, suffix) poc2 = "%s AND 1=2 %s" % (prefix, suffix) req_tmp1 = request.copy() req_tmp1.__class__ = GetRequest req_tmp1.setParam(param, req_tmp1.getParam(param) + poc1) req_tmp2 = request.copy() req_tmp2.__class__ = GetRequest req_tmp2.setParam(param, req_tmp2.getParam(param) + poc2) if (len(req_tmp1.fetch()) != len(req_tmp2.fetch())): util.report({ "type": "sqli", "content": util.json_encode({ "sqli_type": "UNION query", "param": param, "detail": "%s" % req_tmp }) }) # print "UNION SQLI:param %s %s" % (param,req_tmp2) return
def send_response(self, status_code=200, data=None, headers=None): """ Set REST API response """ self.set_status(status_code, None) if headers is not None: self.set_headers(headers) if data: data = json_encode(data) else: data = "" self.finish(data)
def features_geojson(features, json_callback): ''' ''' geojson = dict(type='FeatureCollection', features=features) body, mime = json_encode(geojson), 'application/json' if json_callback: body = '%s(%s);\n' % (json_callback, body) mime = 'text/javascript' return body, mime
def ssh_write(host, state): d = tempfile.mkdtemp() try: filename = os.path.join(d, 'glidein_state') with open(filename, 'w') as f: f.write(json_encode(state)) cmd = ['scp', filename, host + ':~/glidein_state'] if subprocess.call(cmd): raise Exception('error in ssh copy of state') finally: shutil.rmtree(d)
def ssh_write(host,state): d = tempfile.mkdtemp() try: filename = os.path.join(d,'glidein_state') with open(filename,'w') as f: f.write(json_encode(state)) cmd = ['scp',filename,host+':~/glidein_state'] if subprocess.call(cmd): raise Exception('error in ssh copy of state') finally: shutil.rmtree(d)
def scanner(): info = {} info["app"] = [] info["web"] = [] for port in g.O["web-ports"]: data = scan_web_server(g.O["target"], int(port)) if data is not None: util.report({"type": "sys_info", "content": util.json_encode(data)}) info["web"].append(data) for port in g.O["app-ports"]: # print port data = scan_app_service(g.O["target"], int(port)) # print type(data) if data is not None: # print encode_dict(data) util.report({"type": "sys_info", "content": util.json_encode(data)}) info["app"].append(data) return info
def exploit(request): if not request.url.params and not request.fields: return if isinstance(request, GetRequest): for param, poc in itertools.product(request.url.params, XSS_POCS): req_tmp = request.copy() req_tmp.__class__ = GetRequest req_tmp.setParam(param, req_tmp.getParam(param)+poc) if poc in req_tmp.fetch(): #print "xss vulnerability param:%s info:%s" % (param, req_tmp) util.report({"type":"xss","content":util.json_encode({"xss_type":"GET","param":param,"detail":"%s" % req_tmp})}) break else: for field, poc in itertools.product(request.fields, XSS_POCS): req_tmp = request.copy() req_tmp.__class__ = PostRequest req_tmp.setField(field, req_tmp.getField(field)+poc) if poc in req_tmp.fetch(): #print "xss vulnerability param:%s info:%s" % (field, req_tmp) util.report({"type":"xss","content":util.json_encode({"xss_type":"POST","field":field,"detail":"%s" % req_tmp})}) break
def build_request(self, regids, data, collapse_key, ttl): payload = {"registration_ids": regids} if data: payload["data"] = data if ttl >= 0: payload["time_to_live"] = ttl if collapse_key: payload["collapse_key"] = collapse_key return json_encode(payload)
def post(self): result = {"code": 200, "msg": "OK", "end": 0, "html": ""} offset = int(self.get_argument("offset", "0")) p = int(self.get_argument("p", "1")) user_id = self.get_argument("user_id", None) filter = self.get_argument("filter", "None") if not filter in ("friends", "followers") and user_id: self._render_error(result) return p = 1 if p < 1 else p if filter == "friends": total = self.relation.get_friends_count(int(user_id)) else: total = self.relation.get_followers_count(int(user_id)) if total <= 0: self._render_error(result) return tmp = offset offset = (p - 1) * MAX_PAGE_SIZE + offset limit = PAGE_SIZE if filter == "friends": users = self.relation.get_friends(int(user_id), offset, limit) else: users = self.relation.get_followers(int(user_id), offset, limit) """Get Relations for the current user.""" if self.current_user: ids = [u["_id"] for u in users] ifriends = self.relation.get_relations_by_ids( self.current_user["_id"], ids) for user in users: user["ifollow"] = True if user["_id"] in ifriends else False tmp = tmp + len(users) htmls = [] for i in range(0, len(users)): args = {'user': users[i], 'odd': True if i % 2 == 0 else False} html = self.render_string("modules/person.html", **args) htmls.append(util.json_encode(html)) result["html"] = htmls pager = tmp % MAX_PAGE_SIZE == 0 final = (offset + len(users)) >= total if pager: result["end"] = 1 if final: result["end"] = 2 self.render("ajax/pubu.json", result=result)
def status(): datasource = get_datasource(environ) status = { 'status': 'ok' if bool(datasource) else 'Bad datasource: %s' % repr(datasource), 'updated': int(time()), 'dependencies': [], 'resources': {} } body = json_encode(status) return Response(body, headers={'Content-type': 'application/json', cors: '*'})
def scanner(): info = {} info['app'] = [] info['web'] = [] for port in g.O['web-ports']: data = scan_web_server(g.O['target'], int(port)) if data is not None : util.report({"type":"sys_info", "content":util.json_encode(data)}) info['web'].append(data) for port in g.O['app-ports']: # print port data = scan_app_service(g.O['target'], int(port)) # print type(data) if data is not None : # print encode_dict(data) util.report({"type":"sys_info", "content":util.json_encode(data)}) info['app'].append(data) return info
def post(self): result = {"code": 200, "msg": "OK", "end": 0, "html": ""} offset = int(self.get_argument("offset", "0")) p = int(self.get_argument("p", "1")) user_id = self.get_argument("user_id", None) filter = self.get_argument("filter", "None") if not filter in ("friends", "followers") and user_id: self._render_error(result) return p = 1 if p < 1 else p if filter == "friends": total = self.relation.get_friends_count(int(user_id)) else: total = self.relation.get_followers_count(int(user_id)) if total <= 0: self._render_error(result) return tmp = offset offset = (p - 1) * config.MAX_PAGE_SIZE + offset limit = config.MINI_PAGE_SIZE if filter == "friends": users = self.relation.get_friends(int(user_id), offset, limit) else: users = self.relation.get_followers(int(user_id), offset, limit) """Get Relations for the current user.""" if self.current_user: ids = [u["_id"] for u in users] ifriends = self.relation.get_relations_by_ids( self.current_user["_id"], ids) for user in users: user["ifollow"] = True if user["_id"] in ifriends else False tmp = tmp + len(users) htmls = [] for i in range(0, len(users)): args = {'user': users[i], 'odd': True if i % 2 == 0 else False} html = self.render_string("modules/person.html", **args) htmls.append(util.json_encode(html)) result["html"] = htmls pager = tmp % config.MAX_PAGE_SIZE == 0 final = (offset + len(users)) >= total if pager: result["end"] = 1 if final: result["end"] = 2 self.render("ajax/pubu.json", result=result)
def request(self, methodname, kwargs): """Send request to RPC Server""" # check method name for bad characters if methodname[0] == '_': logger.warning('cannot use RPC for private methods') raise Exception('Cannot use RPC for private methods') # translate request to json body = json_encode({ 'jsonrpc': '2.0', 'method': methodname, 'params': kwargs, 'id': Client.newid() }) headers = {'Content-type': 'application/json'} request = urllib2.Request(self._address, data=body, headers=headers) # make request to server try: response = urllib2.urlopen(request, timeout=self._timeout) except Exception: logger.warn('error making jsonrpc request', exc_info=True) raise # translate response from json try: cb_data = response.read() data = json_decode(cb_data) except Exception: try: logger.info('json data: %r', cb_data) except Exception: pass raise if 'error' in data: try: raise Exception('Error %r: %r %r' % data['error']) except Exception: raise Exception('Error %r' % data['error']) if 'result' in data: return data['result'] else: return None
def status(): datasource = get_datasource(environ) status = { 'status': 'ok' if bool(datasource) else 'Bad datasource: %s' % repr(datasource), 'updated': int(time()), 'dependencies': [], 'resources': {} } body = json_encode(status) return Response(body, headers={ 'Content-type': 'application/json', cors: '*' })
def request(self, methodname, kwargs): """Send request to RPC Server""" # check method name for bad characters if methodname[0] == '_': logger.warning('cannot use RPC for private methods') raise Exception('Cannot use RPC for private methods') # translate request to json body = json_encode({'jsonrpc': '2.0', 'method': methodname, 'params': kwargs, 'id': Client.newid()}) headers = {'Content-type':'application/json'} request = urllib2.Request(self._address, data=body, headers=headers) # make request to server try: response = urllib2.urlopen(request, timeout=self._timeout) except Exception: logger.warn('error making jsonrpc request', exc_info=True) raise # translate response from json try: cb_data = response.read() data = json_decode(cb_data) except Exception: try: logger.info('json data: %r', cb_data) except Exception: pass raise if 'error' in data: try: raise Exception('Error %r: %r %r'%data['error']) except Exception: raise Exception('Error %r'%data['error']) if 'result' in data: return data['result'] else: return None
def format_values(self, data=None): if not isinstance(data, dict): return data # Try to convert all fields to string. formatted = {} for (k, v) in data.items(): if isinstance(v, bool): formatted[k] = "1" if v else "0" elif isinstance(v, dict): try: formatted[k] = json_encode(self.format_values(v)) except: logging.error("Error treating field " + k) elif v is not None: formatted[k] = str(v) return formatted
def select(): ''' Retrieve features. ''' if is_census_datasource(environ): error = "Can't select individual features from " + census_url return Response(render_template('error.html', error=error), status=404) where_clause = request.args.get('where', None) where_clause = where_clause and str(where_clause) page_number = int(request.args.get('page', 1)) include_geom = bool(request.args.get('include_geom', True)) json_callback = request.args.get('callback', None) # This. Is. Python. ogr.UseExceptions() try: datasource = get_datasource(environ) features = get_matching_features(datasource, where_clause, page_number, include_geom) except QueryError, e: body, mime = json_encode({'error': str(e)}), 'application/json' if json_callback: body = '%s(%s);\n' % (json_callback, body) mime = 'text/javascript' return Response(body, status=400, headers={ 'Content-type': mime, cors: '*' })
def post(self): result = {"code": 200, "msg": "OK", "end": 0, "html": ""} offset = int(self.get_argument("offset", "0")) p = int(self.get_argument("p", "1")) user_id = self.get_argument("user_id", None) filter = self.get_argument("filter", "-1") category = self.get_argument("category", None) q = self.get_argument("q", None) p = 1 if p < 1 else p if filter == "likes": if not user_id: result["code"] = 404 self.render("ajax/pubu.json", result=result) return total = self.fav_dal.get_user_like_entries_count(int(user_id)) elif user_id: total = self.entry_dal.get_user_entries_count(int(user_id)) elif category: total = self.entry_dal.get_entries_count_by_category(int(category)) elif q: total = self.searchEngine.search_entries_count(q) else: total = self.entry_dal.get_count(None) if total <= 0: result["code"] = 404 self.render("ajax/pubu.json", result=result) return tmp = offset offset = (p - 1) * MAX_PAGE_SIZE + offset limit = PAGE_SIZE if filter == "likes": entries = self.fav_dal.get_user_like_entries( int(user_id), offset, limit) elif user_id: entries = self.entry_dal.get_user_entries( int(user_id), offset, limit) elif category: entries = self.entry_dal.get_entries_by_category(int(category), offset, limit) elif q: entries = self.searchEngine.search_entries(q, offset, limit) else: entries = self.entry_dal.query(None, offset, limit) """Get entry's comments""" tweet_ids = [t["_id"] for t in entries] comments = self.comment_dal.get_comments_by_ids(tweet_ids) for tweet in entries: tweet["comment_list"] = [] for comment in comments: if tweet["_id"] == comment["entry_id"]: tweet["comment_list"].append(comment) """Validate the current user is like this entry""" if self.current_user: fids = self.fav_dal.get_user_isliked( self.current_user["_id"], tweet_ids) for tweet in entries: tweet["iliked"] = True if tweet["_id"] in fids else False tmp = tmp + len(entries) htmls = [] for entry in entries: html = self.render_string("modules/entry.html", entry=entry) htmls.append(util.json_encode(html)) result["html"] = htmls pager = tmp % MAX_PAGE_SIZE == 0 final = (offset + len(entries)) >= total if pager: result["end"] = 1 if final: result["end"] = 2 self.render("ajax/pubu.json", result=result)
def save(self, dir, msg): msg = util.json_encode(msg) lkey = {'in': 0, 'out': 1}[dir] if self._last[lkey] < msg['MsgSeqNum']: self._last[lkey] = msg['MsgSeqNum'] self.db.update([msg])
def write(self, chunk): if isinstance(chunk, dict): chunk = json_encode(chunk) self.set_header("Content-Type", "application/json; charset=UTF-8") chunk = utf8(chunk) self._write_buffer.append(chunk)
fcv, fcsp = f['color'], f['color_speed'] else: fcv, fcsp = SplEval(0), SplEval(0) sansfinal = [v for k, v in gnm['xforms'].items() if k != 'final'] lc, rc = [ np.array([ v['color'](t) * (1 - fcsp(t)) + fcv(t) * fcsp(t) for v in sansfinal ]) for t in (0, 1) ] rcrv = 1 - rc # TODO: use spline integration instead of L2 dens = np.array( [np.hypot(v['weight'](0), v['weight'](1)) for v in sansfinal]) return np.sum(np.abs(dens * (rc - lc))) > np.sum(np.abs(dens * (rcrv - lc))) def palflip(gnm): for v in gnm['xforms'].values(): c = v['color'] v['color'] = SplEval([0, c(0), 1, 1 - c(1)], c(0, 1), -c(1, 1)) pal = genome.palette_decode(gnm['palettes'][1]) gnm['palettes'][1] = genome.palette_encode(np.flipud(pal)) if __name__ == "__main__": import sys, json a, b, c = [json.load(open(f + '.json')) for f in 'abc'] print json_encode(blend(a, b, c))
def __init__(self, object): content = json_encode(object) super(JsonResponse, self).__init__(content, mimetype='application/json')
def post(self): result = {"code": 200, "msg": "OK", "end": 0, "html": ""} offset = int(self.get_argument("offset", "0")) p = int(self.get_argument("p", "1")) user_id = self.get_argument("user_id", None) filter = self.get_argument("filter", "-1") category = self.get_argument("category", None) q = self.get_argument("q", None) p = 1 if p < 1 else p if filter == "likes": if not user_id: result["code"] = 404 self.render("ajax/pubu.json", result=result) return total = self.fav_dal.get_user_like_entries_count(int(user_id)) elif user_id: total = self.entry_dal.get_user_entries_count(int(user_id)) elif category: total = self.entry_dal.get_entries_count_by_category(int(category)) elif q: total = self.searchEngine.search_entries_count(q) else: total = self.entry_dal.get_count(None) if total <= 0: result["code"] = 404 self.render("ajax/pubu.json", result=result) return tmp = offset offset = (p - 1) * config.MAX_PAGE_SIZE + offset limit = config.MINI_PAGE_SIZE if filter == "likes": entries = self.fav_dal.get_user_like_entries( int(user_id), offset, limit) elif user_id: entries = self.entry_dal.get_user_entries(int(user_id), offset, limit) elif category: entries = self.entry_dal.get_entries_by_category( int(category), offset, limit) elif q: entries = self.searchEngine.search_entries(q, offset, limit) else: entries = self.entry_dal.query(None, offset, limit) """Get entry's comments""" tweet_ids = [t["_id"] for t in entries] comments = self.comment_dal.get_comments_by_ids(tweet_ids) for tweet in entries: tweet["comment_list"] = [] for comment in comments: if tweet["_id"] == comment["entry_id"]: tweet["comment_list"].append(comment) """Validate the current user is like this entry""" if self.current_user: fids = self.fav_dal.get_user_isliked(self.current_user["_id"], tweet_ids) for tweet in entries: tweet["iliked"] = True if tweet["_id"] in fids else False tmp = tmp + len(entries) htmls = [] for entry in entries: html = self.render_string("modules/entry.html", entry=entry) htmls.append(util.json_encode(html)) result["html"] = htmls pager = tmp % config.MAX_PAGE_SIZE == 0 final = (offset + len(entries)) >= total if pager: result["end"] = 1 if final: result["end"] = 2 self.render("ajax/pubu.json", result=result)
for sd in izip_longest(ssort, dsort): yield sd def checkpalflip(gnm): if 'final' in gnm['xforms']: f = gnm['xforms']['final'] fcv, fcsp = f['color'], f['color_speed'] else: fcv, fcsp = SplEval(0), SplEval(0) sansfinal = [v for k, v in gnm['xforms'].items() if k != 'final'] lc, rc = [np.array([v['color'](t) * (1 - fcsp(t)) + fcv(t) * fcsp(t) for v in sansfinal]) for t in (0, 1)] rcrv = 1 - rc # TODO: use spline integration instead of L2 dens = np.array([np.hypot(v['weight'](0), v['weight'](1)) for v in sansfinal]) return np.sum(np.abs(dens * (rc - lc))) > np.sum(np.abs(dens * (rcrv - lc))) def palflip(gnm): for v in gnm['xforms'].values(): c = v['color'] v['color'] = SplEval([0, c(0), 1, 1 - c(1)], c(0, 1), -c(1, 1)) pal = genome.palette_decode(gnm['palettes'][1]) gnm['palettes'][1] = genome.palette_encode(np.flipud(pal)) if __name__ == "__main__": import sys, json a, b, c = [json.load(open(f+'.json')) for f in 'abc'] print json_encode(blend(a, b, c))