async def ls(request, full_path): full_path = '/' + full_path.strip() if full_path == 'favicon.ico': return response.raw(b'') print(request.headers, full_path) ls = x115.path[full_path] if not ls or len(ls) <= 2: ls = x115.listdir(full_path) if request.method == "HEAD": code = 200 if not ls: code = 404 size = 0 else: size = ls.get('size', 0) mod_time = datetime.utcfromtimestamp( ls['time']).strftime('%a, %d %b %Y %H:%M:%S GMT') return response.raw(b'', headers={ 'Content-Length': size, 'Last-Modified': mod_time }, status=code) if 'pickcode' in ls: r = x115._get_link(ls['pickcode']) url = r.json()['file_url'] cookie = r.headers.get('Set-Cookie') # if cookie: # cookie = cookie.replace('domain=115.com', 'domain=' + urlparse(url).netloc) # cookie = cookie.split(';', 1)[0] + '; domain=' + urlparse(url).netloc return response.redirect(url.replace('http://', 'https://'), headers={'Set-Cookie': cookie}) else: return {'root': full_path, 'ls': ls}
async def get_one_file(request, id, cid): try: query = await db_model.filemeta_query() file_meta = await db_objects.get(query, id=id) query = await db_model.callback_query() callback = await db_objects.get(query, id=cid) except Exception as e: print(e) return json({'status': 'error', 'error': 'file not found'}) # now that we have the file metadata, get the file if it's done downloading if file_meta.complete and not file_meta.deleted: encoded_data = open(file_meta.path, 'rb').read() encoded_data = base64.b64encode(encoded_data) # if this is an auto-generated file from the load command, we should remove the file afterwards if "/app/payloads/operations/" in file_meta.path and "load-" in file_meta.path: os.remove(file_meta.path) file_meta.deleted = True await db_objects.update(file_meta) if callback.encryption_type != "" and callback.encryption_type is not None: # encrypt the message before returning it if callback.encryption_type == "AES256": raw_encrypted = await crypt.encrypt_AES256( data=encoded_data, key=base64.b64decode(callback.encryption_key)) return raw(base64.b64encode(raw_encrypted), status=200) return raw(encoded_data) elif file_meta.deleted: return json({'status': 'error', 'error': 'temporary file deleted'}) else: return json({'status': 'error', 'error': 'file not done downloading'})
async def handler(self, request, path=None): if path is None: abort(404) if "/" in path or "\\" in path: abort(404) if path == "toolbox.json": # TODO: add additional blocks and categories return res.raw(self.get_toolbox_json(), content_type="application/json") blockdef_match = re.match("^blockdef_([0-9a-zA-Z_]+)\\.json$", path) if blockdef_match is not None: block_name = blockdef_match.group(1) block = self._blocks.get(block_name, None) if block is None: abort(404) return res.raw(block.json, content_type="application/json") blockpygen_match = re.match("^blockpygen_([0-9a-zA-Z_]+)\\.js$", path) if blockpygen_match is not None: block_name = blockpygen_match.group(1) block = self._blocks.get(block_name, None) if block is None: abort(404) return res.raw(block.python_generator, content_type="text/javascript") abort(404)
def image_tiles(request, map_path, layer, z, y, x): try: mbtiles = os.path.join(maps_root, map_path, '{}.mbtiles'.format(layer)) reader = MBTilesReader(mbtiles) return response.raw(reader.tile(int(z), int(y), int(x)), headers={'Content-Type': 'image/png'}) except ExtractionError: pass except (InvalidFormatError, sqlite3.OperationalError): exceptions.abort(404, 'Cannot read tile database') return response.raw(blank_tile(), headers={'Content-Type': 'image/png'})
async def AESPSK_Create_Callback(request, uuid): # get payload associated with UUID try: query = await db_model.payload_query() payload = await db_objects.get(query, uuid=uuid) # get the AES_PSK parameter from C2ProfileParametersInstance to get actual value to use try: query = await db_model.c2profileparameters_query() c2_param = await db_objects.get(query, c2_profile=payload.c2_profile, key="AESPSK") query = await db_model.c2profileparametersinstance_query() c2_param_instance = await db_objects.get( query, c2_profile_parameters=c2_param, payload=payload) AESPSK_String = c2_param_instance.value # print("AESb64key: " + AESPSK_String ) except Exception as e: print(str(e)) return raw(b"") # decrypt request.body with this AES_PSK try: AESPSK = base64.b64decode(AESPSK_String) # print("message body: " + str(request.body) + "\n") encrypted_request = base64.b64decode(request.body) # print("about to decrypt\n") message = await crypt.decrypt_AES256(encrypted_request, AESPSK) decrypted_message_json = js.loads(message.decode('utf-8')) # print("decrypted message: " + str(decrypted_message_json)) # pass this information along to the /callbacks API decrypted_message_json['encryption_key'] = AESPSK_String decrypted_message_json['decryption_key'] = AESPSK_String decrypted_message_json['encryption_type'] = "AES256" # print("calling create callback func") response = await create_callback_func(decrypted_message_json) # turn the json response to a string, encrypt it, and return it response_message = js.dumps(response) # print("create callback response: " + response_message) encrypted_response = await crypt.encrypt_AES256( response_message.encode(), AESPSK) encrypted_response_string = base64.b64encode(encrypted_response) # print("encrypted response: " + str(encrypted_response_string)) return raw(encrypted_response_string, status=200) except Exception as e: print(str(e)) return raw(b"") except Exception as e: print("failed to find payload") print(str(e)) return raw(b"")
async def get_online_supplier(request): logger = logging.getLogger(__name__) wb = xlwt.Workbook(encoding="utf-8") st = wb.add_sheet("sheet1") st.write(0, 0, 'cmsID') st.write(0, 1, '酒店名称') st.write(0, 2, '英文名称') st.write(0, 3, '酒店地址') st.write(0, 4, '酒店所属城市') st.write(0, 14, '酒店所属国家') st.write(0, 5, 'hotelbeds') st.write(0, 6, 'hotelspro') st.write(0, 7, 'relux') st.write(0, 8, 'bonotel') st.write(0, 9, 'jactravel') st.write(0, 10, 'roomsxml') st.write(0, 11, 'weegotr') st.write(0, 12, 'whotel') st.write(0, 13, 'travflex') hotels = await get_supplier() for row, data in enumerate(hotels): for col, msg in data.items(): st.write(row + 1, col, msg) excel = BytesIO() wb.save(excel) excel.seek(0) logger.info(f'下载酒店信息成功,filename=上线酒店供应商统计.xls') return raw( excel.getvalue(), headers={"Content-Disposition": f"attachment;filename=上线酒店供应商统计.xls"}, content_type="application/vnd.ms-excel", )
async def start_crawl_hcom(request): logger = logging.getLogger(__name__) form = request.form secret = form.get('secret') if not secret or secret not in secret_map: return html( '<h1>身份验证失败,请重新访问页面</h1>' ) start_time = form.get('start_time') if not start_time: # 后续会对时间做时区校准,在此先将默认时间设置为北京时区 start_time = datetime.now() + timedelta(hours=7) end_time = form.get('end_time') if not end_time: end_time = datetime.now() + timedelta(hours=8) stages = form.getlist('stages', []) logger.info(f"start_time: {start_time}") logger.info(f"end_time: {end_time}") logger.info(f"stages: {stages}") logger.info(f"{secret_map[secret]} download data with conditon: \nstart_time: {start_time}\nend_time: {end_time}\nstages: {stages}") data = await extract_data(start_time, end_time, stages) wb = save_excel(data) excel = BytesIO() wb.save(excel) excel.seek(0) return raw( excel.getvalue(), headers={ "Content-Disposition": f"attachment;filename={start_time}-{end_time}{'/'.join(stages)}.xls" }, content_type="application/vnd.ms-excel", )
def show_metrics(request): """Display metrics for Prometheus.""" registry = core.REGISTRY output = exposition.generate_latest(registry) resp = response.raw(output, 200) resp.headers["Content-type"] = exposition.CONTENT_TYPE_LATEST return resp
async def get_avatar(request, penguin_id: int): background = request.args.get('photo', 'true') size = request.args.get('size', 120) if int(size) not in valid_sizes: return response.json({"message": 'Invalid size'}, status=400) clothing = await Penguin.select('photo', 'flag', 'color', 'head', 'face', 'body', 'neck', 'hand', 'feet').where(Penguin.id == penguin_id ).gino.first() if clothing is None: return response.json({'message': 'Not found'}, status=404) if background != 'true': clothing.pop(0) loop = asyncio.get_event_loop() try: image = await loop.run_in_executor(None, build_avatar, clothing, int(size)) except: return response.json({"message": "Something has gone wrong."}, status=500) return response.raw(image, headers={'Content-type': 'image/png'})
async def gen_image(request): start = datetime.now() encoded_png = generate_image() end = datetime.now() duration = (end - start).microseconds / 1000 print("Image generation time: {}".format(int(duration))) return response.raw(encoded_png, headers={'Content-Type': 'image/png'})
async def image_proxy(request, path: str): r = await image_proxer(path) if not isinstance(r, tuple): return json({"code": "404", "message": "not_found"}, 404) or r return raw(r[0], content_type=r[1])
async def give_fucks(request, num): accept_header_value = request.headers.get('Accept', 'application/json') response_mime_type = select_return_type( accept_header_value, ['application/json', 'application/xml']) try: num_asint = int(num) except ValueError: return error_response( 'What the f**k kind of number is {}?'.format(num), response_mime_type=response_mime_type) if request.method == 'OPTIONS': return response.raw(b'', headers={ 'Allow': 'GET, OPTIONS', 'Content-Type': response_mime_type }) if num_asint >= 1000: return error_response('No one has that many f*****g f***s to give.', status_code=410, response_mime_type=response_mime_type) elif num_asint > 20: return error_response("Sorry, that's just too many f***s to give.", status_code=410, response_mime_type=response_mime_type) elif num_asint < 0: return error_response( "Negative f***s? Are you f*****g kidding me? Real cute... asshole.", status_code=400, response_mime_type=response_mime_type) else: return fucks_given_response(num_asint, response_mime_type=response_mime_type)
async def options(self, request, *args, **kwargs): """ Returns a default empty response for OPTIONS method, needed for CORS pre-flight. Do not forget to add CORS headers globally! """ return response.raw(b'')
async def tile_endpoint(self, request, db_name, table, z, x, y): start = time.time() if not valid_zoom(z): raise NotFound("Invalid zoom value") if not await self.datasette.table_exists(db_name, table): raise NotFound("Table does not exist") geo_column = get_geo_column(self.datasette, db_name, table) if geo_column is None: raise NotFound("Not a spatial table") fetch = time.time() data = await self.get_features(db_name, table, geo_column, z, x, y) encode = time.time() mvt = mapbox_vector_tile.encode(data, quantize_bounds=mercantile.xy_bounds( x, y, z)) now = time.time() print( "[{}/{}/{}/{}/{}] Total: {:.3}s (init: {:.3}s + fetch: {:.3}s + encode: {:.3}s)" .format(db_name, table, z, x, y, now - start, fetch - start, encode - fetch, now - encode)) ttl = self.datasette.config("default_cache_ttl") if int(ttl) == 0: ttl_header = "no-cache" else: ttl_header = "max-age={}".format(ttl) return response.raw(mvt, headers={ "Content-Type": "application/vnd.mapbox-vector-tile", "Cache-Control": ttl_header })
async def get_tile_tm2(request, x, y, z): """ """ scale_denominator = zoom_to_scale_denom(z) # compute mercator bounds bounds = mercantile.xy_bounds(x, y, z)#计算出的范围不对 bbox = f"st_makebox2d(st_point({bounds.left}, {bounds.bottom}), st_point({bounds.right},{bounds.top}))" sql = Config.tm2query.format( bbox=bbox, scale_denominator=scale_denominator, pixel_width=256, pixel_height=256, ) print(sql) logger.debug(sql) async with Config.db.acquire() as conn: # join tiles into one bytes string except null tiles rows = await conn.fetch(sql) for row in rows: if row[0]: print(row[0].encode('utf-8')) pbf = b''.join([row[0] for row in rows if row[0]]) return response.raw( pbf.encode('utf-8'), headers={"Content-Type": "application/x-protobuf"} )
async def get_excel(request): logger = logging.getLogger(__name__) body = request.json start_time = body.get('start_time', '') end_time = body.get('end_time', '') stages = body.get('stages', []) user = body.get('user', '') is_excel = body.get('is_excel', True) if not start_time: # 后续会对时间做时区校准,在此先将默认时间设置为北京时区 start_time = datetime.now() + timedelta(hours=7) if not end_time: end_time = datetime.now() + timedelta(hours=8) logger.info( f"{user} download data with conditon: \nstart_time: {start_time}\nend_time: {end_time}\nstages: {stages}" ) data = await extract_data(start_time, end_time, stages) if is_excel and len(data.get('info', [])) <= 65534: wb = save_excel(data) excel = BytesIO() wb.save(excel) excel.seek(0) return raw( excel.getvalue(), headers={ "Access-Control-Allow-Origin": '*', "Content-Disposition": f"attachment;filename={start_time}-{end_time}{'/'.join(stages)}.xls" }, content_type="application/vnd.ms-excel", ) return json({'data': data})
async def download_zipped_files(request, user): if user['auth'] not in ['access_token', 'apitoken']: abort(status_code=403, message="Cannot access via Cookies. Use CLI or access via JS in browser") try: data = request.json if 'files' not in data: return abort(404, "missing 'files' value") # need to make aa temporary directory, copy all the files there, zip it, return that and clean up temp dir temp_id = str(uuid.uuid4()) query = await db_model.operation_query() operation = await db_objects.get(query, name=user['current_operation']) working_dir = "./app/payloads/operations/{}/{}/".format(operation.name, str(uuid.uuid4())) os.makedirs(working_dir, exist_ok=True) query = await db_model.filemeta_query() for file_id in data['files']: try: cur_file = await db_objects.get(query, agent_file_id=file_id, operation=operation) shutil.copy(cur_file.path, working_dir + os.path.basename(cur_file.path)) except Exception as e: print(str(e)) shutil.make_archive("./app/payloads/operations/{}/{}".format(operation.name, temp_id), 'zip', working_dir) zip_data = open("./app/payloads/operations/{}/{}.zip".format(operation.name, temp_id), 'rb').read() os.remove("./app/payloads/operations/{}/{}.zip".format(operation.name, temp_id)) shutil.rmtree(working_dir) return raw(base64.b64encode(zip_data)) except Exception as e: print(str(e)) return json({'status': 'error', 'error': 'failed to process request'})
async def _lbheartbeat_view(self, request): """ Lets the load balancer know the application is running and available. Must return 200 (not 204) for ELB http://docs.aws.amazon.com/ElasticLoadBalancing/latest/DeveloperGuide/elb-healthchecks.html """ return response.raw(b"", 200)
async def random_photo_html(req, webp=False): res = obj_to_dict(api.photo.random(count=1))[0] if webp: url = res["urls"]["regular"] # webp binary = await http_get(url.replace("fm=jpg", "fm=webp")) return response.raw(binary, content_type="image/webp") url = res["urls"]["raw"] try: location = res["location"] if isinstance(location, dict): location = location["title"] except KeyError as e: logging.warning(str(e)) logging.warning(res) location = "unknown location" html = ( '<body style="margin: 0; display: inline-block;">' '<img src="{url}" alt="{location}" title="{location}" ' 'style="max-width: 100vw; max-height: 100vh; margin: 0 auto;">' "</body>" ) return response.html(html.format(url=url, location=location))
async def get_schema(request, id): get_object = s3.get_object(Bucket="funnel-data-schema-stage", Key=id + ".json") body = get_object['Body'].read() return raw(body, content_type="application/json", headers={"content-encoding": "gzip"})
async def get_tile_postgis(request, x, y, z, layer): """ Direct access to a postgis layer """ if ' ' in layer: return response.text('bad layer name: {}'.format(layer), status=404) # get fields given in parameters fields = ',' + request.raw_args[ 'fields'] if 'fields' in request.raw_args else '' # get geometry column name from query args else geom is used geom = request.raw_args.get('geom', 'geom') # compute mercator bounds bounds = mercantile.xy_bounds(x, y, z) # make bbox for filtering bbox = f"st_setsrid(st_makebox2d(st_point({bounds.left}, {bounds.bottom}), st_point({bounds.right},{bounds.top})), {OUTPUT_SRID})" # compute pixel resolution scale = resolution(z) sql = single_layer.format(**locals(), OUTPUT_SRID=OUTPUT_SRID) logger.debug(sql) async with Config.db.acquire() as conn: rows = await conn.fetch(sql) pbf = b''.join([row[0] for row in rows if row[0]]) return response.raw(pbf, headers={"Content-Type": "application/x-protobuf"})
async def artwork(request, key): app = request.app if key not in app.processor.artwork: return response.json({"message": "invalid artwork ID"}, status=404) art = app.processor.artwork[key] return response.raw(art.data, content_type=art.mime)
def update_settings(_: Request, data: dict): if wrol_mode_enabled() and 'wrol_mode' not in data: # Cannot update settings while WROL Mode is enabled, unless you want to disable WROL Mode. raise WROLModeEnabled() save_settings_config(data) return response.raw('', HTTPStatus.NO_CONTENT)
async def post_handler(request): # print(request.body) # bb = io.BytesIO(request.body) bb = pa.py_buffer(request.body) # br = pa.BufferReader(request.body) df = pa.deserialize_pandas(bb) print(df.shape) return raw(b'HI')
def send_image(self, size: str='o') -> BaseHTTPResponse: if not self.make_path(size).is_file(): return self.not_found() return raw(b'', content_type='image', headers={'X-Accel-Redirect': self.make_url(size)}, status=200)
async def get_photos(request): params = dict(photoreference=request.args.get("place_id"), key=app.config["API_KEY"], maxwidth=request.args.get("max_width", 1600)) response = await QueryHandler.get(app.config["PICTURES_URL"], resp_type="read", params=params) return raw(response)
async def get_valuation(self, request): try: secs = request.json.get("secs") date = arrow.get(request.json.get("date")).date() fields = request.json.get("fields") n = request.json.get("n", 1) except Exception as e: logger.exception(e) logger.error("problem params:%s", request.json) return response.empty(status=400) try: valuation = await aq.get_valuation(secs, date, fields, n) body = pickle.dumps(valuation, protocol=cfg.pickle.ver) return response.raw(body) except Exception as e: logger.exception(e) return response.raw(pickle.dumps(None, protocol=cfg.pickle.ver))
async def get_bars_handler(self, request): try: sec = request.json.get("sec") frame_type = FrameType(request.json.get("frame_type")) end = arrow.get(request.json.get("end"), tzinfo=cfg.tz) end = end.date() if frame_type in tf.day_level_frames else end.datetime n_bars = request.json.get("n_bars") include_unclosed = request.json.get("include_unclosed", False) bars = await aq.get_bars(sec, end, n_bars, frame_type, include_unclosed) body = pickle.dumps(bars, protocol=cfg.pickle.ver) return response.raw(body) except Exception as e: logger.exception(e) return response.raw(pickle.dumps(None, protocol=cfg.pickle.ver))
async def badges_instances(request, extension): instances = await request.app.db.users.count_documents({}) url = f"https://img.shields.io/badge/instances-{instances}-green.{extension}?style=for-the-badge" async with request.app.session.get(url) as resp: file = await resp.read() return response.raw(file, content_type='image/svg+xml', headers={'cache-control': 'no-cache'})
async def tag_handler(request, tag): if (tag in shared.framebuffer): _, jpg = cv2.imencode('.jpg', shared.framebuffer[tag]) return response.raw(jpg, content_type='image/jpeg', headers={'Cache-Control': 'no-store'}) else: return response.html('No image')
async def artwork(request, key): app = request.app if key not in app.processor.artwork: return response.json({"message": "invalid artwork ID"}, status=404) print("getting artwork") art = app.processor.artwork[key] return response.raw(art.data, content_type=art.mime)
async def handle_request(request, exception): start_time = time.time() format = 'html' url = request.path headers = dict() if url.startswith('/http'): url = url[1:] elif url.startswith('/html/http'): url = url[6:] elif url.startswith('/mhtml/http'): format = 'mhtml' url = url[7:] elif url.startswith('/pdf/http'): format = 'pdf' url = url[5:] elif url.startswith('/jpeg/http'): format = 'jpeg' url = url[6:] elif url.startswith('/png/http'): format = 'png' url = url[5:] if request.query_string: url = url + '?' + request.query_string parsed_url = urlparse(url) proxy = request.headers.get('X-Prerender-Proxy', '') if not parsed_url.hostname: return response.text('Bad Request', status=400) if ALLOWED_DOMAINS: if parsed_url.hostname not in ALLOWED_DOMAINS: return response.text('Forbiden', status=403) skip_cache = request.method == 'POST' if not skip_cache: try: data = await cache.get(url, format) modified_since = await cache.modified_since(url) or time.time() headers['Last-Modified'] = formatdate(modified_since, usegmt=True) try: if_modified_since = parsedate(request.headers.get('If-Modified-Since')) if_modified_since = time.mktime(if_modified_since) except TypeError: if_modified_since = 0 if modified_since and if_modified_since >= modified_since: logger.info('Got 304 for %s in cache in %dms', url, int((time.time() - start_time) * 1000)) return response.text('', status=304, headers=headers) if data is not None: headers['X-Prerender-Cache'] = 'hit' logger.info('Got 200 for %s in cache in %dms', url, int((time.time() - start_time) * 1000)) if format == 'html': return response.html( apply_filters(data.decode('utf-8'), HTML_FILTERS), headers=headers ) return response.raw(data, headers=headers) except Exception: logger.exception('Error reading cache') if sentry: sentry.captureException() if CONCURRENCY <= 0: # Read from cache only logger.warning('Got 502 for %s in %dms, prerender unavailable', url, int((time.time() - start_time) * 1000)) return response.text('Bad Gateway', status=502) try: if _ENABLE_CB: user_agent = request.headers.get('user-agent', '') _os, browser = httpagentparser.simple_detect(user_agent) breaker = _BREAKERS[browser] data, status_code = await breaker.run(lambda: _render(request.app.prerender, url, format, proxy)) else: data, status_code = await _render(request.app.prerender, url, format, proxy) headers.update({'X-Prerender-Cache': 'miss', 'Last-Modified': formatdate(usegmt=True)}) logger.info('Got %d for %s in %dms', status_code, url, int((time.time() - start_time) * 1000)) if format == 'html': if 200 <= status_code < 300: executor.submit(_save_to_cache, url, data.encode('utf-8'), format) return response.html( apply_filters(data, HTML_FILTERS), headers=headers, status=status_code ) if 200 <= status_code < 300: executor.submit(_save_to_cache, url, data, format) return response.raw(data, headers=headers, status=status_code) except (asyncio.TimeoutError, asyncio.CancelledError, TemporaryBrowserFailure, RetriesExhausted): logger.warning('Got 504 for %s in %dms', url, int((time.time() - start_time) * 1000)) return response.text('Gateway timeout', status=504) except TooManyResponseError: logger.warning('Too many response error for %s in %dms', url, int((time.time() - start_time) * 1000)) return response.text('Service unavailable', status=503) except CircuitOpen: logger.warning('Circuit breaker open for %s', browser) return response.text('Service unavailable', status=503) except Exception: logger.exception('Internal Server Error for %s in %dms', url, int((time.time() - start_time) * 1000)) if sentry: sentry.captureException() return response.text('Internal Server Error', status=500)
async def discogs_proxy(request): url = request.args["url"][0] headers = {"Authorization": DISCOGS_AUTH} res = requests.get(url, headers=headers) return response.raw(res.content, content_type=res.headers["content-type"])
def handle_request(request): return response.raw('raw data')