def format_response(response: Response): """Format JSON response""" status_code = response.status_code success = (status_code >= 200) and (status_code <= 299) content_type = response.headers.get('content-type', 'plain/text') is_json = content_type == 'application/json' body = json.loads(response.get_data()) if is_json else response.get_data( ).decode('utf-8') message = None data = None if isinstance(body, str): message = body elif isinstance(body, dict): if 'message' in body: message = body.pop('message') if 'data' in body: data = body.pop('data') else: data = body else: data = body response.headers['content-type'] = 'application/json' response.set_data( json.dumps( dict(success=success, status_code=status_code, message=message, data=data))) return response
def return_analytics_report(): analytics = initialize_analytics_reporting() response = get_report(analytics) result = {} for report in response.get('reports', []): columnHeader = report.get('columnHeader', {}) dimensionHeaders = columnHeader.get('dimensions', []) metricHeaders = columnHeader.get('metricHeader', {}).get('metricHeaderEntries', []) for row in report.get('data', {}).get('rows', []): dimensions = row.get('dimensions', []) dateRangeValues = row.get('metrics', []) for header, dimension in zip(dimensionHeaders, dimensions): # print(header + ': ' + dimension) for i, values in enumerate(dateRangeValues): for metricHeader, value in zip(metricHeaders, values.get('values')): # print(metricHeader.get('name') + ': ' + value) if dimension == '(not set)': dimension = 'Other' result.update({dimension: value}) resp = Response() resp.status_code = 200 resp.set_data(json.dumps(result)) print(json.dumps(json.loads(resp.get_data()), indent=4, sort_keys=True)) return resp
def helloworld(): res = Response("test for res") res.headers.add('program-name', 'the second flask book') res.set_data("eeeeeeeeeeeboooooooooook") res.set_cookie("accccesslevel", 'efe') print(res.get_data()) return res
def request_finished_handler(self, sender, response: Response, **extra): if 200 >= response.status_code < 300 and response.content_type == 'application/json': data = response.get_data(as_text=True) jwe = JWE(data, alg='A256KW', enc='A256CBC-HS512', cty='application/json') encrypted = jwe.encrypt(self._keys, kid=self._keys[0].kid) response.content_type = 'application/jose' response.data = encrypted
def wrap_response(response: flask.Response) -> None: """ Wrap response with the homogenized response dictionary, containing a ``status`` key. :param response: The flask response en route to user """ try: data = json.loads(response.get_data()) except ValueError: return response response_data = { 'status': 'success' if response.status_code // 100 == 2 else 'failed', 'response': data, } if flask.g.user and flask.g.user.has_permission( SitePermissions.MANAGE_CACHE_KEYS): # We can't encode sets to JSON. response_data['cache_keys'] = { k: list(v) for k, v in flask.g.cache_keys.items() } response.set_data(json.dumps(response_data))
def after(response: Response) -> Response: if request.endpoint != 'requestlogs': req_data = { 'url': request.url, 'method': request.method, 'headers': dict(request.headers), } if request.data: req_data['body'] = json.loads(request.data) resp_data = { 'status_code': response.status_code, 'headers': dict(response.headers), } if response.mimetype == 'application/json': resp_data['body'] = json.loads(response.get_data()) log_data = { 'date': datetime.datetime.utcnow(), 'request': req_data, 'response': resp_data, } mongo.db.logs.insert_one(log_data) return response
def scrapeAll(): responses = [] timeNow = datetime.now() # =========== 10K DIR ============= if os.path.exists(pathname_10k) and validate: print('MODE: validate\n') else: print('MODE: scrape\n') # tqdm(ticker_cik_df['cik']) for ticker, cik in tqdm(ticker_cik_df.itertuples(index=False), desc='scrape Progress', leave=True, total=len(ticker_cik_df), position=1): if ticker == 'msft': break try: flaskResponse = scrapeCompany(ticker, cik) except Exception as e: flaskResponse = Response( response=json.dumps({ "ticker": ticker.upper(), "status": 500, "message": str(e) })) responses.append(json.loads(flaskResponse.get_data(as_text=True))) print('Execution time: {}'.format(datetime.now() - timeNow)) return Response(mimetype='application/json', status=200, response=json.dumps(responses))
def pesquisa_especie(especie=None): especies = Especie.find_pesquisa(especie) cabeca = txml.Element('especies') for (idespecie, especie, genero, habitat, coordenadas, notas, detalhes, nomecomum, codigo, validacao, datacriacao) in especies: element = txml.SubElement(cabeca, 'c_especie') elemento1 = txml.SubElement(element,'idespecie') elemento2 = txml.SubElement(element,'especie') elemento3 = txml.SubElement(element,'nomecomum') elemento4 = txml.SubElement(element,'habitat') elemento1.text = "{}".format(idespecie) elemento2.text = "{}".format(especie) elemento3.text = "{}".format(nomecomum) elemento4.text = "{}".format(habitat) my_xml = txml.tostring(cabeca) b = Response() b.set_data(my_xml) b.status = "200" b.get_json(force=False, silent=True, cache=True) b.mimetype = "application/xml" return b.get_data()
def decorator(*args, **kwargs): response = func(*args, **kwargs) if not isinstance(response, Response): response = Response(response) # Check if the response is appropriate for gzipping. if ("gzip" not in request.headers.get("Accept-Encoding", "").lower() or not 200 <= response.status_code < 300 or "Content-Encoding" in response.headers): return response response.direct_passthrough = False # GZIP the response. gzip_buffer = BytesIO() with GzipFile(mode="wb", compresslevel=9, fileobj=gzip_buffer) as gzip_file: gzip_file.write(response.get_data()) response.set_data(gzip_buffer.getvalue()) response.headers["Content-Encoding"] = "gzip" response.direct_passthrough = False vary = response.headers.get("Vary") if vary: if "accept-encoding" not in vary.lower(): response.headers["Vary"] = "{}, Accept-Encoding".format(vary) else: response.headers["Vary"] = "Accept-Encoding" return response
def test_no_methods_after_patch_error(self): """Test that no methods are added for errors.""" data = "somethingsomething error" response = Response(data) response.status_code = 400 add_permitted_methods_after_update('fale', None, response) self.assertEqual(data, response.get_data().decode('utf-8'))
def delete_reino(idreino): reino = Reino.find_by_id(idreino) if reino is not None: Reino.delete(idreino) b = Response() b.set_data("apagado id= {}".format(idreino)) b.status = "200" b.get_json(force=False, silent=True, cache=True) b.mimetype = "application/xml" return b.get_data()
def after_request(response: Response): db.session.rollback() headers = dict(response.headers) headers["Cache-Control"] = "no-transform" headers.update(**cors_headers) response.headers = Headers(headers) path = request.path if path.startswith("/api/v") and path.endswith("/") and path.count("/") == 3: body = response.get_data().replace(b"<head>", b"<head><style>.models {display: none !important}</style>") return Response(body, response.status_code, response.headers) return response
def sign_response(self, response: Response) -> Response: """Add Pop Token to response""" token_response = { "data": response.get_data(), "headers": { "Content-Type": response.headers["Content-Type"] }, } token = self.token_signer.issue_pop_token(response=token_response, include_headers=[]) response.headers["Authentication"] = "Pop {}".format(token) if response.status_code == 401: response.headers["WWW-Authenticate"] = "Pop" return response
def compress(content): """ Compress str, unicode content using gzip """ resp = Response() if isinstance(content, Response): resp = content content = resp.get_data() #logger.debug("Response compress: {}".format(resp)) before_len = len(content) if not IS_PYTHON_3 and isinstance(content, unicode): content = content.encode('utf8') if IS_PYTHON_3: gzip_buffer = BytesIO() gzip_file = gzip.GzipFile(fileobj=gzip_buffer, mode='wb') #gzip_file.write(bytes(content, 'utf-8')) gzip_file.write(content) else: gzip_buffer = StringIO() gzip_file = gzip.GzipFile(fileobj=gzip_buffer, mode='wb') gzip_file.write(content) gzip_file.close() resp.headers['Content-Encoding'] = 'gzip' resp.headers['Vary'] = 'Accept-Encoding' resp.set_data(gzip_buffer.getvalue()) logger.debug("Reduce size by {} bytes".format(before_len - len(resp.get_data()))) return resp
def after_request(response: Response): app_logger = log.getLogger('access.logger') msg = '' try: if request.method == 'OPTIONS': return response msg = '{}, {}, {}, {}'.format(request.remote_addr, request.method, request.url, response.status_code) r = response.get_data(True) app_logger.debug('[resp], {}, {}'.format(msg, r if len(r) < 1000 else str(r)[:1000])) except Exception as ex: app_logger.exception('Error on after_request. {}'.format(ex)) log_performance(app_logger, msg) return response
def gzip_response(resp: Response) -> Response: if "gzip" not in request.headers.get("Accept-Encoding", ""): return resp # not worth to gzip < 1500 bytes if resp.is_json and int(resp.headers.get("Content-Length")) > 1500: # Get the content and gzip it out = io.BytesIO() with gzip.GzipFile(fileobj=out, mode="wb") as fp: fp.write(resp.get_data()) # Update resp resp.set_data(out.getvalue()) resp.headers["Content-Encoding"] = "gzip" return resp
def parseAll(fileName): responses = [] time = datetime.now() tickers_cik_dict = load_dict_ticker_cik(fileName) # tickers = sorted(filter(lambda ti: os.path.isdir(os.path.join('10K', ti)), os.listdir('10K'))) tickers = tickers_cik_dict.keys() parsed = 0 for ticker in tqdm(tickers, desc='parse Progress', leave=True, ncols=len(tickers), position=1): # if list(tickers).index(ticker) < list(tickers).index('fb'): # start from # continue # if ticker != 'cost': # continue if parsed == 3: break try: flaskResponse = parseCompany(ticker.lower(), tickers_cik_dict) parsed = parsed + 1 except Exception as e: flaskResponse = Response( response=json.dumps({ "ticker": ticker.upper(), "status": 500, "message": str(e) })) responses.append(json.loads(flaskResponse.get_data(as_text=True))) print('Execution time: {}'.format(datetime.now() - time)) return Response(mimetype='application/json', status=200, response=json.dumps({ "status": 200, "message": responses }))
def from_response_json(response: Response) -> dict: return json.loads(response.get_data(as_text=True))
def show_uri(path, datetime=None): global IPFS_API daemonAddress = '{0}:{1}'.format(IPFSAPI_HOST, IPFSAPI_PORT) if not ipwbUtils.isDaemonAlive(daemonAddress): errStr = ('IPFS daemon not running. ' 'Start it using $ ipfs daemon on the command-line ' ' or from the <a href="/">' 'IPWB replay homepage</a>.') return Response(errStr, status=503) path = getCompleteURI(path) cdxjLine = '' try: surtedURI = surt.surt( path, path_strip_trailing_slash_unless_empty=False) indexPath = ipwbUtils.getIPWBReplayIndexPath() searchString = surtedURI if datetime is not None: searchString = surtedURI + ' ' + datetime cdxjLine = getCDXJLine_binarySearch(searchString, indexPath) except Exception as e: print(sys.exc_info()[0]) respString = ('{0} not found :(' + ' <a href="http://{1}:{2}">Go home</a>').format( path, IPWBREPLAY_HOST, IPWBREPLAY_PORT) return Response(respString) if cdxjLine is None: # Resource not found in archives return generateNoMementosInterface(path, datetime) cdxjParts = cdxjLine.split(" ", 2) jObj = json.loads(cdxjParts[2]) datetime = cdxjParts[1] digests = jObj['locator'].split('/') class HashNotFoundError(Exception): pass payload = None header = None try: def handler(signum, frame): raise HashNotFoundError() if os.name != 'nt': # Bug #310 signal.signal(signal.SIGALRM, handler) signal.alarm(10) payload = IPFS_API.cat(digests[-1]) header = IPFS_API.cat(digests[-2]) if os.name != 'nt': # Bug #310 signal.alarm(0) except ipfsapi.exceptions.TimeoutError: print("{0} not found at {1}".format(cdxjParts[0], digests[-1])) respString = ('{0} not found in IPFS :(' + ' <a href="http://{1}:{2}">Go home</a>').format( path, IPWBREPLAY_HOST, IPWBREPLAY_PORT) return Response(respString) except TypeError as e: print('A type error occurred') print(e) abort(500) except HTTPError as e: print("Fetching from the IPFS failed") print(e) abort(503) except HashNotFoundError: if payload is None: print("Hashes not found:\n\t{0}\n\t{1}".format( digests[-1], digests[-2])) abort(404) else: # payload found but not header, fabricate header print("HTTP header not found, fabricating for resp replay") header = '' except Exception as e: print('Unknown exception occurred while fetching from ipfs.') print(e) abort(500) if 'encryption_method' in jObj: keyString = None while keyString is None: if 'encryption_key' in jObj: keyString = jObj['encryption_key'] else: askForKey = ('Enter a path for file', ' containing decryption key: \n> ') keyString = raw_input(askForKey) paddedEncryptionKey = pad(keyString, AES.block_size) key = base64.b64encode(paddedEncryptionKey) nonce = b64decode(jObj['encryption_nonce']) cipher = AES.new(key, AES.MODE_CTR, nonce=nonce) header = cipher.decrypt(base64.b64decode(header)) payload = cipher.decrypt(base64.b64decode(payload)) hLines = header.decode().split('\n') hLines.pop(0) status = 200 if 'status_code' in jObj: status = jObj['status_code'] resp = Response(payload, status=status) for idx, hLine in enumerate(hLines): k, v = hLine.split(': ', 1) if k.lower() == 'transfer-encoding' and v.lower() == 'chunked': try: unchunkedPayload = extractResponseFromChunkedData(payload) except Exception as e: print('Error while dechunking') print(sys.exc_info()[0]) continue # Data may have no actually been chunked resp.set_data(unchunkedPayload) if k.lower() not in ["content-type", "content-encoding", "location"]: k = "X-Archive-Orig-" + k resp.headers[k] = v.strip() # Add ipwb header for additional SW logic newPayload = resp.get_data() lineJSON = cdxjLine.split(' ', 2)[2] mime = json.loads(lineJSON)['mime_type'] if 'text/html' in mime: ipwbjsinject = """<script src="/ipwbassets/webui.js"></script> <script>injectIPWBJS()</script>""" newPayload = newPayload.decode('utf-8').replace( '</html>', ipwbjsinject + '</html>') resp.set_data(newPayload) resp.headers['Memento-Datetime'] = ipwbUtils.digits14ToRFC1123(datetime) if header is None: resp.headers['X-Headers-Generated-By'] = 'InterPlanetary Wayback' # Get TimeMap for Link response header # respWithLinkHeader = getLinkHeaderAbbreviatedTimeMap(path, datetime) # resp.headers['Link'] = respWithLinkHeader.replace('\n', ' ') if status[0] == '3' and isUri(resp.headers.get('Location')): # Bad assumption that the URI-M will contain \d14 but works for now. uriBeforeURIR = request.url[:re.search(r'/\d{14}/', request.url).end()] newURIM = uriBeforeURIR + resp.headers['Location'] resp.headers['Location'] = newURIM return resp
def get_data(self): return Response.get_data(self, True)
def show_uri(path, datetime=None): global IPFS_API if len(path) == 0: return showWebUI('index.html') # TODO: Use a better approach to serve static contents # instead of using the same logic for every JS file as the SW script localScripts = [ 'serviceWorker.js', 'reconstructive.js', 'reconstructive-banner.js' ] if path in localScripts: return getServiceWorker(path) daemonAddress = '{0}:{1}'.format(IPFSAPI_IP, IPFSAPI_PORT) if not ipwbConfig.isDaemonAlive(daemonAddress): errStr = ('IPFS daemon not running. ' 'Start it using $ ipfs daemon on the command-line ' ' or from the <a href="/">' 'IPWB replay homepage</a>.') return Response(errStr, status=503) path = getCompleteURI(path) cdxjLine = '' try: surtedURI = surt.surt(path, path_strip_trailing_slash_unless_empty=False) indexPath = ipwbConfig.getIPWBReplayIndexPath() searchString = surtedURI if datetime is not None: searchString = surtedURI + ' ' + datetime cdxjLine = getCDXJLine_binarySearch(searchString, indexPath) print('CDXJ Line: {0}'.format(cdxjLine)) except Exception as e: print(sys.exc_info()[0]) respString = ('{0} not found :(' + ' <a href="http://{1}:{2}">Go home</a>').format( path, IPWBREPLAY_IP, IPWBREPLAY_PORT) return Response(respString) if cdxjLine is None: # Resource not found in archives return generateNoMementosInterface(path, datetime) cdxjParts = cdxjLine.split(" ", 2) jObj = json.loads(cdxjParts[2]) datetime = cdxjParts[1] digests = jObj['locator'].split('/') class HashNotFoundError(Exception): pass try: def handler(signum, frame): raise HashNotFoundError() signal.signal(signal.SIGALRM, handler) signal.alarm(10) payload = IPFS_API.cat(digests[-1]) header = IPFS_API.cat(digests[-2]) signal.alarm(0) except ipfsapi.exceptions.TimeoutError: print("{0} not found at {1}".format(cdxjParts[0], digests[-1])) respString = ('{0} not found in IPFS :(' + ' <a href="http://{1}:{2}">Go home</a>').format( path, IPWBREPLAY_IP, IPWBREPLAY_PORT) return Response(respString) except TypeError: print('A type error occurred') print(traceback.format_exc()) print(sys.exc_info()[0]) except HashNotFoundError: print("Hashes not found") return '', 404 except Exception as e: print('Unknown exception occurred while fetching from ipfs.') print(sys.exc_info()[0]) sys.exit() if 'encryption_method' in jObj: keyString = None while keyString is None: if 'encryption_key' in jObj: keyString = jObj['encryption_key'] else: askForKey = ('Enter a path for file', ' containing decryption key: \n> ') keyString = raw_input(askForKey) encryptionMethod = None if jObj['encryption_method'] == 'xor': encryptionMethod = XOR pKey = encryptionMethod.new(keyString) payload = pKey.decrypt(base64.b64decode(payload)) hKey = encryptionMethod.new(keyString) header = hKey.decrypt(base64.b64decode(header)) hLines = header.split('\n') hLines.pop(0) resp = Response(payload) for idx, hLine in enumerate(hLines): k, v = hLine.split(': ', 1) if k.lower() == 'transfer-encoding' and v.lower() == 'chunked': try: unchunkedPayload = extractResponseFromChunkedData(payload) except Exception as e: continue # Data may have no actually been chunked resp.set_data(unchunkedPayload) if k.lower() not in ["content-type", "content-encoding"]: k = "X-Archive-Orig-" + k resp.headers[k] = v # Add ipwb header for additional SW logic newPayload = resp.get_data() ipwbjsinject = """<script src="/webui/webui.js"></script> <script>injectIPWBJS()</script>""" newPayload = newPayload.replace('</html>', ipwbjsinject + '</html>') resp.set_data(newPayload) resp.headers['Memento-Datetime'] = ipwbConfig.datetimeToRFC1123(datetime) # Get TimeMap for Link response header respWithLinkHeader = getLinkHeaderAbbreviatedTimeMap(path, datetime) resp.headers['Link'] = respWithLinkHeader.replace('\n', ' ') return resp
def after_request(self, response: Response): """Method to apply after of process request""" body = response.get_data() self.__logger.debug(f'{self.__module}|after_request()|body:{body}') return response
def index(): c = Response(gen()) return c.get_data().decode('utf-8')
def github(): settings = APISettings("github") headers = request.headers jsonData = request.get_json(silent=True) if CONTEXT.DEBUG and not CONTEXT.TESTING: # check local HMAC calculation headers = { "User-Agent": "GitHub-Hookshot/a837270", "X-GitHub-Delivery": "0aa67100-1b16-11e7-8bbf-ee052e733e39", "X-GitHub-Event": "push", "X-Hub-Signature": "sha1=c6498e16d2fa649cb8d92bea4c2a7f1dabfb7643" } rawData = open("../payload_github_push.json", "rb").read() jsonData = json.loads(rawData.decode()) else: rawData = request.get_data(as_text=False) if not jsonData: LOGGER.critical( "No json data in request! Expected a request with an application/json body." ) return requestError( "No json data in request! Expected a request with an application/json body." ) if "X-GitHub-Delivery" not in headers or "X-GitHub-Event" not in headers or "GitHub-Hookshot" not in headers[ "User-Agent"]: LOGGER.critical("Unsupported Header combination:\n" + "\n".join( map(lambda x: " \"%s\": \"%s\"" % (x[0], x[1]), headers.items()))) return requestError("Invalid Request") # calculate HMAC if settings.hmacSecret and "X-Hub-Signature" in headers: secret = settings.hmacSecret mac = hmac.new(secret, rawData, hashlib.sha1).hexdigest() if "sha1=" + mac != headers["X-Hub-Signature"]: LOGGER.critical( "Invalid GitHub signature, automatic deploy failed!") LOGGER.debug("Computed Mac: sha1=" + mac + "\n" + "Sent Mac : " + headers["X-Hub-Signature"]) LOGGER.debug("type(data) = " + str(type(rawData)) + "\nPayload:\n" + rawData.decode()) return requestError("Invalid Signature 401", 401) else: LOGGER.warning("Skip signature validation!") # setup git operation repoName = jsonData["repository"]["name"] release = Release(settings) event = headers["X-GitHub-Event"] mailHeader = "" if event == "push": if "refs/heads" not in jsonData["ref"]: LOGGER.debug("No branch push detected, ignore push event.") return Response("No branch push detected, ignore push event", content_type=contenttype) branch = jsonData["ref"].replace("refs/heads/", "") mailHeader = "Push event successful!\n\n" resp = logErrorRespToLevel( downloadFromGit(repoName, settings, branch=str(branch), release=release, webhook=True), LOGGER.critical) elif event == "release": tag = jsonData["release"]["tag_name"] mailHeader = "Release event successful!\n\n" resp = logErrorRespToLevel( releaseEvent(repoName, tag, settings, release), LOGGER.critical) elif event == "create" and jsonData["ref_type"] == "tag": tag = jsonData["ref"] mailHeader = "Create event successful!\n\n" resp = logErrorRespToLevel( createTagEvent(repoName, tag, settings, release), LOGGER.critical) elif event == "ping": events = jsonData["hook"]["events"] supportedEvents = ["push", "release", "create"] contentType = jsonData["hook"]["config"]["content_type"] for a in supportedEvents: try: events.remove(a) except: pass output = "" error = False if len(events) > 0: output += "[-] Unsupported webhook events configured: " + ", ".join( events) + "\n" error = True if contentType != "json": output += "[-] Unsupported content type. Expected application/json content type in configuration." error = True if error: LOGGER.critical("Error during github ping event...\n" + output) return requestError(output, code=400) else: mailHeader = "Ping event successful!\n\n" resp = Response("Everything looks good!", content_type=contenttype) else: LOGGER.critical("Received an unsupported GitHub Event: " + headers["X-GitHub-Event"]) return requestError("Received an unsupported GitHub Event", code=405) if resp.status_code == 200 and CONTEXT.MAIL_HANDLER: CONTEXT.MAIL_HANDLER.setLevel(logger.logging.INFO) LOGGER.info(mailHeader + resp.get_data(as_text=True).strip()) CONTEXT.MAIL_HANDLER.setLevel(logger.logging.CRITICAL) return resp
def new_post(): """ Mattermost new post event handler """ try: # NOTE: common stuff slash_command = False resp_data = {} resp_data['username'] = settings.USERNAME resp_data['icon_url'] = settings.ICON_URL data = request.form if not 'token' in data: raise Exception('Missing necessary token in the post data') if data['token'] not in settings.MATTERMOST_GIPHY_TOKEN: raise Exception( 'Tokens did not match, it is possible that this request came from somewhere other than Mattermost' ) # NOTE: support the slash command if 'command' in data: slash_command = True resp_data['response_type'] = 'in_channel' channel = data['channel_name'] if channel in settings.BLACK_LISTED_CHANNELS: raise Exception( ':trollface: Pierre says "no gif in ~~{}~~"'.format(channel)) if settings.RATING_PER_CHANNELS.has_key(channel): settings.RATING = settings.RATING_PER_CHANNELS.get(channel) else: settings.RATING = 'pg-13' translate_text = data['text'] if not slash_command: translate_text = data['text'][len(data['trigger_word']):] if not translate_text: raise Exception("No translate text provided, not hitting Giphy") gif_url = giphy_translate(translate_text) if not gif_url: raise Exception('No gif url found for `{}`'.format(translate_text)) app.logger.info(gif_url) resp_data[ 'text'] = "`{}` searched for {}\r\n {}redirect/{}.gif".format( data.get('user_name', 'unknown').title(), translate_text, request.host_url, base64.urlsafe_b64encode(gif_url)) except Exception as err: msg = err.message logging.error('unable to handle new post :: {}'.format(msg)) resp_data['response_type'] = 'ephemeral' resp_data['text'] = msg finally: resp = Response(content_type='application/json') resp.set_data(json.dumps(resp_data)) app.logger.info(resp.get_data()) return resp
def json_from_response(self, response: flask.Response) -> Dict[str, Any]: """Parses the json returned in a response.""" data_text = response.get_data(as_text=True) self.assertEqual(200, response.status_code, msg=data_text) return typing.cast(Dict[str, Any], json.loads(data_text))
def show_uri(path, datetime=None): global IPFS_API daemonAddress = '{0}:{1}'.format(IPFSAPI_HOST, IPFSAPI_PORT) if not ipwbUtils.isDaemonAlive(daemonAddress): errStr = ('IPFS daemon not running. ' 'Start it using $ ipfs daemon on the command-line ' ' or from the <a href="/">' 'IPWB replay homepage</a>.') return Response(errStr, status=503) path = getCompleteURI(path) cdxjLine = '' try: surtedURI = surt.surt( path, path_strip_trailing_slash_unless_empty=False) indexPath = ipwbUtils.getIPWBReplayIndexPath() searchString = surtedURI if datetime is not None: searchString = surtedURI + ' ' + datetime cdxjLine = getCDXJLine_binarySearch(searchString, indexPath) except Exception as e: print(sys.exc_info()[0]) respString = ('{0} not found :(' + ' <a href="http://{1}:{2}">Go home</a>').format( path, IPWBREPLAY_HOST, IPWBREPLAY_PORT) return Response(respString) if cdxjLine is None: # Resource not found in archives return generateNoMementosInterface(path, datetime) cdxjParts = cdxjLine.split(" ", 2) jObj = json.loads(cdxjParts[2]) datetime = cdxjParts[1] digests = jObj['locator'].split('/') class HashNotFoundError(Exception): pass payload = None header = None try: def handler(signum, frame): raise HashNotFoundError() if os.name != 'nt': # Bug #310 signal.signal(signal.SIGALRM, handler) signal.alarm(10) payload = IPFS_API.cat(digests[-1]) header = IPFS_API.cat(digests[-2]) if os.name != 'nt': # Bug #310 signal.alarm(0) except ipfsapi.exceptions.TimeoutError: print("{0} not found at {1}".format(cdxjParts[0], digests[-1])) respString = ('{0} not found in IPFS :(' + ' <a href="http://{1}:{2}">Go home</a>').format( path, IPWBREPLAY_HOST, IPWBREPLAY_PORT) return Response(respString) except TypeError as e: print('A type error occurred') print(e) abort(500) except HTTPError as e: print("Fetching from the IPFS failed") print(e) abort(503) except HashNotFoundError: if payload is None: print("Hashes not found:\n\t{0}\n\t{1}".format( digests[-1], digests[-2])) abort(404) else: # payload found but not header, fabricate header print("HTTP header not found, fabricating for resp replay") header = '' except Exception as e: print('Unknown exception occurred while fetching from ipfs.') print(e) abort(500) if 'encryption_method' in jObj: keyString = None while keyString is None: if 'encryption_key' in jObj: keyString = jObj['encryption_key'] else: askForKey = ('Enter a path for file', ' containing decryption key: \n> ') keyString = raw_input(askForKey) paddedEncryptionKey = pad(keyString, AES.block_size) key = base64.b64encode(paddedEncryptionKey) nonce = b64decode(jObj['encryption_nonce']) cipher = AES.new(key, AES.MODE_CTR, nonce=nonce) header = cipher.decrypt(base64.b64decode(header)) payload = cipher.decrypt(base64.b64decode(payload)) hLines = header.decode() \ .replace('\r', '') \ .replace('\n\t', '\t') \ .replace('\n ', ' ') \ .split('\n') hLines.pop(0) status = 200 if 'status_code' in jObj: status = jObj['status_code'] resp = Response(payload, status=status) for idx, hLine in enumerate(hLines): k, v = hLine.split(':', 1) if k.lower() == 'transfer-encoding' and \ re.search(r'\bchunked\b', v, re.I): try: unchunkedPayload = extractResponseFromChunkedData(payload) except Exception as e: print('Error while dechunking') print(sys.exc_info()[0]) continue # Data may have no actually been chunked resp.set_data(unchunkedPayload) if k.lower() not in ["content-type", "content-encoding", "location"]: k = "X-Archive-Orig-" + k resp.headers[k] = v.strip() # Add ipwb header for additional SW logic newPayload = resp.get_data() lineJSON = cdxjLine.split(' ', 2)[2] mime = json.loads(lineJSON)['mime_type'] if 'text/html' in mime: ipwbjsinject = """<script src="/ipwbassets/webui.js"></script> <script>injectIPWBJS()</script>""" newPayload = newPayload.decode('utf-8').replace( '</html>', ipwbjsinject + '</html>') resp.set_data(newPayload) resp.headers['Memento-Datetime'] = ipwbUtils.digits14ToRFC1123(datetime) if header is None: resp.headers['X-Headers-Generated-By'] = 'InterPlanetary Wayback' # Get TimeMap for Link response header # respWithLinkHeader = getLinkHeaderAbbreviatedTimeMap(path, datetime) # resp.headers['Link'] = respWithLinkHeader.replace('\n', ' ') if status[0] == '3' and isUri(resp.headers.get('Location')): # Bad assumption that the URI-M will contain \d14 but works for now. uriBeforeURIR = request.url[:re.search(r'/\d{14}/', request.url).end()] newURIM = uriBeforeURIR + resp.headers['Location'] resp.headers['Location'] = newURIM return resp
def show_uri(path, datetime=None): try: ipwb_utils.check_daemon_is_alive(ipwb_utils.IPFSAPI_MUTLIADDRESS) except IPFSDaemonNotAvailable: errStr = ('IPFS daemon not running. ' 'Start it using $ ipfs daemon on the command-line ' ' or from the <a href="/">' 'IPWB replay homepage</a>.') return Response(errStr, status=503) cdxj_line = '' try: surted_uri = surt.surt(path, path_strip_trailing_slash_unless_empty=False) index_path = ipwb_utils.get_ipwb_replay_index_path() search_string = surted_uri if datetime is not None: search_string = f'{surted_uri} {datetime}' cdxj_line = get_cdxj_line_binarySearch(search_string, index_path) except Exception as e: print(sys.exc_info()[0]) resp_string = ( f'{path} not found :(' f' <a href="http://{IPWBREPLAY_HOST}:{IPWBREPLAY_PORT}">' f'Go home</a>') return Response(resp_string) if cdxj_line is None: # Resource not found in archives return generate_no_mementos_interface(path, datetime) cdxj_parts = cdxj_line.split(" ", 2) json_object = json.loads(cdxj_parts[2]) datetime = cdxj_parts[1] digests = json_object['locator'].split('/') class HashNotFoundError(Exception): pass payload = None header = None try: def handler(signum, frame): raise HashNotFoundError() # if os.name != 'nt': # Bug #310 # signal.signal(signal.SIGALRM, handler) # signal.alarm(10) payload = ipfs_client().cat(digests[-1]) header = ipfs_client().cat(digests[-2]) # if os.name != 'nt': # Bug #310 # signal.alarm(0) except ipfsapi.exceptions.TimeoutError: print(f"{cdxj_parts[0]} not found at {digests[-1]}") resp_string = ( f'{path} not found in IPFS :(' f' <a href="http://{IPWBREPLAY_HOST}:{IPWBREPLAY_PORT}">' f'Go home</a>') return Response(resp_string) except TypeError as e: print('A type error occurred') print(e) return "A Type Error Occurred", 500 except HTTPError as e: print("Fetching from the IPFS failed") print(e) return "Fetching from IPFS failed", 503 except HashNotFoundError: if payload is None: print(f"Hashes not found:\n\t{digests[-1]}\n\t{digests[-2]}") return "Hashed not found", 404 else: # payload found but not header, fabricate header print("HTTP header not found, fabricating for resp replay") header = '' except Exception as e: print('Unknown exception occurred while fetching from ipfs.') print(e) return "An unknown exception occurred", 500 if 'encryption_method' in json_object: key_string = None while key_string is None: if 'encryption_key' in json_object: key_string = json_object['encryption_key'] else: ask_for_key = ('Enter a path for file', ' containing decryption key: \n> ') key_string = raw_input(ask_for_key) padded_encryption_key = pad(key_string, AES.block_size) key = base64.b64encode(padded_encryption_key) nonce = b64decode(json_object['encryption_nonce']) cipher = AES.new(key, AES.MODE_CTR, nonce=nonce) header = cipher.decrypt(base64.b64decode(header)) payload = cipher.decrypt(base64.b64decode(payload)) h_lines = header.decode() \ .replace('\r', '') \ .replace('\n\t', '\t') \ .replace('\n ', ' ') \ .split('\n') h_lines.pop(0) status = 200 if 'status_code' in json_object: status = json_object['status_code'] resp = Response(payload, status=status) for idx, hLine in enumerate(h_lines): k, v = hLine.split(':', 1) if k.lower() == 'transfer-encoding' and \ re.search(r'\bchunked\b', v, re.I): try: unchunked_payload = extract_response_from_chunked_data(payload) except Exception as e: continue # Data not chunked resp.set_data(unchunked_payload) if k.lower() not in ["content-type", "content-encoding", "location"]: k = "X-Archive-Orig-" + k resp.headers[k] = v.strip() # Add ipwb header for additional SW logic new_payload = resp.get_data() line_json = cdxj_line.split(' ', 2)[2] mime = json.loads(line_json)['mime_type'] if 'text/html' in mime: ipwb_js_inject = """<script src="/ipwbassets/webui.js"></script> <script>injectIPWBJS()</script>""" new_payload = new_payload.decode('utf-8').replace( '</html>', f'{ipwb_js_inject}</html>') resp.set_data(new_payload) resp.headers['Memento-Datetime'] = ipwb_utils.digits14_to_rfc1123(datetime) if header is None: resp.headers['X-Headers-Generated-By'] = 'InterPlanetary Wayback' # Get TimeMap for Link response header # respWithlink_header = get_link_header_abbreviated_timemap(path, datetime) # resp.headers['Link'] = respWithlink_header.replace('\n', ' ') if status[0] == '3' and isUri(resp.headers.get('Location')): # Bad assumption that the URI-M will contain \d14 but works for now. uri_before_urir = request.url[:re.search(r'/\d{14}/', request.url).end( )] new_urim = uri_before_urir + resp.headers['Location'] resp.headers['Location'] = new_urim return resp