def lambda_handler(request): """ Proxy the request to the elastic search. """ action = request.args.get('action') indexes = request.args.get('index') terminate_after = os.getenv('MAX_DOCUMENTS_PER_SHARD') if action == 'search': query = request.args.get('query', '') body = { "query": { "simple_query_string": { "query": query, "fields": ['content', 'comment', 'key_text', 'meta_text'] } } } # TODO: should be user settable; we should proably forbid `content` (can be huge) _source = ['key', 'version_id', 'updated', 'last_modified', 'size', 'user_meta'] size = 1000 elif action == 'stats': body = { "query": {"match_all": {}}, "aggs": { "totalBytes": {"sum": {"field": 'size'}}, "exts": { "terms": {"field": 'ext'}, "aggs": {"size": {"sum": {"field": 'size'}}}, }, } } size = 0 _source = [] # Consider all documents when computing counts, etc. terminate_after = None elif action == 'images': body = { 'query': {'terms': {'ext': IMG_EXTS}}, 'collapse': { 'field': 'key', 'inner_hits': { 'name': 'latest', 'size': 1, 'sort': [{'last_modified': 'desc'}], '_source': ['key', 'version_id'], }, }, } size = NUM_PREVIEW_IMAGES _source = [] elif action == 'sample': body = { 'query': { 'bool': { 'must': [{'terms': {'ext': SAMPLE_EXTS}}], 'must_not': [ {'terms': {'key': README_KEYS + [SUMMARIZE_KEY]}}, {'wildcard': {'key': '*/' + SUMMARIZE_KEY}}, ], }, }, 'collapse': { 'field': 'key', 'inner_hits': { 'name': 'latest', 'size': 1, 'sort': [{'last_modified': 'desc'}], '_source': ['key', 'version_id'], }, }, } size = NUM_PREVIEW_FILES _source = [] else: return make_json_response(400, {"title": "Invalid action"}) es_host = os.environ['ES_HOST'] region = os.environ['AWS_REGION'] index_overrides = os.getenv('INDEX_OVERRIDES', '') auth = BotoAWSRequestsAuth( aws_host=es_host, aws_region=region, aws_service='es' ) es_client = Elasticsearch( hosts=[{'host': es_host, 'port': 443}], http_auth=auth, use_ssl=True, verify_certs=True, connection_class=RequestsHttpConnection ) to_search = f"{indexes},{index_overrides}" if index_overrides else indexes result = es_client.search( to_search, body, _source=_source, size=size, terminate_after=terminate_after, timeout=MAX_QUERY_DURATION ) return make_json_response(200, result)
def signed_api_call(service: str, path: str = "/", method: str = 'GET', configuration: Configuration = None, secrets: Secrets = None, params: Dict[str, Any] = None) -> requests.Response: """ Perform an API call against an AWS service. This should only be used when boto does not already implement the service itself. See https://boto3.readthedocs.io/en/latest/reference/services/index.html for a list of supported services by boto. This function does not claim being generic enough to support the whole range of AWS API. The `configuration` object should look like this: ```json { "aws_region": "us-east-1", "aws_host": "amazonaws.com" } ``` While both are optional, and default to the values shown in this snippet, you should make sure to be explicit about them to avoid confusion. The endpoint being called is built from the given `service` name, the given region and host as well as the `path` of the action being called on the service. By default, the call is made over `HTTPS` but this can be changed by setting `aws_endpoint_scheme` in the configuration dictionary. Pass any parameters of the API itself as part of the remaining `params` paramater is a dictionary. It should match the signature of the service you are trying to call and will be sent as a query-string when `method` is `"GET"` or `"DELETE"`, or as a JSON payload otherwise. Refer to the AWS documentation for each service type. This function does not support profile names so you must provide the credentials in secrets. """ # noqa: E501 configuration = configuration or {} region = configuration.get("aws_region", "us-east-1") or "" host = configuration.get("aws_host", "amazonaws.com") scheme = configuration.get("aws_endpoint_scheme", "https") host = "{s}.{r}.{h}".format(s=service, r=region, h=host) endpoint = configuration.get( "aws_endpoint", '{scheme}://{h}'.format(scheme=scheme, h=host)).replace('..', '.') endpoint = "{e}{p}".format(e=endpoint, p=path) creds = get_credentials(secrets) # when creds weren't provided via secrets, we let boto search for them # from the process environment if creds["aws_access_key_id"] and creds["aws_secret_access_key"]: auth = AWSRequestsAuth( aws_access_key=creds["aws_access_key_id"], aws_secret_access_key=creds["aws_secret_access_key"], aws_host=host, aws_region=region, aws_service=service) else: auth = BotoAWSRequestsAuth(aws_host=host, aws_region=region, aws_service=service) headers = {"Accept": "application/json"} if method in ('DELETE', 'GET'): return requests.request(method, endpoint, headers=headers, auth=auth, params=params) return requests.request(method, endpoint, headers=headers, auth=auth, json=params)
from aws_requests_auth.aws_auth import AWSRequestsAuth WAGTAILSEARCH_BACKENDS['default']['HOSTS'][0][ 'http_auth'] = AWSRequestsAuth( aws_access_key=AWS_ACCESS_KEY_ID, aws_secret_access_key=AWS_SECRET_ACCESS_KEY, aws_token=os.getenv('AWS_SESSION_TOKEN', ''), aws_host=ELASTICSEARCH_ENDPOINT, aws_region=AWS_REGION, aws_service='es', ) elif AWS_REGION: from aws_requests_auth.boto_utils import BotoAWSRequestsAuth WAGTAILSEARCH_BACKENDS['default']['HOSTS'][0][ 'http_auth'] = BotoAWSRequestsAuth( aws_host=ELASTICSEARCH_ENDPOINT, aws_region=AWS_REGION, aws_service='es', ) # Add whitenoise MIDDLEWARE.append('whitenoise.middleware.WhiteNoiseMiddleware') STATICFILES_STORAGE = 'whitenoise.storage.CompressedManifestStaticFilesStorage' # Configure AWS_STORAGE_BUCKET if it is in os enviroment if 'AWS_STORAGE_BUCKET_NAME' in os.environ: AWS_STORAGE_BUCKET_NAME = os.getenv('AWS_STORAGE_BUCKET_NAME') AWS_S3_CUSTOM_DOMAIN = '%s.s3.amazonaws.com' % AWS_STORAGE_BUCKET_NAME AWS_AUTO_CREATE_BUCKET = True INSTALLED_APPS.append('storages') MEDIA_URL = "https://%s/" % AWS_S3_CUSTOM_DOMAIN
from aws_requests_auth.boto_utils import BotoAWSRequestsAuth awsregion = os.environ.get('AWS_REGION') ddbtable = 'ddb-connections' dynamodb = boto3.client('dynamodb', region_name=awsregion) apiid = None apiname = os.environ.get('WSAPINAME') apis = boto3.client('apigatewayv2').get_apis()['Items'] for api in apis: if api['Name'] == apiname: apiid = api['ApiId'] break awsauth = BotoAWSRequestsAuth(aws_host=apiid + '.execute-api.' + awsregion + '.amazonaws.com', aws_region=awsregion, aws_service='execute-api') ## def addId(connid): return dynamodb.put_item(TableName=ddbtable, Item={'Connections': {'S': connid}}) ## def removeId(connid): return dynamodb.delete_item(TableName=ddbtable, Key={'Connections': {'S': connid}}) ## def connect(event, context): try: connid = event['requestContext']['connectionId'] except: return {"body": "error", "statusCode": 400}
def get_auth(): return BotoAWSRequestsAuth(aws_host=host_name, aws_service=AWS_SERVICE, aws_region=AWS_REGION)
WAGTAILSEARCH_BACKENDS['default']['HOSTS'][0]['http_auth'] = AWSRequestsAuth( aws_access_key=AWS_ACCESS_KEY_ID, aws_secret_access_key=AWS_SECRET_ACCESS_KEY, aws_token=os.getenv('AWS_SESSION_TOKEN', ''), aws_host=ELASTICSEARCH_ENDPOINT, aws_region=AWS_REGION, aws_service='es', ) elif AWS_REGION: # No API keys in the environ, so attempt to discover them with Boto instead, per: # http://boto3.readthedocs.io/en/latest/guide/configuration.html#configuring-credentials # This may be useful if your credentials are obtained via EC2 instance meta data. from aws_requests_auth.boto_utils import BotoAWSRequestsAuth WAGTAILSEARCH_BACKENDS['default']['HOSTS'][0]['http_auth'] = BotoAWSRequestsAuth( aws_host=ELASTICSEARCH_ENDPOINT, aws_region=AWS_REGION, aws_service='es', ) # Simplified static file serving. # https://warehouse.python.org/project/whitenoise/ MIDDLEWARE.append('whitenoise.middleware.WhiteNoiseMiddleware') STATICFILES_STORAGE = 'whitenoise.storage.CompressedManifestStaticFilesStorage' if 'AWS_STORAGE_BUCKET_NAME' in os.environ: AWS_STORAGE_BUCKET_NAME = os.getenv('AWS_STORAGE_BUCKET_NAME') AWS_S3_CUSTOM_DOMAIN = '%s.s3.amazonaws.com' % AWS_STORAGE_BUCKET_NAME AWS_AUTO_CREATE_BUCKET = True INSTALLED_APPS.append('storages')
def lambda_handler(request): """ Proxy the request to the elastic search. """ action = request.args.get('action') user_body = request.args.get('body', {}) user_fields = request.args.get('fields', []) user_indexes = request.args.get('index', "") user_size = request.args.get('size', DEFAULT_SIZE) user_source = request.args.get('_source', []) # 0-indexed starting position (for pagination) user_from = int(request.args.get('from', 0)) user_retry = int(request.args.get('retry', 0)) terminate_after = int(os.environ.get('MAX_DOCUMENTS_PER_SHARD', 10_000)) if not user_indexes or not isinstance(user_indexes, str): raise ValueError( "Request must include index=<comma-separated string of indices>") if user_from < 0: raise ValueError("'from' must be a non-negative integer") if action == 'packages': query = request.args.get('query', '') body = user_body or { "query": { "query_string": { "analyze_wildcard": True, "lenient": True, "query": query, # see enterprise/**/bucket.py for mappings "fields": user_fields or [ # package 'comment', 'handle', 'handle_text^2', 'metadata', 'tags' ] } } } if not all(i.endswith('_packages') for i in user_indexes.split(',')): raise ValueError( "'packages' action searching indexes that don't end in '_packages'" ) _source = user_source size = user_size terminate_after = None elif action == 'search': query = request.args.get('query', '') my_fields = user_fields or [ # object 'content', 'comment', 'ext', 'key', 'key_text', 'meta_text', # package, and boost the fields 'handle^2', 'handle_text^2', 'metadata^2', 'tags^2' ] if user_retry <= 1: body = { "query": { "query_string": { "analyze_wildcard": True, "lenient": user_retry > 0, "query": query, # more precise searches vs OR "default_operator": "AND", # see enterprise/**/bucket.py for mappings "fields": my_fields } } } else: body = { "query": { "simple_query_string": { "query": query, "analyze_wildcard": user_retry < 3, "default_operator": "AND", "fields": my_fields, "lenient": True, } } } _source = user_source or [ 'key', 'version_id', 'updated', 'last_modified', 'size', 'user_meta', 'comment', 'handle', 'hash', 'tags', 'metadata', 'pointer_file' ] size = DEFAULT_SIZE elif action == 'stats': body = { "query": { "match_all": {} }, "aggs": { "totalBytes": { "sum": { "field": 'size' } }, "exts": { "terms": { "field": 'ext' }, "aggs": { "size": { "sum": { "field": 'size' } } }, }, "totalPackageHandles": { "value_count": { "field": "handle" } }, } } size = 0 # We still get all aggregates, just don't need the results _source = False # Consider all documents when computing counts, etc. terminate_after = None elif action == 'images': body = { 'query': { 'regexp': { 'ext': IMG_EXTS } }, 'collapse': { 'field': 'key', 'inner_hits': { 'name': 'latest', 'size': 1, 'sort': [{ 'last_modified': 'desc' }], '_source': ['key', 'version_id'], }, }, } size = NUM_PREVIEW_IMAGES _source = False elif action == 'sample': body = { 'query': { 'bool': { 'must': [{ 'regexp': { 'ext': SAMPLE_EXTS } }], 'must_not': [ { 'terms': { 'key': README_KEYS + [SUMMARIZE_KEY] } }, { 'wildcard': { 'key': '*/' + SUMMARIZE_KEY } }, ], }, }, 'collapse': { 'field': 'key', 'inner_hits': { 'name': 'latest', 'size': 1, 'sort': [{ 'last_modified': 'desc' }], '_source': ['key', 'version_id'], }, }, } size = NUM_PREVIEW_FILES _source = False else: return make_json_response(400, {"title": "Invalid action"}) es_host = os.environ['ES_HOST'] region = os.environ['AWS_REGION'] index_overrides = os.getenv('INDEX_OVERRIDES', '') auth = BotoAWSRequestsAuth(aws_host=es_host, aws_region=region, aws_service='es') es_client = Elasticsearch( hosts=[{ 'host': es_host, 'port': 443 }], http_auth=auth, use_ssl=True, verify_certs=True, connection_class=RequestsHttpConnection, timeout=MAX_QUERY_DURATION, ) to_search = f"{user_indexes},{index_overrides}" if index_overrides else user_indexes result = es_client.search( index=to_search, body=body, _source=_source, size=size, from_=user_from, # try turning this off to consider all documents terminate_after=terminate_after, ) return make_json_response(200, post_process(result, action))
import os import requests from pprint import pprint from aws_requests_auth.boto_utils import BotoAWSRequestsAuth api_host = os.getenv("API_HOST", "simple-todo.jaehyeon.me") api_base_path = os.getenv("API_BASE_PATH", "api") aws_region = os.getenv("AWS_REGION", "ap-southeast-2") base_url = "https://{0}/{1}".format(api_host, api_base_path) auth = BotoAWSRequestsAuth(aws_host=api_host, aws_region=aws_region, aws_service="execute-api") def set_data(**kwargs): data = {} for k, v in kwargs.items(): if k in ["username", "created_at", "all_items", "todo"]: data.update({k: v}) return data ## hello world resp = requests.get(base_url, auth=auth) # resp.json() # {'status': 'ok'} ## all items params = set_data(username="******", created_at="2020-01-01", all_items=True) resp = requests.get("{0}/items".format(base_url), auth=auth, params=params)
from pygments import highlight from pygments.lexers import guess_lexer from pygments.formatters import TerminalFormatter import requests # This line will fail if you do not have both aws-requests-auth and botocore installed from aws_requests_auth.boto_utils import BotoAWSRequestsAuth if __name__ == '__main__': # Configure AWS basics aws_region = 'us-east-1' aws_service = 'ec2' aws_endpoint = '{}.{}.amazonaws.com'.format(aws_service, aws_region) # Use Boto to automatically gather AWS credentials from environment variables, AWS config files, or IAM Role auth = BotoAWSRequestsAuth(aws_host=aws_endpoint, aws_region=aws_region, aws_service=aws_service) # Configure details of the API call api = 'DescribeVpcs' api_version = '2015-10-01' params = {'Action': api, 'Version': api_version} url = 'https://{}'.format(aws_endpoint) # Send a GET request response = requests.get(url=url, params=params, auth=auth) # Print response details print('Response code: {}'.format(response.status_code)) print('Headers: {}'.format(response.headers))
#!/usr/bin/env python3 # https://github.com/DavidMuller/aws-requests-auth import requests from aws_requests_auth.boto_utils import BotoAWSRequestsAuth auth = BotoAWSRequestsAuth( aws_host='4f4ldfr0oe.execute-api.us-west-2.amazonaws.com', aws_region='us-west-2', aws_service='execute-api') response = requests.get( 'https://4f4ldfr0oe.execute-api.us-west-2.amazonaws.com/test/iamauth', auth=auth) print(response.request.headers) print(response.content)
# Also, use a ~/.aws/config file for private config such as aws_access_key_id, aws_secret_access_key, region and output (BotoAWSRequestAuth will read these automatically if the file is present) #IMPORTS import json import boto3 # pip3 install boto3 import requests from web3 import Web3, HTTPProvider from aws_requests_auth.boto_utils import BotoAWSRequestsAuth # pip3 install aws_requests_auth from elasticsearch import Elasticsearch, RequestsHttpConnection # pip3 install elasticsearch # CONFIG web3 = Web3(HTTPProvider('https://testnet-rpc.cybermiles.io:8545')) host = 'search-smart-contract-search-engine-cdul5cxmqop325ularygq62khi.ap-southeast-2.es.amazonaws.com' auth = BotoAWSRequestsAuth( aws_host= 'search-smart-contract-search-engine-cdul5cxmqop325ularygq62khi.ap-southeast-2.es.amazonaws.com', aws_region='ap-southeast-2', aws_service='es') es = Elasticsearch(hosts=[{ 'host': host, 'port': 443 }], region='ap-southeast-2', use_ssl=True, verify_certs=True, http_auth=auth, connection_class=RequestsHttpConnection) # FUNCTIONS def fetchAbi():
def main(argv=None): """Run when invoked from the command-line.""" colorama.init(autoreset=True) # Create an arparse argument parser for parsing command-line arguments prog = 'httpaws' desc = '{} v{}: A CLI HTTP client for AWS services with syntax highlighting'.format( prog, version) epilog = 'See the AWS Documentation for API references for each service: https://docs.aws.amazon.com' parser = argparse.ArgumentParser(description=desc, epilog=epilog, prog=prog) parser.add_argument( '-r', '--region', help='AWS region. Overrides config/env - e.g. us-east-1') parser.add_argument('-s', '--service', help='AWS service - e.g. ec2, s3, etc.', default='ec2') parser.add_argument( '-e', '--endpoint', help= "override command's default URL with the given URL - e.g. ec2.us-east-1.amazonaws.com" ) parser.add_argument( '-c', '--creds', help= "override AWS Access Key Id and AWS Secret Access Key - i.e. <Access_Key>:<Secret_Key>" ) parser.add_argument('-v', '--version', help='API version to use for the service', default='2015-10-01') parser.add_argument('-p', '--paginate', action='store_true', help='paginate long output') parser.add_argument( '-w', '--wrap', action='store_true', help='wrap long lines in paginated output (instead of chop)') parser.add_argument('api', help='name of the API to call - e.g. "DescribeVpcs"') args = parser.parse_args(argv) # --- Configure AWS basics --- # Configure AWS region aws_region = 'us-east-1' # Default if not overriden on command-line or specified in config file if args.region: aws_region = args.region else: import os # Read the region from the ~/.aws/config file if it exists try: with open(os.path.expanduser('~/.aws/config')) as f: data = f.read() lines = data.splitlines() for line in lines: if line.startswith('region = '): aws_region = line.split('region = ')[1] break except (FileNotFoundError, PermissionError): pass # Configure AWS service if args.service: aws_service = args.service else: aws_service = 'ec2' # Default if not overriden on command-line # Configure AWS endpoint if args.endpoint: aws_endpoint = args.endpoint else: aws_endpoint = '{}.{}.amazonaws.com'.format(aws_service, aws_region) if args.creds: from aws_requests_auth.aws_auth import AWSRequestsAuth # Use the specified AWS access and secret key try: access_key, secret_key = args.creds.split(':') auth = AWSRequestsAuth(aws_access_key=access_key, aws_secret_access_key=secret_key, aws_host=aws_endpoint, aws_region=aws_region, aws_service=aws_service) except ValueError: perror( 'Credentials must be proviced in the format "<AWS_Access_Key_Id>:<AWS_Secret_Access_key>' ) return ExitStatus.ERROR else: # This line will fail if you do not have both aws-requests-auth and botocore installed from aws_requests_auth.boto_utils import BotoAWSRequestsAuth # Use Boto to automatically gather AWS credentials from environment variables, AWS config files, or IAM Role auth = BotoAWSRequestsAuth(aws_host=aws_endpoint, aws_region=aws_region, aws_service=aws_service) # Configure details of the API call api = args.api api_version = args.version params = {'Action': api, 'Version': api_version} url = 'https://{}'.format(aws_endpoint) # Send a GET request try: # TODO: Support PUT requests for Mutating API calls response = requests.get(url=url, params=params, auth=auth) except requests.exceptions.ConnectionError: perror('Error connecting to host {!r}'.format(url)) return ExitStatus.ERROR # Gather response details response_text = 'Response code: {}'.format(response.status_code) header_text = 'Headers: {}'.format(response.headers) # Convert the response content from an encoded byte string to a Unicode string response_bytes = response.content response_str = response_bytes.decode() # If the respose is XML, ensure that it is nicely formatted with good indenting and newlines if response_str.startswith('<?xml'): try: import lxml.etree as etree response_bytes = etree.tostring(etree.fromstring(response.content), pretty_print=True) except ImportError: pass # Pretty-print the content of the response with syntax highlighting for readability highlighted_text = highlight(response_bytes, guess_lexer(response_str), TerminalFormatter()) output_text = '{}\n{}\n{}'.format(response_text, header_text, highlighted_text) if args.paginate: ppaged(output_text, wrap=args.wrap) else: print(output_text) return ExitStatus.SUCCESS
def boto_auth(): return BotoAWSRequestsAuth(aws_host='s3-us-west-2.amazonaws.com', aws_region='us-west-2', aws_service='s3')
from aws_requests_auth.boto_utils import BotoAWSRequestsAuth from elasticsearch import Elasticsearch, RequestsHttpConnection import time def delete_index(index_name): es_client.indices.delete(index=index_name, ignore=[400, 404]) es_host = 'ES_ENDPOINT' auth = BotoAWSRequestsAuth(aws_host=es_host, aws_region='eu-west-1', aws_service='es') # use the requests connection_class and pass in our custom auth class es_client = Elasticsearch(host=es_host, port=80, connection_class=RequestsHttpConnection, http_auth=auth) def lambda_handler(event, context): indices = es_client.indices.get_settings("*") current_milli_time = lambda: int(round(time.time() * 1000)) _7_days_back = current_milli_time() - 604800000 for key in indices: index_creation_date = indices[key]['settings']['index'][ 'creation_date']