def _create_database(cursor): if is_test() and get('database').endswith('test'): try: cursor.execute("CREATE DATABASE %s ENCODING 'UTF8'" % get('database')) except psycopg2.ProgrammingError as ex: logger.error(str(ex))
def backup_api_gateway(s3_resource, aws_account_id, current_date): # type: ('boto3.client("s3")', str, str) -> float logger.info("backup apigateway ran at " + str(datetime.datetime.now())) t0 = time.time() bucket_name = "backup-apigateway-%s" % aws_account_id s3_bucket = get_S3_bucket(s3_resource, bucket_name, get('aws_region_name')) # http://boto3.readthedocs.io/en/latest/reference/services/apigateway.html#APIGateway.Client.get_export api_gateway_client = boto3.client('apigateway', region_name=get('aws_region_name')) apis = api_gateway_client.get_rest_apis(limit=500) for api in apis['items']: stages = api_gateway_client.get_stages(restApiId=api['id']) for stage in stages['item']: api_gateway_dict = api_gateway_client.get_export( restApiId=api['id'], stageName=stage['stageName'], exportType='swagger', parameters={'extensions': 'integrations'}, accepts='application/json') # Upload the file to S3 key = current_date + '-' + api['name'] + '-' + stage[ 'stageName'] + '-backup.json' s3_bucket.put_object(Key=key, Body=api_gateway_dict['body'].read()) t1 = time.time() return t1 - t0
def _drop_database(cursor): if is_test() and get('database').endswith('test'): try: # cursor.execute("SELECT * from pg_stat_activity;") # result = cursor.fetchall() cursor.execute("DROP DATABASE %s" % get('database')) except Exception as ex: logger.error(str(ex))
def http_post(request_params, request_body): # type: (dict, dict) -> list logger.info("http_post") cognito_idp_client = boto3.client('cognito-idp') user_profile = User_profile.get( User_profile.username == request_body['username']) # noinspection PyUnresolvedReferences s3_client = boto3.client('s3', config=Config( signature_version='s3v4', region_name=get('aws_region_name'))) # { # "username_id": "TESTER1", # "media_uuid": "fffe57fe-a60b-4374-8c35-97abe629afbb", # "tags": {"species": "Whitetail", "size": "10 point"}, # "latitude": 38.8966, # "longitude": 121.0769, # "media_created": "2004-10-19 10:23:54", # } where = [] if request_body: if "tags" in request_body: for k, v in request_body["tags"].items(): where += ["tags->'%s' = '%s'" % (k, v)] if "likes" in request_body: where += ["likes >= %i" % request_body["likes"]] if "username" in request_body: # # noinspection PyUnresolvedReferences where += ["username_id = '%s'" % user_profile.id] if where: sql = "select * from media where %s order by media_created desc;" % " and ".join( where) else: sql = "select * from media;" logger.info(sql) urls = [] bucket_name = "media-%s" % get('aws_account_id') for media in Media.raw(sql): logger.info(media) url = s3_client.generate_presigned_url(ClientMethod='get_object', Params={ 'Bucket': bucket_name, 'Key': media.media_uuid.hex }) urls += [url] logger.info(urls) return urls
def http_post(request_params, request_body): # type: (dict, dict) -> dict logger.info('http_post username='******'username']) cognito_app_client_id = get_cognito_app_client_id( cognito_idp_client, cognito_user_pool_id=get('aws_user_pools_id')) # ############################################################################################################## # IF Cognito user pool was created by Amplify or Manually, it may not have an auth flow that works via python response = cognito_idp_client.describe_user_pool_client( UserPoolId=get('aws_user_pools_id'), ClientId=cognito_app_client_id) explicit_auth_flows = response['UserPoolClient']['ExplicitAuthFlows'] explicit_auth_flows_count = len(explicit_auth_flows) if 'USER_PASSWORD_AUTH' not in explicit_auth_flows: explicit_auth_flows.extend('USER_PASSWORD_AUTH') if 'ADMIN_NO_SRP_AUTH' not in explicit_auth_flows: explicit_auth_flows.extend('ADMIN_NO_SRP_AUTH') if explicit_auth_flows_count != len(explicit_auth_flows): # add new auth flows to cognito user pool response = cognito_idp_client.update_user_pool_client( UserPoolId=get('aws_user_pools_id'), ClientId=cognito_app_client_id, ExplicitAuthFlows=['ADMIN_NO_SRP_AUTH', 'USER_PASSWORD_AUTH']) # ############################################################################################################## auth_response = cognito_idp_client.initiate_auth( ClientId=cognito_app_client_id, AuthFlow='USER_PASSWORD_AUTH', AuthParameters={ 'USERNAME': request_body['username'], 'PASSWORD': request_body['password'] }) logger.info('Auth successful') if "ChallengeName" in auth_response and auth_response[ "ChallengeName"] == "NEW_PASSWORD_REQUIRED": raise Exception("NEW_PASSWORD_REQUIRED") cognito_refresh_token = auth_response['AuthenticationResult'][ 'RefreshToken'] logger.info('return result tokens') return { "user": {}, "access_token": auth_response['AuthenticationResult']['AccessToken'], "refresh_token": auth_response['AuthenticationResult']['RefreshToken'], "id_token": auth_response['AuthenticationResult']['IdToken'], "token_type": auth_response['AuthenticationResult']['TokenType'], "expires_in": auth_response['AuthenticationResult']['ExpiresIn'] }
def http_put(request_params, request_body): # type: (dict, dict) -> dict logger.info("http_put") # Don't Email when new user is provisioned in Cognito if is_test() message_action = 'SUPPRESS' if is_test() else 'RESEND' cognito_user = user_persist(cognito_idp_client, get('aws_user_pools_id'), request_body, True, ['EMAIL'], message_action) cognito_user = cognito_idp_client.admin_get_user(UserPoolId=get('aws_user_pools_id'), Username=cognito_user['Username']) return remove_cruft(cognito_user)
def get_secure_event(lambda_function, aws=False): global _ID_TOKEN fullpath = get_lambda_fullpath(lambda_function) event = get_lambda_test_data(fullpath, authorization_token=_ID_TOKEN) param = 'body' if 'body' in event else 'queryStringParameters' if aws: # noinspection PyTypeChecker session = Session(region_name=get('aws_region_name')) credentials = session.get_credentials() event[param]['aws_access_key_id'] = credentials.access_key event[param]['aws_secret_access_key'] = credentials.secret_key sts_client = boto3.client("sts", aws_access_key_id=credentials.access_key, aws_secret_access_key=credentials.secret_key) aws_account_id = sts_client.get_caller_identity()["Account"] event[param]['aws_account_id'] = aws_account_id if 'username' in event[param]: event[param]['username'] = TESTER1 if 'recipient_username' in event[param]: event[param]['recipient_username'] = TESTER1 if 'blocked_username' in event[param]: event[param]['blocked_username'] = TESTER2 if 'from_username' in event[param]: event[param]['from_username'] = TESTER2 if param in event and 'to_username' in event[param]: event[param]['to_username'] = TESTER1 return event, fullpath
def http_get(request_params, request_body): # type: (dict, dict) -> dict logger.debug('entered:' + context.function_name) current_time = datetime.datetime.now() current_date = str(current_time.strftime("%Y-%m-%d")) name = context.function_name logger.info("Your backup" + name + " ran at " + str(current_time)) s3_resource = boto3.resource('s3') logger.info("Got s3 client") sts_client = boto3.client("sts", region_name=get('aws_region_name')) logger.info("Got sts client") aws_account_id = sts_client.get_caller_identity()["Account"] duration = 0 duration += backup_cognito(s3_resource, aws_account_id, current_date) logger.info("duration= %d" % duration) duration += backup_iam(s3_resource, aws_account_id, current_date) logger.info("duration= %d" % duration) duration += backup_api_gateway(s3_resource, aws_account_id, current_date) logger.info("duration= %d" % duration) duration += backup_cloudfront(s3_resource, aws_account_id, current_date) logger.info("duration= %d" % duration) duration += backup_route53(s3_resource, aws_account_id, current_date) logger.info("duration= %d" % duration) return {"duration": duration}
def get_api_url(apigateway_client, rest_api_name, stage, endpoint): # type: ('boto3.client("apigateway")', str, str, str) -> str return 'https://%s.execute-api.%s.amazonaws.com%s%s' % ( get_rest_api_id(apigateway_client, rest_api_name), get('aws_region_name'), stage, endpoint )
def _create_extensions(): conn = psycopg2.connect(database=get('database')) conn.set_isolation_level(0) try: _cursor = conn.cursor() _cursor.execute("CREATE EXTENSION IF NOT EXISTS hstore ") finally: conn.close()
def http_get(request_params, request_body): # type: (dict, dict) -> dict logger.info("http_get") s3_client = boto3.client('s3', config=Config( signature_version='s3v4', region_name=get('aws_region_name'))) bucket_name = "media-%s" % get('aws_account_id') s3_client.create_bucket(ACL='private', Bucket=bucket_name) url = s3_client.generate_presigned_url(ClientMethod='get_object', Params={ 'Bucket': bucket_name, 'Key': request_params['media_uuid'] }) logger.info(url) return {"url": url}
def backup_cloudfront(s3_resource, aws_account_id, current_date): # type: ('boto3.client("s3")', str, str) -> float logger.info("backup cloudfront ran at " + str(datetime.datetime.now())) t0 = time.time() bucket_name = "backup-cloudfront-%s" % aws_account_id s3_bucket = get_S3_bucket(s3_resource, bucket_name, get('aws_region_name')) cloudfront_client = boto3.client('cloudfront', region_name=get('aws_region_name')) distributions = cloudfront_client.list_distributions() if 'Items' in distributions['DistributionList']: for item in distributions['DistributionList']['Items']: cloudfront_config = cloudfront_client.get_distribution_config( Id=item['Id']) b = bytearray() b.extend(map(ord, json.dumps(cloudfront_config, indent=4))) # Upload the file to S3 key = current_date + '-' + item['Id'] + '-backup.json' s3_bucket.put_object(Key=key, Body=b) t1 = time.time() return t1 - t0
def http_delete(request_params, request_body): # type: (dict, dict) -> dict logger.info("http_delete") try: with Media.atomic(): query = Media.delete().where( Media.media_uuid == request_params["media_uuid"]) response = query.execute() except Exception as ex: logger.error(str(ex)) s3_client = boto3.client('s3', config=Config( signature_version='s3v4', region_name=get('aws_region_name'))) bucket_name = "media-%s" % get('aws_account_id') s3_client.delete_object(Bucket=bucket_name, Key=request_params["media_uuid"]) return {}
def backup_iam(s3_resource, aws_account_id, current_date): # type: ('boto3.client("s3")', str, str) -> float logger.info("backup iam ran at " + str(datetime.datetime.now())) t0 = time.time() bucket_name = "backup-iam-%s" % aws_account_id s3_bucket = get_S3_bucket(s3_resource, bucket_name, get('aws_region_name')) iam_client = boto3.client('iam') user_dict = {} groups = [] for userlist in iam_client.list_users()['Users']: user_groups = iam_client.list_groups_for_user( UserName=userlist['UserName']) print("Username: "******"Assigned groups: ") groups = [] for group_name in user_groups['Groups']: print(group_name) groups += [group_name['GroupName']] user_dict[userlist['UserName']] = json.dumps(groups) b = bytearray() b.extend(map(ord, json.dumps(user_dict, indent=4))) # Upload the file to S3 s3_bucket.put_object(Key=current_date + '-users-backup.json', Body=b) policies = [] for group in groups: policies.append({ group: iam_client.list_attached_group_policies(GroupName=group, MaxItems=500) }) s3_bucket.put_object(Key=current_date + '-group-policies-backup.json', Body=json.dumps(policies, indent=4)) role_policies = [] kwargs = {'MaxItems': 100} while True: response = iam_client.list_roles(**kwargs) for role in response['Roles']: role_policies.append({ role['RoleName']: iam_client.list_attached_role_policies( RoleName=role['RoleName']) }) try: kwargs['Marker'] = response['Marker'] except KeyError: break if len(role_policies): s3_bucket.put_object(Key=current_date + '-roles-backup.json', Body=json.dumps(role_policies, indent=4)) t1 = time.time() return t1 - t0
def http_get(request_params, request_body): # type: (dict, dict) -> dict logger.info("http_get") s3_resource = boto3.resource('s3', region_name=get('aws_region_name')) s3_client = boto3.client('s3', config=Config(signature_version='s3v4', region_name=get('aws_region_name'))) bucket_name = "media-%s" % get('aws_account_id') logger.info("before create bucket") s3_resource.create_bucket(ACL='private', Bucket=bucket_name) logger.info("before generate_presigned_url") key = str(uuid.uuid4()) # Generate the URL to get 'key-name' from 'bucket-name' url = s3_client.generate_presigned_url( ClientMethod='get_object', Params={ 'Bucket': bucket_name, 'Key': key }, HttpMethod="put" ) return {"url": url, "media_uuid": key}
def http_post(request_params, request_body): # type: (dict, dict) -> dict logger.info("http_post") cognito_user = cognito_idp_client.admin_get_user(UserPoolId=get('aws_user_pools_id'), Username=request_body['username']) if "cognito_user_pool_app_client_id" in request_body: cognito_app_client_id = request_body['cognito_user_pool_app_client_id'] else: cognito_app_client_id = get_cognito_app_client_id(cognito_idp_client, cognito_user_pool_id=get('aws_user_pools_id')) # TODO: enable ADMIN_NO_SRP_AUTH and USER_PASSWORD_AUTH auth flows if 'newpassword' in request_body: # noinspection PyBroadException auth_response = cognito_idp_client.admin_initiate_auth( UserPoolId=get('aws_user_pools_id'), AuthFlow='ADMIN_NO_SRP_AUTH', AuthParameters={ 'USERNAME': cognito_user['Username'], 'PASSWORD': request_body['password'] }, ClientId=cognito_app_client_id ) # https://github.com/capless/warrant/issues/14 tokens = cognito_idp_client.respond_to_auth_challenge( ClientId=cognito_app_client_id, ChallengeName='NEW_PASSWORD_REQUIRED', Session=auth_response['Session'], # 'session_string_from_first_challenge_response', ChallengeResponses={ 'NEW_PASSWORD': request_body['newpassword'], 'USERNAME': request_body['username'] } ) logger.info('newpassword successful, return result tokens') cognito_user = remove_cruft(cognito_user) return cognito_user
def http_delete(request_params, request_body): # type: (dict, dict) -> dict logger.info("http_delete") if request_params and 'force' in request_params: logger.info(request_params['force']) # if test, delete the user # if production, de-activate logger.info("http_delete in %s mode" % 'test' if is_test() else 'production') # disable user in Cognito if is_test() or "force" in request_params: try: cognito_idp_client.admin_disable_user(UserPoolId=get('aws_user_pools_id'), Username=request_params['username']) sleep(2) except cognito_idp_client.exceptions.UserNotFoundException as ex: return {} if is_test() or "force" in request_params: logger.info("Deleting user %s for real" % request_params['username']) cognito_idp_client.admin_delete_user(UserPoolId=get('aws_user_pools_id'), Username=request_params['username']) logger.info("Deleted user from Cognito") return {}
def create_users(delete_users): # Create testers for tester in TESTERS: if is_test(): fullpath = get_lambda_fullpath("LambdaApiUserSignUp") event = get_lambda_test_data(fullpath) event['body']['username'] = tester response = invoke(fullpath, event) assert response['body'] body = json.loads(response['body']) if response['statusCode'] == STATUS_BAD_REQUEST: assert body['Code'] == "UsernameExistsException" payload = {"httpMethod": "GET", "queryStringParameters": event['body']} # noinspection PyTypeChecker,PyUnusedLocal response = invoke(fullpath, payload) else: assert response['statusCode'] == STATUS_OK assert body['Username'] if is_production(): # noinspection PyBroadException,PyUnusedLocal event = get_lambda_test_data(get_lambda_fullpath("LambdaApiUserSignUp")) event['body']['username'] = tester event['body'].pop('newpassword', None) url = get_api_url(apigateway_client, 'API', '/v1', '/user/signup') response = requests.put(url, headers=event['headers'], data=json.dumps(event['body'])) response_data = json.loads(response.text) if response.status_code == STATUS_BAD_REQUEST: assert response_data['Code'] == "UsernameExistsException" # noinspection PyUnusedLocal response = requests.get(url, params=event['body']) else: assert response.status_code == STATUS_OK # update database session = Session(region_name=get('aws_region_name')) data = { "username": tester, "avatar": "00000000-0000-0000-0000-000000000000" } # noinspection PyBroadException,PyUnusedLocal with User_profile.atomic(): query = User_profile.update(**data).where(User_profile.username == tester) count = query.execute() assert count == 1
def raw_database(): if is_test() and get('database').endswith('test'): conn = psycopg2.connect(database='postgres') conn.set_isolation_level(0) # noinspection PyUnusedLocal try: _cursor = conn.cursor() _drop_database(_cursor) _create_database(_cursor) except psycopg2.OperationalError as ex: pass finally: conn.close() _create_extensions() return db_cursor
def backup_route53(s3_resource, aws_account_id, current_date): # type: ('boto3.client("s3")', str, str) -> float logger.info("backup route53 ran at " + str(datetime.datetime.now())) t0 = time.time() bucket_name = "backup-route53-%s" % aws_account_id s3_bucket = get_S3_bucket(s3_resource, bucket_name, get('aws_region_name')) route53_client = boto3.client('route53') hosted_zones = route53_client.list_hosted_zones() for hosted_zone in hosted_zones['HostedZones']: tokens = hosted_zone['Id'].split('/') route53_records = route53_client.list_resource_record_sets( HostedZoneId=tokens[2]) b = bytearray() b.extend(map(ord, json.dumps(route53_records, indent=4))) # Upload the file to S3 key = current_date + '-' + hosted_zone['Name'] + '-backup.json' s3_bucket.put_object(Key=key, Body=b) t1 = time.time() return t1 - t0
def http_get(request_params, request_body): # type: (dict, dict) -> dict logger.info("http_get") cognito_user = cognito_idp_client.admin_get_user(UserPoolId=get('aws_user_pools_id'), Username=request_params['username']) return remove_cruft(cognito_user)
def backup_cognito(s3_resource, aws_account_id, current_date): # type: ('boto3.client("s3")', str, str) -> float logger.info("backup cognito ran at " + str(datetime.datetime.now())) t0 = time.time() bucket_name = "backup-cognito-%s" % aws_account_id s3_bucket = get_S3_bucket(s3_resource, bucket_name, get('aws_region_name')) cognito_idp_client = boto3.client('cognito-idp', region_name=get('aws_region_name')) user_pools = cognito_idp_client.list_user_pools(MaxResults=60) user_pool_list = [] user_pool_client_list = [] identity_pool_list = [] for user_pool in user_pools['UserPools']: response = cognito_idp_client.describe_user_pool( UserPoolId=user_pool['Id']) response['UserPool']['LastModifiedDate'] = None response['UserPool']['CreationDate'] = None user_pool_list.append(response['UserPool']) user_pool_clients = cognito_idp_client.list_user_pool_clients( UserPoolId=response['UserPool']['Id'], MaxResults=60) for user_pool_client in user_pool_clients['UserPoolClients']: response = cognito_idp_client.describe_user_pool_client( UserPoolId=user_pool_client['UserPoolId'], ClientId=user_pool_client['ClientId']) response['UserPoolClient']['LastModifiedDate'] = None response['UserPoolClient']['CreationDate'] = None user_pool_client_list.append(response['UserPoolClient']) cognito_identity_client = boto3.client('cognito-identity', region_name=get('aws_region_name')) identity_pools = cognito_identity_client.list_identity_pools(MaxResults=60) for idenity_pool in identity_pools['IdentityPools']: response = cognito_identity_client.describe_identity_pool( IdentityPoolId=idenity_pool['IdentityPoolId']) response.pop('ResponseMetadata', None) identity_pool_list.append(response) # save cognito user pool configuration b = bytearray() b.extend(map(ord, json.dumps(user_pool_list, indent=4))) # Upload the file to S3 s3_bucket.put_object(Key=current_date + '-user_pool-backup.json', Body=b) # save cognito user pool client configuration b = bytearray() b.extend(map(ord, json.dumps(user_pool_client_list, indent=4))) # Upload the file to S3 s3_bucket.put_object(Key=current_date + '-user_pool_app_client-backup.json', Body=b) # save cognito (Federated) idenity pool configuration b = bytearray() b.extend(map(ord, json.dumps(identity_pool_list, indent=4))) # Upload the file to S3 s3_bucket.put_object(Key=current_date + '-identity_pool-backup.json', Body=b) t1 = time.time() return t1 - t0
def http_get(request_params, request_body): # type: (dict, dict) -> dict logger.info("http_get") response = cognito_idp_client.list_users( UserPoolId=get('aws_user_pools_id')) return response['Users']