def get_access_key_for_user(self, new_key=True, delete_keys=True): """ get AWS access key for current user :param new_key: when True (default) a new key will be created everytime this method is called :param delete_keys: when True (default) all previous access keys will be deleted :return: return tuple with (AccessKeyId,SecretAccessKey) """ if new_key or file_not_exists( self.path_temp_credentials ): # only create key if file doesn't exist or new_key is False if delete_keys: # by default make sure that there is only one valid key available self.iam.user_access_keys_delete_all( ) # this will delete all current keys for this users access_key = self.iam.user_access_key_create( ) # create new access keys del access_key[ 'CreateDate'] # because: Object of type datetime is not JSON serializable json_save(self.path_temp_credentials, access_key) # save them in temp file else: access_key = json_load( self.path_temp_credentials) # load keys from temp file return access_key.get('AccessKeyId'), access_key.get( 'SecretAccessKey' ) # return tuple with (access_key and secret_access_key)
def test_GET_bytes_to_file(self): target = temp_file(extension="png") assert file_not_exists(target) assert GET_bytes_to_file(self.url_png, target) assert file_exists(target) assert file_size(target) == 17575 assert file_bytes(target)[:4] == b"\x89PNG"
def run(event, context=None): target_host = event.get('target_host') ssh_key = event.get('ssh_key') ssh_key_name = event.get('ssh_key_name') ssh_user = event.get('ssh_user') ssh_command = event.get('ssh_command') # port_forward = event.get('port_forwards' ) # not implemented: include_stderr = event.get('include_stderr') ssh_key_file = f'/tmp/{ssh_key_name}' if file_not_exists(ssh_key_file): # create local key if it doesn't exist file_create(ssh_key_file, ssh_key) start_process('chmod', ['600', ssh_key_file]) ssh_params = ['-o', 'StrictHostKeyChecking=no' ] # todo: add support for updating the local hosts file if ssh_key_file: ssh_params.append('-i') # set key to use ssh_params.append(ssh_key_file) # if port_forward: # todo see if we do actually need this (main use case would be to allow direct HTTP access to an internal server) # local_port = port_forward.get('local_port' ) # need to see if Lambda will allow binding ports like this # remote_ip = port_forward.get('remote_ip' ) # remote_port = port_forward.get('remote_port') ssh_params.append(f'{ssh_user}@{target_host}') # set user and target ip ssh_params.append(ssh_command) # add command to execute result = start_process("ssh", ssh_params) # execute command if include_stderr: # see if we need to include stderr in return value return result.get('stdout') + result.get('stderr') return result.get('stdout')
def test_set_hd3_location(self): hd3_location = path_combine(temp_folder(), 'cccc') assert file_not_exists(hd3_location) self.config.set_hd3_location(hd3_location) assert self.config.hd3_location == hd3_location assert file_exists(hd3_location) self.config.load_values() assert abspath( self.config.hd3_location) == abspath('./test_data/scenario-1/hd3') assert folder_delete_all(hd3_location)
def setUp(self) -> None: load_dotenv() self.ssh_config = { "user" : os.environ.get('TEST_SSH_USER' ), "server" : os.environ.get('TEST_SSH_SERVER'), "ssh_key" : os.environ.get('TEST_SSH_KEY' ) } if file_not_exists(self.ssh_config.get('ssh_key')): skip('no ssh key in current test environment') self.ssh = Ssh(ssh_config=self.ssh_config) print()
def test_save(self): target_file = temp_file() # temp file to save data assert file_not_exists(target_file) # confirm it doesn't exist with patch.object(Hash_Json, 'get_file_path', return_value=target_file): # patch get_file_path to return temp file path assert self.hash_json.get_file_path() == target_file # confirm patch is in place self.hash_json.save() # call write_to_file assert file_exists(target_file) # confirm temp file now exists assert self.hash_json.load() == self.hash_json.data() # confirm reloaded data is correct assert json_load_file(target_file) == self.hash_json.data() # also confirm using direct json load of temp file assert self.hash_json.get_file_path() != target_file # confirm pathc is not there (after 'with' ends) file_delete(target_file) # delete temp file
def test_save_png_bytes_to_file(self): with Log_To_String(logger_png) as log_to_string: assert file_not_exists(self.path_temp_png) png_file = save_png_bytes_to_file(bytes=TEST_PNG_BYTES, png_file=self.path_temp_png) assert png_file == self.path_temp_png assert file_exists(self.path_temp_png) assert log_to_string.contents( ) == f'Png data with size 148 saved to {png_file}\n' with Log_To_String(logger_png) as log_to_string: png_file = save_png_bytes_to_file(bytes=TEST_PNG_BYTES) assert file_exists(png_file) assert log_to_string.contents( ) == f'Png data with size 148 saved to {png_file}\n'
def test_set_hd2_location(self): hd2_location = path_combine(temp_folder(), 'bbb') assert file_not_exists(hd2_location) self.config.set_hd2_location(hd2_location) assert self.config.hd2_location == hd2_location assert self.config.hd2_data_location == path_combine( hd2_location, DEFAULT_HD2_DATA_NAME) assert self.config.hd2_status_location == path_combine( hd2_location, DEFAULT_HD2_STATUS_NAME) assert self.config.hd2_processed_location == path_combine( hd2_location, DEFAULT_HD2_PROCESSED_NAME) assert file_exists(hd2_location) self.config.load_values() assert abspath( self.config.hd2_location) == abspath('./test_data/scenario-1/hd2') assert folder_delete_all(hd2_location)
def load_dependency(target): if os.getenv('AWS_REGION') is None: return from osbot_aws.apis.S3 import S3 import shutil import sys s3 = S3() s3_bucket = AWS_Config().lambda_s3_bucket() s3_key = 'lambdas-dependencies/{0}.zip'.format(target) tmp_dir = Files.path_combine('/tmp/lambdas-dependencies', target) #return s3.file_exists(s3_bucket,s3_key) if s3.file_exists(s3_bucket,s3_key) is False: raise Exception("In Lambda load_dependency, could not find dependency for: {0}".format(target)) if file_not_exists(tmp_dir): # download dependency zip_file = s3.file_download(s3_bucket, s3_key,False) # download zip file with dependencies shutil.unpack_archive(zip_file, extract_dir = tmp_dir) # unpack them if tmp_dir not in sys.path: # if not currently in the path sys.path.append(tmp_dir) # add tmp_dir to the path that python uses to check for dependencies return Files.exists(tmp_dir)
def test_file_not_exists(self): target = temp_file() assert file_not_exists(target) is True file_create(target, 'asd') assert file_not_exists(target) is False
def download_iso(self): if file_not_exists(self.path_core_iso): Http.GET_bytes_to_file(self.url_core_iso, self.path_core_iso) return self.path_core_iso
def save_icon_locally(self, name, icon_path, force_reload=False): if file_not_exists(icon_path) or force_reload: icon_url = self.github_url(name) GET_bytes_to_file(icon_url, icon_path) return file_exists(icon_path)
def download_ova_file(self, url, target_ova_path): if file_not_exists(target_ova_path): Http.GET_bytes_to_file(url, target_ova_path) return target_ova_path