def test_image_build(self): target_image = 'centos' expected_size = 209348126 folder_dockerFile = path_combine(self.path_docker_images, target_image) path_dockerfile = path_combine(folder_dockerFile, 'Dockerfile') repository = "osbot_docker__test_image_build" tag = "abc" image_name = f"{repository}:{tag}" assert folder_exists(folder_dockerFile) assert file_exists(path_dockerfile) result = self.api_docker.image_build(folder_dockerFile, repository, tag) build_logs = result.get('build_logs') image = result.get('image') status = result.get('status') tags = result.get('tags') assert self.api_docker.image_exists(repository, tag) assert status == 'ok' assert image_name in tags assert image_name in self.api_docker.images_names() assert image.get('Size') == expected_size assert next(build_logs) == {'stream': 'Step 1/3 : FROM centos:8'} assert self.api_docker.image_delete(repository, tag) is True assert image_name not in self.api_docker.images_names()
def get_processing_docker_config(self, target_file_name): icap_folder = '/icap_folder' input_folder = path_combine(icap_folder, 'input') output_folder = path_combine(icap_folder, 'output') input_file = path_combine(input_folder, target_file_name) output_file = path_combine(output_folder, target_file_name) return { "icap_folder": icap_folder, "input_file": input_file, "output_file": output_file }
def test_folder_create_in_parent(self): tmp_folder = '_tmp_folder' child_folder = '_child_folder' assert folder_exists(tmp_folder) == False assert folder_create(tmp_folder) == tmp_folder assert create_folder_in_parent(tmp_folder, child_folder) == path_combine( tmp_folder, child_folder) assert folder_exists(path_combine(tmp_folder, child_folder)) == True assert folder_delete_all(tmp_folder) == True assert folder_not_exists(path_combine(tmp_folder, child_folder)) == True
def set_hd2_location(self, hd2_location): self.hd2_location = self.ensure_last_char_is_not_forward_slash( hd2_location) self.hd2_data_location = path_combine(self.hd2_location, DEFAULT_HD2_DATA_NAME) self.hd2_status_location = path_combine(self.hd2_location, DEFAULT_HD2_STATUS_NAME) self.hd2_processed_location = path_combine(self.hd2_location, DEFAULT_HD2_PROCESSED_NAME) self.hd2_not_processed_location = path_combine( self.hd2_location, DEFAULT_HD2_NOT_PROCESSED_NAME) folder_create(self.hd2_location) folder_create(self.hd2_data_location) folder_create(self.hd2_status_location) folder_create(self.hd2_processed_location)
def save_event_in_tmp_folder(event): now_str = datetime.now().strftime("%Y-%m-%d__%H-%M-%S__%f") target_folder = folder_create('/tmp/event_logs') file_name = f'{now_str}.json' target_file = path_combine(target_folder, file_name) return json_save(target_file, event)
def get_processing_local_config(self, target_file_name): icap_processing_folder = path_combine(current_temp_folder(), 'icap_processing_folder') icap_session = new_guid() icap_session_folder = path_combine(icap_processing_folder, icap_session) input_folder = create_folder_in_parent(icap_session_folder, 'input') output_folder = create_folder_in_parent(icap_session_folder, 'output') input_file = path_combine(input_folder, target_file_name) output_file = path_combine(output_folder, target_file_name) return { "temp_folder": icap_session_folder, "session_id": icap_session, "input_file": input_file, "output_file": output_file }
async def test_args_set_user_data_dir__enable_logging(self): user_data = temp_folder() chrome = Chrome().headless(True) (chrome.chrome_args.args_set_user_data_dir(user_data).enable_logging()) await chrome.browser() log_file = path_combine(user_data, 'Default/chrome_debug.log') assert file_exists(log_file)
def __init__(self, reload_data=False, return_cache_key=False): self.cache_folder_name = "osbot_cache_on_tmp" self.cache_folder = path_combine(temp_folder_current(), self.cache_folder_name) self.last_cache_path = None self.return_cache_key = return_cache_key self.reload_data = reload_data folder_create(self.cache_folder)
def test_lambda_upload_file(self): deploy_lambda = Deploy_Lambda(self.lambda_handler) package = deploy_lambda.package aws_lambda = package.aws_lambda deploy_lambda.add_function_source_code() assert run.__module__ == self.expected_module assert run.__name__ == 'run' assert '/osbot_aws/lambdas/dev/hello_world.py' in package.get_files() assert len(files_list(aws_lambda.folder_code)) == len( package.get_files()) assert file_exists( path_combine(aws_lambda.folder_code, 'osbot_aws/lambdas/dev/hello_world.py')) assert aws_lambda.s3_bucket == f'{self.expected_account_id}-osbot-lambdas' assert aws_lambda.s3_key == f'lambdas/{self.expected_module}.zip' assert self.s3.file_exists(bucket=aws_lambda.s3_bucket, key=aws_lambda.s3_key) is True assert self.s3.file_delete(bucket=aws_lambda.s3_bucket, key=aws_lambda.s3_key) is True assert self.s3.file_exists(bucket=aws_lambda.s3_bucket, key=aws_lambda.s3_key) is False self.s3.folder_upload(folder=aws_lambda.folder_code, s3_bucket=aws_lambda.s3_bucket, s3_key=aws_lambda.s3_key) assert self.s3.file_exists(bucket=aws_lambda.s3_bucket, key=aws_lambda.s3_key) is True
def test_deployment(self): deployment_file = path_combine('../../test_files/deployment', 'nginx-deployment.yaml') assert file_exists(deployment_file) deployment = yaml_load(deployment_file) resp = self.cluster.api_apps().create_namespaced_deployment( body=deployment, namespace="default") print("Deployment created. status='%s'" % resp.metadata.name)
def test_image_build_scratch(self): path = path_combine(self.path_docker_images, 'scratch') repository = 'scratch' tag = 'latest' result = self.api_docker.image_build(path=path, repository=repository, tag=tag) assert result.get('image').get('Size') == 0
def set_root_folder(self, root_folder=None): if folder_not_exists( root_folder ): # use temp folder if no value is provided or folder doesn't exist root_folder = temp_folder() self.root_folder = root_folder self.hd1_location = path_combine( root_folder, DEFAULT_HD1_NAME) # set default values for h1, h2 and hd3 self.hd2_location = path_combine(root_folder, DEFAULT_HD2_NAME) self.hd3_location = path_combine(root_folder, DEFAULT_HD3_NAME) self.set_hd1_location(self.hd1_location) # make sure folders exist self.set_hd2_location(self.hd2_location) self.set_hd3_location(self.hd3_location) return self
def test_add_file(self): metadata = self.metadata file_paths = metadata.data.get('original_file_paths') assert self.metadata.exists() is False # metadata folder doesn't exist # adding file first time assert metadata.add_file( self.file_path ) == self.file_hash # add file and get file hash as return value assert metadata.exists() is True # confirm metadata folder now exists assert folder_exists(metadata.metadata_folder_path() ) # confirm metadata folder now exists assert file_exists( metadata.metadata_file_path()) # confirm metadata json file exists assert file_exists(metadata.source_file_path() ) # confirm source file was correctly put in place assert metadata.file_hash == self.metadata_utils.file_hash( metadata.source_file_path() ) # confirm hash of source file matches hash of file_path assert metadata.metadata_file_path() == path_combine( metadata.metadata_folder_path(), DEFAULT_METADATA_FILENAME ) # confirm metadata file is place in correct location file_paths = metadata.data.get('original_file_paths') assert file_paths == [ self.file_path ] # confirms that in this mode the entire path is preserved # adding same file 2nd time (with same hash and same name) assert metadata.add_file( self.file_path) == self.file_hash # adding the same file again file_paths = metadata.data.get('original_file_paths') assert file_paths == [self.file_path ] # should not impact this value (same as above) # adding same file 3nd time (with same hash but different name) assert metadata.add_file( self.file_copy_path ) == self.file_hash # adding the same file again (with different name) file_paths = metadata.data.get('original_file_paths') assert file_paths == [self.file_path, self.file_copy_path ] # will make the new file path be added # adding same file 4th time (with self.path_hd1 set to parent folder of path) file_parent_folder = parent_folder( self.file_path) # get parent folder of test file self.metadata.path_hd1 = file_parent_folder # assign it to the metadata variable used to calculate virtual paths assert metadata.add_file(self.file_path) == self.file_hash file_paths = metadata.data.get('original_file_paths') assert file_paths == [ self.file_path, self.file_copy_path, file_name(self.file_path) ] # confirm that the virtual file path was added as the 3rd item (in this case the file name) #clean up assert self.metadata.delete() is True assert folder_not_exists(self.metadata.metadata_folder_path())
def __init__(self, headless=True): self.web_page = '/vivagraph/simple.html' self.jira_icons = '/vivagraph/icons' self.web_root = path_combine(Files.parent_folder(__file__), '../web_root') self.api_browser = API_Browser(headless=headless).sync__setup_browser() self.browser_width = None self.render_wait = None self.web_server = None # Web_Server(self.web_root)
def configure(self, hd1_path=None, hd2_path=None, hd3_path=None): self.reset_last_error() try: dotenv_file = dotenv.find_dotenv() if hd1_path: if path.exists(hd1_path): environ['HD1_LOCATION'] = hd1_path dotenv.set_key(dotenv_file, "HD1_LOCATION", environ["HD1_LOCATION"]) else: self.last_error_message = f"hd1_path did not exist: {hd1_path}" log_error(message=f"hd1_path did not exist", data={"path": hd1_path}) return -1 if hd2_path: if not path.exists(hd2_path): folder_create(hd2_path) folder_create(path_combine(hd2_path, DEFAULT_HD2_DATA_NAME)) folder_create( path_combine(hd2_path, DEFAULT_HD2_STATUS_NAME)) environ['HD2_LOCATION'] = hd2_path dotenv.set_key(dotenv_file, "HD2_LOCATION", environ["HD2_LOCATION"]) if hd3_path: if not path.exists(hd3_path): folder_create(hd3_path) environ['HD3_LOCATION'] = hd3_path dotenv.set_key(dotenv_file, "HD3_LOCATION", environ["HD3_LOCATION"]) self.config.load_values() return self.env_details() except Exception as e: self.last_error_message = f'Configure_Env : configure : {e}' log_error(f'Configure_Env : configure : {e}') raise ValueError(str(e))
def __init__(self, image_name, path_images=None, image_tag='latest') -> object: self.api_docker = API_Docker() self.ecr = ECR() self.aws_config = AWS_Config() self.image_name = image_name self.image_tag = image_tag self.path_images = path_images or path_combine(__file__, '../../images')
def test_add_file_path(self): test_path_1 = path_combine(self.metadata.path_hd1, 'aaaa.txt') test_path_2 = path_combine(self.metadata.path_hd1, 'bbbb/ccc.txt') test_path_3 = 'dddd/eeee.txt' test_path_4 = '/fff/gggg.txt' file_paths = self.metadata.data.get('original_file_paths') assert file_paths == [] self.metadata.add_file_path(test_path_1) assert file_paths == [] self.metadata.file_hash = 'this value needs to be set for .add_file_path to work' self.metadata.add_file_path(test_path_1) self.metadata.add_file_path(test_path_2) self.metadata.add_file_path(test_path_3) self.metadata.add_file_path(test_path_4) assert file_paths == [ 'aaaa.txt', 'bbbb/ccc.txt', 'dddd/eeee.txt', '/fff/gggg.txt' ]
def deploy_lambda__browser_dev(self): #package = self.get_package(lambda_name) package = self.deploy_lambda.package source_folder = path_combine(__file__, '../../osbot_browser') # to do check path #package.add_module('osbot_browser') package.add_folder(source_folder, ignore='web_root') #package.add_osbot_utils() package.add_osbot_aws() self.configure_environment_variables() package.update() return package
def setup_test_environment__Deploy_Lambda( cls): # todo: refactor into separate class STS().check_current_session_credentials() cls.lambda_name = "osbot_test_deploy_lambda" cls.lambda_code = Temp_Folder_With_Lambda_File(cls.lambda_name) cls.code_folder = cls.lambda_code.folder lambda_file = cls.lambda_code.tmp_file module_folder = path_combine(cls.code_folder, "osbot_test_deploy_lambda") lambda_in_module = path_combine(module_folder, file_name(lambda_file)) folder_create(module_folder) file_copy(lambda_file, lambda_in_module) # todo add a file_move to OSBot_Utils file_delete(lambda_file) file_create(path_combine(module_folder, '__init__.py'), "") sys.path.append(cls.code_folder) cls.lambda_module = importlib.import_module( "osbot_test_deploy_lambda.osbot_test_deploy_lambda") cls.lambda_function = cls.lambda_module.run
def test_folder_copy(self): folder_a = temp_folder(prefix='folder_a_') folder_b = temp_folder(prefix='folder_b_', parent_folder=folder_a) folder_c = temp_folder(prefix='folder_c_', parent_folder=folder_a) file_a = temp_file(parent_folder=folder_a, contents='abc') file_b = temp_file(parent_folder=folder_b, contents='abc') file_c = temp_file(parent_folder=folder_c, contents='abc') target_a = path_combine(folder_a, 'target_a') assert parent_folder(target_a) == folder_a assert parent_folder_combine(target_a, 'target_a') == target_a assert folder_copy(source=folder_a, destination=target_a) == target_a assert (len(folder_files(target_a)) == 3) assert folder_files(target_a) == sorted([ path_append(target_a, remove(file_a, folder_a + '/')), path_append(target_a, remove(file_b, folder_a + '/')), path_append(target_a, remove(file_c, folder_a + '/')) ]) # test with ignore_pattern target_b = path_combine(folder_a, 'target_b') assert folder_copy(source=target_a, destination=target_b, ignore_pattern='folder_b_*') == target_b assert (len(folder_files(target_b)) == 2) zipped_files = zip_files(target_a) assert zip_file_list(zipped_files) == sorted([ remove(file_a, folder_a + '/'), remove(file_b, folder_a + '/'), remove(file_c, folder_a + '/') ]) path_pattern = f'{folder_a}/**/*.*' assert len(file_find(path_pattern)) == 8
def key_pair_create_to_file(self, key_name, target_folder=None, tags=None): key_pair = self.key_pair_create(key_name=key_name, tags=tags) key_pair_id = key_pair.get('KeyPairId') key_pair_material = key_pair.get('KeyMaterial') if target_folder is None: target_folder = temp_folder() path_key_pair = path_combine(target_folder, key_name + ".pem") file_create(path_key_pair, key_pair_material) chmod(path_key_pair, 0o400) return { 'path_key_pair': path_key_pair, 'key_pair_id': key_pair_id, 'key_pair': key_pair }
def deploy_lambda__browser_dev(self): #package = self.get_package(lambda_name) package = self.deploy_lambda.package source_folder = path_combine(__file__, '../../osbot_browser') # to do check path #package.add_module('osbot_browser') package.add_folder(source_folder, ignore='web_root') #package.add_osbot_utils() package.add_osbot_aws() self.configure_environment_variables() package.update() return package # don't use this version (on the OSS Fork) # def deploy(self, delete_before=False): # if delete_before: # self.package.delete() # code_folder = Files.path_combine(__file__,'..') # self.package.add_folder(code_folder) # self.package.add_root_folder() # self.package.add_pbx_gs_python_utils() # #Dev.pprint(self.package.get_files()) # return self.package.update()
def __init__(self): self.path_packages = path_combine('.', '../../../_lambda_dependencies/')
def local_path(self, package): return path_combine(self.path_packages, package)
def hd3(self, path=''): return path_combine(self.config.hd3_location, path) # add path and convert to absolute paths
def create_temp_file(self, new_code=None): self.lambda_code = new_code or self.lambda_code self.folder = folder_create_temp('tmp_lambda_') self.tmp_file = path_combine(self.folder, f'{self.file_name}.py') file_create(self.tmp_file, self.lambda_code) return self
def test_file_name(self): target = temp_file() assert path_combine(folder_name(target), file_name(target)) == target
def test_path_combine(self): assert path_combine( 'a', 'b') == f"{path_current()}/a/b" # todo: add more use cases
def test_folder_files(self): folder = parent_folder(__file__) assert path_combine(folder, 'test_Files.py') in folder_files(folder) assert path_combine(folder, 'test_Json.py') in folder_files(folder)
def get_cache_in_tmp_path(self, self_obj, function_obj, params): cache_key = self.get_cache_in_tmp_key(self_obj, function_obj, params) cache_path = path_combine(self.cache_folder,cache_key) self.last_cache_path = cache_path return cache_path