def save(self): if not file_exists(self.status_file_path()): folder_create(self.storage.hd2_status()) file_create(self.status_file_path()) json_save_file_pretty(self.data(), self.status_file_path()) return self
def __init__(self, reload_data=False, return_cache_key=False): self.cache_folder_name = "osbot_cache_on_tmp" self.cache_folder = path_combine(temp_folder_current(), self.cache_folder_name) self.last_cache_path = None self.return_cache_key = return_cache_key self.reload_data = reload_data folder_create(self.cache_folder)
def pip_install_dependency(target): path_lambda_dependencies = Files.path_combine('.', '../../../_lambda_dependencies/') folder_create(path_lambda_dependencies) path_install = Files.path_combine(path_lambda_dependencies, target) if folder_not_exists(path_install): return Process.run('pip3', ['install','-t',path_install,target]) return folder_exists(path_install)
def create(self, file_path): if self.file_hash: folder_create(self.metadata_folder_path()) file_copy(file_path, self.source_file_path()) self.set_original_file_size(file_path) self.set_original_file_extension(file_path) self.set_original_file_name(file_path)
def test_folder_create(self): tmp_folder = '_tmp_folder' assert folder_exists(tmp_folder) is False assert folder_create(tmp_folder) == tmp_folder assert folder_create(tmp_folder) == tmp_folder assert folder_exists(tmp_folder) is True assert folder_not_exists(tmp_folder) is False assert folder_delete_all(tmp_folder) is True assert folder_not_exists(tmp_folder) is True
def save_event_in_tmp_folder(event): now_str = datetime.now().strftime("%Y-%m-%d__%H-%M-%S__%f") target_folder = folder_create('/tmp/event_logs') file_name = f'{now_str}.json' target_file = path_combine(target_folder, file_name) return json_save(target_file, event)
def save_file(self, result, processed_path): self.add_event_log('Saving to: ' + processed_path) dirname = ntpath.dirname(processed_path) basename = ntpath.basename(processed_path) folder_create(dirname) decoded = FileService.base64decode(result) if decoded: FileService.wrtie_binary_file(dirname, basename, decoded) self.add_event_log('The decoded file has been saved') return processed_path else: FileService.wrtie_file( dirname, basename + ".html", result) # todo: capture better this workflow self.add_event_log('Decoding FAILED. The HTML file has been saved') return processed_path + '.html' # todo: refactor this workflow and how this is calculated
def setup_test_environment__Deploy_Lambda( cls): # todo: refactor into separate class STS().check_current_session_credentials() cls.lambda_name = "osbot_test_deploy_lambda" cls.lambda_code = Temp_Folder_With_Lambda_File(cls.lambda_name) cls.code_folder = cls.lambda_code.folder lambda_file = cls.lambda_code.tmp_file module_folder = path_combine(cls.code_folder, "osbot_test_deploy_lambda") lambda_in_module = path_combine(module_folder, file_name(lambda_file)) folder_create(module_folder) file_copy(lambda_file, lambda_in_module) # todo add a file_move to OSBot_Utils file_delete(lambda_file) file_create(path_combine(module_folder, '__init__.py'), "") sys.path.append(cls.code_folder) cls.lambda_module = importlib.import_module( "osbot_test_deploy_lambda.osbot_test_deploy_lambda") cls.lambda_function = cls.lambda_module.run
def test_folder_create_in_parent(self): tmp_folder = '_tmp_folder' child_folder = '_child_folder' assert folder_exists(tmp_folder) == False assert folder_create(tmp_folder) == tmp_folder assert create_folder_in_parent(tmp_folder, child_folder) == path_combine( tmp_folder, child_folder) assert folder_exists(path_combine(tmp_folder, child_folder)) == True assert folder_delete_all(tmp_folder) == True assert folder_not_exists(path_combine(tmp_folder, child_folder)) == True
def set_hd2_location(self, hd2_location): self.hd2_location = self.ensure_last_char_is_not_forward_slash( hd2_location) self.hd2_data_location = path_combine(self.hd2_location, DEFAULT_HD2_DATA_NAME) self.hd2_status_location = path_combine(self.hd2_location, DEFAULT_HD2_STATUS_NAME) self.hd2_processed_location = path_combine(self.hd2_location, DEFAULT_HD2_PROCESSED_NAME) self.hd2_not_processed_location = path_combine( self.hd2_location, DEFAULT_HD2_NOT_PROCESSED_NAME) folder_create(self.hd2_location) folder_create(self.hd2_data_location) folder_create(self.hd2_status_location) folder_create(self.hd2_processed_location)
def clear_data_and_status_folders(self): data_target = self.storage.hd2_data( ) # todo: refactor this clean up to the storage class status_target = self.storage.hd2_status() processed_target = self.storage.hd2_processed() folder_delete_all(data_target) folder_delete_all(status_target) folder_delete_all(processed_target) folder_create(data_target) folder_create(status_target) folder_create(processed_target) self.status.reset()
def configure(self, hd1_path=None, hd2_path=None, hd3_path=None): self.reset_last_error() try: dotenv_file = dotenv.find_dotenv() if hd1_path: if path.exists(hd1_path): environ['HD1_LOCATION'] = hd1_path dotenv.set_key(dotenv_file, "HD1_LOCATION", environ["HD1_LOCATION"]) else: self.last_error_message = f"hd1_path did not exist: {hd1_path}" log_error(message=f"hd1_path did not exist", data={"path": hd1_path}) return -1 if hd2_path: if not path.exists(hd2_path): folder_create(hd2_path) folder_create(path_combine(hd2_path, DEFAULT_HD2_DATA_NAME)) folder_create( path_combine(hd2_path, DEFAULT_HD2_STATUS_NAME)) environ['HD2_LOCATION'] = hd2_path dotenv.set_key(dotenv_file, "HD2_LOCATION", environ["HD2_LOCATION"]) if hd3_path: if not path.exists(hd3_path): folder_create(hd3_path) environ['HD3_LOCATION'] = hd3_path dotenv.set_key(dotenv_file, "HD3_LOCATION", environ["HD3_LOCATION"]) self.config.load_values() return self.env_details() except Exception as e: self.last_error_message = f'Configure_Env : configure : {e}' log_error(f'Configure_Env : configure : {e}') raise ValueError(str(e))
def hd1_delete_all_files(self): folder_delete_all(self.hd1()) folder_create(self.hd1())
def set_hd3_location(self, hd3_location): self.hd3_location = self.ensure_last_char_is_not_forward_slash( hd3_location) folder_create(self.hd3_location)
def do_rebuild(self, endpoint, hash, source_path, dir): log_info( message=f"Starting rebuild for file {hash} on endpoint {endpoint}") with Duration() as duration: event_data = { "endpoint": endpoint, "hash": hash, "source_path": source_path, "dir": dir } # todo: see if we can use a variable that holds the params data self.add_event_log('Starting File rebuild', event_data) self.meta_service.set_rebuild_server(dir, endpoint) encodedFile = FileService.base64encode(source_path) if not encodedFile: message = f"Failed to encode the file: {hash}" log_error(message=message) self.add_event_log(message) self.meta_service.set_error(dir, message) return False response = self.rebuild(endpoint, encodedFile) result = response.text if not result: message = f"Failed to rebuild the file : {hash}" log_error(message=message) self.add_event_log(message) self.meta_service.set_error(dir, message) return False try: for path in self.meta_service.get_original_file_paths(dir): #rebuild_file_path = path if path.startswith(self.config.hd1_location): rebuild_file_path = path.replace( self.config.hd1_location, self.config.hd3_location) else: rebuild_file_path = os.path.join( self.config.hd3_location, path) folder_create(parent_folder( rebuild_file_path)) # make sure parent folder exists final_rebuild_file_path = self.save_file( result, rebuild_file_path ) # returns actual file saved (which could be .html) # todo: improve the performance of these update since each will trigger a save file_size = os.path.getsize( final_rebuild_file_path) # calculate rebuilt file fize rebuild_hash = self.meta_service.file_hash( final_rebuild_file_path ) # calculate hash of final_rebuild_file_path self.meta_service.set_rebuild_file_size(dir, file_size) self.meta_service.set_rebuild_file_path( dir, final_rebuild_file_path ) # capture final_rebuild_file_path self.meta_service.set_rebuild_hash( dir, rebuild_hash) # capture it if not FileService.base64decode(result): message = f"Engine response could not be decoded" log_error(message=message, data=f"{result}") self.meta_service.set_error(dir, message) return False except Exception as error: message = f"Error Saving file for {hash} : {error}" log_error(message=message) self.meta_service.set_xml_report_status(dir, "No Report") self.meta_service.set_error(dir, message) return False headers = response.headers fileIdKey = "X-Adaptation-File-Id" # get XML report if fileIdKey in headers: if self.get_xmlreport(endpoint, headers[fileIdKey], dir): self.add_event_log('The XML report has been saved') self.meta_service.set_xml_report_status(dir, "Obtained") else: self.meta_service.set_xml_report_status( dir, "No XML Report") else: self.meta_service.set_xml_report_status( dir, "Failed to obtain") message = f'No X-Adaptation-File-Id header found in the response for {hash}' log_error(message) self.add_event_log(message) self.meta_service.set_error(dir, message) return False #raise ValueError("No X-Adaptation-File-Id header found in the response") # todo: add when server side supports this # SDKEngineVersionKey = "X-SDK-Engine-Version" # SDKAPIVersionKey = "X-SDK-Api-Version" # # if SDKEngineVersionKey in headers: # self.sdk_engine_version = headers[SDKEngineVersionKey] # if SDKAPIVersionKey in headers: # self.sdk_api_version = headers[SDKAPIVersionKey] # # self.meta_service.set_server_version(dir, "Engine:" + self.sdk_engine_version + " API:" + self.sdk_api_version ) log_info( message= f"rebuild ok for file {hash} on endpoint {endpoint} took {duration.seconds()} seconds" ) return True
def icons_folder(self): return folder_create(f'/tmp/{self.icons_folder_name}')
def hd2_delete_all_files(self): folder_delete_all(self.hd2_data()) folder_delete_all(self.hd2_status()) folder_create(self.hd2_data()) folder_create(self.hd2_status())