def test_move_file(self): source_file = temp_file() target_file = temp_file() file_copy(self.test_file, source_file) self.file_service.move_file(source_file, target_file) assert os.path.exists(source_file) is False assert os.path.exists(target_file) is True
def test_file_copy(self): text_a = random_string() text_b = random_string() file_a = temp_file(contents=text_a) file_b = temp_file(contents=text_b) assert file_exists(file_a) is True assert file_exists(file_b) is True assert file_contents(file_a) == text_a assert file_contents(file_b) == text_b assert file_delete(file_b) is True assert file_exists(file_b) is False file_copy(file_a, file_b) assert file_exists(file_b) is True assert file_contents(file_b) == text_a
def setUpClass(cls) -> None: cls.test_file = temp_file(contents='Static text so that we have a static hash') cls.test_file_name = file_name(cls.test_file) cls.test_file_hash = '500286533bf75d769e9180a19414d1c3502dd52093e7351a0a9b1385d8f8961c' cls.meta_data={ 'file_name' : None , 'original_hash' : '500286533bf75d769e9180a19414d1c3502dd52093e7351a0a9b1385d8f8961c' , 'rebuild_hash' : None }
def test_hd1_add_file(self): test_file = temp_file(contents=random_text()) test_file_name = file_name(test_file) file_in_hd1 = self.storage.hd1_add_file(test_file) assert file_exists(file_in_hd1) assert file_contents(file_in_hd1) == file_contents(test_file) assert self.storage.hd1_file_path(test_file_name) == file_in_hd1
def test_GET_bytes_to_file(self): target = temp_file(extension="png") assert file_not_exists(target) assert GET_bytes_to_file(self.url_png, target) assert file_exists(target) assert file_size(target) == 17575 assert file_bytes(target)[:4] == b"\x89PNG"
def setUpClass(cls) -> None: cls.test_file = temp_file( contents='Static text so that we have a static hash') cls.file_hash = '500286533bf75d769e9180a19414d1c3502dd52093e7351a0a9b1385d8f8961c' cls.temp_dir = temp_folder() file_copy(cls.test_file, cls.temp_dir) cls.pre_processor = Pre_Processor() Setup_Testing().configure_pre_processor(cls.pre_processor)
def code_zip(self): version_arn = self.latest().get('LayerVersionArn') if version_arn: url_code = self.client().get_layer_version_by_arn( Arn=version_arn).get('Content', {}).get('Location') if url_code: path_target = temp_file(extension='.zip') return GET_bytes_to_file(url_code, path_target)
def exec_scp_command(self, source_file, target_file=None): if target_file is None: target_file = temp_file() result = exec_process("scp", self.get_scp_params(source_file, target_file)) if self.filter_exec_results(result).get('status'): return target_file
def yaml_save( yaml_code, path_file=None ): #todo: refactor other 'save' methods to have the content as the first param (to allow for creation of temp files when path_file path is not provided) if path_file is None: path_file = temp_file(extension=".yaml") with open(path_file, 'w') as file: yaml.dump(yaml_code, file) return path_file
def test_file_write(self): target = temp_file() text = "this is a string" assert file_contents(file_write(target, text)) == text assert file_bytes(file_write(target, text)) == text.encode() assert file_contents(file_write(target, text.encode(), mode='wb')) == text assert file_bytes(file_write(target, b"\x89PNG___", mode='wb')) == b"\x89PNG___"
def test_download(self): path_screenshot_1 = self.screenshot.download() assert file_exists(path_screenshot_1) # download to specific location target_file = temp_file(extension='.png') #target_file = '/tmp/vm_screenshot_2.png' #self.screenshot.vm.wait(2) path_screenshot_2 = VM_Screenshot(self.vm, target_file).download() assert file_exists(path_screenshot_2)
def test_do_rebuild_bad_file(self): # refactor bad_file = temp_file(contents=random_text()) file_hash = file_sha256(bad_file) metadata = self.meta_service.create_metadata(bad_file) endpoint = f'http://{self.sdk_server}:{self.sdk_port}' dir = metadata.metadata_folder_path() result = self.file_processing.do_rebuild(endpoint=endpoint, hash=file_hash, source_path=bad_file, dir=dir) assert result == False metadata.load() assert metadata.data.get('error') == 'Engine response could not be decoded'
def test_processDirectory__bad_file(self): bad_file = temp_file(contents=random_text()) metadata = self.meta_service.create_metadata(bad_file) endpoint = f'http://{self.sdk_server}:{self.sdk_port}' dir = metadata.metadata_folder_path() result = self.file_processing.processDirectory(endpoint=endpoint, dir=dir) assert result == False metadata.load() assert metadata.data.get('rebuild_status') == 'Completed with errors' assert metadata.data.get('error') == 'Engine response could not be decoded'
def test_update_status(self): temp_data_file = temp_file() with patch.object(Hash_Json, 'get_file_path', return_value=temp_data_file): self.hash_json.add_file(self.test_file_hash, self.test_file_name) assert self.hash_json.data()[self.test_file_hash]['file_status'] == 'Initial' self.hash_json.update_status(self.test_file_hash, 'BBBB') self.hash_json.save() assert self.hash_json.data()[self.test_file_hash]['file_status'] == 'BBBB' assert json_load_file(temp_data_file)[self.test_file_hash]['file_status'] == 'BBBB' pprint(self.hash_json.load())
def test_save(self): target_file = temp_file() # temp file to save data assert file_not_exists(target_file) # confirm it doesn't exist with patch.object(Hash_Json, 'get_file_path', return_value=target_file): # patch get_file_path to return temp file path assert self.hash_json.get_file_path() == target_file # confirm patch is in place self.hash_json.save() # call write_to_file assert file_exists(target_file) # confirm temp file now exists assert self.hash_json.load() == self.hash_json.data() # confirm reloaded data is correct assert json_load_file(target_file) == self.hash_json.data() # also confirm using direct json load of temp file assert self.hash_json.get_file_path() != target_file # confirm pathc is not there (after 'with' ends) file_delete(target_file) # delete temp file
def requests_download_from_url(self): tmp_file = temp_file(extension=".png") cookie = self.get_request_cookie() headers = self.get_headers() params = self.get_params() server_url = self.get_server_url() with open(tmp_file, "wb") as file: response = requests.get(server_url, params=params, headers=headers, cookies=cookie, verify=self.verify_cert) file.write(response.content) return tmp_file
def setUpClass(cls) -> None: super().setUpClass() cls.test_file = temp_file( contents='Static text so that we have a static hash') cls.file_hash = '500286533bf75d769e9180a19414d1c3502dd52093e7351a0a9b1385d8f8961c' cls.metadata_elastic = Metadata_Elastic() Setup_Testing().configure_metadata_elastic(cls.metadata_elastic) cls.metadata_service = Metadata_Service() cls.metadata_service.metadata_elastic = cls.metadata_elastic if cls.metadata_elastic.enabled is False: pytest.skip('Elastic server not available')
def test_folder_copy(self): folder_a = temp_folder(prefix='folder_a_') folder_b = temp_folder(prefix='folder_b_', parent_folder=folder_a) folder_c = temp_folder(prefix='folder_c_', parent_folder=folder_a) file_a = temp_file(parent_folder=folder_a, contents='abc') file_b = temp_file(parent_folder=folder_b, contents='abc') file_c = temp_file(parent_folder=folder_c, contents='abc') target_a = path_combine(folder_a, 'target_a') assert parent_folder(target_a) == folder_a assert parent_folder_combine(target_a, 'target_a') == target_a assert folder_copy(source=folder_a, destination=target_a) == target_a assert (len(folder_files(target_a)) == 3) assert folder_files(target_a) == sorted([ path_append(target_a, remove(file_a, folder_a + '/')), path_append(target_a, remove(file_b, folder_a + '/')), path_append(target_a, remove(file_c, folder_a + '/')) ]) # test with ignore_pattern target_b = path_combine(folder_a, 'target_b') assert folder_copy(source=target_a, destination=target_b, ignore_pattern='folder_b_*') == target_b assert (len(folder_files(target_b)) == 2) zipped_files = zip_files(target_a) assert zip_file_list(zipped_files) == sorted([ remove(file_a, folder_a + '/'), remove(file_b, folder_a + '/'), remove(file_c, folder_a + '/') ]) path_pattern = f'{folder_a}/**/*.*' assert len(file_find(path_pattern)) == 8
def test_upload__download(self): local_file = temp_file(file_contents="This is a local file - " + random_string()) # create local temp file self.datastore_file.upload(local_file) # upload file to server tmp_file = self.datastore_file.download() # download file from server assert file_exists(tmp_file) # confirm it exists assert file_contents(local_file) == file_contents( tmp_file ) # confirm content matches the randomly generated temp content assert self.datastore_file.delete( ) is True # delete temp file from data_store file_delete(local_file) # delete local temp file
def test_file_create(self): target = temp_file() text = random_string() assert file_delete(target) is True assert file_exists(target) is False assert file_create(target, text) == target assert file_exists(target) is True assert file_contents(target) == text empty_file = file_create() assert file_exists(empty_file) is True assert file_contents(empty_file) == ''
def save_png_base64_to_file(png_data, png_file=None): if png_data is not None: if type(png_data) is not str: logger_png.error(f'Png data was not a string: {png_data}') else: if png_file is None: png_file = temp_file('.png') try: with open(png_file, "wb") as fh: fh.write(base64.decodebytes(png_data.encode())) logger_png.error( f'Png data with size {len(png_data)} saved to {png_file}' ) # note: this is currently set to error because nothing else seems to be picked up by logging.getLogger().addHandler(logging.StreamHandler()) return png_file except Exception as error: logger_png.error(f'png save error: {error}') logger_png.error(png_data)
def test_is_hash(self): test_file = temp_file(contents='aaaa') file_hash = Metadata_Utils().file_hash(test_file) # create hash from file text_hash = str_sha256('asd') # create hash from string assert self.hash_json.is_hash(file_hash ) is True # confirm both are valid hashes assert self.hash_json.is_hash(text_hash ) is True assert self.hash_json.is_hash(None ) is False # testing all sorts of conner cases assert self.hash_json.is_hash('' ) is False # empty strings assert self.hash_json.is_hash('aaaa' ) is False # non hash string assert self.hash_json.is_hash(file_hash + 'aaaa') is False # confirm only exact matches work assert self.hash_json.is_hash(text_hash + 'aaaa') is False assert self.hash_json.is_hash('aaa' + file_hash ) is False assert self.hash_json.is_hash(text_hash + '\nb`') is False # confirm content in new lines is also not a match assert self.hash_json.is_hash('a\n' + file_hash ) is False file_delete(test_file)
def logger_add_handler__file(log_file=None): from osbot_utils.utils.Files import temp_file log_file = log_file or temp_file(extension=".log") logger_add_handler(logging.FileHandler(filename=log_file)) return log_file
def test_update_report(self): temp_data_file = temp_file() with patch.object(Analysis_Json, 'get_file_path', return_value=temp_data_file): self.analysis_json.add_file(self.test_file_hash, self.test_file_name) self.analysis_json.update_report(self.test_file_hash, self.report_data) pprint(self.analysis_json.get_from_file())
def setUpClass(cls) -> None: cls.test_file = temp_file(contents='Static text so that we have a static hash') cls.test_file_name = file_name(cls.test_file) cls.test_file_hash = '500286533bf75d769e9180a19414d1c3502dd52093e7351a0a9b1385d8f8961c'
def test_file_name(self): target = temp_file() assert path_combine(folder_name(target), file_name(target)) == target
def setUp(self): self.path_temp_png = temp_file('.png') # temp file path
def test_file_write_bytes(self): target = temp_file() bytes = b"\x89PNG___" assert file_bytes(file_write_bytes(target, contents=bytes)) == bytes assert file_open_bytes(target).read() == b'\x89PNG___'
def setUpClass(cls) -> None: cls.file_path = temp_file(contents='some text') # test file cls.file_copy_path = cls.file_path + '_an_copy' # create a copy to test adding multiple files file_copy(cls.file_path, cls.file_copy_path)
def test_file_not_exists(self): target = temp_file() assert file_not_exists(target) is True file_create(target, 'asd') assert file_not_exists(target) is False