def test_random_text(self): result = random_text() assert len(result) == 17 assert result[:5] == "text_" assert len(random_text(length=37)) == 42 assert random_text(prefix='abc_')[:4] == "abc_" assert random_text(prefix='abc')[:4] == "abc_"
def test_info(self): message = random_text() duration = random_text() result = self.logging.info(message, duration=duration) record_id = result.get('_id') assert result.get('_shards').get('successful') == 1 data = self.elastic.get_data(record_id) assert data.get('level') == 'INFO' assert data.get('message') == message assert data.get('duration') == duration
def test_hd1_add_file(self): test_file = temp_file(contents=random_text()) test_file_name = file_name(test_file) file_in_hd1 = self.storage.hd1_add_file(test_file) assert file_exists(file_in_hd1) assert file_contents(file_in_hd1) == file_contents(test_file) assert self.storage.hd1_file_path(test_file_name) == file_in_hd1
def test__start_logging(self): # todo: understand better why this test takes about 1.1 secs to execute (some of it is caused by the processing process starting, and elastic being setup) log_worker = start_logging() # trigger logging process log_info() # send 4 log messages log_warning() log_info(message=random_text(), data={'a': 42}) log_error(message='an error')
def test_processDirectory__bad_file(self): bad_file = temp_file(contents=random_text()) metadata = self.meta_service.create_metadata(bad_file) endpoint = f'http://{self.sdk_server}:{self.sdk_port}' dir = metadata.metadata_folder_path() result = self.file_processing.processDirectory(endpoint=endpoint, dir=dir) assert result == False metadata.load() assert metadata.data.get('rebuild_status') == 'Completed with errors' assert metadata.data.get('error') == 'Engine response could not be decoded'
def test_do_rebuild_bad_file(self): # refactor bad_file = temp_file(contents=random_text()) file_hash = file_sha256(bad_file) metadata = self.meta_service.create_metadata(bad_file) endpoint = f'http://{self.sdk_server}:{self.sdk_port}' dir = metadata.metadata_folder_path() result = self.file_processing.do_rebuild(endpoint=endpoint, hash=file_hash, source_path=bad_file, dir=dir) assert result == False metadata.load() assert metadata.data.get('error') == 'Engine response could not be decoded'
def setUpClass(cls) -> None: super().setUpClass() cls.test_file = Test_Data().create_test_pdf(text=random_text(prefix="some random text: ")) cls.test_file_name = file_name(cls.test_file) cls.config = Config() #cls.temp_root = folder_create('/tmp/temp_root') # temp_folder() #cls.config.set_root_folder(root_folder=cls.temp_root) cls.meta_service = Metadata_Service() cls.metadata = cls.meta_service.create_metadata(cls.test_file) cls.analysis_json = Analysis_Json()
def test_create_info_exists_delete(self): result = self.index_pattern.create() #pprint(result) assert result.get('attributes').get('title') == self.pattern_name assert self.index_pattern.exists() is True assert list_set(self.index_pattern.info()) == [ 'fields', 'id', 'namespaces', 'references', 'score', 'title', 'type', 'updated_at' ] assert Index_Pattern(kibana=self.kibana, pattern_name=random_text()).info() == {} assert self.index_pattern.delete() is True
def test_critical_debug_error_info_warning(self): message = random_text() self.logging.critical(message) self.logging.debug(message) self.logging.error(message) self.logging.info(message) self.logging.warning(message) messages = self.elastic.search_using_lucene(message, index_by='level') assert list_set(messages) == [ 'CRITICAL', 'DEBUG', 'ERROR', 'INFO', 'WARNING' ] assert messages['ERROR'].get('message') == message
def add_test_files(self, count=5, text_size=10, execute_stage_1=False): added_files = [] #random_blog = random_text(length=count*text_size) for i in range(1, count + 1): text = random_text( ) + '_' * text_size # much better performance than using random_text for the full string test_file = Test_Data().create_test_pdf(text=text, file_key=f"temp_file_{i}") added_files.append(self.storage.hd1_add_file(test_file)) file_delete(test_file) if execute_stage_1: self.execute_stage_1() return added_files
def test_pdf_rebuild(self,): # refactor into separate test file server = self.config.test_sdk url = f"http://{server}:8080/api/rebuild/base64" headers = { 'accept': 'application/json', 'Content-Type': 'application/json'} text = random_text("random text - ") test_pdf = Test_Data().create_test_pdf(text=text) original_bytes = file_contents_as_bytes(test_pdf) original_base64 = bytes_to_base64(original_bytes) post_data = {"Base64": original_base64} result = POST(url, data=post_data, headers=headers) rebuild_base64 = base64_to_bytes(result) assert str_to_bytes(text) in original_bytes assert b'Glasswall' not in original_bytes assert str_to_bytes(text) in rebuild_base64 assert b'Glasswall' in rebuild_base64
def create_test_pdf(self, text=None, file_key=None): # see https://brendanzagaeski.appspot.com/0004.html for a description of the code bellow text = text or random_text(prefix="Some random text in Arial : ") font = "Arial" # "Times-Roman" # todo: format the pdf text below better small_pdf_bytes = str_to_bytes( '%PDF-1.1\n%\xc2\xa5\xc2\xb1\xc3\xab\n\n1 0 obj\n << /Type /Catalog\n /' 'Pages 2 0 R\n >>\nendobj\n\n2 0 obj\n << /Type /Pages\n /Kids [3 0 R' ']\n /Count 1\n /MediaBox [0 0 300 144]\n >>\nendobj\n\n3 0 obj\n ' '<< /Type /Page\n /Parent 2 0 R\n /Resources\n << /Font\n ' ' << /F1\n << /Type /Font\n /Subtype ' f'/Type1\n /BaseFont /{font}\n >>\n ' ' >>\n >>\n /Contents 4 0 R\n >>\nendobj\n\n4 0 obj\n << /L' f'ength 55 >>\nstream\n BT\n /F1 18 Tf\n 0 0 Td\n ({text}) T' 'j\n ET\nendstream\nendobj\n\nxref\n0 5\n0000000000 65535 f \n0000000018 000' '00 n \n0000000077 00000 n \n0000000178 00000 n \n0000000457 00000 n \ntraile' 'r\n << /Root 1 0 R\n /Size 5\n >>\nstartxref\n565\n%%EOF\n' ) file_key = file_key or "test" return file_create_bytes(extension=file_key + '.pdf', contents=small_pdf_bytes)
def test_create__time_field(self): time_field = random_text() self.index_pattern.create(time_field=time_field) assert self.index_pattern.info().get('timeFieldName') == time_field assert self.index_pattern.delete() is True