Esempio n. 1
0
    def moveProcessedFiles(self):
        json_list = self.hash_json.data()

        for key in json_list:

            source_path = self.storage.hd2_data(key)

            if (FileStatus.COMPLETED == json_list[key]["file_status"]):
                destination_path = self.storage.hd2_processed(key)

                if folder_exists(destination_path):
                    folder_delete_all(destination_path)

                shutil.move(source_path, destination_path)

            if (FileStatus.FAILED == json_list[key]["file_status"]):

                meta_service = Metadata_Service()
                meta_service.get_from_file(source_path)
                metadata = meta_service.metadata
                if ("Engine response could not be decoded" == metadata.get_error()) and \
                    metadata.get_original_file_extension() in ['.xml', '.json']:
                    destination_path = self.storage.hd2_not_processed(key)

                    if folder_exists(destination_path):
                        folder_delete_all(destination_path)

                    shutil.move(source_path, destination_path)
Esempio n. 2
0
    def test__init__(self):
        self.pre_processor.clear_data_and_status_folders(
        )  # clear output folders
        self.pre_processor.process_files()  # copy files across

        assert folder_exists(self.config.hd1_location)
        assert folder_exists(self.config.hd2_location)
        assert folder_exists(self.config.hd3_location)
Esempio n. 3
0
 def test_folder_create(self):
     tmp_folder = '_tmp_folder'
     assert folder_exists(tmp_folder) is False
     assert folder_create(tmp_folder) == tmp_folder
     assert folder_create(tmp_folder) == tmp_folder
     assert folder_exists(tmp_folder) is True
     assert folder_not_exists(tmp_folder) is False
     assert folder_delete_all(tmp_folder) is True
     assert folder_not_exists(tmp_folder) is True
Esempio n. 4
0
    def test_folder_create_in_parent(self):
        tmp_folder = '_tmp_folder'
        child_folder = '_child_folder'

        assert folder_exists(tmp_folder) == False
        assert folder_create(tmp_folder) == tmp_folder
        assert create_folder_in_parent(tmp_folder,
                                       child_folder) == path_combine(
                                           tmp_folder, child_folder)
        assert folder_exists(path_combine(tmp_folder, child_folder)) == True
        assert folder_delete_all(tmp_folder) == True
        assert folder_not_exists(path_combine(tmp_folder,
                                              child_folder)) == True
Esempio n. 5
0
    def metadata_folder_path(self):
        if not self.file_hash:
            return

        path = self.storage.hd2_not_processed(self.file_hash)
        if folder_exists(path):
            return path

        path = self.storage.hd2_processed(self.file_hash)
        if folder_exists(path):
            return path

        # never processed - must be in the 'todo' folder
        path = self.storage.hd2_data(self.file_hash)
        return path
Esempio n. 6
0
 def test_get_package(self):
     package = self.deploy.get_package()
     assert package.lambda_name == 'osbot_test_deploy_lambda.osbot_test_deploy_lambda'
     assert package.s3_bucket == self.aws_config.lambda_s3_bucket()
     assert package.s3_key == f'{self.aws_config.lambda_s3_folder_lambdas()}/{package.lambda_name}.zip'
     assert package.role_arn == f"arn:aws:iam::{self.aws_config.aws_session_account_id()}:role/temp_role_for_lambda_invocation"
     assert folder_exists(package.tmp_folder)
    def test_copy_file(self):
        assert file_exists(self.test_file)
        assert folder_exists(self.new_folder)
        self.dst = os.path.join(self.new_folder, "image2.jpg")
        self.file_service.copy_file(self.test_file, self.dst)

        assert os.path.exists(self.dst) is True
Esempio n. 8
0
def pip_install_dependency(target):
    path_lambda_dependencies = Files.path_combine('.', '../../../_lambda_dependencies/')
    folder_create(path_lambda_dependencies)
    path_install = Files.path_combine(path_lambda_dependencies, target)
    if folder_not_exists(path_install):
        return Process.run('pip3', ['install','-t',path_install,target])
    return folder_exists(path_install)
Esempio n. 9
0
 def test_file_create_and_deletion(self):
     with Temp_S3_Zip_With_Lambda_File() as temp_s3_zip:
         assert folder_exists(temp_s3_zip.folder)
         assert file_exists(temp_s3_zip.tmp_file)
         assert temp_s3_zip.s3_prefix == 'lambdas/unit_tests/temp_zips'
         assert 'temp_zip_file_' in temp_s3_zip.file_name
         assert temp_s3_zip.s3_key == f'{temp_s3_zip.s3_prefix}/{temp_s3_zip.file_name}.zip'
Esempio n. 10
0
    def test_image_build(self):
        target_image = 'centos'
        expected_size = 209348126
        folder_dockerFile = path_combine(self.path_docker_images, target_image)
        path_dockerfile = path_combine(folder_dockerFile, 'Dockerfile')
        repository = "osbot_docker__test_image_build"
        tag = "abc"
        image_name = f"{repository}:{tag}"

        assert folder_exists(folder_dockerFile)
        assert file_exists(path_dockerfile)

        result = self.api_docker.image_build(folder_dockerFile, repository,
                                             tag)

        build_logs = result.get('build_logs')
        image = result.get('image')
        status = result.get('status')
        tags = result.get('tags')

        assert self.api_docker.image_exists(repository, tag)
        assert status == 'ok'
        assert image_name in tags
        assert image_name in self.api_docker.images_names()
        assert image.get('Size') == expected_size
        assert next(build_logs) == {'stream': 'Step 1/3 : FROM centos:8'}

        assert self.api_docker.image_delete(repository, tag) is True

        assert image_name not in self.api_docker.images_names()
    def test_add_file(self):
        metadata = self.metadata
        file_paths = metadata.data.get('original_file_paths')

        assert self.metadata.exists() is False  # metadata folder doesn't exist

        # adding file first time
        assert metadata.add_file(
            self.file_path
        ) == self.file_hash  # add file and get file hash as return value
        assert metadata.exists() is True  # confirm metadata folder now exists
        assert folder_exists(metadata.metadata_folder_path()
                             )  # confirm metadata folder now exists
        assert file_exists(
            metadata.metadata_file_path())  # confirm metadata json file exists
        assert file_exists(metadata.source_file_path()
                           )  # confirm source file was correctly put in place
        assert metadata.file_hash == self.metadata_utils.file_hash(
            metadata.source_file_path()
        )  # confirm hash of source file matches hash of file_path
        assert metadata.metadata_file_path() == path_combine(
            metadata.metadata_folder_path(), DEFAULT_METADATA_FILENAME
        )  # confirm metadata file is place in correct location
        file_paths = metadata.data.get('original_file_paths')
        assert file_paths == [
            self.file_path
        ]  # confirms that in this mode the entire path is preserved

        # adding same file 2nd time (with same hash and same name)
        assert metadata.add_file(
            self.file_path) == self.file_hash  # adding the same file again
        file_paths = metadata.data.get('original_file_paths')
        assert file_paths == [self.file_path
                              ]  # should not impact this value (same as above)

        # adding same file 3nd time (with same hash but different name)
        assert metadata.add_file(
            self.file_copy_path
        ) == self.file_hash  # adding the same file again (with different name)
        file_paths = metadata.data.get('original_file_paths')
        assert file_paths == [self.file_path, self.file_copy_path
                              ]  # will make the new file path be added

        # adding same file 4th time (with self.path_hd1 set to parent folder of path)
        file_parent_folder = parent_folder(
            self.file_path)  # get parent folder of test file
        self.metadata.path_hd1 = file_parent_folder  # assign it to the metadata variable used to calculate virtual paths

        assert metadata.add_file(self.file_path) == self.file_hash
        file_paths = metadata.data.get('original_file_paths')
        assert file_paths == [
            self.file_path, self.file_copy_path,
            file_name(self.file_path)
        ]  # confirm that the virtual file path was added as the 3rd item (in this case the file name)

        #clean up
        assert self.metadata.delete() is True
        assert folder_not_exists(self.metadata.metadata_folder_path())
Esempio n. 12
0
    def test_set_root_folder(self):
        root_folder = temp_folder()
        assert self.config.root_folder != root_folder
        self.config.set_root_folder(root_folder)
        assert self.config.root_folder == root_folder

        assert self.config.hd1_location == path_combine(
            root_folder, DEFAULT_HD1_NAME)
        assert self.config.hd2_location == path_combine(
            root_folder, DEFAULT_HD2_NAME)
        assert self.config.hd3_location == path_combine(
            root_folder, DEFAULT_HD3_NAME)

        assert folder_exists(self.config.root_folder)
        assert folder_exists(self.config.hd1_location)
        assert folder_exists(self.config.hd2_location)
        assert folder_exists(self.config.hd2_data_location)
        assert folder_exists(self.config.hd2_status_location)
        assert folder_exists(self.config.hd2_processed_location)
        assert folder_exists(self.config.hd3_location)

        folder_delete_all(root_folder)

        assert folder_not_exists(self.config.root_folder)
        assert folder_not_exists(self.config.hd1_location)
        assert folder_not_exists(self.config.hd2_location)
        assert folder_not_exists(self.config.hd3_location)
Esempio n. 13
0
    def test_moveProcessedFiles(self):
        Loops.continue_processing = True
        count = 20
        self.add_test_files(count=count, execute_stage_1=True)

        json_data = self.loops.updateHashJson()
        assert len(json_data) > 0

        self.loops.moveProcessedFiles()
        assert folder_exists(self.loops.storage.hd2_processed())
Esempio n. 14
0
    def test_set_config_to_temp_folder__restore_config(self):
        storage         = Storage()
        config          = storage.config
        original_config = config.values()
        self.setup_testing.set_config_to_temp_folder()
        temp_config     = config.values()

        assert parent_folder(config.root_folder  ) == temp_folder_current()
        assert folder_exists(config.root_folder  )
        assert folder_exists(storage.hd1()       )
        assert folder_exists(storage.hd2_status())
        assert folder_exists(storage.hd2_data()  )
        assert folder_exists(storage.hd3()       )
        assert original_config != temp_config

        self.setup_testing.restore_config()
        #self.setup_testing.configure_static_logging()
        assert original_config == config.values()
        assert parent_folder(config.root_folder) != temp_folder_current()
        assert folder_not_exists(temp_config.get('root_folder'))
Esempio n. 15
0
    def test_invalid_hd3(self):
        hd1_path      = "./test_data/scenario-1/hd1"
        hd2_path      = "./test_data/scenario-1/hd2"
        hd3_path      = "./test_data/scenario-1/hd3xyz"

        response=self.configure.configure(hd1_path=hd1_path,
                                          hd2_path=hd2_path,
                                          hd3_path=hd3_path)

        assert self.configure.last_error_message == ""
        assert response is not None
        assert folder_exists(hd3_path)
        folder_delete_all(hd3_path)
Esempio n. 16
0
    def test_load_values(self):
        config = self.config

        self.assertEqual(
            abspath(config.hd1_location),
            abspath(os.environ.get("HD1_LOCATION", DEFAULT_HD1_LOCATION)))
        self.assertEqual(
            abspath(config.hd2_location),
            abspath(os.environ.get("HD2_LOCATION", DEFAULT_HD2_LOCATION)))
        self.assertEqual(
            abspath(config.hd3_location),
            abspath(os.environ.get("HD3_LOCATION", DEFAULT_HD3_LOCATION)))
        self.assertEqual(
            abspath(config.root_folder),
            abspath(os.environ.get("ROOT_FOLDER", DEFAULT_ROOT_FOLDER)))
        self.assertEqual(
            config.endpoints,
            json.loads(os.environ.get("ENDPOINTS", DEFAULT_ENDPOINTS)))
        assert config.endpoints['Endpoints'][0]['IP']
        assert config.endpoints['Endpoints'][0]['Port']

        assert folder_exists(config.root_folder)
        assert folder_exists(config.hd1_location)
        assert folder_exists(config.hd2_location)
        assert folder_exists(config.hd2_data_location)
        assert folder_exists(config.hd2_status_location)
        assert folder_exists(config.hd2_processed_location)
        assert folder_exists(config.hd3_location)

        # check config_cache
        config.root_folder = 'aaa'
        assert Config().root_folder == 'aaa'

        config.load_values()
        assert config.root_folder == DEFAULT_ROOT_FOLDER
        assert Config().root_folder == DEFAULT_ROOT_FOLDER
Esempio n. 17
0
    def LoopHashDirectoriesInternal(self, thread_count, do_single):

        if folder_exists(self.storage.hd2_data()) is False:
            log_message = "ERROR: rootdir does not exist: " + self.storage.hd2_data(
            )
            log_error(log_message)
            return False

        if not isinstance(thread_count, int):
            raise TypeError("thread_count must be a integer")

        if not isinstance(do_single, bool):
            raise TypeError("thread_count must be a integer")

        log_message = f"LoopHashDirectoriesInternal started with {thread_count} threads"
        self.events.add_log(log_message)
        log_info(log_message)

        json_list = self.updateHashJson()

        log_message = f"LoopHashDirectoriesInternal started with {thread_count} threads"
        self.events.add_log(log_message)
        log_info(log_message)

        threads = list()

        process_index = 0

        log_info(
            message=f'before Mapping thread_data for {len(json_list)} files')
        thread_data = []
        for key in json_list:
            file_hash = key

            itempath = self.storage.hd2_data(key)
            if (FileStatus.COMPLETED == json_list[key]["file_status"]):
                self.events.add_log(
                    f"The file processing has been already completed")
                continue

            if not os.path.exists(itempath):
                self.events.add_log(
                    f"ERROR: Path \"{itempath}\" does not exist")
                json_list[key]["file_status"] = FileStatus.FAILED
                continue

            process_index += 1
            thread_data.append((
                itempath,
                file_hash,
                process_index,
            ))
            # # limit the number of parallel threads
            #
            # if process_index % int(thread_count) == 0:                      # todo: refactor this workflow to use multiprocess and queues
            #     # Clean up the threads
            #     for index, thread in enumerate(threads):                    # todo: since at the moment this will block allocating new threads until
            #         thread.join()                                           #       all have finishing execution
            #
            # process_index += 1
            # log_info(message=f"in LoopHashDirectoriesInternal process_index={process_index} , thread #{process_index % int(thread_count) }")
            # x = threading.Thread(target=self.ProcessDirectory, args=(itempath, file_hash, process_index,))
            # threads.append(x)
            # x.start()
            #
            # if do_single:
            #     break
            #
            # if not Loops.continue_processing:
            #     break

        # for index, thread in enumerate(threads):
        #     thread.join()

        log_info(
            message=
            f'after mapped thread_data, there are {len(thread_data)} mapped items'
        )
        #thread_data = thread_data[:500]
        #log_info(message=f'to start with only processing {len(thread_data)} thread_data items')
        pool = ThreadPool(thread_count)
        results = pool.map(self.ProcessDirectory, thread_data)
        pool.close()
        pool.join()

        self.moveProcessedFiles()

        self.events.add_log("LoopHashDirectoriesInternal finished")
        return True
 def test_path_images(self):
     assert folder_exists(self._.path_images)
Esempio n. 19
0
 def test_icons_folder(self):
     assert folder_exists(self.jira_icons.icons_folder())
Esempio n. 20
0
 def test__init__(self):
     assert folder_exists(self.test_data.path_test_files)
Esempio n. 21
0
 def test_set_test_root_dir(self):
     self.setup_testing.set_test_root_dir()
     assert folder_exists(path_combine('.', '.git'))
     assert folder_exists(path_combine('.', 'test_data'))
Esempio n. 22
0
 def exists(self):
     return folder_exists(self.metadata_folder_path())
 def test_path_folder_with_docker_file(self):
     path = self.icap_client.path_folder_with_docker_file()
     dockerfile = path_combine(path, 'Dockerfile')
     assert folder_exists(path)
     assert file_exists(dockerfile)
Esempio n. 24
0
 def test_path_repo_root(self):
     path_repo = self.setup_testing.path_repo_root()
     assert folder_exists(path_repo)
     assert folder_exists(path_combine(path_repo, '.git'))
     assert folder_exists(path_combine(path_repo, 'test_data'))
Esempio n. 25
0
 def is_in_todo(self):
     folder_exists(self.storage.hd2_data(self.file_hash))
Esempio n. 26
0
 def is_in_not_processed(self):
     folder_exists(self.storage.hd2_not_processed(self.file_hash))
 def exists_locally(self, package):
     return folder_exists(self.local_path(package))
 def test__init__(self):
     assert folder_exists(self.pre_processor.storage.hd2_data())
     assert folder_exists(self.pre_processor.storage.hd2_status())