Пример #1
0
 def test__using_with_valid_zip_and_target_folder(self):
     test_zip = parent_folder(__file__)
     target_folder = '/tmp/unzip_test'
     with Zip_Folder(test_zip) as (zip_file):
         with Unzip_File(zip_file, target_folder, True) as temp_folder:
             assert Files.exists(temp_folder) is True
     assert Files.exists(temp_folder) is False
    def test_add_file(self):
        metadata = self.metadata
        file_paths = metadata.data.get('original_file_paths')

        assert self.metadata.exists() is False  # metadata folder doesn't exist

        # adding file first time
        assert metadata.add_file(
            self.file_path
        ) == self.file_hash  # add file and get file hash as return value
        assert metadata.exists() is True  # confirm metadata folder now exists
        assert folder_exists(metadata.metadata_folder_path()
                             )  # confirm metadata folder now exists
        assert file_exists(
            metadata.metadata_file_path())  # confirm metadata json file exists
        assert file_exists(metadata.source_file_path()
                           )  # confirm source file was correctly put in place
        assert metadata.file_hash == self.metadata_utils.file_hash(
            metadata.source_file_path()
        )  # confirm hash of source file matches hash of file_path
        assert metadata.metadata_file_path() == path_combine(
            metadata.metadata_folder_path(), DEFAULT_METADATA_FILENAME
        )  # confirm metadata file is place in correct location
        file_paths = metadata.data.get('original_file_paths')
        assert file_paths == [
            self.file_path
        ]  # confirms that in this mode the entire path is preserved

        # adding same file 2nd time (with same hash and same name)
        assert metadata.add_file(
            self.file_path) == self.file_hash  # adding the same file again
        file_paths = metadata.data.get('original_file_paths')
        assert file_paths == [self.file_path
                              ]  # should not impact this value (same as above)

        # adding same file 3nd time (with same hash but different name)
        assert metadata.add_file(
            self.file_copy_path
        ) == self.file_hash  # adding the same file again (with different name)
        file_paths = metadata.data.get('original_file_paths')
        assert file_paths == [self.file_path, self.file_copy_path
                              ]  # will make the new file path be added

        # adding same file 4th time (with self.path_hd1 set to parent folder of path)
        file_parent_folder = parent_folder(
            self.file_path)  # get parent folder of test file
        self.metadata.path_hd1 = file_parent_folder  # assign it to the metadata variable used to calculate virtual paths

        assert metadata.add_file(self.file_path) == self.file_hash
        file_paths = metadata.data.get('original_file_paths')
        assert file_paths == [
            self.file_path, self.file_copy_path,
            file_name(self.file_path)
        ]  # confirm that the virtual file path was added as the 3rd item (in this case the file name)

        #clean up
        assert self.metadata.delete() is True
        assert folder_not_exists(self.metadata.metadata_folder_path())
Пример #3
0
    def test_set_config_to_temp_folder__restore_config(self):
        storage         = Storage()
        config          = storage.config
        original_config = config.values()
        self.setup_testing.set_config_to_temp_folder()
        temp_config     = config.values()

        assert parent_folder(config.root_folder  ) == temp_folder_current()
        assert folder_exists(config.root_folder  )
        assert folder_exists(storage.hd1()       )
        assert folder_exists(storage.hd2_status())
        assert folder_exists(storage.hd2_data()  )
        assert folder_exists(storage.hd3()       )
        assert original_config != temp_config

        self.setup_testing.restore_config()
        #self.setup_testing.configure_static_logging()
        assert original_config == config.values()
        assert parent_folder(config.root_folder) != temp_folder_current()
        assert folder_not_exists(temp_config.get('root_folder'))
Пример #4
0
    def test_folder_copy(self):
        folder_a = temp_folder(prefix='folder_a_')
        folder_b = temp_folder(prefix='folder_b_', parent_folder=folder_a)
        folder_c = temp_folder(prefix='folder_c_', parent_folder=folder_a)
        file_a = temp_file(parent_folder=folder_a, contents='abc')
        file_b = temp_file(parent_folder=folder_b, contents='abc')
        file_c = temp_file(parent_folder=folder_c, contents='abc')

        target_a = path_combine(folder_a, 'target_a')

        assert parent_folder(target_a) == folder_a
        assert parent_folder_combine(target_a, 'target_a') == target_a

        assert folder_copy(source=folder_a, destination=target_a) == target_a
        assert (len(folder_files(target_a)) == 3)

        assert folder_files(target_a) == sorted([
            path_append(target_a, remove(file_a, folder_a + '/')),
            path_append(target_a, remove(file_b, folder_a + '/')),
            path_append(target_a, remove(file_c, folder_a + '/'))
        ])

        # test with ignore_pattern
        target_b = path_combine(folder_a, 'target_b')
        assert folder_copy(source=target_a,
                           destination=target_b,
                           ignore_pattern='folder_b_*') == target_b
        assert (len(folder_files(target_b)) == 2)

        zipped_files = zip_files(target_a)
        assert zip_file_list(zipped_files) == sorted([
            remove(file_a, folder_a + '/'),
            remove(file_b, folder_a + '/'),
            remove(file_c, folder_a + '/')
        ])

        path_pattern = f'{folder_a}/**/*.*'
        assert len(file_find(path_pattern)) == 8
Пример #5
0
 def test_folder_files(self):
     folder = parent_folder(__file__)
     assert path_combine(folder, 'test_Files.py') in folder_files(folder)
     assert path_combine(folder, 'test_Json.py') in folder_files(folder)
Пример #6
0
    def do_rebuild(self, endpoint, hash, source_path, dir):
        log_info(
            message=f"Starting rebuild for file {hash} on endpoint {endpoint}")
        with Duration() as duration:
            event_data = {
                "endpoint": endpoint,
                "hash": hash,
                "source_path": source_path,
                "dir": dir
            }  # todo: see if we can use a variable that holds the params data
            self.add_event_log('Starting File rebuild', event_data)

            self.meta_service.set_rebuild_server(dir, endpoint)

            encodedFile = FileService.base64encode(source_path)
            if not encodedFile:
                message = f"Failed to encode the file: {hash}"
                log_error(message=message)
                self.add_event_log(message)
                self.meta_service.set_error(dir, message)
                return False

            response = self.rebuild(endpoint, encodedFile)
            result = response.text
            if not result:
                message = f"Failed to rebuild the file : {hash}"
                log_error(message=message)
                self.add_event_log(message)
                self.meta_service.set_error(dir, message)
                return False

            try:
                for path in self.meta_service.get_original_file_paths(dir):
                    #rebuild_file_path = path
                    if path.startswith(self.config.hd1_location):
                        rebuild_file_path = path.replace(
                            self.config.hd1_location, self.config.hd3_location)
                    else:
                        rebuild_file_path = os.path.join(
                            self.config.hd3_location, path)

                    folder_create(parent_folder(
                        rebuild_file_path))  # make sure parent folder exists

                    final_rebuild_file_path = self.save_file(
                        result, rebuild_file_path
                    )  # returns actual file saved (which could be .html)

                    # todo: improve the performance of these update since each will trigger a save
                    file_size = os.path.getsize(
                        final_rebuild_file_path)  # calculate rebuilt file fize
                    rebuild_hash = self.meta_service.file_hash(
                        final_rebuild_file_path
                    )  # calculate hash of final_rebuild_file_path

                    self.meta_service.set_rebuild_file_size(dir, file_size)
                    self.meta_service.set_rebuild_file_path(
                        dir, final_rebuild_file_path
                    )  # capture final_rebuild_file_path
                    self.meta_service.set_rebuild_hash(
                        dir, rebuild_hash)  # capture it
                if not FileService.base64decode(result):
                    message = f"Engine response could not be decoded"
                    log_error(message=message, data=f"{result}")
                    self.meta_service.set_error(dir, message)
                    return False
            except Exception as error:
                message = f"Error Saving file for {hash} : {error}"
                log_error(message=message)
                self.meta_service.set_xml_report_status(dir, "No Report")
                self.meta_service.set_error(dir, message)
                return False

            headers = response.headers
            fileIdKey = "X-Adaptation-File-Id"

            # get XML report
            if fileIdKey in headers:
                if self.get_xmlreport(endpoint, headers[fileIdKey], dir):
                    self.add_event_log('The XML report has been saved')
                    self.meta_service.set_xml_report_status(dir, "Obtained")
                else:
                    self.meta_service.set_xml_report_status(
                        dir, "No XML Report")
            else:
                self.meta_service.set_xml_report_status(
                    dir, "Failed to obtain")
                message = f'No X-Adaptation-File-Id header found in the response for {hash}'
                log_error(message)
                self.add_event_log(message)
                self.meta_service.set_error(dir, message)
                return False
                #raise ValueError("No X-Adaptation-File-Id header found in the response")

            # todo: add when server side supports this
            # SDKEngineVersionKey = "X-SDK-Engine-Version"
            # SDKAPIVersionKey = "X-SDK-Api-Version"
            #
            # if SDKEngineVersionKey in headers:
            #     self.sdk_engine_version = headers[SDKEngineVersionKey]
            # if SDKAPIVersionKey in headers:
            #     self.sdk_api_version = headers[SDKAPIVersionKey]
            #
            # self.meta_service.set_server_version(dir, "Engine:" + self.sdk_engine_version + " API:" + self.sdk_api_version )
        log_info(
            message=
            f"rebuild ok for file {hash} on endpoint {endpoint} took {duration.seconds()} seconds"
        )
        return True
 def path_repo_root(self):
     """find the root path via getting the parent folder of the location of the
        cdr_plugin_folder_to_folder module"""
     return parent_folder(cdr_plugin_folder_to_folder.__path__[0])
Пример #8
0
 def test__using_with_valid_zip_no_target_folder(self):
     test_zip = parent_folder(__file__)
     with Zip_Folder(test_zip) as (zip_file):
         with Unzip_File(zip_file, None, True) as temp_folder:
             assert Files.exists(temp_folder) is True
     assert Files.exists(temp_folder) is False
Пример #9
0
 def test__using_with_params(self):
     target_folder = parent_folder(__file__)
     with Zip_Folder(target_folder) as (zip_file):
         assert Files.exists(zip_file) is True
     assert Files.exists(zip_file) is False