def test_copytree(self): orig = gf.tmp_directory() tmp_path = os.path.join(orig, "foo.bar") with io.open(tmp_path, "w", encoding="utf-8") as tmp_file: tmp_file.write(u"Foo bar") dest = gf.tmp_directory() gf.copytree(orig, dest) self.assertTrue(gf.file_exists(os.path.join(dest, "foo.bar"))) gf.delete_directory(dest) gf.delete_directory(orig)
def test_decompress(self): for key in self.FILES: output_path = gf.tmp_directory() f = self.FILES[key] cont = Container(f["path"]) cont.decompress(output_path) copy = Container(output_path, ContainerFormat.UNPACKED) self.assertEqual(copy.entries, self.EXPECTED_ENTRIES) gf.delete_directory(output_path)
def test_compress_unpacked(self): input_path = self.FILES["unpacked"]["path"] output_path = gf.tmp_directory() cont = Container(output_path, ContainerFormat.UNPACKED) cont.compress(input_path) self.assertFalse(os.path.isfile(output_path)) copy = Container(output_path, ContainerFormat.UNPACKED) self.assertEqual(copy.entries, self.EXPECTED_ENTRIES) gf.delete_directory(output_path)
def test_exec_tmp_path(self): tmp_path = gf.tmp_directory() self.execute([ ("in", "../tools/res/audio.mp3"), ("in", "../tools/res/subtitles.txt"), ("", "task_language=eng|is_text_type=subtitles|os_task_file_format=srt"), ("out", "sonnet.srt"), ("", "-r=\"tmp_path=%s\"" % tmp_path) ], 0) gf.delete_directory(tmp_path)
def execute(self, path): input_path = gf.absolute_path(path, __file__) output_path = gf.tmp_directory() executor = ExecuteJob(job=None) executor.load_job_from_container(input_path) self.assertIsNotNone(executor.job) executor.execute() result_path = executor.write_output_container(output_path) self.assertIsNotNone(result_path) self.assertTrue(gf.file_exists(result_path)) executor.clean() gf.delete_directory(output_path)
def execute(self, parameters, expected_exit_code): output_path = gf.tmp_directory() params = ["placeholder"] for p_type, p_value in parameters: if p_type == "in": params.append(gf.absolute_path(p_value, __file__)) elif p_type == "out": params.append(os.path.join(output_path, p_value)) else: params.append(p_value) exit_code = ExecuteJobCLI(use_sys=False).run(arguments=params) gf.delete_directory(output_path) self.assertEqual(exit_code, expected_exit_code)
def test_ensure_parent_directory(self): orig = gf.tmp_directory() tmp_path = os.path.join(orig, "foo.bar") tmp_parent = orig gf.ensure_parent_directory(tmp_path) self.assertTrue(gf.directory_exists(tmp_parent)) tmp_path = os.path.join(orig, "foo/bar.baz") tmp_parent = os.path.join(orig, "foo") gf.ensure_parent_directory(tmp_path) self.assertTrue(gf.directory_exists(tmp_parent)) tmp_path = os.path.join(orig, "bar") gf.ensure_parent_directory(tmp_path, ensure_parent=False) self.assertTrue(gf.directory_exists(tmp_path)) gf.delete_directory(orig)
def convert(self, input_file_path, ofp=None, runtime_configuration=None): if ofp is None: output_path = gf.tmp_directory() output_file_path = os.path.join(output_path, "audio.wav") else: output_file_path = ofp try: converter = FFMPEGWrapper(rconf=runtime_configuration) result = converter.convert( gf.absolute_path(input_file_path, __file__), output_file_path) self.assertEqual(result, output_file_path) gf.delete_directory(output_path) except OSError as exc: if ofp is None: gf.delete_directory(output_path) else: gf.delete_file(None, ofp) raise exc
def convert(self, input_file_path, ofp=None, runtime_configuration=None): if ofp is None: output_path = gf.tmp_directory() output_file_path = os.path.join(output_path, "audio.wav") else: output_file_path = ofp try: converter = FFMPEGWrapper(rconf=runtime_configuration) result = converter.convert( gf.absolute_path(input_file_path, __file__), output_file_path ) self.assertEqual(result, output_file_path) gf.delete_directory(output_path) except OSError as exc: if ofp is None: gf.delete_directory(output_path) else: gf.delete_file(None, ofp) raise exc
def test_delete_directory_existing(self): orig = gf.tmp_directory() self.assertTrue(gf.directory_exists(orig)) gf.delete_directory(orig) self.assertFalse(gf.directory_exists(orig))
def load_job_from_container(self, container_path, config_string=None): """ Load the job from the given :class:`aeneas.container.Container` object. If ``config_string`` is ``None``, the container must contain a configuration file; otherwise use the provided config string (i.e., the wizard case). :param string container_path: the path to the input container :param string config_string: the configuration string (from wizard) :raises: :class:`~aeneas.executejob.ExecuteJobInputError`: if the given container does not contain a valid :class:`~aeneas.job.Job` """ self.log(u"Loading job from container...") # create working directory where the input container # will be decompressed self.working_directory = gf.tmp_directory( root=self.rconf[RuntimeConfiguration.TMP_PATH]) self.log([u"Created working directory '%s'", self.working_directory]) try: self.log(u"Decompressing input container...") input_container = Container(container_path, logger=self.logger) input_container.decompress(self.working_directory) self.log(u"Decompressing input container... done") except Exception as exc: self.clean() self.log_exc( u"Unable to decompress container '%s': %s" % (container_path, exc), None, True, ExecuteJobInputError) try: self.log(u"Creating job from working directory...") working_container = Container(self.working_directory, logger=self.logger) analyzer = AnalyzeContainer(working_container, logger=self.logger) self.job = analyzer.analyze(config_string=config_string) self.log(u"Creating job from working directory... done") except Exception as exc: self.clean() self.log_exc( u"Unable to analyze container '%s': %s" % (container_path, exc), None, True, ExecuteJobInputError) if self.job is None: self.log_exc( u"The container '%s' does not contain a valid Job" % (container_path), None, True, ExecuteJobInputError) try: # set absolute path for text file and audio file # for each task in the job self.log(u"Setting absolute paths for tasks...") for task in self.job.tasks: task.text_file_path_absolute = gf.norm_join( self.working_directory, task.text_file_path) task.audio_file_path_absolute = gf.norm_join( self.working_directory, task.audio_file_path) self.log(u"Setting absolute paths for tasks... done") self.log(u"Loading job from container: succeeded") except Exception as exc: self.clean() self.log_exc(u"Error while setting absolute paths for tasks", exc, True, ExecuteJobInputError)
def write_output_container(self, output_directory_path): """ Write the output container for this job. Return the path to output container, which is the concatenation of ``output_directory_path`` and of the output container file or directory name. :param string output_directory_path: the path to a directory where the output container must be created :rtype: string :raises: :class:`~aeneas.executejob.ExecuteJobOutputError`: if there is a problem while writing the output container """ self.log(u"Writing output container for this job") if self.job is None: self.log_exc(u"The job object is None", None, True, ExecuteJobOutputError) if len(self.job) == 0: self.log_exc(u"The job has no tasks", None, True, ExecuteJobOutputError) self.log([u"Number of tasks: '%d'", len(self.job)]) # create temporary directory where the sync map files # will be created # this temporary directory will be compressed into # the output container self.tmp_directory = gf.tmp_directory( root=self.rconf[RuntimeConfiguration.TMP_PATH]) self.log([u"Created temporary directory '%s'", self.tmp_directory]) for task in self.job.tasks: custom_id = task.configuration["custom_id"] # check if the task has sync map and sync map file path if task.sync_map_file_path is None: self.log_exc( u"Task '%s' has sync_map_file_path not set" % (custom_id), None, True, ExecuteJobOutputError) if task.sync_map is None: self.log_exc(u"Task '%s' has sync_map not set" % (custom_id), None, True, ExecuteJobOutputError) try: # output sync map self.log([u"Outputting sync map for task '%s'...", custom_id]) task.output_sync_map_file(self.tmp_directory) self.log( [u"Outputting sync map for task '%s'... done", custom_id]) except Exception as exc: self.log_exc( u"Error while outputting sync map for task '%s'" % (custom_id), None, True, ExecuteJobOutputError) # get output container info output_container_format = self.job.configuration["o_container_format"] self.log([u"Output container format: '%s'", output_container_format]) output_file_name = self.job.configuration["o_name"] if ((output_container_format != ContainerFormat.UNPACKED) and (not output_file_name.endswith(output_container_format))): self.log(u"Adding extension to output_file_name") output_file_name += "." + output_container_format self.log([u"Output file name: '%s'", output_file_name]) output_file_path = gf.norm_join(output_directory_path, output_file_name) self.log([u"Output file path: '%s'", output_file_path]) try: self.log(u"Compressing...") container = Container(output_file_path, output_container_format, logger=self.logger) container.compress(self.tmp_directory) self.log(u"Compressing... done") self.log([u"Created output file: '%s'", output_file_path]) self.log(u"Writing output container for this job: succeeded") self.clean(False) return output_file_path except Exception as exc: self.clean(False) self.log_exc(u"Error while compressing", exc, True, ExecuteJobOutputError) return None
def test_find_entry_empty_directory(self): output_path = gf.tmp_directory() cont = Container(output_path) self.assertIsNone(cont.find_entry(self.EXPECTED_ENTRIES[0])) gf.delete_directory(output_path)
def test_is_safe_empty_directory(self): output_path = gf.tmp_directory() cont = Container(output_path) self.assertTrue(cont.is_safe) gf.delete_directory(output_path)
def test_entries_empty_directory(self): output_path = gf.tmp_directory() cont = Container(output_path) self.assertEqual(len(cont.entries), 0) gf.delete_directory(output_path)
def write_output_container(self, output_directory_path): """ Write the output container for this job. Return the path to output container, which is the concatenation of ``output_directory_path`` and of the output container file or directory name. :param string output_directory_path: the path to a directory where the output container must be created :rtype: string :raises: :class:`~aeneas.executejob.ExecuteJobOutputError`: if there is a problem while writing the output container """ self.log(u"Writing output container for this job") if self.job is None: self.log_exc(u"The job object is None", None, True, ExecuteJobOutputError) if len(self.job) == 0: self.log_exc(u"The job has no tasks", None, True, ExecuteJobOutputError) self.log([u"Number of tasks: '%d'", len(self.job)]) # create temporary directory where the sync map files # will be created # this temporary directory will be compressed into # the output container self.tmp_directory = gf.tmp_directory(root=self.rconf[RuntimeConfiguration.TMP_PATH]) self.log([u"Created temporary directory '%s'", self.tmp_directory]) for task in self.job.tasks: custom_id = task.configuration["custom_id"] # check if the task has sync map and sync map file path if task.sync_map_file_path is None: self.log_exc(u"Task '%s' has sync_map_file_path not set" % (custom_id), None, True, ExecuteJobOutputError) if task.sync_map is None: self.log_exc(u"Task '%s' has sync_map not set" % (custom_id), None, True, ExecuteJobOutputError) try: # output sync map self.log([u"Outputting sync map for task '%s'...", custom_id]) task.output_sync_map_file(self.tmp_directory) self.log([u"Outputting sync map for task '%s'... done", custom_id]) except Exception as exc: self.log_exc(u"Error while outputting sync map for task '%s'" % (custom_id), None, True, ExecuteJobOutputError) # get output container info output_container_format = self.job.configuration["o_container_format"] self.log([u"Output container format: '%s'", output_container_format]) output_file_name = self.job.configuration["o_name"] if ((output_container_format != ContainerFormat.UNPACKED) and (not output_file_name.endswith(output_container_format))): self.log(u"Adding extension to output_file_name") output_file_name += "." + output_container_format self.log([u"Output file name: '%s'", output_file_name]) output_file_path = gf.norm_join( output_directory_path, output_file_name ) self.log([u"Output file path: '%s'", output_file_path]) try: self.log(u"Compressing...") container = Container( output_file_path, output_container_format, logger=self.logger ) container.compress(self.tmp_directory) self.log(u"Compressing... done") self.log([u"Created output file: '%s'", output_file_path]) self.log(u"Writing output container for this job: succeeded") self.clean(False) return output_file_path except Exception as exc: self.clean(False) self.log_exc(u"Error while compressing", exc, True, ExecuteJobOutputError) return None
def test_tmp_directory(self): tmp_dir = gf.tmp_directory() self.assertTrue(gf.directory_exists(tmp_dir)) gf.delete_directory(tmp_dir)
def load_job_from_container(self, container_path, config_string=None): """ Load the job from the given :class:`aeneas.container.Container` object. If ``config_string`` is ``None``, the container must contain a configuration file; otherwise use the provided config string (i.e., the wizard case). :param string container_path: the path to the input container :param string config_string: the configuration string (from wizard) :raises: :class:`~aeneas.executejob.ExecuteJobInputError`: if the given container does not contain a valid :class:`~aeneas.job.Job` """ self.log(u"Loading job from container...") # create working directory where the input container # will be decompressed self.working_directory = gf.tmp_directory(root=self.rconf[RuntimeConfiguration.TMP_PATH]) self.log([u"Created working directory '%s'", self.working_directory]) try: self.log(u"Decompressing input container...") input_container = Container(container_path, logger=self.logger) input_container.decompress(self.working_directory) self.log(u"Decompressing input container... done") except Exception as exc: self.clean() self.log_exc(u"Unable to decompress container '%s': %s" % (container_path, exc), None, True, ExecuteJobInputError) try: self.log(u"Creating job from working directory...") working_container = Container( self.working_directory, logger=self.logger ) analyzer = AnalyzeContainer(working_container, logger=self.logger) self.job = analyzer.analyze(config_string=config_string) self.log(u"Creating job from working directory... done") except Exception as exc: self.clean() self.log_exc(u"Unable to analyze container '%s': %s" % (container_path, exc), None, True, ExecuteJobInputError) if self.job is None: self.log_exc(u"The container '%s' does not contain a valid Job" % (container_path), None, True, ExecuteJobInputError) try: # set absolute path for text file and audio file # for each task in the job self.log(u"Setting absolute paths for tasks...") for task in self.job.tasks: task.text_file_path_absolute = gf.norm_join( self.working_directory, task.text_file_path ) task.audio_file_path_absolute = gf.norm_join( self.working_directory, task.audio_file_path ) self.log(u"Setting absolute paths for tasks... done") self.log(u"Loading job from container: succeeded") except Exception as exc: self.clean() self.log_exc(u"Error while setting absolute paths for tasks", exc, True, ExecuteJobInputError)
def test_exists_empty_directory(self): output_path = gf.tmp_directory() cont = Container(output_path) self.assertTrue(cont.exists()) gf.delete_directory(output_path)