Esempio n. 1
0
 def _get_tool_source(self, source_file_name=None, source_contents=None):
     if source_file_name is None:
         source_file_name = self.source_file_name
     if source_contents is None:
         source_contents = self.source_contents
     if not os.path.isabs(source_file_name):
         path = os.path.join(self.temp_directory, source_file_name)
         open(path, "w").write(source_contents)
     else:
         path = source_file_name
     tool_source = get_tool_source(path)
     return tool_source
Esempio n. 2
0
    def _get_tool_source(self, source_file_name=None, source_contents=None, macro_contents=None):
        macro_path = None
        if source_file_name is None:
            source_file_name = self.source_file_name
        if source_contents is None:
            source_contents = self.source_contents
        if not os.path.isabs(source_file_name):
            path = os.path.join(self.temp_directory, source_file_name)
            with open(path, "w") as out:
                out.write(source_contents)
            if macro_contents:
                macro_path = os.path.join(self.temp_directory, 'macros.xml')
                with open(macro_path, "w") as out:
                    out.write(macro_contents)

        else:
            path = source_file_name
        tool_source = get_tool_source(path, macro_paths=[macro_path])
        return tool_source
Esempio n. 3
0
def main(TMPDIR, WORKING_DIRECTORY, IMPORT_STORE_DIRECTORY):
    metadata_params = get_metadata_params(WORKING_DIRECTORY)
    datatypes_config = metadata_params["datatypes_config"]
    if not os.path.exists(datatypes_config):
        datatypes_config = os.path.join(WORKING_DIRECTORY, 'configs', datatypes_config)
    datatypes_registry = validate_and_load_datatypes_config(datatypes_config)
    object_store = get_object_store(WORKING_DIRECTORY)
    import_store = store.imported_store_for_metadata(IMPORT_STORE_DIRECTORY)
    # TODO: clean up random places from which we read files in the working directory
    job_io = JobIO.from_json(os.path.join(IMPORT_STORE_DIRECTORY, 'job_io.json'), sa_session=import_store.sa_session)
    tool_app_config = ToolAppConfig(
        name='tool_app',
        tool_data_path=job_io.tool_data_path,
        galaxy_data_manager_data_path=job_io.galaxy_data_manager_data_path,
        nginx_upload_path=TMPDIR,
        len_file_path=job_io.len_file_path,
        builds_file_path=job_io.builds_file_path,
        root=TMPDIR,
        is_admin_user=lambda _: job_io.user_context.is_admin)
    with open(os.path.join(IMPORT_STORE_DIRECTORY, 'tool_data_tables.json')) as data_tables_json:
        tdtm = ToolDataTableManager.from_dict(json.load(data_tables_json))
    app = ToolApp(
        sa_session=import_store.sa_session,
        tool_app_config=tool_app_config,
        datatypes_registry=datatypes_registry,
        object_store=object_store,
        tool_data_table_manager=tdtm,
        file_sources=job_io.file_sources,
    )
    # TODO: could try to serialize just a minimal tool variant instead of the whole thing ?
    tool_source = get_tool_source(tool_source_class=job_io.tool_source_class, raw_tool_source=job_io.tool_source)
    tool = create_tool_from_source(app, tool_source=tool_source, tool_dir=job_io.tool_dir)
    tool_evaluator = evaluation.RemoteToolEvaluator(app=app, tool=tool, job=job_io.job, local_working_directory=WORKING_DIRECTORY)
    tool_evaluator.set_compute_environment(compute_environment=SharedComputeEnvironment(job_io=job_io, job=job_io.job))
    with open(os.path.join(WORKING_DIRECTORY, 'tool_script.sh'), 'a') as out:
        command_line, version_command_line, extra_filenames, environment_variables = tool_evaluator.build()
        out.write(f'{version_command_line or ""}{command_line}')