Exemplo n.º 1
0
 def setup_job(self, jiha, archive_source, archive_type):
     if archive_type != "url":
         external_chown(
             archive_source,
             jiha.job.user.system_user_pwent(
                 self.app.config.real_system_username),
             self.app.config.external_chown_script,
             "history import archive")
     external_chown(
         jiha.archive_dir,
         jiha.job.user.system_user_pwent(
             self.app.config.real_system_username),
         self.app.config.external_chown_script,
         "history import archive directory")
Exemplo n.º 2
0
    def cleanup_after_job(self):
        """ Set history, datasets, collections and jobs' attributes
            and clean up archive directory.
        """

        #
        # Import history.
        #

        jiha = self.sa_session.query(model.JobImportHistoryArchive).filter_by(
            job_id=self.job_id).first()
        if not jiha:
            return None
        user = jiha.job.user

        new_history = None
        try:
            archive_dir = jiha.archive_dir
            if self.app.config.external_chown_script:
                external_chown(
                    archive_dir,
                    jiha.job.user.system_user_pwent(getpass.getuser()),
                    self.app.config.external_chown_script,
                    "history import archive directory")
            model_store = store.get_import_model_store_for_directory(
                archive_dir,
                app=self.app,
                user=user,
                tag_handler=self.app.tag_handler.create_tag_handler_session())
            job = jiha.job
            with model_store.target_history(
                    default_history=job.history) as new_history:

                jiha.history = new_history
                self.sa_session.flush()
                model_store.perform_import(new_history,
                                           job=job,
                                           new_history=True)
                # Cleanup.
                if os.path.exists(archive_dir):
                    shutil.rmtree(archive_dir)

        except Exception as e:
            jiha.job.tool_stderr += f"Error cleaning up history import job: {e}"
            self.sa_session.flush()
            raise

        return new_history
Exemplo n.º 3
0
def create_paramfile(trans, uploaded_datasets):
    """
    Create the upload tool's JSON "param" file.
    """
    tool_params = []
    json_file_path = None
    for uploaded_dataset in uploaded_datasets:
        data = uploaded_dataset.data
        if uploaded_dataset.type == 'composite':
            # we need to init metadata before the job is dispatched
            data.init_meta()
            for meta_name, meta_value in uploaded_dataset.metadata.items():
                setattr(data.metadata, meta_name, meta_value)
            trans.sa_session.add(data)
            trans.sa_session.flush()
            params = dict(
                file_type=uploaded_dataset.file_type,
                dataset_id=data.dataset.id,
                dbkey=uploaded_dataset.dbkey,
                type=uploaded_dataset.type,
                metadata=uploaded_dataset.metadata,
                primary_file=uploaded_dataset.primary_file,
                composite_file_paths=uploaded_dataset.composite_files,
                composite_files={
                    k: v.__dict__
                    for k, v in data.datatype.get_composite_files(
                        data).items()
                })
        else:
            try:
                is_binary = uploaded_dataset.datatype.is_binary
            except Exception:
                is_binary = None
            try:
                link_data_only = uploaded_dataset.link_data_only
            except Exception:
                link_data_only = 'copy_files'
            try:
                uuid_str = uploaded_dataset.uuid
            except Exception:
                uuid_str = None
            try:
                purge_source = uploaded_dataset.purge_source
            except Exception:
                purge_source = True
            try:
                user_ftp_dir = os.path.abspath(trans.user_ftp_dir)
            except Exception:
                user_ftp_dir = None
            if user_ftp_dir and uploaded_dataset.path.startswith(user_ftp_dir):
                uploaded_dataset.type = 'ftp_import'
            params = dict(
                file_type=uploaded_dataset.file_type,
                ext=uploaded_dataset.ext,
                name=uploaded_dataset.name,
                dataset_id=data.dataset.id,
                dbkey=uploaded_dataset.dbkey,
                type=uploaded_dataset.type,
                is_binary=is_binary,
                link_data_only=link_data_only,
                uuid=uuid_str,
                to_posix_lines=getattr(uploaded_dataset, "to_posix_lines",
                                       True),
                auto_decompress=getattr(uploaded_dataset, "auto_decompress",
                                        True),
                purge_source=purge_source,
                space_to_tab=uploaded_dataset.space_to_tab,
                run_as_real_user=trans.app.config.external_chown_script
                is not None,
                check_content=trans.app.config.check_upload_content,
                path=uploaded_dataset.path)
            # TODO: This will have to change when we start bundling inputs.
            # Also, in_place above causes the file to be left behind since the
            # user cannot remove it unless the parent directory is writable.
            if link_data_only == 'copy_files' and trans.user:
                external_chown(uploaded_dataset.path,
                               trans.user.system_user_pwent(
                                   trans.app.config.real_system_username),
                               trans.app.config.external_chown_script,
                               description="uploaded file")
        tool_params.append(params)
    with tempfile.NamedTemporaryFile(mode="w",
                                     prefix='upload_params_',
                                     delete=False) as fh:
        json_file_path = fh.name
        dump(tool_params, fh)
    return json_file_path