Example #1
0
    def execute( self, tool, trans, incoming={}, set_output_hid=True, history=None, **kwargs ):
        dataset_upload_inputs = []
        for input_name, input in tool.inputs.iteritems():
            if input.type == "upload_dataset":
                dataset_upload_inputs.append( input )
        assert dataset_upload_inputs, Exception( "No dataset upload groups were found." )

        persisting_uploads_timer = ExecutionTimer()
        precreated_datasets = upload_common.get_precreated_datasets( trans, incoming, trans.app.model.HistoryDatasetAssociation )
        incoming = upload_common.persist_uploads( incoming )
        log.debug("Persisted uploads %s" % persisting_uploads_timer)
        # We can pass an empty string as the cntrller here since it is used to check whether we
        # are in an admin view, and this tool is currently not used there.
        check_and_cleanup_timer = ExecutionTimer()
        uploaded_datasets = upload_common.get_uploaded_datasets( trans, '', incoming, precreated_datasets, dataset_upload_inputs, history=history )
        upload_common.cleanup_unused_precreated_datasets( precreated_datasets )

        if not uploaded_datasets:
            return None, 'No data was entered in the upload form, please go back and choose data to upload.'

        log.debug("Checked and cleaned uploads %s" % check_and_cleanup_timer)
        create_job_timer = ExecutionTimer()
        json_file_path = upload_common.create_paramfile( trans, uploaded_datasets )
        data_list = [ ud.data for ud in uploaded_datasets ]
        rval = upload_common.create_job( trans, incoming, tool, json_file_path, data_list, history=history )
        log.debug("Created upload job %s" % create_job_timer)
        return rval
Example #2
0
    def execute(self, tool, trans, incoming={}, set_output_hid=True, history=None, **kwargs):
        dataset_upload_inputs = []
        for input_name, input in tool.inputs.items():
            if input.type == "upload_dataset":
                dataset_upload_inputs.append(input)
        assert dataset_upload_inputs, Exception("No dataset upload groups were found.")

        persisting_uploads_timer = ExecutionTimer()
        precreated_datasets = upload_common.get_precreated_datasets(trans, incoming, trans.app.model.HistoryDatasetAssociation)
        incoming = upload_common.persist_uploads(incoming, trans)
        log.debug("Persisted uploads %s" % persisting_uploads_timer)
        # We can pass an empty string as the cntrller here since it is used to check whether we
        # are in an admin view, and this tool is currently not used there.
        check_and_cleanup_timer = ExecutionTimer()
        uploaded_datasets = upload_common.get_uploaded_datasets(trans, '', incoming, precreated_datasets, dataset_upload_inputs, history=history)
        upload_common.cleanup_unused_precreated_datasets(precreated_datasets)

        if not uploaded_datasets:
            return None, 'No data was entered in the upload form, please go back and choose data to upload.'

        log.debug("Checked and cleaned uploads %s" % check_and_cleanup_timer)
        create_job_timer = ExecutionTimer()
        json_file_path = upload_common.create_paramfile(trans, uploaded_datasets)
        data_list = [ud.data for ud in uploaded_datasets]
        rval = upload_common.create_job(trans, incoming, tool, json_file_path, data_list, history=history)
        log.debug("Created upload job %s" % create_job_timer)
        return rval
Example #3
0
 def _upload_dataset(self, trans, library_id, folder_id, replace_dataset=None, **kwd):
     # Set up the traditional tool state/params
     cntrller = 'api'
     tool_id = 'upload1'
     message = None
     tool = trans.app.toolbox.get_tool(tool_id)
     state = tool.new_state(trans)
     populate_state(trans, tool.inputs, kwd, state.inputs)
     tool_params = state.inputs
     dataset_upload_inputs = []
     for input_name, input in tool.inputs.items():
         if input.type == "upload_dataset":
             dataset_upload_inputs.append(input)
     # Library-specific params
     server_dir = kwd.get('server_dir', '')
     upload_option = kwd.get('upload_option', 'upload_file')
     response_code = 200
     if upload_option == 'upload_directory':
         full_dir, import_dir_desc = validate_server_directory_upload(trans, server_dir)
         message = 'Select a directory'
     elif upload_option == 'upload_paths':
         # Library API already checked this - following check isn't actually needed.
         validate_path_upload(trans)
     # Some error handling should be added to this method.
     try:
         # FIXME: instead of passing params here ( which have been processed by util.Params(), the original kwd
         # should be passed so that complex objects that may have been included in the initial request remain.
         library_bunch = upload_common.handle_library_params(trans, kwd, folder_id, replace_dataset)
     except Exception:
         response_code = 500
         message = "Unable to parse upload parameters, please report this error."
     # Proceed with (mostly) regular upload processing if we're still errorless
     if response_code == 200:
         precreated_datasets = upload_common.get_precreated_datasets(trans, tool_params, trans.app.model.LibraryDatasetDatasetAssociation, controller=cntrller)
         if upload_option == 'upload_file':
             tool_params = upload_common.persist_uploads(tool_params, trans)
             uploaded_datasets = upload_common.get_uploaded_datasets(trans, cntrller, tool_params, precreated_datasets, dataset_upload_inputs, library_bunch=library_bunch)
         elif upload_option == 'upload_directory':
             uploaded_datasets, response_code, message = self._get_server_dir_uploaded_datasets(trans, kwd, full_dir, import_dir_desc, library_bunch, response_code, message)
         elif upload_option == 'upload_paths':
             uploaded_datasets, response_code, message = self._get_path_paste_uploaded_datasets(trans, kwd, library_bunch, response_code, message)
         upload_common.cleanup_unused_precreated_datasets(precreated_datasets)
         if upload_option == 'upload_file' and not uploaded_datasets:
             response_code = 400
             message = 'Select a file, enter a URL or enter text'
     if response_code != 200:
         return (response_code, message)
     json_file_path = upload_common.create_paramfile(trans, uploaded_datasets)
     data_list = [ud.data for ud in uploaded_datasets]
     job_params = {}
     job_params['link_data_only'] = json.dumps(kwd.get('link_data_only', 'copy_files'))
     job_params['uuid'] = json.dumps(kwd.get('uuid', None))
     job, output = upload_common.create_job(trans, tool_params, tool, json_file_path, data_list, folder=library_bunch.folder, job_params=job_params)
     trans.sa_session.add(job)
     trans.sa_session.flush()
     return output
Example #4
0
    def execute(self, tool, trans, incoming={}, set_output_hid=True):
        dataset_upload_inputs = []
        for input_name, input in tool.inputs.iteritems():
            if input.type == "upload_dataset":
                dataset_upload_inputs.append(input)
        assert dataset_upload_inputs, Exception(
            "No dataset upload groups were found.")

        precreated_datasets = upload_common.get_precreated_datasets(
            trans, incoming, trans.app.model.HistoryDatasetAssociation)
        incoming = upload_common.persist_uploads(incoming)
        uploaded_datasets = upload_common.get_uploaded_datasets(
            trans, incoming, precreated_datasets, dataset_upload_inputs)
        upload_common.cleanup_unused_precreated_datasets(precreated_datasets)

        if not uploaded_datasets:
            return 'No data was entered in the upload form, please go back and choose data to upload.'

        json_file_path = upload_common.create_paramfile(
            trans, uploaded_datasets)
        data_list = [ud.data for ud in uploaded_datasets]
        return upload_common.create_job(trans, incoming, tool, json_file_path,
                                        data_list)
Example #5
0
 def _upload_dataset(self,
                     trans,
                     library_id,
                     folder_id,
                     replace_dataset=None,
                     **kwd):
     # Set up the traditional tool state/params
     cntrller = 'api'
     tool_id = 'upload1'
     message = None
     tool = trans.app.toolbox.get_tool(tool_id)
     state = tool.new_state(trans)
     populate_state(trans, tool.inputs, kwd, state.inputs)
     tool_params = state.inputs
     dataset_upload_inputs = []
     for input_name, input in tool.inputs.items():
         if input.type == "upload_dataset":
             dataset_upload_inputs.append(input)
     # Library-specific params
     server_dir = kwd.get('server_dir', '')
     upload_option = kwd.get('upload_option', 'upload_file')
     response_code = 200
     if upload_option == 'upload_directory':
         full_dir, import_dir_desc = validate_server_directory_upload(
             trans, server_dir)
         message = 'Select a directory'
     elif upload_option == 'upload_paths':
         # Library API already checked this - following check isn't actually needed.
         validate_path_upload(trans)
     # Some error handling should be added to this method.
     try:
         # FIXME: instead of passing params here ( which have been processed by util.Params(), the original kwd
         # should be passed so that complex objects that may have been included in the initial request remain.
         library_bunch = upload_common.handle_library_params(
             trans, kwd, folder_id, replace_dataset)
     except Exception:
         response_code = 500
         message = "Unable to parse upload parameters, please report this error."
     # Proceed with (mostly) regular upload processing if we're still errorless
     if response_code == 200:
         precreated_datasets = upload_common.get_precreated_datasets(
             trans,
             tool_params,
             trans.app.model.LibraryDatasetDatasetAssociation,
             controller=cntrller)
         if upload_option == 'upload_file':
             tool_params = upload_common.persist_uploads(tool_params, trans)
             uploaded_datasets = upload_common.get_uploaded_datasets(
                 trans,
                 cntrller,
                 tool_params,
                 precreated_datasets,
                 dataset_upload_inputs,
                 library_bunch=library_bunch)
         elif upload_option == 'upload_directory':
             uploaded_datasets, response_code, message = self._get_server_dir_uploaded_datasets(
                 trans, kwd, full_dir, import_dir_desc, library_bunch,
                 response_code, message)
         elif upload_option == 'upload_paths':
             uploaded_datasets, response_code, message = self._get_path_paste_uploaded_datasets(
                 trans, kwd, library_bunch, response_code, message)
         upload_common.cleanup_unused_precreated_datasets(
             precreated_datasets)
         if upload_option == 'upload_file' and not uploaded_datasets:
             response_code = 400
             message = 'Select a file, enter a URL or enter text'
     if response_code != 200:
         return (response_code, message)
     json_file_path = upload_common.create_paramfile(
         trans, uploaded_datasets)
     data_list = [ud.data for ud in uploaded_datasets]
     job_params = {}
     job_params['link_data_only'] = json.dumps(
         kwd.get('link_data_only', 'copy_files'))
     job_params['uuid'] = json.dumps(kwd.get('uuid', None))
     job, output = upload_common.create_job(trans,
                                            tool_params,
                                            tool,
                                            json_file_path,
                                            data_list,
                                            folder=library_bunch.folder,
                                            job_params=job_params)
     trans.sa_session.add(job)
     trans.sa_session.flush()
     return output
Example #6
0
 def upload_dataset( self, trans, controller, library_id, folder_id, replace_dataset=None, **kwd ):
     # Set up the traditional tool state/params
     tool_id = 'upload1'
     tool = trans.app.toolbox.tools_by_id[ tool_id ]
     state = tool.new_state( trans )
     errors = tool.update_state( trans, tool.inputs_by_page[0], state.inputs, kwd )
     tool_params = state.inputs
     dataset_upload_inputs = []
     for input_name, input in tool.inputs.iteritems():
         if input.type == "upload_dataset":
             dataset_upload_inputs.append( input )
     # Library-specific params
     params = util.Params( kwd ) # is this filetoolparam safe?
     library_bunch = upload_common.handle_library_params( trans, params, folder_id, replace_dataset )
     msg = util.restore_text( params.get( 'msg', ''  ) )
     messagetype = params.get( 'messagetype', 'done' )
     server_dir = util.restore_text( params.get( 'server_dir', '' ) )
     if replace_dataset not in [ None, 'None' ]:
         replace_id = replace_dataset.id
     else:
         replace_id = None
     upload_option = params.get( 'upload_option', 'upload_file' )
     err_redirect = False
     if upload_option == 'upload_directory':
         if server_dir in [ None, 'None', '' ]:
             err_redirect = True
         if controller == 'library_admin':
             import_dir = trans.app.config.library_import_dir
             import_dir_desc = 'library_import_dir'
             full_dir = os.path.join( import_dir, server_dir )
         else:
             import_dir = trans.app.config.user_library_import_dir
             import_dir_desc = 'user_library_import_dir'
             if server_dir == trans.user.email:
                 full_dir = os.path.join( import_dir, server_dir )
             else:
                 full_dir = os.path.join( import_dir, trans.user.email, server_dir )
         if import_dir:
             msg = 'Select a directory'
         else:
             msg = '"%s" is not defined in the Galaxy configuration file' % import_dir_desc
     # Proceed with (mostly) regular upload processing
     precreated_datasets = upload_common.get_precreated_datasets( trans, tool_params, trans.app.model.LibraryDatasetDatasetAssociation, controller=controller )
     if upload_option == 'upload_file':
         tool_params = upload_common.persist_uploads( tool_params )
         uploaded_datasets = upload_common.get_uploaded_datasets( trans, tool_params, precreated_datasets, dataset_upload_inputs, library_bunch=library_bunch )
     elif upload_option == 'upload_directory':
         uploaded_datasets, err_redirect, msg = self.get_server_dir_uploaded_datasets( trans, params, full_dir, import_dir_desc, library_bunch, err_redirect, msg )
     elif upload_option == 'upload_paths':
         uploaded_datasets, err_redirect, msg = self.get_path_paste_uploaded_datasets( trans, params, library_bunch, err_redirect, msg )
     upload_common.cleanup_unused_precreated_datasets( precreated_datasets )
     if upload_option == 'upload_file' and not uploaded_datasets:
         msg = 'Select a file, enter a URL or enter text'
         err_redirect = True
     if err_redirect:
         trans.response.send_redirect( web.url_for( controller=controller,
                                                    action='upload_library_dataset',
                                                    library_id=library_id,
                                                    folder_id=folder_id,
                                                    replace_id=replace_id,
                                                    upload_option=upload_option,
                                                    msg=util.sanitize_text( msg ),
                                                    messagetype='error' ) )
     json_file_path = upload_common.create_paramfile( trans, uploaded_datasets )
     data_list = [ ud.data for ud in uploaded_datasets ]
     return upload_common.create_job( trans, tool_params, tool, json_file_path, data_list, folder=library_bunch.folder )
Example #7
0
 def upload_dataset(self,
                    trans,
                    controller,
                    library_id,
                    folder_id,
                    replace_dataset=None,
                    **kwd):
     # Set up the traditional tool state/params
     tool_id = 'upload1'
     tool = trans.app.toolbox.tools_by_id[tool_id]
     state = tool.new_state(trans)
     errors = tool.update_state(trans, tool.inputs_by_page[0], state.inputs,
                                kwd)
     tool_params = state.inputs
     dataset_upload_inputs = []
     for input_name, input in tool.inputs.iteritems():
         if input.type == "upload_dataset":
             dataset_upload_inputs.append(input)
     # Library-specific params
     params = util.Params(kwd)  # is this filetoolparam safe?
     library_bunch = upload_common.handle_library_params(
         trans, params, folder_id, replace_dataset)
     msg = util.restore_text(params.get('msg', ''))
     messagetype = params.get('messagetype', 'done')
     server_dir = util.restore_text(params.get('server_dir', ''))
     if replace_dataset not in [None, 'None']:
         replace_id = replace_dataset.id
     else:
         replace_id = None
     upload_option = params.get('upload_option', 'upload_file')
     err_redirect = False
     if upload_option == 'upload_directory':
         if server_dir in [None, 'None', '']:
             err_redirect = True
         if controller == 'library_admin':
             import_dir = trans.app.config.library_import_dir
             import_dir_desc = 'library_import_dir'
             full_dir = os.path.join(import_dir, server_dir)
         else:
             import_dir = trans.app.config.user_library_import_dir
             import_dir_desc = 'user_library_import_dir'
             if server_dir == trans.user.email:
                 full_dir = os.path.join(import_dir, server_dir)
             else:
                 full_dir = os.path.join(import_dir, trans.user.email,
                                         server_dir)
         if import_dir:
             msg = 'Select a directory'
         else:
             msg = '"%s" is not defined in the Galaxy configuration file' % import_dir_desc
     # Proceed with (mostly) regular upload processing
     precreated_datasets = upload_common.get_precreated_datasets(
         trans,
         tool_params,
         trans.app.model.LibraryDatasetDatasetAssociation,
         controller=controller)
     if upload_option == 'upload_file':
         tool_params = upload_common.persist_uploads(tool_params)
         uploaded_datasets = upload_common.get_uploaded_datasets(
             trans,
             tool_params,
             precreated_datasets,
             dataset_upload_inputs,
             library_bunch=library_bunch)
     elif upload_option == 'upload_directory':
         uploaded_datasets, err_redirect, msg = self.get_server_dir_uploaded_datasets(
             trans, params, full_dir, import_dir_desc, library_bunch,
             err_redirect, msg)
     elif upload_option == 'upload_paths':
         uploaded_datasets, err_redirect, msg = self.get_path_paste_uploaded_datasets(
             trans, params, library_bunch, err_redirect, msg)
     upload_common.cleanup_unused_precreated_datasets(precreated_datasets)
     if upload_option == 'upload_file' and not uploaded_datasets:
         msg = 'Select a file, enter a URL or enter text'
         err_redirect = True
     if err_redirect:
         trans.response.send_redirect(
             web.url_for(controller=controller,
                         action='upload_library_dataset',
                         library_id=library_id,
                         folder_id=folder_id,
                         replace_id=replace_id,
                         upload_option=upload_option,
                         msg=util.sanitize_text(msg),
                         messagetype='error'))
     json_file_path = upload_common.create_paramfile(
         trans, uploaded_datasets)
     data_list = [ud.data for ud in uploaded_datasets]
     return upload_common.create_job(trans,
                                     tool_params,
                                     tool,
                                     json_file_path,
                                     data_list,
                                     folder=library_bunch.folder)