Esempio n. 1
0
def create_app(test_config=None):
    # create and configure the app
    # Galaxy Config
    load_dotenv()
    GALAXY_INSTANCE_URL = os.getenv("GALAXY_INSTANCE_URL")
    GALAXY_INSTANCE_API_KEY = os.getenv("GALAXY_INSTANCE_API_KEY")
    gi = GalaxyInstance(GALAXY_INSTANCE_URL, GALAXY_INSTANCE_API_KEY)
    file_library = gi.libraries.list(name="VisaTMPiloteData")[0]

    app = Flask(__name__, instance_relative_config=True)

    app.config['UPLOAD_FOLDER'] = UPLOAD_FOLDER
    app.config['CORPUS_FOLDER'] = CORPUS_FOLDER
    app.config['OUTPUTS_FOLDER'] = OUTPUTS_FOLDER
    app.config['history_id'] = os.getenv("GALAXY_HISTORY_ID")
    app.config['library_name'] = "VisaTMPiloteData"
    app.config['GALAXY_INSTANCE'] = gi

    app.config['LIBRARY_CORPUS_FOLDER'] = "corpus"
    app.config['LIBRARY_EXTRACTION_FOLDER'] = "extraction"
    app.config['LIBRARY_CLUSTERING_FOLDER'] = "clustering"

    from . import clusters
    app.register_blueprint(clusters.bp)
    from . import corpus
    app.register_blueprint(corpus.bp)
    from . import files
    app.register_blueprint(files.bp)
    from . import istex
    app.register_blueprint(istex.bp)
    from . import workflows
    app.register_blueprint(workflows.bp)

    return app
Esempio n. 2
0
This example retrieves details of all the Workflows in our Galaxy account and lists information on them.

Usage: python list_workflows.py <Galaxy_URL> <Galaxy_API_key>
"""
from __future__ import print_function

import sys

from bioblend.galaxy.objects import GalaxyInstance

if len(sys.argv) != 3:
    print("Usage: python list_workflows.py <Galaxy_URL> <Galaxy_API_key>")
    sys.exit(1)
galaxy_url = sys.argv[1]
galaxy_key = sys.argv[2]

print("Initiating Galaxy connection")

gi = GalaxyInstance(galaxy_url, galaxy_key)

print("Retrieving Workflows list")

workflows = gi.workflows.get_previews()

if len(workflows) == 0:
    print("There are no Workflows in your account.")
else:
    print("\nWorkflows:")
    for wf in workflows:
        print("{0} : {1}".format(wf.name, wf.id))
Esempio n. 3
0
import os
import sys

from bioblend.galaxy.objects import GalaxyInstance
from common import get_one  # noqa:I100,I201

URL = 'https://orione.crs4.it'
API_KEY = os.getenv('GALAXY_API_KEY', 'YOUR_API_KEY')
if API_KEY == 'YOUR_API_KEY':
    sys.exit('API_KEY not set, see the README.txt file')
gi = GalaxyInstance(URL, API_KEY)

# Select "W2 - Bacterial re-sequencing | Paired-end" from published workflows

workflow_name = 'W2 - Bacterial re-sequencing | Paired-end'
previews = gi.workflows.get_previews(name=workflow_name, published=True)
p = get_one(_ for _ in previews if _.published)

# Import the workflow to user space

iw = gi.workflows.import_shared(p.id)

# Create a new history

history_name = f"{workflow_name} output"
h = gi.histories.create(history_name)

# Select the "Orione SupMat" library

library_name = 'Orione SupMat'
l = get_one(gi.libraries.list(name=library_name))
 def setUp(self) -> None:
     super().setUp()
     self.conn = GalaxyInstance(self.host, self.key)
    def import_to_galaxy(self,
                         json_parameter_file,
                         log,
                         hist_id,
                         token=None,
                         config_file=None):
        """
        Import samples and their sample files into Galaxy from IRIDA

        :type json_parameter_file: str
        :param json_parameter_file: a path that Galaxy passes,
        to the stub datasource it created
        :type log: str
        :param log: the name of a file to write the tool's log to.
        :type token: str
        :param token: An access token that can be passed to the tool when it
        is manually run.
        :type config_file: str
        :param config_file: the name of a file to configure from
        """
        collection_array = []
        num_files = 0
        self.pp = pprint.PrettyPrinter(indent=4)

        self.logger.setLevel(logging.INFO)
        self.configure()
        with open(json_parameter_file, 'r') as param_file_handle:

            full_param_dict = json.loads(param_file_handle.read())
            param_dict = full_param_dict['param_dict']
            json_params_dict = json.loads(param_dict['json_params'])

            self.print_logged("Importing files from IRIDA to Galaxy...")

            self.uploaded_files_log = []
            self.skipped_files_log = []

            samples_dict = json_params_dict['_embedded']['samples']
            email = json_params_dict['_embedded']['user']['email']
            addtohistory = json_params_dict['_embedded']['addtohistory']
            desired_lib_name = json_params_dict['_embedded']['library']['name']
            oauth_dict = json_params_dict['_embedded']['oauth2']

            make_paired_collection = True

            if "makepairedcollection" in json_params_dict['_embedded']:
                make_paired_collection = json_params_dict['_embedded'][
                    'makepairedcollection']

            self.token = token
            self.irida = self.get_IRIDA_session(oauth_dict)

            self.gi = GalaxyInstance(self.GALAXY_URL, self.ADMIN_KEY)

            # This is necessary for uploads from arbitary local paths
            # that require setting the "link_to_files" flag:
            self.reg_gi = galaxy.GalaxyInstance(url=self.GALAXY_URL,
                                                key=self.ADMIN_KEY)

            self.histories = self.reg_gi.histories

            # Each sample contains a list of sample files
            samples = self.get_samples(samples_dict)

            # Set up the library
            self.library = self.get_first_or_make_lib(desired_lib_name, email)
            self.create_folder_if_nec(self.ILLUMINA_PATH)
            self.create_folder_if_nec(self.REFERENCE_PATH)

            # Add each sample's files to the library
            num_files = self.add_samples_if_nec(samples)

            if addtohistory:
                if make_paired_collection:
                    collection_array = self.add_samples_to_history(
                        samples, hist_id)
                    self.print_logged("Samples added to history!")
                    self.logger.debug("Collection items: \n" +
                                      self.pp.pformat(collection_array))
                else:
                    collection_array = self.add_samples_to_history(
                        samples, hist_id, make_paired_collection=False)
                    self.print_logged("Samples added to history!")
            else:
                self.print_logged("Samples not added to history!")

            self.logger.debug("Number of files on galaxy: " + str(num_files))

            self.print_summary()
Esempio n. 6
0
    :return: HDA instance
    """
    if not path.is_file():
        print("Invalid file path specified")

    if not label:
        label = path.name

    if type:
        hda = history.upload_file(str(path.resolve()),
                                  file_name=label,
                                  file_type=type)
    else:
        hda = history.upload_file(str(path.resolve()), file_name=label)

    return hda


if __name__ == '__main__':
    conn = GalaxyInstance('localhost:8000', '63ade82a83e02581f076f34522d957b7')
    h = get_upload_history(conn)
    i = 0
    while True:
        upload(h, Path('./LICENSE'), 'test', 'txt')
        i += 1
        if i % 100 == 0:
            h.update()
            if 'error' in h.state_details and h.state_details['error'] > 0:
                print('Error detected.')
                exit(1)
Esempio n. 7
0
    def run(self, temp_wf=False, output_file=None):
        """
        Make the connection, set up for the workflow, then run it

        Args:
            temp_wf (bool): Flag to determine whether the workflow should be deleted after use
        Returns:
            results (tuple): List of output datasets and output history if successful, None if not successful
        """
        self.logger.info("Initiating Galaxy connection")
        gi = GalaxyInstance(self.galaxy_url, self.galaxy_key)

        self.logger.info("Importing workflow '%s' from '%s' source" %
                         (self.workflow, self.workflow_source))
        workflow = self.import_workflow(gi)
        if not workflow.is_runnable:
            self.logger.error("Workflow not runnable, missing required tools")
            raise RuntimeError("Workflow not runnable, missing required tools")

        self.logger.info("Creating output history '%s'" % self.history_name)
        outputhist = gi.histories.create(self.history_name)

        input_map = dict()
        if self.dataset_collection:
            self.logger.info("Creating dataset collection")
            dataset_collection = self.create_dataset_collection(gi, outputhist)
            input_map[
                self.dataset_collection['input_label']] = dataset_collection

        datasets = []
        if self.datasets:
            self.logger.info("Importing datasets to history")
            imported_datasets = self.import_datasets('datasets', gi,
                                                     outputhist)
            for i in range(0, len(imported_datasets)):
                input_map[self.datasets[i]
                          ['input_label']] = imported_datasets[i]

        if self.library_name:
            self.logger.info("Creating library '%s'" % self.library_name)
            lib = gi.libraries.create(self.library_name)
            self.logger.info("Copying datasets to library '%s'" %
                             self.library_name)
            for data in outputhist.get_datasets():
                lib.copy_from_dataset(data)

        if self.runtime_params:
            self.logger.info("Setting runtime tool parameters")
            params = self.set_runtime_params(workflow)
            self.logger.info("Initiating workflow")
            results = workflow.run(input_map, outputhist, params)
        else:
            self.logger.info("Checking for missing tool parameters")
            missing_param = self.verify_runtime_params(workflow)
            if missing_param:
                self.logger.error("Missing runtime parameter for '%s'" %
                                  str(missing_param))
                raise RuntimeError("Missing runtime parameter for '%s'" %
                                   str(missing_param))
            self.logger.info("Initiating workflow")
            results = workflow.run(input_map, outputhist)
            if output_file:
                f = open(output_file, 'w')
                f.write(str(results))
                f.close()

        if temp_wf and self.workflow_source != 'id':
            self.logger.info("Deleting workflow: '%s'" % self.workflow)
            workflow.delete()

        return results
Esempio n. 8
0
def main(args: argparse.Namespace):
    """
    Script entrance function
    """
    conn = GalaxyInstance(args.host, args.key)
    if args.command not in ('reference', 'runs', 'results', 'cancel'):
        upload_history = get_upload_history(conn)

    if 'reference_id' in args and args.reference_id:
        # Attempt to recover from user entering accession rather than reference id
        args.reference_id = re.sub('\W', '_', args.reference_id)

    if args.command == 'upload':
        print("Dataset ID:", file=sys.stderr)
        sys.stderr.flush()
        hda = upload(upload_history, args.path, args.label)
        print(hda.id)

    elif args.command == 'list':
        print("ID\tLabel", file=sys.stderr)
        sys.stderr.flush()
        uploads = list_data(upload_history)
        if len(uploads):
            for data in uploads:
                print(f"{data.id}\t{data.name}")
        else:
            print("No datasets found", file=sys.stderr)

    elif args.command == 'delete':
        delete_data(upload_history, args.id)

    elif args.command == 'reference':
        print("Reference ID\tName", file=sys.stderr)
        sys.stderr.flush()
        for genome in list_reference(conn, args.query):
            print(f"{genome[1]}\t{genome[0]}")

    elif args.command == 'run':
        workflow = get_workflow(conn)
        if args.output and not args.output.is_dir():
            main.cmd.error("Output path must be existing folder")

        # Deal with bug in argparse 'extend' by switching to 'append' and flattening
        data = _flatten(args.data)
        newick = None
        if args.newick_accession or args.newick_label:
            newick = upload_history.get_dataset(args.newick_accession or args.newick_label)
        print("Analysis ID:", file=sys.stderr)
        sys.stderr.flush()
        invocation_id, _ = invoke(workflow, args.label, [upload_history.get_dataset(id) for id in data], newick, 'newick_accession' in args, args.reference_id)
        print(invocation_id)
        if args.output:
            results(workflow, invocation_id, args.output)

    elif args.command == 'runs':
        workflow = get_workflow(conn)
        print("ID\tLabel\tState", file=sys.stderr)
        sys.stderr.flush()
        for invocation in invocations(workflow):
            print(f"{invocation['id']}\t{invocation['label']}\t{invocation['state']}")  # TODO get actual state

    elif args.command == 'download':
        workflow = get_workflow(conn)
        results(workflow, args.id, args.path)

    elif args.command == 'cancel':
        workflow = get_workflow(conn)
        cancel(workflow, args.id)

    elif args.command == 'errors':
        workflow = get_workflow(conn)
        for e in errors(workflow, args.id).values():
            print(e)

    elif args.command == 'upload_run':
        workflow = get_workflow(conn)
        # Deal with bug in argparse 'extend' by switching to append and flattening
        paths = _flatten(args.paths)
        round_trip(upload_history, paths, workflow, args.label, args.output_path, args.newick_accession or args.newick_label, 'newick_accession' in args, args.reference_id)

    else:
        main.cmd.print_help()