Exemple #1
0
 def __init__(self, fsmon=False, **kwargs) -> None:
     super().__init__()
     self.haltables = [
         ("object store", self._shutdown_object_store),
         ("database connection", self._shutdown_model),
     ]
     self._register_singleton(BasicSharedApp, self)
     if not log.handlers:
         # Paste didn't handle it, so we need a temporary basic log
         # configured.  The handler added here gets dumped and replaced with
         # an appropriately configured logger in configure_logging below.
         logging.basicConfig(level=logging.DEBUG)
     log.debug("python path is: %s", ", ".join(sys.path))
     self.name = 'galaxy'
     self.is_webapp = False
     self.new_installation = False
     # Read config file and check for errors
     self.config: Any = self._register_singleton(
         config.Configuration, config.Configuration(**kwargs))
     self.config.check()
     self._configure_object_store(fsmon=True)
     config_file = kwargs.get('global_conf', {}).get('__file__', None)
     if config_file:
         log.debug('Using "galaxy.ini" config file: %s', config_file)
     self._configure_models(
         check_migrate_databases=self.config.check_migrate_databases,
         config_file=config_file)
     # Security helper
     self._configure_security()
     self._register_singleton(IdEncodingHelper, self.security)
     self._register_singleton(SharedModelMapping, self.model)
     self._register_singleton(GalaxyModelMapping, self.model)
     self._register_singleton(galaxy_scoped_session, self.model.context)
     self._register_singleton(install_model_scoped_session,
                              self.install_model.context)
Exemple #2
0
 def __init__( self, **kwargs ):
     # Read config file and check for errors
     self.config = config.Configuration( **kwargs )
     self.config.check()
     config.configure_logging( self.config )
     # Connect up the object model
     if self.config.database_connection:
         self.model = galaxy.model.mapping.init( self.config.file_path,
                                                 self.config.database_connection,
                                                 create_tables = True )
     else:
         self.model = galaxy.model.mapping.init( self.config.file_path,
                                                 "sqlite://%s?isolation_level=IMMEDIATE" % self.config.database,
                                                 create_tables = True )
     # Initialize the tools
     self.toolbox = tools.ToolBox( self.config.tool_config, self.config.tool_path )
     # Start the job queue
     self.job_queue = jobs.JobQueue( self.config.job_queue_workers, self )
     self.heartbeat = None
     # Start the heartbeat process if configured and available
     if self.config.use_heartbeat:
         from galaxy import heartbeat
         if heartbeat.Heartbeat:
             self.heartbeat = heartbeat.Heartbeat()
             self.heartbeat.start()
Exemple #3
0
def main(ini_file):
    global logger

    # Initializing logger
    logger = init_logger(logging)

    conf_parser = ConfigParser.ConfigParser({'here': os.getcwd()})
    logger.info('Reading galaxy.ini')
    conf_parser.read(ini_file)
    ini_config = dict()
    for key, value in conf_parser.items("app:main"):
        ini_config[key] = value
    ini_config = config.Configuration(**ini_config)
    logger.info('Reading ajax config file from galaxy.ini')
    #ajax_config_file = ini_config.get("ajax_dynamic_options_config_file", None)
    ajax_config_file = ini_config.get(
        "ajax_dynamic_options_config_file",
        "/galaxy-central/scripts/tools/ajax_dynamic_options.conf.yaml")
    if not ajax_config_file:
        raise ValueError("Need to specify ajax configuration in universe_wsgi")
    in_handle = open(ajax_config_file)
    ajax_config = yaml.load(in_handle)
    in_handle.close()
    db_con = ini_config.database_connection
    if not db_con:
        #db_con = "sqlite:///%s?isolation_level=IMMEDIATE" % ini_config.database
        db_con = "postgresql://*****:*****@localhost:5432/galaxy"
    app = SimpleApp(db_con, ini_config.file_path)

    top_level_type = create_default_requests(app, ajax_config["user"])
    update_existing_user(app, ajax_config["user"], "User Registration Form")
def testapi():
    host_url = 'http://localhost:8080'
    new_path = [os.path.join(os.getcwd(), "lib")]
    new_path.extend(sys.path[1:])  # remove scripts/ from the path
    sys.path = new_path
    from galaxy import config
    aconfig = config.Configuration()
    M_A_K = aconfig.master_api_key
    tooldeps = aconfig.tool_dependency_dir
    gi = GalaxyInstance(url=host_url, key=M_A_K)
def get_sa_session(ini_file):
    conf_parser = ConfigParser.ConfigParser({'here': os.getcwd()})
    conf_parser.read(ini_file)
    kwds = dict()
    for key, value in conf_parser.items("app:main"):
        kwds[key] = value
    ini_config = config.Configuration(**kwds)
    db_con = ini_config.database_connection
    if not db_con:
        db_con = "sqlite:///%s?isolation_level=IMMEDIATE" % ini_config.database
    model = galaxy.model.mapping.init(ini_config.file_path,
                                      db_con,
                                      engine_options={},
                                      create_tables=False)
    return model.context.current, ini_config
Exemple #6
0
    def __init__(self, **kwargs):
        if not log.handlers:
            # Paste didn't handle it, so we need a temporary basic log
            # configured.  The handler added here gets dumped and replaced with
            # an appropriately configured logger in configure_logging below.
            logging.basicConfig(level=logging.DEBUG)
        log.debug("python path is: %s", ", ".join(sys.path))
        self.name = 'galaxy'
        self.startup_timer = ExecutionTimer()
        self.new_installation = False
        # Read config file and check for errors
        self.config = config.Configuration(**kwargs)
        self.config.check()
        config.configure_logging(self.config)
        self.configure_fluent_log()
        # A lot of postfork initialization depends on the server name, ensure it is set immediately after forking before other postfork functions
        self.application_stack = application_stack_instance(app=self)
        self.application_stack.register_postfork_function(
            self.application_stack.set_postfork_server_name, self)
        self.config.reload_sanitize_whitelist(
            explicit='sanitize_whitelist_file' in kwargs)
        self.amqp_internal_connection_obj = galaxy.queues.connection_from_config(
            self.config)
        # control_worker *can* be initialized with a queue, but here we don't
        # want to and we'll allow postfork to bind and start it.
        self.control_worker = GalaxyQueueWorker(self)

        self._configure_tool_shed_registry()
        self._configure_object_store(fsmon=True)
        # Setup the database engine and ORM
        config_file = kwargs.get('global_conf', {}).get('__file__', None)
        if config_file:
            log.debug('Using "galaxy.ini" config file: %s', config_file)
        check_migrate_tools = self.config.check_migrate_tools
        self._configure_models(
            check_migrate_databases=self.config.check_migrate_databases,
            check_migrate_tools=check_migrate_tools,
            config_file=config_file)

        # Manage installed tool shed repositories.
        self.installed_repository_manager = installed_repository_manager.InstalledRepositoryManager(
            self)

        self._configure_datatypes_registry(self.installed_repository_manager)
        galaxy.model.set_datatypes_registry(self.datatypes_registry)

        # Security helper
        self._configure_security()
        # Tag handler
        self.tag_handler = GalaxyTagHandler(self.model.context)
        self.dataset_collections_service = DatasetCollectionManager(self)
        self.history_manager = HistoryManager(self)
        self.dependency_resolvers_view = DependencyResolversView(self)
        self.test_data_resolver = test_data.TestDataResolver(
            file_dirs=self.config.tool_test_data_directories)
        self.library_folder_manager = FolderManager()
        self.library_manager = LibraryManager()
        self.dynamic_tool_manager = DynamicToolManager(self)

        # Tool Data Tables
        self._configure_tool_data_tables(from_shed_config=False)
        # Load dbkey / genome build manager
        self._configure_genome_builds(data_table_name="__dbkeys__",
                                      load_old_style=True)

        # Genomes
        self.genomes = Genomes(self)
        # Data providers registry.
        self.data_provider_registry = DataProviderRegistry()

        # Initialize job metrics manager, needs to be in place before
        # config so per-destination modifications can be made.
        self.job_metrics = job_metrics.JobMetrics(
            self.config.job_metrics_config_file, app=self)

        # Initialize error report plugins.
        self.error_reports = ErrorReports(self.config.error_report_file,
                                          app=self)

        # Initialize the job management configuration
        self.job_config = jobs.JobConfiguration(self)

        # Setup a Tool Cache
        self.tool_cache = ToolCache()
        self.tool_shed_repository_cache = ToolShedRepositoryCache(self)
        # Watch various config files for immediate reload
        self.watchers = ConfigWatchers(self)
        self._configure_toolbox()

        # Load Data Manager
        self.data_managers = DataManagers(self)
        # Load the update repository manager.
        self.update_repository_manager = update_repository_manager.UpdateRepositoryManager(
            self)
        # Load proprietary datatype converters and display applications.
        self.installed_repository_manager.load_proprietary_converters_and_display_applications(
        )
        # Load datatype display applications defined in local datatypes_conf.xml
        self.datatypes_registry.load_display_applications(self)
        # Load datatype converters defined in local datatypes_conf.xml
        self.datatypes_registry.load_datatype_converters(self.toolbox)
        # Load external metadata tool
        self.datatypes_registry.load_external_metadata_tool(self.toolbox)
        # Load history import/export tools.
        load_lib_tools(self.toolbox)
        # visualizations registry: associates resources with visualizations, controls how to render
        self.visualizations_registry = VisualizationsRegistry(
            self,
            directories_setting=self.config.visualization_plugins_directory,
            template_cache_dir=self.config.template_cache)
        # Tours registry
        self.tour_registry = ToursRegistry(self.config.tour_config_dir)
        # Webhooks registry
        self.webhooks_registry = WebhooksRegistry(self.config.webhooks_dirs)
        # Load security policy.
        self.security_agent = self.model.security_agent
        self.host_security_agent = galaxy.model.security.HostAgent(
            model=self.security_agent.model,
            permitted_actions=self.security_agent.permitted_actions)
        # Load quota management.
        if self.config.enable_quotas:
            self.quota_agent = galaxy.quota.QuotaAgent(self.model)
        else:
            self.quota_agent = galaxy.quota.NoQuotaAgent(self.model)
        # Heartbeat for thread profiling
        self.heartbeat = None
        from galaxy import auth
        self.auth_manager = auth.AuthManager(self)
        # Start the heartbeat process if configured and available (wait until
        # postfork if using uWSGI)
        if self.config.use_heartbeat:
            if heartbeat.Heartbeat:
                self.heartbeat = heartbeat.Heartbeat(
                    self.config,
                    period=self.config.heartbeat_interval,
                    fname=self.config.heartbeat_log)
                self.heartbeat.daemon = True
                self.application_stack.register_postfork_function(
                    self.heartbeat.start)

        if self.config.enable_oidc:
            from galaxy.authnz import managers
            self.authnz_manager = managers.AuthnzManager(
                self, self.config.oidc_config,
                self.config.oidc_backends_config)

        self.sentry_client = None
        if self.config.sentry_dsn:

            def postfork_sentry_client():
                import raven
                self.sentry_client = raven.Client(
                    self.config.sentry_dsn,
                    transport=raven.transport.HTTPTransport)

            self.application_stack.register_postfork_function(
                postfork_sentry_client)

        # Transfer manager client
        if self.config.get_bool('enable_beta_job_managers', False):
            from galaxy.jobs import transfer_manager
            self.transfer_manager = transfer_manager.TransferManager(self)
        # Start the job manager
        from galaxy.jobs import manager
        self.job_manager = manager.JobManager(self)
        self.application_stack.register_postfork_function(
            self.job_manager.start)
        self.proxy_manager = ProxyManager(self.config)

        from galaxy.workflow import scheduling_manager
        # Must be initialized after job_config.
        self.workflow_scheduling_manager = scheduling_manager.WorkflowSchedulingManager(
            self)

        # Must be initialized after any component that might make use of stack messaging is configured. Alternatively if
        # it becomes more commonly needed we could create a prefork function registration method like we do with
        # postfork functions.
        self.application_stack.init_late_prefork()

        self.containers = {}
        if self.config.enable_beta_containers_interface:
            self.containers = build_container_interfaces(
                self.config.containers_config_file,
                containers_conf=self.config.containers_conf)

        # Configure handling of signals
        handlers = {}
        if self.heartbeat:
            handlers[signal.SIGUSR1] = self.heartbeat.dump_signal_handler
        self._configure_signal_handlers(handlers)

        self.database_heartbeat = DatabaseHeartbeat(
            application_stack=self.application_stack)
        self.application_stack.register_postfork_function(
            self.database_heartbeat.start)

        # Start web stack message handling
        self.application_stack.register_postfork_function(
            self.application_stack.start)

        self.model.engine.dispose()

        # Inject url_for for components to more easily optionally depend
        # on url_for.
        self.url_for = url_for

        self.server_starttime = int(time.time())  # used for cachebusting
        log.info("Galaxy app startup finished %s" % self.startup_timer)
Exemple #7
0
    def __init__(self, **kwargs):
        print >> sys.stderr, "python path is: " + ", ".join(sys.path)
        self.name = 'galaxy'
        self.new_installation = False
        # Read config file and check for errors
        self.config = config.Configuration(**kwargs)
        self.config.check()
        config.configure_logging(self.config)
        self.configure_fluent_log()
        self._amqp_internal_connection_obj = galaxy.queues.connection_from_config(
            self.config)
        self._configure_tool_shed_registry()
        self._configure_object_store(fsmon=True)
        # Setup the database engine and ORM
        config_file = kwargs.get('global_conf', {}).get('__file__', None)
        if config_file:
            log.debug('Using "galaxy.ini" config file: %s', config_file)
        check_migrate_tools = self.config.check_migrate_tools
        self._configure_models(check_migrate_databases=True,
                               check_migrate_tools=check_migrate_tools,
                               config_file=config_file)

        # Manage installed tool shed repositories.
        from tool_shed.galaxy_install import installed_repository_manager
        self.installed_repository_manager = installed_repository_manager.InstalledRepositoryManager(
            self)

        self._configure_datatypes_registry(self.installed_repository_manager)
        galaxy.model.set_datatypes_registry(self.datatypes_registry)

        # Security helper
        self._configure_security()
        # Tag handler
        self.tag_handler = GalaxyTagManager(self)
        # Dataset Collection Plugins
        self.dataset_collections_service = DatasetCollectionManager(self)

        # Tool Data Tables
        self._configure_tool_data_tables(from_shed_config=False)
        # Load dbkey / genome build manager
        self._configure_genome_builds(data_table_name="__dbkeys__",
                                      load_old_style=True)

        # Genomes
        self.genomes = Genomes(self)
        # Data providers registry.
        self.data_provider_registry = DataProviderRegistry()

        # Initialize job metrics manager, needs to be in place before
        # config so per-destination modifications can be made.
        self.job_metrics = job_metrics.JobMetrics(
            self.config.job_metrics_config_file, app=self)

        # Initialize the job management configuration
        self.job_config = jobs.JobConfiguration(self)

        self._configure_toolbox()

        # Load Data Manager
        self.data_managers = DataManagers(self)
        # Load the update repository manager.
        self.update_repository_manager = update_repository_manager.UpdateRepositoryManager(
            self)
        # Load proprietary datatype converters and display applications.
        self.installed_repository_manager.load_proprietary_converters_and_display_applications(
        )
        # Load datatype display applications defined in local datatypes_conf.xml
        self.datatypes_registry.load_display_applications(self)
        # Load datatype converters defined in local datatypes_conf.xml
        self.datatypes_registry.load_datatype_converters(self.toolbox)
        # Load external metadata tool
        self.datatypes_registry.load_external_metadata_tool(self.toolbox)
        # Load history import/export tools.
        load_history_imp_exp_tools(self.toolbox)
        # visualizations registry: associates resources with visualizations, controls how to render
        self.visualizations_registry = VisualizationsRegistry(
            self,
            directories_setting=self.config.visualization_plugins_directory,
            template_cache_dir=self.config.template_cache)
        # Load security policy.
        self.security_agent = self.model.security_agent
        self.host_security_agent = galaxy.security.HostAgent(
            model=self.security_agent.model,
            permitted_actions=self.security_agent.permitted_actions)
        # Load quota management.
        if self.config.enable_quotas:
            self.quota_agent = galaxy.quota.QuotaAgent(self.model)
        else:
            self.quota_agent = galaxy.quota.NoQuotaAgent(self.model)
        # Heartbeat for thread profiling
        self.heartbeat = None
        # Container for OpenID authentication routines
        if self.config.enable_openid:
            from galaxy.web.framework import openid_manager
            self.openid_manager = openid_manager.OpenIDManager(
                self.config.openid_consumer_cache_path)
            self.openid_providers = OpenIDProviders.from_file(
                self.config.openid_config_file)
        else:
            self.openid_providers = OpenIDProviders()
        # Start the heartbeat process if configured and available
        if self.config.use_heartbeat:
            from galaxy.util import heartbeat
            if heartbeat.Heartbeat:
                self.heartbeat = heartbeat.Heartbeat(
                    fname=self.config.heartbeat_log)
                self.heartbeat.daemon = True
                self.heartbeat.start()
        # Transfer manager client
        if self.config.get_bool('enable_beta_job_managers', False):
            from galaxy.jobs import transfer_manager
            self.transfer_manager = transfer_manager.TransferManager(self)
        # Start the job manager
        from galaxy.jobs import manager
        self.job_manager = manager.JobManager(self)
        self.job_manager.start()
        # FIXME: These are exposed directly for backward compatibility
        self.job_queue = self.job_manager.job_queue
        self.job_stop_queue = self.job_manager.job_stop_queue
        self.proxy_manager = ProxyManager(self.config)
        # Initialize the external service types
        self.external_service_types = external_service_types.ExternalServiceTypesCollection(
            self.config.external_service_type_config_file,
            self.config.external_service_type_path, self)

        from galaxy.workflow import scheduling_manager
        # Must be initialized after job_config.
        self.workflow_scheduling_manager = scheduling_manager.WorkflowSchedulingManager(
            self)

        self.model.engine.dispose()
        self.server_starttime = int(time.time())  # used for cachebusting
 def __init__( self, **kwargs ):
     print >> sys.stderr, "python path is: " + ", ".join( sys.path )
     self.new_installation = False
     # Read config file and check for errors
     self.config = config.Configuration( **kwargs )
     self.config.check()
     config.configure_logging( self.config )
     # Determine the database url
     if self.config.database_connection:
         db_url = self.config.database_connection
     else:
         db_url = "sqlite:///%s?isolation_level=IMMEDIATE" % self.config.database
     # Initialize database / check for appropriate schema version.  # If this
     # is a new installation, we'll restrict the tool migration messaging.
     from galaxy.model.migrate.check import create_or_verify_database
     create_or_verify_database( db_url, kwargs.get( 'global_conf', {} ).get( '__file__', None ), self.config.database_engine_options, app=self )
     # Alert the Galaxy admin to tools that have been moved from the distribution to the tool shed.
     from galaxy.tool_shed.migrate.check import verify_tools
     verify_tools( self, db_url, kwargs.get( 'global_conf', {} ).get( '__file__', None ), self.config.database_engine_options )
     # Object store manager
     self.object_store = build_object_store_from_config(self.config)
     # Setup the database engine and ORM
     from galaxy.model import mapping
     self.model = mapping.init( self.config.file_path,
                                db_url,
                                self.config.database_engine_options,
                                database_query_profiling_proxy = self.config.database_query_profiling_proxy,
                                object_store = self.object_store )
     # Set up the tool sheds registry
     if os.path.isfile( self.config.tool_sheds_config ):
         self.tool_shed_registry = galaxy.tool_shed.tool_shed_registry.Registry( self.config.root, self.config.tool_sheds_config )
     else:
         self.tool_shed_registry = None
     # Manage installed tool shed repositories.
     self.installed_repository_manager = galaxy.tool_shed.InstalledRepositoryManager( self )
     # Create an empty datatypes registry.
     self.datatypes_registry = galaxy.datatypes.registry.Registry()
     # Load proprietary datatypes defined in datatypes_conf.xml files in all installed tool shed repositories.  We
     # load proprietary datatypes before datatypes in the distribution because Galaxy's default sniffers include some
     # generic sniffers (eg text,xml) which catch anything, so it's impossible for proprietary sniffers to be used.
     # However, if there is a conflict (2 datatypes with the same extension) between a proprietary datatype and a datatype
     # in the Galaxy distribution, the datatype in the Galaxy distribution will take precedence.  If there is a conflict
     # between 2 proprietary datatypes, the datatype from the repository that was installed earliest will take precedence.
     # This will also load proprietary datatype converters and display applications.
     self.installed_repository_manager.load_proprietary_datatypes()
     # Load the data types in the Galaxy distribution, which are defined in self.config.datatypes_config.
     self.datatypes_registry.load_datatypes( self.config.root, self.config.datatypes_config )
     galaxy.model.set_datatypes_registry( self.datatypes_registry )
     # Security helper
     self.security = security.SecurityHelper( id_secret=self.config.id_secret )
     # Tag handler
     self.tag_handler = GalaxyTagHandler()
     # Tool data tables
     self.tool_data_tables = galaxy.tools.data.ToolDataTableManager( self.config.tool_data_table_config_path )
     # Initialize the tools, making sure the list of tool configs includes the reserved migrated_tools_conf.xml file.
     tool_configs = self.config.tool_configs
     if self.config.migrated_tools_config not in tool_configs:
         tool_configs.append( self.config.migrated_tools_config )
     self.toolbox = tools.ToolBox( tool_configs, self.config.tool_path, self )
     # Search support for tools
     self.toolbox_search = galaxy.tools.search.ToolBoxSearch( self.toolbox )
     # If enabled, poll respective tool sheds to see if updates are available for any installed tool shed repositories.
     if self.config.get_bool( 'enable_tool_shed_check', False ):
         from tool_shed import update_manager
         self.update_manager = update_manager.UpdateManager( self )
     # Load datatype display applications defined in local datatypes_conf.xml
     self.datatypes_registry.load_display_applications()
     # Load datatype converters defined in local datatypes_conf.xml
     self.datatypes_registry.load_datatype_converters( self.toolbox )
     # Load external metadata tool
     self.datatypes_registry.load_external_metadata_tool( self.toolbox )
     # Load history import/export tools.
     load_history_imp_exp_tools( self.toolbox )
     # Load genome indexer tool.
     load_genome_index_tools( self.toolbox )
     # Load security policy.
     self.security_agent = self.model.security_agent
     self.host_security_agent = galaxy.security.HostAgent( model=self.security_agent.model, permitted_actions=self.security_agent.permitted_actions )
     # Load quota management.
     if self.config.enable_quotas:
         self.quota_agent = galaxy.quota.QuotaAgent( self.model )
     else:
         self.quota_agent = galaxy.quota.NoQuotaAgent( self.model )
     # Heartbeat and memdump for thread / heap profiling
     self.heartbeat = None
     self.memdump = None
     self.memory_usage = None
     # Container for OpenID authentication routines
     if self.config.enable_openid:
         from galaxy.web.framework import openid_manager
         self.openid_manager = openid_manager.OpenIDManager( self.config.openid_consumer_cache_path )
         self.openid_providers = OpenIDProviders.from_file( self.config.openid_config )
     else:
         self.openid_providers = OpenIDProviders()
     # Start the heartbeat process if configured and available
     if self.config.use_heartbeat:
         from galaxy.util import heartbeat
         if heartbeat.Heartbeat:
             self.heartbeat = heartbeat.Heartbeat( fname=self.config.heartbeat_log )
             self.heartbeat.start()
     # Enable the memdump signal catcher if configured and available
     if self.config.use_memdump:
         from galaxy.util import memdump
         if memdump.Memdump:
             self.memdump = memdump.Memdump()
     # Transfer manager client
     if self.config.get_bool( 'enable_beta_job_managers', False ):
         from jobs import transfer_manager
         self.transfer_manager = transfer_manager.TransferManager( self )
     # Start the job manager
     from jobs import manager
     self.job_manager = manager.JobManager( self )
     # FIXME: These are exposed directly for backward compatibility
     self.job_queue = self.job_manager.job_queue
     self.job_stop_queue = self.job_manager.job_stop_queue
     # Initialize the external service types
     self.external_service_types = external_service_types.ExternalServiceTypesCollection( self.config.external_service_type_config_file, self.config.external_service_type_path, self )
def __main__():
    file_path = sys.argv.pop(1)
    tool_job_working_directory = tmp_dir = sys.argv.pop(
        1)  #this is also the job_working_directory now
    galaxy.model.Dataset.file_path = file_path
    galaxy.datatypes.metadata.MetadataTempFile.tmp_dir = tmp_dir

    config_root = sys.argv.pop(1)
    config_file_name = sys.argv.pop(1)
    if not os.path.isabs(config_file_name):
        config_file_name = os.path.join(config_root, config_file_name)

    # Set up reference to object store
    # First, read in the main config file for Galaxy; this is required because
    # the object store configuration is stored there
    conf_dict = load_app_properties(ini_file=config_file_name)
    # config object is required by ObjectStore class so create it now
    universe_config = config.Configuration(**conf_dict)
    universe_config.ensure_tempdir()
    object_store = build_object_store_from_config(universe_config)
    galaxy.model.Dataset.object_store = object_store

    # Set up datatypes registry
    datatypes_config = sys.argv.pop(1)
    datatypes_registry = galaxy.datatypes.registry.Registry()
    datatypes_registry.load_datatypes(root_dir=config_root,
                                      config=datatypes_config)
    galaxy.model.set_datatypes_registry(datatypes_registry)

    job_metadata = sys.argv.pop(1)
    existing_job_metadata_dict = {}
    new_job_metadata_dict = {}
    if job_metadata != "None" and os.path.exists(job_metadata):
        for line in open(job_metadata, 'r'):
            try:
                line = stringify_dictionary_keys(json.loads(line))
                if line['type'] == 'dataset':
                    existing_job_metadata_dict[line['dataset_id']] = line
                elif line['type'] == 'new_primary_dataset':
                    new_job_metadata_dict[line['filename']] = line
            except:
                continue

    for filenames in sys.argv[1:]:
        fields = filenames.split(',')
        filename_in = fields.pop(0)
        filename_kwds = fields.pop(0)
        filename_out = fields.pop(0)
        filename_results_code = fields.pop(0)
        dataset_filename_override = fields.pop(0)
        # Need to be careful with the way that these parameters are populated from the filename splitting,
        # because if a job is running when the server is updated, any existing external metadata command-lines
        #will not have info about the newly added override_metadata file
        if fields:
            override_metadata = fields.pop(0)
        else:
            override_metadata = None
        set_meta_kwds = stringify_dictionary_keys(
            json.load(open(filename_kwds))
        )  # load kwds; need to ensure our keywords are not unicode
        try:
            dataset = cPickle.load(open(filename_in))  # load DatasetInstance
            if dataset_filename_override:
                dataset.dataset.external_filename = dataset_filename_override
            files_path = os.path.abspath(
                os.path.join(tool_job_working_directory,
                             "dataset_%s_files" % (dataset.dataset.id)))
            dataset.dataset.external_extra_files_path = files_path
            if dataset.dataset.id in existing_job_metadata_dict:
                dataset.extension = existing_job_metadata_dict[
                    dataset.dataset.id].get('ext', dataset.extension)
            # Metadata FileParameter types may not be writable on a cluster node, and are therefore temporarily substituted with MetadataTempFiles
            if override_metadata:
                override_metadata = json.load(open(override_metadata))
                for metadata_name, metadata_file_override in override_metadata:
                    if galaxy.datatypes.metadata.MetadataTempFile.is_JSONified_value(
                            metadata_file_override):
                        metadata_file_override = galaxy.datatypes.metadata.MetadataTempFile.from_JSON(
                            metadata_file_override)
                    setattr(dataset.metadata, metadata_name,
                            metadata_file_override)
            file_dict = existing_job_metadata_dict.get(dataset.dataset.id, {})
            set_meta_with_tool_provided(dataset, file_dict, set_meta_kwds)
            dataset.metadata.to_JSON_dict(
                filename_out)  # write out results of set_meta
            json.dump((True, 'Metadata has been set successfully'),
                      open(filename_results_code,
                           'wb+'))  # setting metadata has succeeded
        except Exception, e:
            json.dump((False, str(e)),
                      open(filename_results_code,
                           'wb+'))  # setting metadata has failed somehow
Exemple #10
0
 def __init__( self, **kwargs ):
     print >> sys.stderr, "python path is: " + ", ".join( sys.path )
     # Read config file and check for errors
     self.config = config.Configuration( **kwargs )
     self.config.check()
     config.configure_logging( self.config )
     # Set up datatypes registry
     self.datatypes_registry = galaxy.datatypes.registry.Registry( self.config.root, self.config.datatypes_config )
     galaxy.model.set_datatypes_registry( self.datatypes_registry )
     # Determine the database url
     if self.config.database_connection:
         db_url = self.config.database_connection
     else:
         db_url = "sqlite:///%s?isolation_level=IMMEDIATE" % self.config.database
     # Initialize database / check for appropriate schema version
     from galaxy.model.migrate.check import create_or_verify_database
     create_or_verify_database( db_url, self.config.database_engine_options )
     # Setup the database engine and ORM
     from galaxy.model import mapping
     self.model = mapping.init( self.config.file_path,
                                db_url,
                                self.config.database_engine_options )
     # Security helper
     self.security = security.SecurityHelper( id_secret=self.config.id_secret )
     # Initialize the tools
     self.toolbox = tools.ToolBox( self.config.tool_config, self.config.tool_path, self )
     # Load datatype converters
     self.datatypes_registry.load_datatype_converters( self.toolbox )
     #load external metadata tool
     self.datatypes_registry.load_external_metadata_tool( self.toolbox )
     # Load datatype indexers
     self.datatypes_registry.load_datatype_indexers( self.toolbox )
     #Load security policy
     self.security_agent = self.model.security_agent
     self.host_security_agent = galaxy.security.HostAgent( model=self.security_agent.model, permitted_actions=self.security_agent.permitted_actions )
     # Heartbeat and memdump for thread / heap profiling
     self.heartbeat = None
     self.memdump = None
     self.memory_usage = None
     # Start the heartbeat process if configured and available
     if self.config.use_heartbeat:
         from galaxy.util import heartbeat
         if heartbeat.Heartbeat:
             self.heartbeat = heartbeat.Heartbeat()
             self.heartbeat.start()
     # Enable the memdump signal catcher if configured and available
     if self.config.use_memdump:
         from galaxy.util import memdump
         if memdump.Memdump:
             self.memdump = memdump.Memdump()
     # Enable memory_usage logging if configured
     if self.config.log_memory_usage:
         from galaxy.util import memory_usage
         self.memory_usage = memory_usage
     # Start the job queue
     self.job_manager = jobs.JobManager( self )
     # FIXME: These are exposed directly for backward compatibility
     self.job_queue = self.job_manager.job_queue
     self.job_stop_queue = self.job_manager.job_stop_queue
     # Start the cloud manager
     self.cloud_manager = cloud.CloudManager( self )
Exemple #11
0
def __main__():
    file_path = sys.argv.pop( 1 )
    tmp_dir = sys.argv.pop( 1 )
    galaxy.model.Dataset.file_path = file_path
    galaxy.datatypes.metadata.MetadataTempFile.tmp_dir = tmp_dir

    config_root = sys.argv.pop( 1 )
    config_file_name = sys.argv.pop( 1 )
    if not os.path.isabs( config_file_name ):
        config_file_name = os.path.join( config_root, config_file_name )
    
    # Set up reference to object store
    # First, read in the main config file for Galaxy; this is required because
    # the object store configuration is stored there
    conf = ConfigParser.ConfigParser()
    conf.read(config_file_name)
    conf_dict = {}
    for section in conf.sections():
        for option in conf.options(section):
            try:
                conf_dict[option] = conf.get(section, option)
            except ConfigParser.InterpolationMissingOptionError:
                # Because this is not called from Paste Script, %(here)s variable
                # is not initialized in the config file so skip those fields -
                # just need not to use any such fields for the object store conf...
                log.debug("Did not load option %s from %s" % (option, config_file_name))
    # config object is required by ObjectStore class so create it now
    universe_config = config.Configuration(**conf_dict)
    object_store = build_object_store_from_config(universe_config)
    galaxy.model.Dataset.object_store = object_store
    
    # Set up datatypes registry
    datatypes_config = sys.argv.pop( 1 )
    datatypes_registry = galaxy.datatypes.registry.Registry()
    datatypes_registry.load_datatypes( root_dir=config_root, config=datatypes_config )
    galaxy.model.set_datatypes_registry( datatypes_registry )

    job_metadata = sys.argv.pop( 1 )
    ext_override = dict()
    if job_metadata != "None" and os.path.exists( job_metadata ):
        for line in open( job_metadata, 'r' ):
            try:
                line = stringify_dictionary_keys( from_json_string( line ) )
                assert line['type'] == 'dataset'
                ext_override[line['dataset_id']] = line['ext']
            except:
                continue
    for filenames in sys.argv[1:]:
        fields = filenames.split( ',' )
        filename_in = fields.pop( 0 )
        filename_kwds = fields.pop( 0 )
        filename_out = fields.pop( 0 )
        filename_results_code = fields.pop( 0 )
        dataset_filename_override = fields.pop( 0 )
        #Need to be careful with the way that these parameters are populated from the filename splitting, 
        #because if a job is running when the server is updated, any existing external metadata command-lines 
        #will not have info about the newly added override_metadata file
        if fields:
            override_metadata = fields.pop( 0 )
        else:
            override_metadata = None
        try:
            dataset = cPickle.load( open( filename_in ) ) #load DatasetInstance
            if dataset_filename_override:
                dataset.dataset.external_filename = dataset_filename_override
            if ext_override.get( dataset.dataset.id, None ):
                dataset.extension = ext_override[ dataset.dataset.id ]
            #Metadata FileParameter types may not be writable on a cluster node, and are therefore temporarily substituted with MetadataTempFiles
            if override_metadata:
                override_metadata = simplejson.load( open( override_metadata ) )
                for metadata_name, metadata_file_override in override_metadata:
                    if galaxy.datatypes.metadata.MetadataTempFile.is_JSONified_value( metadata_file_override ):
                        metadata_file_override = galaxy.datatypes.metadata.MetadataTempFile.from_JSON( metadata_file_override )
                    setattr( dataset.metadata, metadata_name, metadata_file_override )
            kwds = stringify_dictionary_keys( simplejson.load( open( filename_kwds ) ) )#load kwds; need to ensure our keywords are not unicode
            dataset.datatype.set_meta( dataset, **kwds )
            dataset.metadata.to_JSON_dict( filename_out ) # write out results of set_meta
            simplejson.dump( ( True, 'Metadata has been set successfully' ), open( filename_results_code, 'wb+' ) ) #setting metadata has succeeded
        except Exception, e:
            simplejson.dump( ( False, str( e ) ), open( filename_results_code, 'wb+' ) ) #setting metadata has failed somehow
            galaxy.model.tool_shed_install.ToolDependency).all():
        dependency_paths.append(tool_dependency.installation_directory(app))
    return dependency_paths


if __name__ == '__main__':
    description = 'Clean out or list the contents any tool dependency directory under the provided'
    description += 'tool dependency path. Remove any non-empty directories found if the '
    description += '--delete command line argument is provided.'
    parser = argparse.ArgumentParser(description=description)
    parser.add_argument(
        '--basepath',
        dest='basepath',
        required=True,
        action='store',
        metavar='name',
        help='The base path where tool dependencies are installed.')
    parser.add_argument('--dburi',
                        dest='dburi',
                        required=True,
                        action='store',
                        metavar='dburi',
                        help='The database URI to connect to.')
    args = parser.parse_args()
    database_connection = args.dburi
    config_dict = dict(database_connection=database_connection,
                       tool_dependency_dir=args.basepath)
    config = galaxy_config.Configuration(**config_dict)
    app = CleanUpDependencyApplication(config)
    sys.exit(main(args, app))
Exemple #13
0
    def __init__( self, **kwargs ):
        if not log.handlers:
            # Paste didn't handle it, so we need a temporary basic log
            # configured.  The handler added here gets dumped and replaced with
            # an appropriately configured logger in configure_logging below.
            logging.basicConfig(level=logging.DEBUG)
        log.debug( "python path is: %s", ", ".join( sys.path ) )
        self.name = 'galaxy'
        self.new_installation = False
        # Read config file and check for errors
        self.config = config.Configuration( **kwargs )
        self.config.check()
        config.configure_logging( self.config )
        self.configure_fluent_log()
        self.config.reload_sanitize_whitelist(explicit='sanitize_whitelist_file' in kwargs)
        self.amqp_internal_connection_obj = galaxy.queues.connection_from_config(self.config)
        # control_worker *can* be initialized with a queue, but here we don't
        # want to and we'll allow postfork to bind and start it.
        self.control_worker = GalaxyQueueWorker(self)

        self._configure_tool_shed_registry()
        self._configure_object_store( fsmon=True )
        # Setup the database engine and ORM
        config_file = kwargs.get( 'global_conf', {} ).get( '__file__', None )
        if config_file:
            log.debug( 'Using "galaxy.ini" config file: %s', config_file )
        check_migrate_tools = self.config.check_migrate_tools
        self._configure_models( check_migrate_databases=True, check_migrate_tools=check_migrate_tools, config_file=config_file )

        # Manage installed tool shed repositories.
        from tool_shed.galaxy_install import installed_repository_manager
        self.installed_repository_manager = installed_repository_manager.InstalledRepositoryManager( self )

        self._configure_datatypes_registry( self.installed_repository_manager )
        galaxy.model.set_datatypes_registry( self.datatypes_registry )

        # Security helper
        self._configure_security()
        # Tag handler
        self.tag_handler = GalaxyTagManager( self )
        # Dataset Collection Plugins
        self.dataset_collections_service = DatasetCollectionManager(self)

        # Tool Data Tables
        self._configure_tool_data_tables( from_shed_config=False )
        # Load dbkey / genome build manager
        self._configure_genome_builds( data_table_name="__dbkeys__", load_old_style=True )

        # Genomes
        self.genomes = Genomes( self )
        # Data providers registry.
        self.data_provider_registry = DataProviderRegistry()

        # Initialize job metrics manager, needs to be in place before
        # config so per-destination modifications can be made.
        self.job_metrics = job_metrics.JobMetrics( self.config.job_metrics_config_file, app=self )

        # Initialize the job management configuration
        self.job_config = jobs.JobConfiguration(self)

        self._configure_toolbox()

        # Load Data Manager
        self.data_managers = DataManagers( self )
        # Load the update repository manager.
        self.update_repository_manager = update_repository_manager.UpdateRepositoryManager( self )
        # Load proprietary datatype converters and display applications.
        self.installed_repository_manager.load_proprietary_converters_and_display_applications()
        # Load datatype display applications defined in local datatypes_conf.xml
        self.datatypes_registry.load_display_applications( self )
        # Load datatype converters defined in local datatypes_conf.xml
        self.datatypes_registry.load_datatype_converters( self.toolbox )
        # Load external metadata tool
        self.datatypes_registry.load_external_metadata_tool( self.toolbox )
        # Load history import/export tools.
        load_lib_tools( self.toolbox )
        # visualizations registry: associates resources with visualizations, controls how to render
        self.visualizations_registry = VisualizationsRegistry(
            self,
            directories_setting=self.config.visualization_plugins_directory,
            template_cache_dir=self.config.template_cache )
        # Tours registry
        self.tour_registry = ToursRegistry(self.config.tour_config_dir)
        # Load security policy.
        self.security_agent = self.model.security_agent
        self.host_security_agent = galaxy.security.HostAgent(
            model=self.security_agent.model,
            permitted_actions=self.security_agent.permitted_actions )
        # Load quota management.
        if self.config.enable_quotas:
            self.quota_agent = galaxy.quota.QuotaAgent( self.model )
        else:
            self.quota_agent = galaxy.quota.NoQuotaAgent( self.model )
        # Heartbeat for thread profiling
        self.heartbeat = None
        # Container for OpenID authentication routines
        if self.config.enable_openid:
            from galaxy.web.framework import openid_manager
            self.openid_manager = openid_manager.OpenIDManager( self.config.openid_consumer_cache_path )
            self.openid_providers = OpenIDProviders.from_file( self.config.openid_config_file )
        else:
            self.openid_providers = OpenIDProviders()
        from galaxy import auth
        self.auth_manager = auth.AuthManager( self )
        # Start the heartbeat process if configured and available (wait until
        # postfork if using uWSGI)
        if self.config.use_heartbeat:
            if heartbeat.Heartbeat:
                self.heartbeat = heartbeat.Heartbeat(
                    self.config,
                    period=self.config.heartbeat_interval,
                    fname=self.config.heartbeat_log
                )
                self.heartbeat.daemon = True

                @postfork
                def _start():
                    self.heartbeat.start()
                if not config.process_is_uwsgi:
                    _start()
        if self.config.sentry_dsn:
            import raven
            self.sentry_client = raven.Client(self.config.sentry_dsn)
        else:
            self.sentry_client = None
        # Transfer manager client
        if self.config.get_bool( 'enable_beta_job_managers', False ):
            from galaxy.jobs import transfer_manager
            self.transfer_manager = transfer_manager.TransferManager( self )
        # Start the job manager
        from galaxy.jobs import manager
        self.job_manager = manager.JobManager( self )
        self.job_manager.start()
        # FIXME: These are exposed directly for backward compatibility
        self.job_queue = self.job_manager.job_queue
        self.job_stop_queue = self.job_manager.job_stop_queue
        self.proxy_manager = ProxyManager( self.config )
        # Initialize the external service types
        self.external_service_types = external_service_types.ExternalServiceTypesCollection(
            self.config.external_service_type_config_file,
            self.config.external_service_type_path, self )

        from galaxy.workflow import scheduling_manager
        # Must be initialized after job_config.
        self.workflow_scheduling_manager = scheduling_manager.WorkflowSchedulingManager( self )

        # Configure handling of signals
        handlers = {}
        if self.heartbeat:
            handlers[signal.SIGUSR1] = self.heartbeat.dump_signal_handler
        self._configure_signal_handlers( handlers )

        self.model.engine.dispose()
        self.server_starttime = int(time.time())  # used for cachebusting