Beispiel #1
0
    def __init__(self, **kwargs):
        for key in os.environ:
            if key.startswith(CONFIG_OVERRIDE_PREFIX):
                config_key = key[len(CONFIG_OVERRIDE_PREFIX):].lower()
                kwargs[config_key] = os.environ[key]
            elif key.startswith(CONFIG_DEFAULT_PREFIX):
                config_key = key[len(CONFIG_DEFAULT_PREFIX):].lower()
                if config_key not in kwargs:
                    kwargs[config_key] = os.environ[key]

        self.config_dict = kwargs
        self.root = kwargs.get('root_dir', '.')

        # Resolve paths of other config files
        self.__parse_config_file_options(kwargs)

        # Collect the umask and primary gid from the environment
        self.umask = os.umask(077)  # get the current umask
        os.umask(self.umask)  # can't get w/o set, so set it back
        self.gid = os.getgid(
        )  # if running under newgrp(1) we'll need to fix the group of data created on the cluster

        # Database related configuration
        self.database = resolve_path(
            kwargs.get("database_file", "database/universe.sqlite"), self.root)
        self.database_connection = kwargs.get("database_connection", False)
        self.database_engine_options = get_database_engine_options(kwargs)
        self.database_create_tables = string_as_bool(
            kwargs.get("database_create_tables", "True"))
        self.database_query_profiling_proxy = string_as_bool(
            kwargs.get("database_query_profiling_proxy", "False"))

        # Don't set this to true for production databases, but probably should
        # default to True for sqlite databases.
        self.database_auto_migrate = string_as_bool(
            kwargs.get("database_auto_migrate", "False"))

        # Install database related configuration (if different).
        self.install_database_connection = kwargs.get(
            "install_database_connection", None)
        self.install_database_engine_options = get_database_engine_options(
            kwargs, model_prefix="install_")

        # Where dataset files are stored
        self.file_path = resolve_path(
            kwargs.get("file_path", "database/files"), self.root)
        self.new_file_path = resolve_path(
            kwargs.get("new_file_path", "database/tmp"), self.root)
        tempfile.tempdir = self.new_file_path
        self.openid_consumer_cache_path = resolve_path(
            kwargs.get("openid_consumer_cache_path",
                       "database/openid_consumer_cache"), self.root)
        self.cookie_path = kwargs.get("cookie_path", "/")
        # Galaxy OpenID settings
        self.enable_openid = string_as_bool(kwargs.get('enable_openid', False))
        self.enable_quotas = string_as_bool(kwargs.get('enable_quotas', False))
        self.enable_unique_workflow_defaults = string_as_bool(
            kwargs.get('enable_unique_workflow_defaults', False))
        self.tool_path = resolve_path(kwargs.get("tool_path", "tools"),
                                      self.root)
        self.tool_data_path = resolve_path(
            kwargs.get("tool_data_path", "tool-data"), os.getcwd())
        self.builds_file_path = resolve_path(
            kwargs.get(
                "builds_file_path",
                os.path.join(self.tool_data_path, 'shared', 'ucsc',
                             'builds.txt')), self.root)
        self.len_file_path = resolve_path(
            kwargs.get(
                "len_file_path",
                os.path.join(self.tool_data_path, 'shared', 'ucsc', 'chrom')),
            self.root)
        self.test_conf = resolve_path(kwargs.get("test_conf", ""), self.root)
        # The value of migrated_tools_config is the file reserved for containing only those tools that have been eliminated from the distribution
        # and moved to the tool shed.
        self.integrated_tool_panel_config = resolve_path(
            kwargs.get('integrated_tool_panel_config',
                       'integrated_tool_panel.xml'), self.root)
        self.tool_filters = listify(kwargs.get("tool_filters", []),
                                    do_strip=True)
        self.tool_label_filters = listify(kwargs.get("tool_label_filters", []),
                                          do_strip=True)
        self.tool_section_filters = listify(kwargs.get("tool_section_filters",
                                                       []),
                                            do_strip=True)

        self.user_tool_filters = listify(kwargs.get("user_tool_filters", []),
                                         do_strip=True)
        self.user_label_filters = listify(kwargs.get("user_tool_label_filters",
                                                     []),
                                          do_strip=True)
        self.user_section_filters = listify(kwargs.get(
            "user_tool_section_filters", []),
                                            do_strip=True)

        # Check for tools defined in the above non-shed tool configs (i.e., tool_conf.xml) tht have
        # been migrated from the Galaxy code distribution to the Tool Shed.
        self.check_migrate_tools = string_as_bool(
            kwargs.get('check_migrate_tools', True))
        self.shed_tool_data_path = kwargs.get("shed_tool_data_path", None)
        if self.shed_tool_data_path:
            self.shed_tool_data_path = resolve_path(self.shed_tool_data_path,
                                                    self.root)
        else:
            self.shed_tool_data_path = self.tool_data_path
        self.manage_dependency_relationships = string_as_bool(
            kwargs.get('manage_dependency_relationships', False))
        self.running_functional_tests = string_as_bool(
            kwargs.get('running_functional_tests', False))
        self.hours_between_check = kwargs.get('hours_between_check', 12)
        if isinstance(self.hours_between_check, basestring):
            self.hours_between_check = float(self.hours_between_check)
        try:
            if isinstance(self.hours_between_check, int):
                if self.hours_between_check < 1 or self.hours_between_check > 24:
                    self.hours_between_check = 12
            elif isinstance(self.hours_between_check, float):
                # If we're running functional tests, the minimum hours between check should be reduced to 0.001, or 3.6 seconds.
                if self.running_functional_tests:
                    if self.hours_between_check < 0.001 or self.hours_between_check > 24.0:
                        self.hours_between_check = 12.0
                else:
                    if self.hours_between_check < 1.0 or self.hours_between_check > 24.0:
                        self.hours_between_check = 12.0
            else:
                self.hours_between_check = 12
        except:
            self.hours_between_check = 12
        self.update_integrated_tool_panel = kwargs.get(
            "update_integrated_tool_panel", True)
        self.enable_data_manager_user_view = string_as_bool(
            kwargs.get("enable_data_manager_user_view", "False"))
        self.galaxy_data_manager_data_path = kwargs.get(
            'galaxy_data_manager_data_path', self.tool_data_path)
        self.tool_secret = kwargs.get("tool_secret", "")
        self.id_secret = kwargs.get("id_secret",
                                    "USING THE DEFAULT IS NOT SECURE!")
        self.retry_metadata_internally = string_as_bool(
            kwargs.get("retry_metadata_internally", "True"))
        self.use_remote_user = string_as_bool(
            kwargs.get("use_remote_user", "False"))
        self.normalize_remote_user_email = string_as_bool(
            kwargs.get("normalize_remote_user_email", "False"))
        self.remote_user_maildomain = kwargs.get("remote_user_maildomain",
                                                 None)
        self.remote_user_header = kwargs.get("remote_user_header",
                                             'HTTP_REMOTE_USER')
        self.remote_user_logout_href = kwargs.get("remote_user_logout_href",
                                                  None)
        self.require_login = string_as_bool(
            kwargs.get("require_login", "False"))
        self.allow_user_creation = string_as_bool(
            kwargs.get("allow_user_creation", "True"))
        self.allow_user_deletion = string_as_bool(
            kwargs.get("allow_user_deletion", "False"))
        self.allow_user_dataset_purge = string_as_bool(
            kwargs.get("allow_user_dataset_purge", "False"))
        self.allow_user_impersonation = string_as_bool(
            kwargs.get("allow_user_impersonation", "False"))
        self.new_user_dataset_access_role_default_private = string_as_bool(
            kwargs.get("new_user_dataset_access_role_default_private",
                       "False"))
        self.collect_outputs_from = [
            x.strip() for x in kwargs.get(
                'collect_outputs_from',
                'new_file_path,job_working_directory').lower().split(',')
        ]
        self.template_path = resolve_path(
            kwargs.get("template_path", "templates"), self.root)
        self.template_cache = resolve_path(
            kwargs.get("template_cache_path", "database/compiled_templates"),
            self.root)
        self.local_job_queue_workers = int(
            kwargs.get("local_job_queue_workers", "5"))
        self.cluster_job_queue_workers = int(
            kwargs.get("cluster_job_queue_workers", "3"))
        self.job_queue_cleanup_interval = int(
            kwargs.get("job_queue_cleanup_interval", "5"))
        self.cluster_files_directory = os.path.abspath(
            kwargs.get("cluster_files_directory", "database/pbs"))
        self.job_working_directory = resolve_path(
            kwargs.get("job_working_directory",
                       "database/job_working_directory"), self.root)
        self.cleanup_job = kwargs.get("cleanup_job", "always")
        self.outputs_to_working_directory = string_as_bool(
            kwargs.get('outputs_to_working_directory', False))
        self.output_size_limit = int(kwargs.get('output_size_limit', 0))
        self.retry_job_output_collection = int(
            kwargs.get('retry_job_output_collection', 0))
        self.job_walltime = kwargs.get('job_walltime', None)
        self.job_walltime_delta = None
        if self.job_walltime is not None:
            h, m, s = [int(v) for v in self.job_walltime.split(':')]
            self.job_walltime_delta = timedelta(0, s, 0, 0, m, h)
        self.admin_users = kwargs.get("admin_users", "")
        self.admin_users_list = [
            u.strip() for u in self.admin_users.split(',') if u
        ]
        self.reset_password_length = int(
            kwargs.get('reset_password_length', '15'))
        self.mailing_join_addr = kwargs.get('mailing_join_addr',
                                            '*****@*****.**')
        self.error_email_to = kwargs.get('error_email_to', None)
        self.activation_email = kwargs.get('activation_email', None)
        self.user_activation_on = string_as_bool(
            kwargs.get('user_activation_on', False))
        self.activation_grace_period = kwargs.get('activation_grace_period',
                                                  None)
        self.inactivity_box_content = kwargs.get('inactivity_box_content',
                                                 None)
        self.terms_url = kwargs.get('terms_url', None)
        self.instance_resource_url = kwargs.get('instance_resource_url', None)
        self.registration_warning_message = kwargs.get(
            'registration_warning_message', None)
        #  Get the disposable email domains blacklist file and its contents
        self.blacklist_location = kwargs.get('blacklist_file', None)
        self.blacklist_content = None
        if self.blacklist_location is not None:
            self.blacklist_file = resolve_path(
                kwargs.get('blacklist_file', None), self.root)
            try:
                with open(self.blacklist_file) as blacklist:
                    self.blacklist_content = [
                        line.rstrip() for line in blacklist.readlines()
                    ]
            except IOError:
                print(
                    "CONFIGURATION ERROR: Can't open supplied blacklist file from path: "
                    + str(self.blacklist_file))
        self.smtp_server = kwargs.get('smtp_server', None)
        self.smtp_username = kwargs.get('smtp_username', None)
        self.smtp_password = kwargs.get('smtp_password', None)
        self.smtp_ssl = kwargs.get('smtp_ssl', None)
        self.track_jobs_in_database = kwargs.get('track_jobs_in_database',
                                                 'None')
        self.start_job_runners = listify(kwargs.get('start_job_runners', ''))
        self.expose_dataset_path = string_as_bool(
            kwargs.get('expose_dataset_path', 'False'))
        # External Service types used in sample tracking
        self.external_service_type_path = resolve_path(
            kwargs.get('external_service_type_path', 'external_service_types'),
            self.root)
        # Tasked job runner.
        self.use_tasked_jobs = string_as_bool(
            kwargs.get('use_tasked_jobs', False))
        self.local_task_queue_workers = int(
            kwargs.get("local_task_queue_workers", 2))
        # The transfer manager and deferred job queue
        self.enable_beta_job_managers = string_as_bool(
            kwargs.get('enable_beta_job_managers', 'False'))
        # Per-user Job concurrency limitations
        self.cache_user_job_count = string_as_bool(
            kwargs.get('cache_user_job_count', False))
        self.user_job_limit = int(kwargs.get('user_job_limit', 0))
        self.registered_user_job_limit = int(
            kwargs.get('registered_user_job_limit', self.user_job_limit))
        self.anonymous_user_job_limit = int(
            kwargs.get('anonymous_user_job_limit', self.user_job_limit))
        self.default_cluster_job_runner = kwargs.get(
            'default_cluster_job_runner', 'local:///')
        self.pbs_application_server = kwargs.get('pbs_application_server', "")
        self.pbs_dataset_server = kwargs.get('pbs_dataset_server', "")
        self.pbs_dataset_path = kwargs.get('pbs_dataset_path', "")
        self.pbs_stage_path = kwargs.get('pbs_stage_path', "")
        self.drmaa_external_runjob_script = kwargs.get(
            'drmaa_external_runjob_script', None)
        self.drmaa_external_killjob_script = kwargs.get(
            'drmaa_external_killjob_script', None)
        self.external_chown_script = kwargs.get('external_chown_script', None)
        self.environment_setup_file = kwargs.get('environment_setup_file',
                                                 None)
        self.use_heartbeat = string_as_bool(
            kwargs.get('use_heartbeat', 'False'))
        self.use_memdump = string_as_bool(kwargs.get('use_memdump', 'False'))
        self.log_actions = string_as_bool(kwargs.get('log_actions', 'False'))
        self.log_events = string_as_bool(kwargs.get('log_events', 'False'))
        self.sanitize_all_html = string_as_bool(
            kwargs.get('sanitize_all_html', True))
        self.serve_xss_vulnerable_mimetypes = string_as_bool(
            kwargs.get('serve_xss_vulnerable_mimetypes', False))
        self.enable_old_display_applications = string_as_bool(
            kwargs.get("enable_old_display_applications", "True"))
        self.brand = kwargs.get('brand', None)
        self.welcome_url = kwargs.get('welcome_url', '/static/welcome.html')
        # Configuration for the message box directly below the masthead.
        self.message_box_visible = kwargs.get('message_box_visible', False)
        self.message_box_content = kwargs.get('message_box_content', None)
        self.message_box_class = kwargs.get('message_box_class', 'info')
        self.support_url = kwargs.get(
            'support_url', 'https://wiki.galaxyproject.org/Support')
        self.wiki_url = kwargs.get('wiki_url',
                                   'http://wiki.galaxyproject.org/')
        self.blog_url = kwargs.get('blog_url', None)
        self.screencasts_url = kwargs.get('screencasts_url', None)
        self.library_import_dir = kwargs.get('library_import_dir', None)
        self.user_library_import_dir = kwargs.get('user_library_import_dir',
                                                  None)
        # Searching data libraries
        self.enable_lucene_library_search = string_as_bool(
            kwargs.get('enable_lucene_library_search', False))
        self.enable_whoosh_library_search = string_as_bool(
            kwargs.get('enable_whoosh_library_search', False))
        self.whoosh_index_dir = resolve_path(
            kwargs.get("whoosh_index_dir", "database/whoosh_indexes"),
            self.root)
        self.ftp_upload_dir = kwargs.get('ftp_upload_dir', None)
        self.ftp_upload_dir_identifier = kwargs.get(
            'ftp_upload_dir_identifier',
            'email')  # attribute on user - email, username, id, etc...
        self.ftp_upload_site = kwargs.get('ftp_upload_site', None)
        self.allow_library_path_paste = kwargs.get('allow_library_path_paste',
                                                   False)
        self.disable_library_comptypes = kwargs.get(
            'disable_library_comptypes', '').lower().split(',')
        # Location for tool dependencies.
        if 'tool_dependency_dir' in kwargs:
            self.tool_dependency_dir = resolve_path(
                kwargs.get("tool_dependency_dir"), self.root)
            # Setting the following flag to true will ultimately cause tool dependencies
            # to be located in the shell environment and used by the job that is executing
            # the tool.
            self.use_tool_dependencies = True
        else:
            self.tool_dependency_dir = None
            self.use_tool_dependencies = False
        # Configuration options for taking advantage of nginx features
        self.upstream_gzip = string_as_bool(kwargs.get('upstream_gzip', False))
        self.apache_xsendfile = string_as_bool(
            kwargs.get('apache_xsendfile', False))
        self.nginx_x_accel_redirect_base = kwargs.get(
            'nginx_x_accel_redirect_base', False)
        self.nginx_x_archive_files_base = kwargs.get(
            'nginx_x_archive_files_base', False)
        self.nginx_upload_store = kwargs.get('nginx_upload_store', False)
        self.nginx_upload_path = kwargs.get('nginx_upload_path', False)
        if self.nginx_upload_store:
            self.nginx_upload_store = os.path.abspath(self.nginx_upload_store)
        self.object_store = kwargs.get('object_store', 'disk')
        self.object_store_check_old_style = string_as_bool(
            kwargs.get('object_store_check_old_style', False))
        self.object_store_cache_path = resolve_path(
            kwargs.get("object_store_cache_path",
                       "database/object_store_cache"), self.root)
        # Handle AWS-specific config options for backward compatibility
        if kwargs.get('aws_access_key', None) is not None:
            self.os_access_key = kwargs.get('aws_access_key', None)
            self.os_secret_key = kwargs.get('aws_secret_key', None)
            self.os_bucket_name = kwargs.get('s3_bucket', None)
            self.os_use_reduced_redundancy = kwargs.get(
                'use_reduced_redundancy', False)
        else:
            self.os_access_key = kwargs.get('os_access_key', None)
            self.os_secret_key = kwargs.get('os_secret_key', None)
            self.os_bucket_name = kwargs.get('os_bucket_name', None)
            self.os_use_reduced_redundancy = kwargs.get(
                'os_use_reduced_redundancy', False)
        self.os_host = kwargs.get('os_host', None)
        self.os_port = kwargs.get('os_port', None)
        self.os_is_secure = string_as_bool(kwargs.get('os_is_secure', True))
        self.os_conn_path = kwargs.get('os_conn_path', '/')
        self.object_store_cache_size = float(
            kwargs.get('object_store_cache_size', -1))
        self.distributed_object_store_config_file = kwargs.get(
            'distributed_object_store_config_file', None)
        if self.distributed_object_store_config_file is not None:
            self.distributed_object_store_config_file = resolve_path(
                self.distributed_object_store_config_file, self.root)
        self.irods_root_collection_path = kwargs.get(
            'irods_root_collection_path', None)
        self.irods_default_resource = kwargs.get('irods_default_resource',
                                                 None)
        # Parse global_conf and save the parser
        global_conf = kwargs.get('global_conf', None)
        global_conf_parser = ConfigParser.ConfigParser()
        self.config_file = None
        self.global_conf_parser = global_conf_parser
        if global_conf and "__file__" in global_conf:
            self.config_file = global_conf['__file__']
            global_conf_parser.read(global_conf['__file__'])
        # Heartbeat log file name override
        if global_conf is not None:
            self.heartbeat_log = global_conf.get('heartbeat_log',
                                                 'heartbeat.log')
        # Determine which 'server:' this is
        self.server_name = 'main'
        for arg in sys.argv:
            # Crummy, but PasteScript does not give you a way to determine this
            if arg.lower().startswith('--server-name='):
                self.server_name = arg.split('=', 1)[-1]
        # Store all configured server names
        self.server_names = []
        for section in global_conf_parser.sections():
            if section.startswith('server:'):
                self.server_names.append(section.replace('server:', '', 1))
        # Store advanced job management config
        self.job_manager = kwargs.get('job_manager', self.server_name).strip()
        self.job_handlers = [
            x.strip()
            for x in kwargs.get('job_handlers', self.server_name).split(',')
        ]
        self.default_job_handlers = [
            x.strip() for x in kwargs.get('default_job_handlers', ','.join(
                self.job_handlers)).split(',')
        ]
        # Use database for job running IPC unless this is a standalone server or explicitly set in the config
        if self.track_jobs_in_database == 'None':
            self.track_jobs_in_database = False
            if len(self.server_names) > 1:
                self.track_jobs_in_database = True
        else:
            self.track_jobs_in_database = string_as_bool(
                self.track_jobs_in_database)
        # Store per-tool runner configs
        self.tool_handlers = self.__read_tool_job_config(
            global_conf_parser, 'galaxy:tool_handlers', 'name')
        self.tool_runners = self.__read_tool_job_config(
            global_conf_parser, 'galaxy:tool_runners', 'url')
        # Cloud configuration options
        self.enable_cloud_launch = string_as_bool(
            kwargs.get('enable_cloud_launch', False))
        self.cloudlaunch_default_ami = kwargs.get('cloudlaunch_default_ami',
                                                  'ami-a7dbf6ce')
        # Galaxy messaging (AMQP) configuration options
        self.amqp = {}
        try:
            amqp_config = global_conf_parser.items("galaxy_amqp")
        except ConfigParser.NoSectionError:
            amqp_config = {}
        for k, v in amqp_config:
            self.amqp[k] = v
        # Galaxy internal control queue configuration.
        # If specified in universe, use it, otherwise we use whatever 'real'
        # database is specified.  Lastly, we create and use new sqlite database
        # (to minimize locking) as a final option.
        if 'amqp_internal_connection' in kwargs:
            self.amqp_internal_connection = kwargs.get(
                'amqp_internal_connection')
            # TODO Get extra amqp args as necessary for ssl
        elif 'database_connection' in kwargs:
            self.amqp_internal_connection = "sqlalchemy+" + self.database_connection
        else:
            self.amqp_internal_connection = "sqlalchemy+sqlite:///%s?isolation_level=IMMEDIATE" % resolve_path(
                "database/control.sqlite", self.root)
        self.biostar_url = kwargs.get('biostar_url', None)
        self.biostar_key_name = kwargs.get('biostar_key_name', None)
        self.biostar_key = kwargs.get('biostar_key', None)
        self.biostar_enable_bug_reports = string_as_bool(
            kwargs.get('biostar_enable_bug_reports', True))
        self.biostar_never_authenticate = string_as_bool(
            kwargs.get('biostar_never_authenticate', False))
        self.pretty_datetime_format = expand_pretty_datetime_format(
            kwargs.get('pretty_datetime_format', '$locale (UTC)'))
        self.master_api_key = kwargs.get('master_api_key', None)
        if self.master_api_key == "changethis":  # default in sample config file
            raise Exception(
                "Insecure configuration, please change master_api_key to something other than default (changethis)"
            )

        # Experimental: This will not be enabled by default and will hide
        # nonproduction code.
        # The api_folders refers to whether the API exposes the /folders section.
        self.api_folders = string_as_bool(kwargs.get('api_folders', False))
        # This is for testing new library browsing capabilities.
        self.new_lib_browse = string_as_bool(
            kwargs.get('new_lib_browse', False))
        # Error logging with sentry
        self.sentry_dsn = kwargs.get('sentry_dsn', None)
        # Logging with fluentd
        self.fluent_log = string_as_bool(kwargs.get('fluent_log', False))
        self.fluent_host = kwargs.get('fluent_host', 'localhost')
        self.fluent_port = int(kwargs.get('fluent_port', 24224))
        # directory where the visualization/registry searches for plugins
        self.visualization_plugins_directory = kwargs.get(
            'visualization_plugins_directory', 'config/plugins/visualizations')
        # Default chunk size for chunkable datatypes -- 64k
        self.display_chunk_size = int(kwargs.get('display_chunk_size', 65536))

        self.citation_cache_type = kwargs.get("citation_cache_type", "file")
        self.citation_cache_data_dir = self.resolve_path(
            kwargs.get("citation_cache_data_dir", "database/citations/data"))
        self.citation_cache_lock_dir = self.resolve_path(
            kwargs.get("citation_cache_lock_dir", "database/citations/locks"))
Beispiel #2
0
    def __init__(self, **kwargs):
        self.config_dict = kwargs
        self.root = kwargs.get('root_dir', '.')

        # Resolve paths of other config files
        self.__parse_config_file_options(kwargs)

        # Collect the umask and primary gid from the environment
        self.umask = os.umask(077)  # get the current umask
        os.umask(self.umask)  # can't get w/o set, so set it back
        self.gid = os.getgid(
        )  # if running under newgrp(1) we'll need to fix the group of data created on the cluster
        self.version_major = VERSION_MAJOR
        self.version = VERSION
        # Database related configuration
        self.database = resolve_path(
            kwargs.get("database_file", "database/community.sqlite"),
            self.root)
        self.database_connection = kwargs.get("database_connection", False)
        self.database_engine_options = get_database_engine_options(kwargs)
        self.database_create_tables = string_as_bool(
            kwargs.get("database_create_tables", "True"))
        # Repository and Tool search API
        self.toolshed_search_on = string_as_bool(
            kwargs.get("toolshed_search_on", True))
        self.whoosh_index_dir = kwargs.get("whoosh_index_dir",
                                           'database/toolshed_whoosh_indexes')
        self.repo_name_boost = kwargs.get("repo_name_boost", 0.9)
        self.repo_description_boost = kwargs.get("repo_description_boost", 0.6)
        self.repo_long_description_boost = kwargs.get(
            "repo_long_description_boost", 0.5)
        self.repo_homepage_url_boost = kwargs.get("repo_homepage_url_boost",
                                                  0.3)
        self.repo_remote_repository_url_boost = kwargs.get(
            "repo_remote_repository_url_boost", 0.2)
        self.repo_owner_username_boost = kwargs.get(
            "repo_owner_username_boost", 0.3)
        self.tool_name_boost = kwargs.get("tool_name_boost", 1.2)
        self.tool_description_boost = kwargs.get("tool_description_boost", 0.6)
        self.tool_help_boost = kwargs.get("tool_help_boost", 0.4)
        self.tool_repo_owner_username = kwargs.get("tool_repo_owner_username",
                                                   0.3)
        # Analytics
        self.ga_code = kwargs.get("ga_code", None)
        self.session_duration = int(kwargs.get('session_duration', 0))
        # Where dataset files are stored
        self.file_path = resolve_path(
            kwargs.get("file_path", "database/community_files"), self.root)
        self.new_file_path = resolve_path(
            kwargs.get("new_file_path", "database/tmp"), self.root)
        self.cookie_path = kwargs.get("cookie_path", "/")
        self.enable_quotas = string_as_bool(kwargs.get('enable_quotas', False))
        self.id_secret = kwargs.get("id_secret",
                                    "USING THE DEFAULT IS NOT SECURE!")
        # Tool stuff
        self.tool_path = resolve_path(kwargs.get("tool_path", "tools"),
                                      self.root)
        self.tool_secret = kwargs.get("tool_secret", "")
        self.tool_data_path = resolve_path(
            kwargs.get("tool_data_path", "shed-tool-data"), os.getcwd())
        self.tool_data_table_config_path = None
        self.integrated_tool_panel_config = resolve_path(
            kwargs.get('integrated_tool_panel_config',
                       'integrated_tool_panel.xml'), self.root)
        self.builds_file_path = resolve_path(
            kwargs.get(
                "builds_file_path",
                os.path.join(self.tool_data_path, 'shared', 'ucsc',
                             'builds.txt')), self.root)
        self.len_file_path = resolve_path(
            kwargs.get(
                "len_file_path",
                os.path.join(self.tool_data_path, 'shared', 'ucsc', 'chrom')),
            self.root)
        self.ftp_upload_dir = kwargs.get('ftp_upload_dir', None)
        # Install and test framework for testing tools contained in repositories.
        self.display_legacy_test_results = string_as_bool(
            kwargs.get('display_legacy_test_results', True))
        self.num_tool_test_results_saved = kwargs.get(
            'num_tool_test_results_saved', 5)
        self.update_integrated_tool_panel = False
        # Galaxy flavor Docker Image
        self.enable_galaxy_flavor_docker_image = string_as_bool(
            kwargs.get("enable_galaxy_flavor_docker_image", "False"))
        self.use_remote_user = string_as_bool(
            kwargs.get("use_remote_user", "False"))
        self.user_activation_on = kwargs.get('user_activation_on', None)
        self.activation_grace_period = kwargs.get('activation_grace_period',
                                                  None)
        self.inactivity_box_content = kwargs.get('inactivity_box_content',
                                                 None)
        self.registration_warning_message = kwargs.get(
            'registration_warning_message', None)
        self.terms_url = kwargs.get('terms_url', None)
        self.blacklist_location = kwargs.get('blacklist_file', None)
        self.blacklist_content = None
        self.remote_user_maildomain = kwargs.get("remote_user_maildomain",
                                                 None)
        self.remote_user_header = kwargs.get("remote_user_header",
                                             'HTTP_REMOTE_USER')
        self.remote_user_logout_href = kwargs.get("remote_user_logout_href",
                                                  None)
        self.remote_user_secret = kwargs.get("remote_user_secret", None)
        self.require_login = string_as_bool(
            kwargs.get("require_login", "False"))
        self.allow_user_creation = string_as_bool(
            kwargs.get("allow_user_creation", "True"))
        self.allow_user_deletion = string_as_bool(
            kwargs.get("allow_user_deletion", "False"))
        self.enable_openid = string_as_bool(kwargs.get('enable_openid', False))
        self.template_path = resolve_path(
            kwargs.get("template_path", "templates"), self.root)
        self.template_cache = resolve_path(
            kwargs.get("template_cache_path",
                       "database/compiled_templates/community"), self.root)
        self.admin_users = kwargs.get("admin_users", "")
        self.admin_users_list = [
            u.strip() for u in self.admin_users.split(',') if u
        ]
        self.mailing_join_addr = kwargs.get('mailing_join_addr',
                                            "*****@*****.**")
        self.error_email_to = kwargs.get('error_email_to', None)
        self.smtp_server = kwargs.get('smtp_server', None)
        self.smtp_username = kwargs.get('smtp_username', None)
        self.smtp_password = kwargs.get('smtp_password', None)
        self.smtp_ssl = kwargs.get('smtp_ssl', None)
        self.start_job_runners = kwargs.get('start_job_runners', None)
        self.email_from = kwargs.get('email_from', None)
        self.nginx_upload_path = kwargs.get('nginx_upload_path', False)
        self.log_actions = string_as_bool(kwargs.get('log_actions', 'False'))
        self.brand = kwargs.get('brand', None)
        self.pretty_datetime_format = expand_pretty_datetime_format(
            kwargs.get('pretty_datetime_format', '$locale (UTC)'))
        # Configuration for the message box directly below the masthead.
        self.message_box_visible = kwargs.get('message_box_visible', False)
        self.message_box_content = kwargs.get('message_box_content', None)
        self.message_box_class = kwargs.get('message_box_class', 'info')
        self.support_url = kwargs.get(
            'support_url', 'https://wiki.galaxyproject.org/Support')
        self.wiki_url = kwargs.get('wiki_url',
                                   'https://wiki.galaxyproject.org/')
        self.blog_url = kwargs.get('blog_url', None)
        self.biostar_url = kwargs.get('biostar_url', None)
        self.screencasts_url = kwargs.get('screencasts_url', None)
        self.log_events = False
        self.cloud_controller_instance = False
        self.server_name = ''
        self.job_manager = ''
        self.default_job_handlers = []
        self.default_cluster_job_runner = 'local:///'
        self.job_handlers = []
        self.tool_handlers = []
        self.tool_runners = []
        # Error logging with sentry
        self.sentry_dsn = kwargs.get('sentry_dsn', None)
        # Where the tool shed hgweb.config file is stored - the default is the Galaxy installation directory.
        self.hgweb_config_dir = resolve_path(
            kwargs.get('hgweb_config_dir', ''), self.root)
        self.disable_push = string_as_bool(kwargs.get("disable_push", "True"))
        # Proxy features
        self.apache_xsendfile = kwargs.get('apache_xsendfile', False)
        self.nginx_x_accel_redirect_base = kwargs.get(
            'nginx_x_accel_redirect_base', False)
        self.drmaa_external_runjob_script = kwargs.get(
            'drmaa_external_runjob_script', None)
        # Parse global_conf and save the parser
        global_conf = kwargs.get('global_conf', None)
        global_conf_parser = ConfigParser.ConfigParser()
        self.global_conf_parser = global_conf_parser
        if global_conf and "__file__" in global_conf:
            global_conf_parser.read(global_conf['__file__'])
        self.running_functional_tests = string_as_bool(
            kwargs.get('running_functional_tests', False))
        self.citation_cache_type = kwargs.get("citation_cache_type", "file")
        self.citation_cache_data_dir = resolve_path(
            kwargs.get("citation_cache_data_dir",
                       "database/tool_shed_citations/data"), self.root)
        self.citation_cache_lock_dir = resolve_path(
            kwargs.get("citation_cache_lock_dir",
                       "database/tool_shed_citations/locks"), self.root)
    def __init__( self, **kwargs ):
        self.config_dict = kwargs
        self.root = kwargs.get( 'root_dir', '.' )

        # Resolve paths of other config files
        self.__parse_config_file_options( kwargs )

        # Collect the umask and primary gid from the environment
        self.umask = os.umask( 077 )  # get the current umask
        os.umask( self.umask )  # can't get w/o set, so set it back
        self.gid = os.getgid()  # if running under newgrp(1) we'll need to fix the group of data created on the cluster

        # Database related configuration
        self.database = resolve_path( kwargs.get( "database_file", "database/universe.sqlite" ), self.root )
        self.database_connection = kwargs.get( "database_connection", False )
        self.database_engine_options = get_database_engine_options( kwargs )
        self.database_create_tables = string_as_bool( kwargs.get( "database_create_tables", "True" ) )
        self.database_query_profiling_proxy = string_as_bool( kwargs.get( "database_query_profiling_proxy", "False" ) )

        # Don't set this to true for production databases, but probably should
        # default to True for sqlite databases.
        self.database_auto_migrate = string_as_bool( kwargs.get( "database_auto_migrate", "False" ) )

        # Install database related configuration (if different).
        self.install_database_connection = kwargs.get( "install_database_connection", None )
        self.install_database_engine_options = get_database_engine_options( kwargs, model_prefix="install_" )

        # Where dataset files are stored
        self.file_path = resolve_path( kwargs.get( "file_path", "database/files" ), self.root )
        self.new_file_path = resolve_path( kwargs.get( "new_file_path", "database/tmp" ), self.root )
        tempfile.tempdir = self.new_file_path
        self.openid_consumer_cache_path = resolve_path( kwargs.get( "openid_consumer_cache_path", "database/openid_consumer_cache" ), self.root )
        self.cookie_path = kwargs.get( "cookie_path", "/" )
        # Galaxy OpenID settings
        self.enable_openid = string_as_bool( kwargs.get( 'enable_openid', False ) )
        self.enable_quotas = string_as_bool( kwargs.get( 'enable_quotas', False ) )
        self.enable_unique_workflow_defaults = string_as_bool( kwargs.get( 'enable_unique_workflow_defaults', False ) )
        self.tool_path = resolve_path( kwargs.get( "tool_path", "tools" ), self.root )
        self.tool_data_path = resolve_path( kwargs.get( "tool_data_path", "tool-data" ), os.getcwd() )
        self.builds_file_path = resolve_path( kwargs.get( "builds_file_path", os.path.join( self.tool_data_path, 'shared', 'ucsc', 'builds.txt') ), self.root )
        self.len_file_path = resolve_path( kwargs.get( "len_file_path", os.path.join( self.tool_data_path, 'shared', 'ucsc', 'chrom') ), self.root )
        self.test_conf = resolve_path( kwargs.get( "test_conf", "" ), self.root )
        # The value of migrated_tools_config is the file reserved for containing only those tools that have been eliminated from the distribution
        # and moved to the tool shed.
        self.integrated_tool_panel_config = resolve_path( kwargs.get( 'integrated_tool_panel_config', 'integrated_tool_panel.xml' ), self.root )
        self.tool_filters = listify( kwargs.get( "tool_filters", [] ), do_strip=True )
        self.tool_label_filters = listify( kwargs.get( "tool_label_filters", [] ), do_strip=True )
        self.tool_section_filters = listify( kwargs.get( "tool_section_filters", [] ), do_strip=True )

        self.user_tool_filters = listify( kwargs.get( "user_tool_filters", [] ), do_strip=True )
        self.user_label_filters = listify( kwargs.get( "user_tool_label_filters", [] ), do_strip=True )
        self.user_section_filters = listify( kwargs.get( "user_tool_section_filters", [] ), do_strip=True )

        # Check for tools defined in the above non-shed tool configs (i.e., tool_conf.xml) tht have
        # been migrated from the Galaxy code distribution to the Tool Shed.
        self.check_migrate_tools = string_as_bool( kwargs.get( 'check_migrate_tools', True ) )
        self.shed_tool_data_path = kwargs.get( "shed_tool_data_path", None )
        if self.shed_tool_data_path:
            self.shed_tool_data_path = resolve_path( self.shed_tool_data_path, self.root )
        else:
            self.shed_tool_data_path = self.tool_data_path
        self.manage_dependency_relationships = string_as_bool( kwargs.get( 'manage_dependency_relationships', False ) )
        self.running_functional_tests = string_as_bool( kwargs.get( 'running_functional_tests', False ) )
        self.hours_between_check = kwargs.get( 'hours_between_check', 12 )
        if isinstance( self.hours_between_check, basestring ):
            self.hours_between_check = float( self.hours_between_check )
        try:
            if isinstance( self.hours_between_check, int ):
                if self.hours_between_check < 1 or self.hours_between_check > 24:
                    self.hours_between_check = 12
            elif isinstance( self.hours_between_check, float ):
                # If we're running functional tests, the minimum hours between check should be reduced to 0.001, or 3.6 seconds.
                if self.running_functional_tests:
                    if self.hours_between_check < 0.001 or self.hours_between_check > 24.0:
                        self.hours_between_check = 12.0
                else:
                    if self.hours_between_check < 1.0 or self.hours_between_check > 24.0:
                        self.hours_between_check = 12.0
            else:
                self.hours_between_check = 12
        except:
            self.hours_between_check = 12
        self.update_integrated_tool_panel = kwargs.get( "update_integrated_tool_panel", True )
        self.enable_data_manager_user_view = string_as_bool( kwargs.get( "enable_data_manager_user_view", "False" ) )
        self.galaxy_data_manager_data_path = kwargs.get( 'galaxy_data_manager_data_path',  self.tool_data_path )
        self.tool_secret = kwargs.get( "tool_secret", "" )
        self.id_secret = kwargs.get( "id_secret", "USING THE DEFAULT IS NOT SECURE!" )
        self.retry_metadata_internally = string_as_bool( kwargs.get( "retry_metadata_internally", "True" ) )
        self.use_remote_user = string_as_bool( kwargs.get( "use_remote_user", "False" ) )
        self.normalize_remote_user_email = string_as_bool( kwargs.get( "normalize_remote_user_email", "False" ) )
        self.remote_user_maildomain = kwargs.get( "remote_user_maildomain", None )
        self.remote_user_header = kwargs.get( "remote_user_header", 'HTTP_REMOTE_USER' )
        self.remote_user_logout_href = kwargs.get( "remote_user_logout_href", None )
        self.require_login = string_as_bool( kwargs.get( "require_login", "False" ) )
        self.allow_user_creation = string_as_bool( kwargs.get( "allow_user_creation", "True" ) )
        self.allow_user_deletion = string_as_bool( kwargs.get( "allow_user_deletion", "False" ) )
        self.allow_user_dataset_purge = string_as_bool( kwargs.get( "allow_user_dataset_purge", "False" ) )
        self.allow_user_impersonation = string_as_bool( kwargs.get( "allow_user_impersonation", "False" ) )
        self.new_user_dataset_access_role_default_private = string_as_bool( kwargs.get( "new_user_dataset_access_role_default_private", "False" ) )
        self.collect_outputs_from = [ x.strip() for x in kwargs.get( 'collect_outputs_from', 'new_file_path,job_working_directory' ).lower().split(',') ]
        self.template_path = resolve_path( kwargs.get( "template_path", "templates" ), self.root )
        self.template_cache = resolve_path( kwargs.get( "template_cache_path", "database/compiled_templates" ), self.root )
        self.local_job_queue_workers = int( kwargs.get( "local_job_queue_workers", "5" ) )
        self.cluster_job_queue_workers = int( kwargs.get( "cluster_job_queue_workers", "3" ) )
        self.job_queue_cleanup_interval = int( kwargs.get("job_queue_cleanup_interval", "5") )
        self.cluster_files_directory = os.path.abspath( kwargs.get( "cluster_files_directory", "database/pbs" ) )
        self.job_working_directory = resolve_path( kwargs.get( "job_working_directory", "database/job_working_directory" ), self.root )
        self.cleanup_job = kwargs.get( "cleanup_job", "always" )
        self.outputs_to_working_directory = string_as_bool( kwargs.get( 'outputs_to_working_directory', False ) )
        self.output_size_limit = int( kwargs.get( 'output_size_limit', 0 ) )
        self.retry_job_output_collection = int( kwargs.get( 'retry_job_output_collection', 0 ) )
        self.job_walltime = kwargs.get( 'job_walltime', None )
        self.job_walltime_delta = None
        if self.job_walltime is not None:
            h, m, s = [ int( v ) for v in self.job_walltime.split( ':' ) ]
            self.job_walltime_delta = timedelta( 0, s, 0, 0, m, h )
        self.admin_users = kwargs.get( "admin_users", "" )
        self.admin_users_list = [u.strip() for u in self.admin_users.split(',') if u]
        self.reset_password_length = int( kwargs.get('reset_password_length', '15') )
        self.mailing_join_addr = kwargs.get('mailing_join_addr', '*****@*****.**')
        self.error_email_to = kwargs.get( 'error_email_to', None )
        self.activation_email = kwargs.get( 'activation_email', None )
        self.user_activation_on = string_as_bool( kwargs.get( 'user_activation_on', False ) )
        self.activation_grace_period = kwargs.get( 'activation_grace_period', None )
        self.inactivity_box_content = kwargs.get( 'inactivity_box_content', None )
        self.terms_url = kwargs.get( 'terms_url', None )
        self.instance_resource_url = kwargs.get( 'instance_resource_url', None )
        self.registration_warning_message = kwargs.get( 'registration_warning_message', None )
        #  Get the disposable email domains blacklist file and its contents
        self.blacklist_location = kwargs.get( 'blacklist_file', None )
        self.blacklist_content = None
        if self.blacklist_location is not None:
            self.blacklist_file = resolve_path( kwargs.get( 'blacklist_file', None ), self.root )
            try:
                with open( self.blacklist_file ) as blacklist:
                    self.blacklist_content = [ line.rstrip() for line in blacklist.readlines() ]
            except IOError:
                    print ( "CONFIGURATION ERROR: Can't open supplied blacklist file from path: " + str( self.blacklist_file ) )
        self.smtp_server = kwargs.get( 'smtp_server', None )
        self.smtp_username = kwargs.get( 'smtp_username', None )
        self.smtp_password = kwargs.get( 'smtp_password', None )
        self.smtp_ssl = kwargs.get( 'smtp_ssl', None )
        self.track_jobs_in_database = kwargs.get( 'track_jobs_in_database', 'None' )
        self.start_job_runners = listify(kwargs.get( 'start_job_runners', '' ))
        self.expose_dataset_path = string_as_bool( kwargs.get( 'expose_dataset_path', 'False' ) )
        # External Service types used in sample tracking
        self.external_service_type_path = resolve_path( kwargs.get( 'external_service_type_path', 'external_service_types' ), self.root )
        # Tasked job runner.
        self.use_tasked_jobs = string_as_bool( kwargs.get( 'use_tasked_jobs', False ) )
        self.local_task_queue_workers = int(kwargs.get("local_task_queue_workers", 2))
        # The transfer manager and deferred job queue
        self.enable_beta_job_managers = string_as_bool( kwargs.get( 'enable_beta_job_managers', 'False' ) )
        # Per-user Job concurrency limitations
        self.cache_user_job_count = string_as_bool( kwargs.get( 'cache_user_job_count', False ) )
        self.user_job_limit = int( kwargs.get( 'user_job_limit', 0 ) )
        self.registered_user_job_limit = int( kwargs.get( 'registered_user_job_limit', self.user_job_limit ) )
        self.anonymous_user_job_limit = int( kwargs.get( 'anonymous_user_job_limit', self.user_job_limit ) )
        self.default_cluster_job_runner = kwargs.get( 'default_cluster_job_runner', 'local:///' )
        self.pbs_application_server = kwargs.get('pbs_application_server', "" )
        self.pbs_dataset_server = kwargs.get('pbs_dataset_server', "" )
        self.pbs_dataset_path = kwargs.get('pbs_dataset_path', "" )
        self.pbs_stage_path = kwargs.get('pbs_stage_path', "" )
        self.drmaa_external_runjob_script = kwargs.get('drmaa_external_runjob_script', None )
        self.drmaa_external_killjob_script = kwargs.get('drmaa_external_killjob_script', None)
        self.external_chown_script = kwargs.get('external_chown_script', None)
        self.environment_setup_file = kwargs.get( 'environment_setup_file', None )
        self.use_heartbeat = string_as_bool( kwargs.get( 'use_heartbeat', 'False' ) )
        self.use_memdump = string_as_bool( kwargs.get( 'use_memdump', 'False' ) )
        self.log_actions = string_as_bool( kwargs.get( 'log_actions', 'False' ) )
        self.log_events = string_as_bool( kwargs.get( 'log_events', 'False' ) )
        self.sanitize_all_html = string_as_bool( kwargs.get( 'sanitize_all_html', True ) )
        self.serve_xss_vulnerable_mimetypes = string_as_bool( kwargs.get( 'serve_xss_vulnerable_mimetypes', False ) )
        self.enable_old_display_applications = string_as_bool( kwargs.get( "enable_old_display_applications", "True" ) )
        self.brand = kwargs.get( 'brand', None )
        self.welcome_url = kwargs.get( 'welcome_url', '/static/welcome.html' )
        # Configuration for the message box directly below the masthead.
        self.message_box_visible = kwargs.get( 'message_box_visible', False )
        self.message_box_content = kwargs.get( 'message_box_content', None )
        self.message_box_class = kwargs.get( 'message_box_class', 'info' )
        self.support_url = kwargs.get( 'support_url', 'https://wiki.galaxyproject.org/Support' )
        self.wiki_url = kwargs.get( 'wiki_url', 'http://wiki.galaxyproject.org/' )
        self.blog_url = kwargs.get( 'blog_url', None )
        self.screencasts_url = kwargs.get( 'screencasts_url', None )
        self.library_import_dir = kwargs.get( 'library_import_dir', None )
        self.user_library_import_dir = kwargs.get( 'user_library_import_dir', None )
        # Searching data libraries
        self.enable_lucene_library_search = string_as_bool( kwargs.get( 'enable_lucene_library_search', False ) )
        self.enable_whoosh_library_search = string_as_bool( kwargs.get( 'enable_whoosh_library_search', False ) )
        self.whoosh_index_dir = resolve_path( kwargs.get( "whoosh_index_dir", "database/whoosh_indexes" ), self.root )
        self.ftp_upload_dir = kwargs.get( 'ftp_upload_dir', None )
        self.ftp_upload_dir_identifier = kwargs.get( 'ftp_upload_dir_identifier', 'email' )  # attribute on user - email, username, id, etc...
        self.ftp_upload_site = kwargs.get( 'ftp_upload_site', None )
        self.allow_library_path_paste = kwargs.get( 'allow_library_path_paste', False )
        self.disable_library_comptypes = kwargs.get( 'disable_library_comptypes', '' ).lower().split( ',' )
        # Location for tool dependencies.
        if 'tool_dependency_dir' in kwargs:
            self.tool_dependency_dir = resolve_path( kwargs.get( "tool_dependency_dir" ), self.root )
            # Setting the following flag to true will ultimately cause tool dependencies
            # to be located in the shell environment and used by the job that is executing
            # the tool.
            self.use_tool_dependencies = True
        else:
            self.tool_dependency_dir = None
            self.use_tool_dependencies = False
        # Configuration options for taking advantage of nginx features
        self.upstream_gzip = string_as_bool( kwargs.get( 'upstream_gzip', False ) )
        self.apache_xsendfile = string_as_bool( kwargs.get( 'apache_xsendfile', False ) )
        self.nginx_x_accel_redirect_base = kwargs.get( 'nginx_x_accel_redirect_base', False )
        self.nginx_x_archive_files_base = kwargs.get( 'nginx_x_archive_files_base', False )
        self.nginx_upload_store = kwargs.get( 'nginx_upload_store', False )
        self.nginx_upload_path = kwargs.get( 'nginx_upload_path', False )
        if self.nginx_upload_store:
            self.nginx_upload_store = os.path.abspath( self.nginx_upload_store )
        self.object_store = kwargs.get( 'object_store', 'disk' )
        self.object_store_check_old_style = string_as_bool( kwargs.get( 'object_store_check_old_style', False ) )
        self.object_store_cache_path = resolve_path( kwargs.get( "object_store_cache_path", "database/object_store_cache" ), self.root )
        # Handle AWS-specific config options for backward compatibility
        if kwargs.get( 'aws_access_key', None) is not None:
            self.os_access_key = kwargs.get( 'aws_access_key', None )
            self.os_secret_key = kwargs.get( 'aws_secret_key', None )
            self.os_bucket_name = kwargs.get( 's3_bucket', None )
            self.os_use_reduced_redundancy = kwargs.get( 'use_reduced_redundancy', False )
        else:
            self.os_access_key = kwargs.get( 'os_access_key', None )
            self.os_secret_key = kwargs.get( 'os_secret_key', None )
            self.os_bucket_name = kwargs.get( 'os_bucket_name', None )
            self.os_use_reduced_redundancy = kwargs.get( 'os_use_reduced_redundancy', False )
        self.os_host = kwargs.get( 'os_host', None )
        self.os_port = kwargs.get( 'os_port', None )
        self.os_is_secure = string_as_bool( kwargs.get( 'os_is_secure', True ) )
        self.os_conn_path = kwargs.get( 'os_conn_path', '/' )
        self.object_store_cache_size = float(kwargs.get( 'object_store_cache_size', -1 ))
        self.distributed_object_store_config_file = kwargs.get( 'distributed_object_store_config_file', None )
        if self.distributed_object_store_config_file is not None:
            self.distributed_object_store_config_file = resolve_path( self.distributed_object_store_config_file, self.root )
        self.irods_root_collection_path = kwargs.get( 'irods_root_collection_path', None )
        self.irods_default_resource = kwargs.get( 'irods_default_resource', None )
        # Parse global_conf and save the parser
        global_conf = kwargs.get( 'global_conf', None )
        global_conf_parser = ConfigParser.ConfigParser()
        self.config_file = None
        self.global_conf_parser = global_conf_parser
        if global_conf and "__file__" in global_conf:
            self.config_file = global_conf['__file__']
            global_conf_parser.read(global_conf['__file__'])
        # Heartbeat log file name override
        if global_conf is not None:
            self.heartbeat_log = global_conf.get( 'heartbeat_log', 'heartbeat.log' )
        # Determine which 'server:' this is
        self.server_name = 'main'
        for arg in sys.argv:
            # Crummy, but PasteScript does not give you a way to determine this
            if arg.lower().startswith('--server-name='):
                self.server_name = arg.split('=', 1)[-1]
        # Store all configured server names
        self.server_names = []
        for section in global_conf_parser.sections():
            if section.startswith('server:'):
                self.server_names.append(section.replace('server:', '', 1))
        # Store advanced job management config
        self.job_manager = kwargs.get('job_manager', self.server_name).strip()
        self.job_handlers = [ x.strip() for x in kwargs.get('job_handlers', self.server_name).split(',') ]
        self.default_job_handlers = [ x.strip() for x in kwargs.get('default_job_handlers', ','.join( self.job_handlers ) ).split(',') ]
        # Use database for job running IPC unless this is a standalone server or explicitly set in the config
        if self.track_jobs_in_database == 'None':
            self.track_jobs_in_database = False
            if len(self.server_names) > 1:
                self.track_jobs_in_database = True
        else:
            self.track_jobs_in_database = string_as_bool( self.track_jobs_in_database )
        # Store per-tool runner configs
        self.tool_handlers = self.__read_tool_job_config( global_conf_parser, 'galaxy:tool_handlers', 'name' )
        self.tool_runners = self.__read_tool_job_config( global_conf_parser, 'galaxy:tool_runners', 'url' )
        # Cloud configuration options
        self.enable_cloud_launch = string_as_bool( kwargs.get( 'enable_cloud_launch', False ) )
        self.cloudlaunch_default_ami = kwargs.get( 'cloudlaunch_default_ami', 'ami-a7dbf6ce' )
        # Galaxy messaging (AMQP) configuration options
        self.amqp = {}
        try:
            amqp_config = global_conf_parser.items("galaxy_amqp")
        except ConfigParser.NoSectionError:
            amqp_config = {}
        for k, v in amqp_config:
            self.amqp[k] = v
        # Galaxy internal control queue configuration.
        # If specified in universe, use it, otherwise we use whatever 'real'
        # database is specified.  Lastly, we create and use new sqlite database
        # (to minimize locking) as a final option.
        if 'amqp_internal_connection' in kwargs:
            self.amqp_internal_connection = kwargs.get('amqp_internal_connection')
            # TODO Get extra amqp args as necessary for ssl
        elif 'database_connection' in kwargs:
            self.amqp_internal_connection = "sqlalchemy+"+self.database_connection
        else:
            self.amqp_internal_connection = "sqlalchemy+sqlite:///%s?isolation_level=IMMEDIATE" % resolve_path( "database/control.sqlite", self.root )
        self.biostar_url = kwargs.get( 'biostar_url', None )
        self.biostar_key_name = kwargs.get( 'biostar_key_name', None )
        self.biostar_key = kwargs.get( 'biostar_key', None )
        self.biostar_enable_bug_reports = string_as_bool( kwargs.get( 'biostar_enable_bug_reports', True ) )
        self.biostar_never_authenticate = string_as_bool( kwargs.get( 'biostar_never_authenticate', False ) )
        self.pretty_datetime_format = expand_pretty_datetime_format( kwargs.get( 'pretty_datetime_format', '$locale (UTC)' ) )
        self.master_api_key = kwargs.get( 'master_api_key', None )
        if self.master_api_key == "changethis":  # default in sample config file
            raise Exception("Insecure configuration, please change master_api_key to something other than default (changethis)")

        # Experimental: This will not be enabled by default and will hide
        # nonproduction code.
        # The api_folders refers to whether the API exposes the /folders section.
        self.api_folders = string_as_bool( kwargs.get( 'api_folders', False ) )
        # This is for testing new library browsing capabilities.
        self.new_lib_browse = string_as_bool( kwargs.get( 'new_lib_browse', False ) )
        # Error logging with sentry
        self.sentry_dsn = kwargs.get( 'sentry_dsn', None )
        # Logging with fluentd
        self.fluent_log = string_as_bool( kwargs.get( 'fluent_log', False ) )
        self.fluent_host = kwargs.get( 'fluent_host', 'localhost' )
        self.fluent_port = int( kwargs.get( 'fluent_port', 24224 ) )
        # directory where the visualization/registry searches for plugins
        self.visualization_plugins_directory = kwargs.get(
            'visualization_plugins_directory', 'config/plugins/visualizations' )
        # Default chunk size for chunkable datatypes -- 64k
        self.display_chunk_size = int( kwargs.get( 'display_chunk_size', 65536) )

        self.citation_cache_type = kwargs.get( "citation_cache_type", "file" )
        self.citation_cache_data_dir = self.resolve_path( kwargs.get( "citation_cache_data_dir", "database/citations/data" ) )
        self.citation_cache_lock_dir = self.resolve_path( kwargs.get( "citation_cache_lock_dir", "database/citations/locks" ) )
Beispiel #4
0
    def __init__( self, **kwargs ):
        self.config_dict = kwargs
        self.root = kwargs.get( 'root_dir', '.' )

        # Resolve paths of other config files
        self.__parse_config_file_options( kwargs )

        # Collect the umask and primary gid from the environment
        self.umask = os.umask( 077 )  # get the current umask
        os.umask( self.umask )  # can't get w/o set, so set it back
        self.gid = os.getgid()  # if running under newgrp(1) we'll need to fix the group of data created on the cluster
        self.version_major = VERSION_MAJOR
        self.version = VERSION
        # Database related configuration
        self.database = resolve_path( kwargs.get( "database_file", "database/community.sqlite" ), self.root )
        self.database_connection = kwargs.get( "database_connection", False )
        self.database_engine_options = get_database_engine_options( kwargs )
        self.database_create_tables = string_as_bool( kwargs.get( "database_create_tables", "True" ) )
        # Repository and Tool search API
        self.toolshed_search_on = string_as_bool( kwargs.get( "toolshed_search_on", True ) )
        self.whoosh_index_dir = kwargs.get( "whoosh_index_dir", 'database/toolshed_whoosh_indexes' )
        self.repo_name_boost = kwargs.get( "repo_name_boost", 0.9 )
        self.repo_description_boost = kwargs.get( "repo_description_boost", 0.6 )
        self.repo_long_description_boost = kwargs.get( "repo_long_description_boost", 0.5 )
        self.repo_homepage_url_boost = kwargs.get( "repo_homepage_url_boost", 0.3 )
        self.repo_remote_repository_url_boost = kwargs.get( "repo_remote_repository_url_boost", 0.2 )
        self.repo_owner_username_boost = kwargs.get( "repo_owner_username_boost", 0.3 )
        self.tool_name_boost = kwargs.get( "tool_name_boost", 1.2 )
        self.tool_description_boost = kwargs.get( "tool_description_boost", 0.6 )
        self.tool_help_boost = kwargs.get( "tool_help_boost", 0.4 )
        self.tool_repo_owner_username = kwargs.get( "tool_repo_owner_username", 0.3 )
        # Analytics
        self.ga_code = kwargs.get( "ga_code", None )
        self.session_duration = int(kwargs.get( 'session_duration', 0 ))
        # Where dataset files are stored
        self.file_path = resolve_path( kwargs.get( "file_path", "database/community_files" ), self.root )
        self.new_file_path = resolve_path( kwargs.get( "new_file_path", "database/tmp" ), self.root )
        self.cookie_path = kwargs.get( "cookie_path", "/" )
        self.enable_quotas = string_as_bool( kwargs.get( 'enable_quotas', False ) )
        self.id_secret = kwargs.get( "id_secret", "USING THE DEFAULT IS NOT SECURE!" )
        # Tool stuff
        self.tool_path = resolve_path( kwargs.get( "tool_path", "tools" ), self.root )
        self.tool_secret = kwargs.get( "tool_secret", "" )
        self.tool_data_path = resolve_path( kwargs.get( "tool_data_path", "shed-tool-data" ), os.getcwd() )
        self.tool_data_table_config_path = None
        self.integrated_tool_panel_config = resolve_path( kwargs.get( 'integrated_tool_panel_config', 'integrated_tool_panel.xml' ), self.root )
        self.builds_file_path = resolve_path( kwargs.get( "builds_file_path", os.path.join( self.tool_data_path, 'shared', 'ucsc', 'builds.txt') ), self.root )
        self.len_file_path = resolve_path( kwargs.get( "len_file_path", os.path.join( self.tool_data_path, 'shared', 'ucsc', 'chrom') ), self.root )
        self.ftp_upload_dir = kwargs.get( 'ftp_upload_dir', None )
        self.update_integrated_tool_panel = False
        # Galaxy flavor Docker Image
        self.enable_galaxy_flavor_docker_image = string_as_bool( kwargs.get( "enable_galaxy_flavor_docker_image", "False" ) )
        self.use_remote_user = string_as_bool( kwargs.get( "use_remote_user", "False" ) )
        self.user_activation_on = None
        self.registration_warning_message = kwargs.get( 'registration_warning_message', None )
        self.terms_url = kwargs.get( 'terms_url', None )
        self.blacklist_location = kwargs.get( 'blacklist_file', None )
        self.blacklist_content = None
        self.remote_user_maildomain = kwargs.get( "remote_user_maildomain", None )
        self.remote_user_header = kwargs.get( "remote_user_header", 'HTTP_REMOTE_USER' )
        self.remote_user_logout_href = kwargs.get( "remote_user_logout_href", None )
        self.remote_user_secret = kwargs.get( "remote_user_secret", None )
        self.require_login = string_as_bool( kwargs.get( "require_login", "False" ) )
        self.allow_user_creation = string_as_bool( kwargs.get( "allow_user_creation", "True" ) )
        self.allow_user_deletion = string_as_bool( kwargs.get( "allow_user_deletion", "False" ) )
        self.enable_openid = string_as_bool( kwargs.get( 'enable_openid', False ) )
        self.template_path = resolve_path( kwargs.get( "template_path", "templates" ), self.root )
        self.template_cache = resolve_path( kwargs.get( "template_cache_path", "database/compiled_templates/community" ), self.root )
        self.admin_users = kwargs.get( "admin_users", "" )
        self.admin_users_list = [u.strip() for u in self.admin_users.split(',') if u]
        self.mailing_join_addr = kwargs.get('mailing_join_addr', "*****@*****.**")
        self.error_email_to = kwargs.get( 'error_email_to', None )
        self.smtp_server = kwargs.get( 'smtp_server', None )
        self.smtp_username = kwargs.get( 'smtp_username', None )
        self.smtp_password = kwargs.get( 'smtp_password', None )
        self.smtp_ssl = kwargs.get( 'smtp_ssl', None )
        self.start_job_runners = kwargs.get( 'start_job_runners', None )
        self.email_from = kwargs.get( 'email_from', None )
        self.nginx_upload_path = kwargs.get( 'nginx_upload_path', False )
        self.log_actions = string_as_bool( kwargs.get( 'log_actions', 'False' ) )
        self.brand = kwargs.get( 'brand', None )
        self.pretty_datetime_format = expand_pretty_datetime_format( kwargs.get( 'pretty_datetime_format', '$locale (UTC)' ) )
        # Configuration for the message box directly below the masthead.
        self.message_box_visible = kwargs.get( 'message_box_visible', False )
        self.message_box_content = kwargs.get( 'message_box_content', None )
        self.message_box_class = kwargs.get( 'message_box_class', 'info' )
        self.support_url = kwargs.get( 'support_url', 'https://wiki.galaxyproject.org/Support' )
        self.wiki_url = kwargs.get( 'wiki_url', 'https://wiki.galaxyproject.org/' )
        self.blog_url = kwargs.get( 'blog_url', None )
        self.biostar_url = kwargs.get( 'biostar_url', None )
        self.screencasts_url = kwargs.get( 'screencasts_url', None )
        self.log_events = False
        self.cloud_controller_instance = False
        self.server_name = ''
        self.job_manager = ''
        self.default_job_handlers = []
        self.default_cluster_job_runner = 'local:///'
        self.job_handlers = []
        self.tool_handlers = []
        self.tool_runners = []
        # Error logging with sentry
        self.sentry_dsn = kwargs.get( 'sentry_dsn', None )
        # Where the tool shed hgweb.config file is stored - the default is the Galaxy installation directory.
        self.hgweb_config_dir = resolve_path( kwargs.get( 'hgweb_config_dir', '' ), self.root )
        self.disable_push = string_as_bool( kwargs.get( "disable_push", "True" ) )
        # Proxy features
        self.apache_xsendfile = kwargs.get( 'apache_xsendfile', False )
        self.nginx_x_accel_redirect_base = kwargs.get( 'nginx_x_accel_redirect_base', False )
        self.drmaa_external_runjob_script = kwargs.get('drmaa_external_runjob_script', None )
        # Parse global_conf and save the parser
        global_conf = kwargs.get( 'global_conf', None )
        global_conf_parser = ConfigParser.ConfigParser()
        self.global_conf_parser = global_conf_parser
        if global_conf and "__file__" in global_conf:
            global_conf_parser.read(global_conf['__file__'])
        self.running_functional_tests = string_as_bool( kwargs.get( 'running_functional_tests', False ) )
        self.citation_cache_type = kwargs.get( "citation_cache_type", "file" )
        self.citation_cache_data_dir = resolve_path( kwargs.get( "citation_cache_data_dir", "database/tool_shed_citations/data" ), self.root )
        self.citation_cache_lock_dir = resolve_path( kwargs.get( "citation_cache_lock_dir", "database/tool_shed_citations/locks" ), self.root )
        self.password_expiration_period = timedelta(days=int(kwargs.get("password_expiration_period", 0)))