def __init__(self, config=None, **kwargs): self.config = config or MockAppConfig(**kwargs) self.security = self.config.security self.name = kwargs.get('name', 'galaxy') self.object_store = objectstore.build_object_store_from_config( self.config) self.model = mapping.init("/tmp", self.config.database_connection, create_tables=True, object_store=self.object_store) self.security_agent = self.model.security_agent self.visualizations_registry = MockVisualizationsRegistry() self.tag_handler = tags.GalaxyTagHandler(self.model.context) self.quota_agent = quota.DatabaseQuotaAgent(self.model) self.init_datatypes() self.job_config = Bunch(dynamic_params=None, destinations={}) self.tool_data_tables = {} self.dataset_collections_service = None self.container_finder = NullContainerFinder() self._toolbox_lock = MockLock() self.tool_shed_registry = Bunch(tool_sheds={}) self.genome_builds = GenomeBuilds(self) self.job_manager = NoopManager() self.application_stack = ApplicationStack() self.auth_manager = AuthManager(self) self.user_manager = UserManager(self) self.execution_timer_factory = Bunch( get_timer=StructuredExecutionTimer) def url_for(*args, **kwds): return "/mock/url" self.url_for = url_for
def init(): options.config = os.path.abspath( options.config ) if options.username == 'all': options.username = None if options.email == 'all': options.email = None os.chdir( os.path.dirname( options.config ) ) sys.path.append( 'lib' ) from galaxy import eggs import pkg_resources import galaxy.config from galaxy.objectstore import build_object_store_from_config # lazy globals()['nice_size'] = __import__( 'galaxy.util', globals(), locals(), ( 'nice_size', ) ).nice_size config_parser = ConfigParser( dict( here = os.getcwd(), database_connection = 'sqlite:///database/universe.sqlite?isolation_level=IMMEDIATE' ) ) config_parser.read( os.path.basename( options.config ) ) config_dict = {} for key, value in config_parser.items( "app:main" ): config_dict[key] = value config = galaxy.config.Configuration( **config_dict ) object_store = build_object_store_from_config( config ) from galaxy.model import mapping return mapping.init( config.file_path, config.database_connection, create_tables = False, object_store = object_store ), object_store, config.database_connection.split(':')[0]
def init(): options.config = os.path.abspath(options.config) if options.username == "all": options.username = None if options.email == "all": options.email = None config_parser = ConfigParser( dict(here=os.getcwd(), database_connection="sqlite:///database/universe.sqlite?isolation_level=IMMEDIATE") ) config_parser.read(options.config) config_dict = {} for key, value in config_parser.items("app:main"): config_dict[key] = value config = galaxy.config.Configuration(**config_dict) object_store = build_object_store_from_config(config) from galaxy.model import mapping return ( mapping.init(config.file_path, config.database_connection, create_tables=False, object_store=object_store), object_store, config.database_connection.split(":")[0], )
def _init(self): self.objects_to_remove = set() log.info('Initializing object store for action %s', self.name) self.object_store = build_object_store_from_config(self._config) self._register_row_method(self.collect_removed_object_info) self._register_post_method(self.remove_objects) self._register_exit_method(self.object_store.shutdown)
def __init__( self, **kwargs ): self.config = MockAppConfig( **kwargs ) self.security = self.config.security self.object_store = objectstore.build_object_store_from_config( self.config ) self.model = mapping.init( "/tmp", "sqlite:///:memory:", create_tables=True, object_store=self.object_store ) self.security_agent = self.model.security_agent self.visualizations_registry = MockVisualizationsRegistry()
def init(): app_properties = app_properties_from_args(args) config = galaxy.config.Configuration(**app_properties) object_store = build_object_store_from_config(config) model = galaxy.config.init_models_from_config(config, object_store=object_store) return model, object_store
def init(): options.config = os.path.abspath( options.config ) if options.username == 'all': options.username = None if options.email == 'all': options.email = None sys.path.insert( 1, os.path.join( os.path.dirname( __file__ ), '..', 'lib' ) ) from galaxy import eggs import pkg_resources import galaxy.config from galaxy.objectstore import build_object_store_from_config # lazy globals()['nice_size'] = __import__( 'galaxy.util', globals(), locals(), ( 'nice_size', ) ).nice_size config_parser = ConfigParser( dict( here = os.getcwd(), database_connection = 'sqlite:///database/universe.sqlite?isolation_level=IMMEDIATE' ) ) config_parser.read( options.config ) config_dict = {} for key, value in config_parser.items( "app:main" ): config_dict[key] = value config = galaxy.config.Configuration( **config_dict ) object_store = build_object_store_from_config( config ) from galaxy.model import mapping return mapping.init( config.file_path, config.database_connection, create_tables = False, object_store = object_store ), object_store, config.database_connection.split(':')[0]
def init(): options.config = os.path.abspath(options.config) if options.username == 'all': options.username = None if options.email == 'all': options.email = None config_parser = ConfigParser( dict(here=os.getcwd(), database_connection= 'sqlite:///database/universe.sqlite?isolation_level=IMMEDIATE')) config_parser.read(options.config) config_dict = {} for key, value in config_parser.items("app:main"): config_dict[key] = value config = galaxy.config.Configuration(**config_dict) object_store = build_object_store_from_config(config) from galaxy.model import mapping return (mapping.init(config.file_path, config.database_connection, create_tables=False, object_store=object_store), object_store, config.database_connection.split(':')[0])
def init(): app_properties = app_properties_from_args(args) config = galaxy.config.Configuration(**app_properties) object_store = build_object_store_from_config(config) model = init_models_from_config(config, object_store=object_store) return model, object_store
def _init(config, need_app=False): if config.startswith('/'): config_file = os.path.abspath(config) else: config_file = os.path.abspath( os.path.join(os.path.dirname(__file__), os.pardir, os.pardir, config)) properties = load_app_properties(ini_file=config_file) config = galaxy.config.Configuration(**properties) object_store = build_object_store_from_config(config) if not config.database_connection: logging.warning( "The database connection is empty. If you are using the default value, please uncomment that in your galaxy.ini" ) if need_app: app = galaxy.app.UniverseApplication(global_conf={ '__file__': config_file, 'here': os.getcwd() }) else: app = None return (mapping.init(config.file_path, config.database_connection, create_tables=False, object_store=object_store), object_store, config.database_connection.split(':')[0], config, app)
def main(argv): parser = argparse.ArgumentParser(formatter_class=argparse.ArgumentDefaultsHelpFormatter) parser.add_argument('-k', '--secret-key', help='Key to convert pages with', default='') parser.add_argument('-d', '--dry-run', help='No changes, just test it.', action='store_true') populate_config_args(parser) args = parser.parse_args() properties = app_properties_from_args(args) config = galaxy.config.Configuration(**properties) secret = args.secret_key or config.id_secret security_helper = IdEncodingHelper(id_secret=secret) object_store = build_object_store_from_config(config) if not config.database_connection: print("The database connection is empty. If you are using the default value, please uncomment that in your galaxy.yml") model = galaxy.config.init_models_from_config(config, object_store=object_store) session = model.context.current pagerevs = session.query(model.PageRevision).all() mock_trans = Bunch(app=Bunch(security=security_helper), model=model, user_is_admin=lambda: True, sa_session=session) for p in pagerevs: try: processor = _PageContentProcessor(mock_trans, _placeholderRenderForSave) processor.feed(p.content) newcontent = unicodify(processor.output(), 'utf-8') if p.content != newcontent: if not args.dry_run: p.content = unicodify(processor.output(), 'utf-8') session.add(p) session.flush() else: print("Modifying revision %s." % p.id) print(difflib.unified_diff(p.content, newcontent)) except Exception: logging.exception("Error parsing page, rolling changes back and skipping revision %s. Please report this error." % p.id) session.rollback()
def __init__(self, config): self.object_store = build_object_store_from_config(config) # Setup the database engine and ORM self.model = galaxy.config.init_models_from_config(config, object_store=self.object_store) registry = Registry() registry.load_datatypes() galaxy.model.set_datatypes_registry(registry)
def init(): options.config = os.path.abspath( options.config ) if options.username == 'all': options.username = None if options.email == 'all': options.email = None os.chdir( os.path.dirname( options.config ) ) sys.path.append( 'lib' ) from galaxy import eggs import pkg_resources import galaxy.config from galaxy.objectstore import build_object_store_from_config config_parser = ConfigParser( dict( here = os.getcwd(), database_connection = 'sqlite:///database/universe.sqlite?isolation_level=IMMEDIATE' ) ) config_parser.read( os.path.basename( options.config ) ) config_dict = {} for key, value in config_parser.items( "app:main" ): config_dict[key] = value config = galaxy.config.Configuration( **config_dict ) object_store = build_object_store_from_config( config ) from galaxy.model import mapping return mapping.init( config.file_path, config.database_connection, create_tables = False, object_store = object_store ), object_store
def main(argv): parser = argparse.ArgumentParser(formatter_class=argparse.ArgumentDefaultsHelpFormatter) parser.add_argument('-k', '--secret-key', help='Key to convert pages with', default='') parser.add_argument('-d', '--dry-run', help='No changes, just test it.', action='store_true') populate_config_args(parser) args = parser.parse_args() properties = app_properties_from_args(args) config = galaxy.config.Configuration(**properties) secret = args.secret_key or config.id_secret security_helper = SecurityHelper(id_secret=secret) object_store = build_object_store_from_config(config) if not config.database_connection: print("The database connection is empty. If you are using the default value, please uncomment that in your galaxy.yml") model = galaxy.config.init_models_from_config(config, object_store=object_store) session = model.context.current pagerevs = session.query(model.PageRevision).all() mock_trans = Bunch(app=Bunch(security=security_helper), model=model, user_is_admin=lambda: True, sa_session=session) for p in pagerevs: try: processor = _PageContentProcessor(mock_trans, _placeholderRenderForSave) processor.feed(p.content) newcontent = unicodify(processor.output(), 'utf-8') if p.content != newcontent: if not args.dry_run: p.content = unicodify(processor.output(), 'utf-8') session.add(p) session.flush() else: print("Modifying revision %s." % p.id) print(difflib.unified_diff(p.content, newcontent)) except Exception: logging.exception("Error parsing page, rolling changes back and skipping revision %s. Please report this error." % p.id) session.rollback()
def _init(config, need_app=False): if config.startswith('/'): config_file = os.path.abspath(config) else: config_file = os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir, os.pardir, config)) properties = load_app_properties(ini_file=config_file) config = galaxy.config.Configuration(**properties) object_store = build_object_store_from_config(config) if not config.database_connection: logging.warning("The database connection is empty. If you are using the default value, please uncomment that in your galaxy.ini") if need_app: app = galaxy.app.UniverseApplication(global_conf={'__file__': config_file, 'here': os.getcwd()}) else: app = None return ( mapping.init( config.file_path, config.database_connection, create_tables=False, object_store=object_store ), object_store, config.database_connection.split(':')[0], config, app )
def __init__(self, config): self.object_store = build_object_store_from_config(config) # Setup the database engine and ORM self.model = galaxy.config.init_models_from_config( config, object_store=self.object_store) registry = Registry() registry.load_datatypes() galaxy.model.set_datatypes_registry(registry)
def get_object_store(tool_job_working_directory): object_store_conf_path = os.path.join(tool_job_working_directory, "metadata", "object_store_conf.json") with open(object_store_conf_path) as f: config_dict = json.load(f) assert config_dict is not None object_store = build_object_store_from_config(None, config_dict=config_dict) Dataset.object_store = object_store return object_store
def __init__(self, config): if config.database_connection is False: config.database_connection = "sqlite:///%s?isolation_level=IMMEDIATE" % config.database self.object_store = build_object_store_from_config(config) # Setup the database engine and ORM self.model = galaxy.model.mapping.init(config.file_path, config.database_connection, engine_options={}, create_tables=False, object_store=self.object_store) registry = Registry() registry.load_datatypes() galaxy.model.set_datatypes_registry(registry)
def __init__( self, tools_migration_config ): install_dependencies = 'install_dependencies' in sys.argv galaxy_config_file = 'universe_wsgi.ini' if '-c' in sys.argv: pos = sys.argv.index( '-c' ) sys.argv.pop( pos ) galaxy_config_file = sys.argv.pop( pos ) if not os.path.exists( galaxy_config_file ): print "Galaxy config file does not exist (hint: use '-c config.ini' for non-standard locations): %s" % galaxy_config_file sys.exit( 1 ) config_parser = ConfigParser.ConfigParser( { 'here':os.getcwd() } ) config_parser.read( galaxy_config_file ) galaxy_config_dict = {} for key, value in config_parser.items( "app:main" ): galaxy_config_dict[ key ] = value self.config = galaxy.config.Configuration( **galaxy_config_dict ) if not self.config.database_connection: self.config.database_connection = "sqlite:///%s?isolation_level=IMMEDIATE" % self.config.database self.config.update_integrated_tool_panel = True self.object_store = build_object_store_from_config( self.config ) # Setup the database engine and ORM self.model = galaxy.model.mapping.init( self.config.file_path, self.config.database_connection, engine_options={}, create_tables=False, object_store=self.object_store ) # Create an empty datatypes registry. self.datatypes_registry = galaxy.datatypes.registry.Registry() # Load the data types in the Galaxy distribution, which are defined in self.config.datatypes_config. self.datatypes_registry.load_datatypes( self.config.root, self.config.datatypes_config ) # Tool data tables self.tool_data_tables = galaxy.tools.data.ToolDataTableManager( self.config.tool_data_path, self.config.tool_data_table_config_path ) # Initialize the tools, making sure the list of tool configs includes the reserved migrated_tools_conf.xml file. tool_configs = self.config.tool_configs if self.config.migrated_tools_config not in tool_configs: tool_configs.append( self.config.migrated_tools_config ) self.toolbox = tools.ToolBox( tool_configs, self.config.tool_path, self ) # Search support for tools self.toolbox_search = galaxy.tools.search.ToolBoxSearch( self.toolbox ) # Set up the tool sheds registry. if os.path.isfile( self.config.tool_sheds_config ): self.tool_shed_registry = galaxy.tool_shed.tool_shed_registry.Registry( self.config.root, self.config.tool_sheds_config ) else: self.tool_shed_registry = None # Get the latest tool migration script number to send to the Install manager. latest_migration_script_number = int( tools_migration_config.split( '_' )[ 0 ] ) # The value of migrated_tools_config is migrated_tools_conf.xml, and is reserved for containing only those tools that have been # eliminated from the distribution and moved to the tool shed. A side-effect of instantiating the InstallManager is the automatic # installation of all appropriate tool shed repositories. self.install_manager = install_manager.InstallManager( app=self, latest_migration_script_number=latest_migration_script_number, tool_shed_install_config=os.path.join( self.config.root, 'scripts', 'migrate_tools', tools_migration_config ), migrated_tools_config=self.config.migrated_tools_config, install_dependencies=install_dependencies )
def __init__( self, **kwargs ): self.config = MockAppConfig( **kwargs ) self.security = self.config.security self.object_store = objectstore.build_object_store_from_config( self.config ) self.model = mapping.init( "/tmp", "sqlite:///:memory:", create_tables=True, object_store=self.object_store ) self.security_agent = self.model.security_agent self.visualizations_registry = MockVisualizationsRegistry() self.tag_handler = tags.GalaxyTagManager( self ) self.quota_agent = quota.QuotaAgent( self.model )
def __init__(self, **kwargs): self.config = MockAppConfig(**kwargs) self.security = self.config.security self.object_store = objectstore.build_object_store_from_config( self.config) self.model = mapping.init("/tmp", "sqlite:///:memory:", create_tables=True, object_store=self.object_store) self.security_agent = self.model.security_agent
def init(): if args.username == 'all': args.username = None if args.email == 'all': args.email = None app_properties = app_properties_from_args(args) config = galaxy.config.Configuration(**app_properties) object_store = build_object_store_from_config(config) engine = galaxy.config.get_database_url(config).split(":")[0] return galaxy.config.init_models_from_config(config, object_store=object_store), object_store, engine
def __init__( self, tools_migration_config ): galaxy_config_file = 'universe_wsgi.ini' if '-c' in sys.argv: pos = sys.argv.index( '-c' ) sys.argv.pop( pos ) galaxy_config_file = sys.argv.pop( pos ) if not os.path.exists( galaxy_config_file ): print "Galaxy config file does not exist (hint: use '-c config.ini' for non-standard locations): %s" % galaxy_config_file sys.exit( 1 ) config_parser = ConfigParser.ConfigParser( { 'here':os.getcwd() } ) config_parser.read( galaxy_config_file ) galaxy_config_dict = {} for key, value in config_parser.items( "app:main" ): galaxy_config_dict[ key ] = value self.config = galaxy.config.Configuration( **galaxy_config_dict ) if self.config.database_connection is None: self.config.database_connection = "sqlite:///%s?isolation_level=IMMEDIATE" % self.config.database self.config.update_integrated_tool_panel = True self.object_store = build_object_store_from_config( self.config ) # Setup the database engine and ORM self.model = galaxy.model.mapping.init( self.config.file_path, self.config.database_connection, engine_options={}, create_tables=False, object_store=self.object_store ) # Create an empty datatypes registry. self.datatypes_registry = galaxy.datatypes.registry.Registry() # Load the data types in the Galaxy distribution, which are defined in self.config.datatypes_config. self.datatypes_registry.load_datatypes( self.config.root, self.config.datatypes_config ) # Initialize the tools, making sure the list of tool configs includes the reserved migrated_tools_conf.xml file. tool_configs = self.config.tool_configs if self.config.migrated_tools_config not in tool_configs: tool_configs.append( self.config.migrated_tools_config ) self.toolbox = tools.ToolBox( tool_configs, self.config.tool_path, self ) # Search support for tools self.toolbox_search = galaxy.tools.search.ToolBoxSearch( self.toolbox ) # Set up the tool sheds registry. if os.path.isfile( self.config.tool_sheds_config ): self.tool_shed_registry = galaxy.tool_shed.tool_shed_registry.Registry( self.config.root, self.config.tool_sheds_config ) else: self.tool_shed_registry = None # Get the latest tool migration script number to send to the Install manager. latest_migration_script_number = int( tools_migration_config.split( '_' )[ 0 ] ) # The value of migrated_tools_config is migrated_tools_conf.xml, and is reserved for containing only those tools that have been # eliminated from the distribution and moved to the tool shed. A side-effect of instantiating the InstallManager is the automatic # installation of all appropriate tool shed repositories. self.install_manager = install_manager.InstallManager( app=self, latest_migration_script_number=latest_migration_script_number, tool_shed_install_config=os.path.join( self.config.root, 'scripts', 'migrate_tools', tools_migration_config ), migrated_tools_config=self.config.migrated_tools_config )
def _init(args): properties = app_properties_from_args(args) config = galaxy.config.Configuration(**properties) object_store = build_object_store_from_config(config) if not config.database_connection: logging.warning("The database connection is empty. If you are using the default value, please uncomment that in your galaxy.yml") model = galaxy.config.init_models_from_config(config, object_store=object_store) return ( model, object_store, config, )
def __init__(self, config: GalaxyDataTestConfig = None, **kwd): config = config or GalaxyDataTestConfig(**kwd) self.config = config self.security = config.security self.object_store = objectstore.build_object_store_from_config( self.config) self.model = init("/tmp", self.config.database_connection, create_tables=True, object_store=self.object_store) self.security_agent = self.model.security_agent self.tag_handler = GalaxyTagHandler(self.model.context) self.init_datatypes()
def __setup_object_store(self, conf): if "object_store_config_file" not in conf: self.object_store = None return object_store_config = Bunch( object_store_config_file=conf['object_store_config_file'], file_path=conf.get("object_store_file_path", None), object_store_check_old_style=False, job_working_directory=conf.get("object_store_job_working_directory", None), new_file_path=conf.get("object_store_new_file_path", tempdir), umask=int(conf.get("object_store_umask", "0000")), ) self.object_store = build_object_store_from_config(object_store_config)
def _init(args): properties = app_properties_from_args(args) config = galaxy.config.Configuration(**properties) object_store = build_object_store_from_config(config) if not config.database_connection: logging.warning("The database connection is empty. If you are using the default value, please uncomment that in your galaxy.yml") model = init_models_from_config(config, object_store=object_store) return ( model, object_store, config, )
def __setup_object_store(self, conf): if "object_store_config_file" not in conf: self.object_store = None return object_store_config = Bunch( object_store_config_file=conf['object_store_config_file'], file_path=conf.get("object_store_file_path", None), object_store_check_old_style=False, job_working_directory=conf.get("object_store_job_working_directory", None), new_file_path=conf.get("object_store_new_file_path", tempdir), umask=int(conf.get("object_store_umask", "0000")), ) self.object_store = build_object_store_from_config(object_store_config)
def __init__(self, config_str, clazz=None): self.temp_directory = mkdtemp() if config_str.startswith("<"): config_file = "store.xml" else: config_file = "store.yaml" self.write(config_str, config_file) config = MockConfig(self.temp_directory, config_file) if clazz is None: self.object_store = objectstore.build_object_store_from_config(config) elif config_file == "store.xml": self.object_store = clazz.from_xml(config, ElementTree.fromstring(config_str)) else: self.object_store = clazz(config, yaml.safe_load(StringIO(config_str)))
def __init__(self, config_str=DISK_TEST_CONFIG, clazz=None, store_by="id"): self.temp_directory = mkdtemp() if config_str.startswith("<"): config_file = "store.xml" else: config_file = "store.yaml" self.write(config_str, config_file) config = MockConfig(self.temp_directory, config_file, store_by=store_by) if clazz is None: self.object_store = objectstore.build_object_store_from_config(config) elif config_file == "store.xml": self.object_store = clazz.from_xml(config, XML(config_str)) else: self.object_store = clazz(config, yaml.safe_load(StringIO(config_str)))
def main(argv=None): if argv is None: argv = sys.argv[1:] args = _arg_parser().parse_args(argv) object_store_config = Bunch( object_store_store_by="uuid", object_store_config_file=args.object_store_config, object_store_check_old_style=False, jobs_directory=None, new_file_path=None, umask=os.umask(0o77), gid=os.getgid(), ) object_store = build_object_store_from_config(object_store_config) galaxy.model.Dataset.object_store = object_store galaxy.model.set_datatypes_registry(example_datatype_registry_for_sample()) from galaxy.model import mapping mapping.init("/tmp", "sqlite:///:memory:", create_tables=True, object_store=object_store) with open(args.objects) as f: targets = yaml.safe_load(f) if not isinstance(targets, list): targets = [targets] export_path = args.export export_type = args.export_type if export_type is None: export_type = "directory" if not export_path.endswith( ".tgz") else "bag_archive" export_types = { "directory": store.DirectoryModelExportStore, "tar": store.TarModelExportStore, "bag_directory": store.BagDirectoryModelExportStore, "bag_archive": store.BagArchiveModelExportStore, } store_class = export_types[export_type] export_kwds = { "serialize_dataset_objects": True, } with store_class(export_path, **export_kwds) as export_store: for target in targets: persist_target_to_export_store(target, export_store, object_store, ".")
def _init(config): if config.startswith('/'): config = os.path.abspath(config) else: config = os.path.abspath( os.path.join(os.path.dirname(__file__), os.pardir, config)) properties = load_app_properties(ini_file=config) config = galaxy.config.Configuration(**properties) object_store = build_object_store_from_config(config) return (mapping.init(config.file_path, config.database_connection, create_tables=False, object_store=object_store), object_store, config.database_connection.split(':')[0])
def init(): options.config = os.path.abspath(options.config) config_parser = ConfigParser(dict(here=os.getcwd(), database_connection='sqlite:///database/universe.sqlite?isolation_level=IMMEDIATE')) config_parser.read(options.config) config_dict = {} for key, value in config_parser.items("app:main"): config_dict[key] = value config = galaxy.config.Configuration(**config_dict) object_store = build_object_store_from_config(config) return (mapping.init(config.file_path, config.database_connection, create_tables=False, object_store=object_store), object_store)
def __init__(self, config=None, **kwargs): super().__init__() self[BasicApp] = self self[MinimalManagerApp] = self self[StructuredApp] = self self.config = config or MockAppConfig(**kwargs) self.security = self.config.security self[idencoding.IdEncodingHelper] = self.security self.name = kwargs.get('name', 'galaxy') self.object_store = objectstore.build_object_store_from_config( self.config) self.model = mapping.init("/tmp", self.config.database_connection, create_tables=True, object_store=self.object_store) self[SharedModelMapping] = self.model self[GalaxyModelMapping] = self.model self[scoped_session] = self.model.context self.security_agent = self.model.security_agent self.visualizations_registry = MockVisualizationsRegistry() self.tag_handler = tags.GalaxyTagHandler(self.model.context) self[tags.GalaxyTagHandler] = self.tag_handler self.quota_agent = quota.DatabaseQuotaAgent(self.model) self.init_datatypes() self.job_config = Bunch(dynamic_params=None, destinations={}, use_messaging=False, assign_handler=lambda *args, **kwargs: None) self.tool_data_tables = {} self.dataset_collections_service = None self.container_finder = NullContainerFinder() self._toolbox_lock = MockLock() self.tool_shed_registry = Bunch(tool_sheds={}) self.genome_builds = GenomeBuilds(self) self.job_manager = NoopManager() self.application_stack = ApplicationStack() self.auth_manager = AuthManager(self.config) self.user_manager = UserManager(self) self.execution_timer_factory = Bunch( get_timer=StructuredExecutionTimer) self.is_job_handler = False rebind_container_to_task(self) def url_for(*args, **kwds): return "/mock/url" self.url_for = url_for
def __init__(self, config_str, clazz=None): self.temp_directory = mkdtemp() if config_str.startswith("<"): config_file = "store.xml" else: config_file = "store.yaml" self.write(config_str, config_file) config = MockConfig(self.temp_directory, config_file) if clazz is None: self.object_store = objectstore.build_object_store_from_config( config) elif config_file == "store.xml": self.object_store = clazz.from_xml( config, ElementTree.fromstring(config_str)) else: self.object_store = clazz(config, yaml.safe_load(StringIO(config_str)))
def _init(config): if config.startswith('/'): config = os.path.abspath(config) else: config = os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir, config)) properties = load_app_properties(ini_file=config) config = galaxy.config.Configuration(**properties) object_store = build_object_store_from_config(config) return ( mapping.init( config.file_path, config.database_connection, create_tables=False, object_store=object_store ), object_store, config.database_connection.split(':')[0] )
def _init(args, need_app=False): properties = app_properties_from_args(args) config = galaxy.config.Configuration(**properties) object_store = build_object_store_from_config(config) if not config.database_connection: logging.warning("The database connection is empty. If you are using the default value, please uncomment that in your galaxy.yml") if need_app: config_file = config_file_from_args(args) app = galaxy.app.UniverseApplication(global_conf={'__file__': config_file, 'here': os.getcwd()}) else: app = None model = galaxy.config.init_models_from_config(config, object_store=object_store) return ( model, object_store, config, app )
def __init__(self, config=None, **kwargs): self.config = config or MockAppConfig(**kwargs) self.security = self.config.security self.name = kwargs.get('name', 'galaxy') self.object_store = objectstore.build_object_store_from_config(self.config) self.model = mapping.init("/tmp", "sqlite:///:memory:", create_tables=True, object_store=self.object_store) self.security_agent = self.model.security_agent self.visualizations_registry = MockVisualizationsRegistry() self.tag_handler = tags.GalaxyTagManager(self.model.context) self.quota_agent = quota.QuotaAgent(self.model) self.init_datatypes() self.job_config = Bunch( dynamic_params=None, ) self.tool_data_tables = {} self.dataset_collections_service = None self.container_finder = NullContainerFinder() self._toolbox_lock = MockLock() self.genome_builds = GenomeBuilds(self) self.job_queue = NoopQueue()
def __setup_object_store(self, conf): if "object_store_config_file" not in conf and "object_store_config" not in conf: self.object_store = None return config_obj_kwds = dict( file_path=conf.get("object_store_file_path", None), object_store_check_old_style=False, job_working_directory=conf.get("object_store_job_working_directory", None), new_file_path=conf.get("object_store_new_file_path", tempdir), umask=int(conf.get("object_store_umask", "0000")), jobs_directory=None, ) config_dict = None if conf.get("object_store_config_file"): config_obj_kwds["object_store_config_file"] = conf['object_store_config_file'] else: config_dict = conf["object_store_config"] object_store_config = Bunch(**config_obj_kwds) self.object_store = build_object_store_from_config(object_store_config, config_dict=config_dict)
def __init__(self, config=None, **kwargs): self.config = config or MockAppConfig(**kwargs) self.security = self.config.security self.name = kwargs.get('name', 'galaxy') self.object_store = objectstore.build_object_store_from_config(self.config) self.model = mapping.init("/tmp", "sqlite:///:memory:", create_tables=True, object_store=self.object_store) self.security_agent = self.model.security_agent self.visualizations_registry = MockVisualizationsRegistry() self.tag_handler = tags.GalaxyTagManager(self.model.context) self.quota_agent = quota.QuotaAgent(self.model) self.init_datatypes() self.job_config = Bunch( dynamic_params=None, ) self.tool_data_tables = {} self.dataset_collections_service = None self.container_finder = NullContainerFinder() self._toolbox_lock = MockLock() self.genome_builds = GenomeBuilds(self) self.job_manager = Bunch(job_queue=NoopQueue()) self.application_stack = ApplicationStack()
def _get_library_dataset_paths(args, kwargs): _config_logging(args) config = galaxy.config.Configuration(**kwargs) object_store = build_object_store_from_config(config) model = galaxy.model.mapping.init('/tmp/', kwargs.get('database_connection'), object_store=object_store) output = _open_output(args) last_library = None log.debug('Beginning library walk') for library, dataset in _walk_libraries(args, model): if library != last_library: log.info('Library: %s', library.name) filename = object_store.get_filename(dataset) files_dir = dataset.get_extra_files_path() if (args.exists and object_store.exists(dataset)) or not args.exists: output.write('%s\n' % _path(filename, args)) elif args.exists: log.warning('Missing %s', filename) if files_dir and os.path.exists(files_dir): output.write('%s\n' % _path(files_dir, args)) last_library = library output.close()
def _init(args, need_app=False): properties = app_properties_from_args(args) config = galaxy.config.Configuration(**properties) object_store = build_object_store_from_config(config) if not config.database_connection: logging.warning( "The database connection is empty. If you are using the default value, please uncomment that in your galaxy.yml" ) if need_app: config_file = config_file_from_args(args) app = galaxy.app.UniverseApplication(global_conf={ '__file__': config_file, 'here': os.getcwd() }) else: app = None model = galaxy.config.init_models_from_config(config, object_store=object_store) return (model, object_store, config, app)
def _get_library_dataset_paths(args, kwargs): _config_logging(args) config = galaxy.config.Configuration(**kwargs) object_store = build_object_store_from_config(config) model = galaxy.model.mapping.init('/tmp/', kwargs.get('database_connection'), object_store=object_store) output = _open_output(args) last_library = None log.debug('Beginning library walk') for library, dataset in _walk_libraries(args, model): if library != last_library: log.info('Library: %s', library.name) filename = object_store.get_filename(dataset) files_dir = dataset.get_extra_files_path() if (args.exists and object_store.exists(dataset)) or not args.exists: output.write('%s\n' % _path(filename, args)) elif args.exists: log.warning('Missing %s', filename) if files_dir and os.path.exists(files_dir): output.write('%s\n' % _path(files_dir, args)) last_library = library output.close()
def __init__(self, **kwargs): print >>sys.stderr, "python path is: " + ", ".join(sys.path) self.name = "galaxy" self.new_installation = False # Read config file and check for errors self.config = config.Configuration(**kwargs) self.config.check() config.configure_logging(self.config) self.configure_fluent_log() # Determine the database url if self.config.database_connection: db_url = self.config.database_connection else: db_url = "sqlite:///%s?isolation_level=IMMEDIATE" % self.config.database # Set up the tool sheds registry if os.path.isfile(self.config.tool_sheds_config): self.tool_shed_registry = tool_shed.tool_shed_registry.Registry( self.config.root, self.config.tool_sheds_config ) else: self.tool_shed_registry = None # Initialize database / check for appropriate schema version. # If this # is a new installation, we'll restrict the tool migration messaging. from galaxy.model.migrate.check import create_or_verify_database create_or_verify_database( db_url, kwargs.get("global_conf", {}).get("__file__", None), self.config.database_engine_options, app=self ) # Alert the Galaxy admin to tools that have been moved from the distribution to the tool shed. from tool_shed.galaxy_install.migrate.check import verify_tools verify_tools( self, db_url, kwargs.get("global_conf", {}).get("__file__", None), self.config.database_engine_options ) # Object store manager self.object_store = build_object_store_from_config(self.config, fsmon=True) # Setup the database engine and ORM from galaxy.model import mapping self.model = mapping.init( self.config.file_path, db_url, self.config.database_engine_options, database_query_profiling_proxy=self.config.database_query_profiling_proxy, object_store=self.object_store, trace_logger=self.trace_logger, use_pbkdf2=self.config.get_bool("use_pbkdf2", True), ) # Manage installed tool shed repositories. self.installed_repository_manager = tool_shed.galaxy_install.InstalledRepositoryManager(self) # Create an empty datatypes registry. self.datatypes_registry = galaxy.datatypes.registry.Registry() # Load proprietary datatypes defined in datatypes_conf.xml files in all installed tool shed repositories. We # load proprietary datatypes before datatypes in the distribution because Galaxy's default sniffers include some # generic sniffers (eg text,xml) which catch anything, so it's impossible for proprietary sniffers to be used. # However, if there is a conflict (2 datatypes with the same extension) between a proprietary datatype and a datatype # in the Galaxy distribution, the datatype in the Galaxy distribution will take precedence. If there is a conflict # between 2 proprietary datatypes, the datatype from the repository that was installed earliest will take precedence. self.installed_repository_manager.load_proprietary_datatypes() # Load the data types in the Galaxy distribution, which are defined in self.config.datatypes_config. self.datatypes_registry.load_datatypes(self.config.root, self.config.datatypes_config) galaxy.model.set_datatypes_registry(self.datatypes_registry) # Security helper self.security = security.SecurityHelper(id_secret=self.config.id_secret) # Tag handler self.tag_handler = GalaxyTagHandler() # Genomes self.genomes = Genomes(self) # Data providers registry. self.data_provider_registry = DataProviderRegistry() # Initialize tool data tables using the config defined by self.config.tool_data_table_config_path. self.tool_data_tables = galaxy.tools.data.ToolDataTableManager( tool_data_path=self.config.tool_data_path, config_filename=self.config.tool_data_table_config_path ) # Load additional entries defined by self.config.shed_tool_data_table_config into tool data tables. self.tool_data_tables.load_from_config_file( config_filename=self.config.shed_tool_data_table_config, tool_data_path=self.tool_data_tables.tool_data_path, from_shed_config=False, ) # Initialize the job management configuration self.job_config = jobs.JobConfiguration(self) # Initialize the tools, making sure the list of tool configs includes the reserved migrated_tools_conf.xml file. tool_configs = self.config.tool_configs if self.config.migrated_tools_config not in tool_configs: tool_configs.append(self.config.migrated_tools_config) self.toolbox = tools.ToolBox(tool_configs, self.config.tool_path, self) # Search support for tools self.toolbox_search = galaxy.tools.search.ToolBoxSearch(self.toolbox) # Load Data Manager self.data_managers = DataManagers(self) # If enabled, poll respective tool sheds to see if updates are available for any installed tool shed repositories. if self.config.get_bool("enable_tool_shed_check", False): from tool_shed.galaxy_install import update_manager self.update_manager = update_manager.UpdateManager(self) else: self.update_manager = None # Load proprietary datatype converters and display applications. self.installed_repository_manager.load_proprietary_converters_and_display_applications() # Load datatype display applications defined in local datatypes_conf.xml self.datatypes_registry.load_display_applications() # Load datatype converters defined in local datatypes_conf.xml self.datatypes_registry.load_datatype_converters(self.toolbox) # Load external metadata tool self.datatypes_registry.load_external_metadata_tool(self.toolbox) # Load history import/export tools. load_history_imp_exp_tools(self.toolbox) # Load genome indexer tool. load_genome_index_tools(self.toolbox) # visualizations registry: associates resources with visualizations, controls how to render self.visualizations_registry = None if self.config.visualizations_config_directory: self.visualizations_registry = VisualizationsRegistry( self.config.root, self.config.visualizations_config_directory ) # Load security policy. self.security_agent = self.model.security_agent self.host_security_agent = galaxy.security.HostAgent( model=self.security_agent.model, permitted_actions=self.security_agent.permitted_actions ) # Load quota management. if self.config.enable_quotas: self.quota_agent = galaxy.quota.QuotaAgent(self.model) else: self.quota_agent = galaxy.quota.NoQuotaAgent(self.model) # Heartbeat and memdump for thread / heap profiling self.heartbeat = None self.memdump = None self.memory_usage = None # Container for OpenID authentication routines if self.config.enable_openid: from galaxy.web.framework import openid_manager self.openid_manager = openid_manager.OpenIDManager(self.config.openid_consumer_cache_path) self.openid_providers = OpenIDProviders.from_file(self.config.openid_config) else: self.openid_providers = OpenIDProviders() # Start the heartbeat process if configured and available if self.config.use_heartbeat: from galaxy.util import heartbeat if heartbeat.Heartbeat: self.heartbeat = heartbeat.Heartbeat(fname=self.config.heartbeat_log) self.heartbeat.start() # Enable the memdump signal catcher if configured and available if self.config.use_memdump: from galaxy.util import memdump if memdump.Memdump: self.memdump = memdump.Memdump() # Transfer manager client if self.config.get_bool("enable_beta_job_managers", False): from galaxy.jobs import transfer_manager self.transfer_manager = transfer_manager.TransferManager(self) # Start the job manager from galaxy.jobs import manager self.job_manager = manager.JobManager(self) # FIXME: These are exposed directly for backward compatibility self.job_queue = self.job_manager.job_queue self.job_stop_queue = self.job_manager.job_stop_queue # Initialize the external service types self.external_service_types = external_service_types.ExternalServiceTypesCollection( self.config.external_service_type_config_file, self.config.external_service_type_path, self )
def __init__(self, config_xml): self.temp_directory = mkdtemp() self.write(config_xml, "store.xml") config = MockConfig(self.temp_directory) self.object_store = objectstore.build_object_store_from_config(config)
def _configure_object_store(self, **kwds): from galaxy.objectstore import build_object_store_from_config self.object_store = build_object_store_from_config(self.config, **kwds)
def __init__( self, **kwargs ): print >> sys.stderr, "python path is: " + ", ".join( sys.path ) self.new_installation = False # Read config file and check for errors self.config = config.Configuration( **kwargs ) self.config.check() config.configure_logging( self.config ) # Determine the database url if self.config.database_connection: db_url = self.config.database_connection else: db_url = "sqlite:///%s?isolation_level=IMMEDIATE" % self.config.database # Initialize database / check for appropriate schema version. # If this # is a new installation, we'll restrict the tool migration messaging. from galaxy.model.migrate.check import create_or_verify_database create_or_verify_database( db_url, kwargs.get( 'global_conf', {} ).get( '__file__', None ), self.config.database_engine_options, app=self ) # Alert the Galaxy admin to tools that have been moved from the distribution to the tool shed. from galaxy.tool_shed.migrate.check import verify_tools verify_tools( self, db_url, kwargs.get( 'global_conf', {} ).get( '__file__', None ), self.config.database_engine_options ) # Object store manager self.object_store = build_object_store_from_config(self.config) # Setup the database engine and ORM from galaxy.model import mapping self.model = mapping.init( self.config.file_path, db_url, self.config.database_engine_options, database_query_profiling_proxy = self.config.database_query_profiling_proxy, object_store = self.object_store ) # Set up the tool sheds registry if os.path.isfile( self.config.tool_sheds_config ): self.tool_shed_registry = galaxy.tool_shed.tool_shed_registry.Registry( self.config.root, self.config.tool_sheds_config ) else: self.tool_shed_registry = None # Manage installed tool shed repositories. self.installed_repository_manager = galaxy.tool_shed.InstalledRepositoryManager( self ) # Create an empty datatypes registry. self.datatypes_registry = galaxy.datatypes.registry.Registry() # Load proprietary datatypes defined in datatypes_conf.xml files in all installed tool shed repositories. We # load proprietary datatypes before datatypes in the distribution because Galaxy's default sniffers include some # generic sniffers (eg text,xml) which catch anything, so it's impossible for proprietary sniffers to be used. # However, if there is a conflict (2 datatypes with the same extension) between a proprietary datatype and a datatype # in the Galaxy distribution, the datatype in the Galaxy distribution will take precedence. If there is a conflict # between 2 proprietary datatypes, the datatype from the repository that was installed earliest will take precedence. # This will also load proprietary datatype converters and display applications. self.installed_repository_manager.load_proprietary_datatypes() # Load the data types in the Galaxy distribution, which are defined in self.config.datatypes_config. self.datatypes_registry.load_datatypes( self.config.root, self.config.datatypes_config ) galaxy.model.set_datatypes_registry( self.datatypes_registry ) # Security helper self.security = security.SecurityHelper( id_secret=self.config.id_secret ) # Tag handler self.tag_handler = GalaxyTagHandler() # Tool data tables self.tool_data_tables = galaxy.tools.data.ToolDataTableManager( self.config.tool_data_table_config_path ) # Initialize the tools, making sure the list of tool configs includes the reserved migrated_tools_conf.xml file. tool_configs = self.config.tool_configs if self.config.migrated_tools_config not in tool_configs: tool_configs.append( self.config.migrated_tools_config ) self.toolbox = tools.ToolBox( tool_configs, self.config.tool_path, self ) # Search support for tools self.toolbox_search = galaxy.tools.search.ToolBoxSearch( self.toolbox ) # If enabled, poll respective tool sheds to see if updates are available for any installed tool shed repositories. if self.config.get_bool( 'enable_tool_shed_check', False ): from tool_shed import update_manager self.update_manager = update_manager.UpdateManager( self ) # Load datatype display applications defined in local datatypes_conf.xml self.datatypes_registry.load_display_applications() # Load datatype converters defined in local datatypes_conf.xml self.datatypes_registry.load_datatype_converters( self.toolbox ) # Load external metadata tool self.datatypes_registry.load_external_metadata_tool( self.toolbox ) # Load history import/export tools. load_history_imp_exp_tools( self.toolbox ) # Load genome indexer tool. load_genome_index_tools( self.toolbox ) # Load security policy. self.security_agent = self.model.security_agent self.host_security_agent = galaxy.security.HostAgent( model=self.security_agent.model, permitted_actions=self.security_agent.permitted_actions ) # Load quota management. if self.config.enable_quotas: self.quota_agent = galaxy.quota.QuotaAgent( self.model ) else: self.quota_agent = galaxy.quota.NoQuotaAgent( self.model ) # Heartbeat and memdump for thread / heap profiling self.heartbeat = None self.memdump = None self.memory_usage = None # Container for OpenID authentication routines if self.config.enable_openid: from galaxy.web.framework import openid_manager self.openid_manager = openid_manager.OpenIDManager( self.config.openid_consumer_cache_path ) self.openid_providers = OpenIDProviders.from_file( self.config.openid_config ) else: self.openid_providers = OpenIDProviders() # Start the heartbeat process if configured and available if self.config.use_heartbeat: from galaxy.util import heartbeat if heartbeat.Heartbeat: self.heartbeat = heartbeat.Heartbeat( fname=self.config.heartbeat_log ) self.heartbeat.start() # Enable the memdump signal catcher if configured and available if self.config.use_memdump: from galaxy.util import memdump if memdump.Memdump: self.memdump = memdump.Memdump() # Transfer manager client if self.config.get_bool( 'enable_beta_job_managers', False ): from jobs import transfer_manager self.transfer_manager = transfer_manager.TransferManager( self ) # Start the job manager from jobs import manager self.job_manager = manager.JobManager( self ) # FIXME: These are exposed directly for backward compatibility self.job_queue = self.job_manager.job_queue self.job_stop_queue = self.job_manager.job_stop_queue # Initialize the external service types self.external_service_types = external_service_types.ExternalServiceTypesCollection( self.config.external_service_type_config_file, self.config.external_service_type_path, self )
def __main__(): file_path = sys.argv.pop( 1 ) tmp_dir = sys.argv.pop( 1 ) galaxy.model.Dataset.file_path = file_path galaxy.datatypes.metadata.MetadataTempFile.tmp_dir = tmp_dir config_root = sys.argv.pop( 1 ) config_file_name = sys.argv.pop( 1 ) if not os.path.isabs( config_file_name ): config_file_name = os.path.join( config_root, config_file_name ) # Set up reference to object store # First, read in the main config file for Galaxy; this is required because # the object store configuration is stored there conf = ConfigParser.ConfigParser() conf.read(config_file_name) conf_dict = {} for section in conf.sections(): for option in conf.options(section): try: conf_dict[option] = conf.get(section, option) except ConfigParser.InterpolationMissingOptionError: # Because this is not called from Paste Script, %(here)s variable # is not initialized in the config file so skip those fields - # just need not to use any such fields for the object store conf... log.debug("Did not load option %s from %s" % (option, config_file_name)) # config object is required by ObjectStore class so create it now universe_config = config.Configuration(**conf_dict) object_store = build_object_store_from_config(universe_config) galaxy.model.Dataset.object_store = object_store # Set up datatypes registry datatypes_config = sys.argv.pop( 1 ) datatypes_registry = galaxy.datatypes.registry.Registry() datatypes_registry.load_datatypes( root_dir=config_root, config=datatypes_config ) galaxy.model.set_datatypes_registry( datatypes_registry ) job_metadata = sys.argv.pop( 1 ) ext_override = dict() if job_metadata != "None" and os.path.exists( job_metadata ): for line in open( job_metadata, 'r' ): try: line = stringify_dictionary_keys( from_json_string( line ) ) assert line['type'] == 'dataset' ext_override[line['dataset_id']] = line['ext'] except: continue for filenames in sys.argv[1:]: fields = filenames.split( ',' ) filename_in = fields.pop( 0 ) filename_kwds = fields.pop( 0 ) filename_out = fields.pop( 0 ) filename_results_code = fields.pop( 0 ) dataset_filename_override = fields.pop( 0 ) # Need to be careful with the way that these parameters are populated from the filename splitting, # because if a job is running when the server is updated, any existing external metadata command-lines #will not have info about the newly added override_metadata file if fields: override_metadata = fields.pop( 0 ) else: override_metadata = None try: dataset = cPickle.load( open( filename_in ) ) # load DatasetInstance if dataset_filename_override: dataset.dataset.external_filename = dataset_filename_override if ext_override.get( dataset.dataset.id, None ): dataset.extension = ext_override[ dataset.dataset.id ] # Metadata FileParameter types may not be writable on a cluster node, and are therefore temporarily substituted with MetadataTempFiles if override_metadata: override_metadata = json.load( open( override_metadata ) ) for metadata_name, metadata_file_override in override_metadata: if galaxy.datatypes.metadata.MetadataTempFile.is_JSONified_value( metadata_file_override ): metadata_file_override = galaxy.datatypes.metadata.MetadataTempFile.from_JSON( metadata_file_override ) setattr( dataset.metadata, metadata_name, metadata_file_override ) kwds = stringify_dictionary_keys( json.load( open( filename_kwds ) ) ) # load kwds; need to ensure our keywords are not unicode dataset.datatype.set_meta( dataset, **kwds ) dataset.metadata.to_JSON_dict( filename_out ) # write out results of set_meta json.dump( ( True, 'Metadata has been set successfully' ), open( filename_results_code, 'wb+' ) ) # setting metadata has succeeded except Exception, e: json.dump( ( False, str( e ) ), open( filename_results_code, 'wb+' ) ) # setting metadata has failed somehow
def __main__(): file_path = sys.argv.pop(1) tool_job_working_directory = tmp_dir = sys.argv.pop( 1) #this is also the job_working_directory now galaxy.model.Dataset.file_path = file_path galaxy.datatypes.metadata.MetadataTempFile.tmp_dir = tmp_dir config_root = sys.argv.pop(1) config_file_name = sys.argv.pop(1) if not os.path.isabs(config_file_name): config_file_name = os.path.join(config_root, config_file_name) # Set up reference to object store # First, read in the main config file for Galaxy; this is required because # the object store configuration is stored there conf_dict = load_app_properties(ini_file=config_file_name) # config object is required by ObjectStore class so create it now universe_config = config.Configuration(**conf_dict) universe_config.ensure_tempdir() object_store = build_object_store_from_config(universe_config) galaxy.model.Dataset.object_store = object_store # Set up datatypes registry datatypes_config = sys.argv.pop(1) datatypes_registry = galaxy.datatypes.registry.Registry() datatypes_registry.load_datatypes(root_dir=config_root, config=datatypes_config) galaxy.model.set_datatypes_registry(datatypes_registry) job_metadata = sys.argv.pop(1) existing_job_metadata_dict = {} new_job_metadata_dict = {} if job_metadata != "None" and os.path.exists(job_metadata): for line in open(job_metadata, 'r'): try: line = stringify_dictionary_keys(json.loads(line)) if line['type'] == 'dataset': existing_job_metadata_dict[line['dataset_id']] = line elif line['type'] == 'new_primary_dataset': new_job_metadata_dict[line['filename']] = line except: continue for filenames in sys.argv[1:]: fields = filenames.split(',') filename_in = fields.pop(0) filename_kwds = fields.pop(0) filename_out = fields.pop(0) filename_results_code = fields.pop(0) dataset_filename_override = fields.pop(0) # Need to be careful with the way that these parameters are populated from the filename splitting, # because if a job is running when the server is updated, any existing external metadata command-lines #will not have info about the newly added override_metadata file if fields: override_metadata = fields.pop(0) else: override_metadata = None set_meta_kwds = stringify_dictionary_keys( json.load(open(filename_kwds)) ) # load kwds; need to ensure our keywords are not unicode try: dataset = cPickle.load(open(filename_in)) # load DatasetInstance if dataset_filename_override: dataset.dataset.external_filename = dataset_filename_override files_path = os.path.abspath( os.path.join(tool_job_working_directory, "dataset_%s_files" % (dataset.dataset.id))) dataset.dataset.external_extra_files_path = files_path if dataset.dataset.id in existing_job_metadata_dict: dataset.extension = existing_job_metadata_dict[ dataset.dataset.id].get('ext', dataset.extension) # Metadata FileParameter types may not be writable on a cluster node, and are therefore temporarily substituted with MetadataTempFiles if override_metadata: override_metadata = json.load(open(override_metadata)) for metadata_name, metadata_file_override in override_metadata: if galaxy.datatypes.metadata.MetadataTempFile.is_JSONified_value( metadata_file_override): metadata_file_override = galaxy.datatypes.metadata.MetadataTempFile.from_JSON( metadata_file_override) setattr(dataset.metadata, metadata_name, metadata_file_override) file_dict = existing_job_metadata_dict.get(dataset.dataset.id, {}) set_meta_with_tool_provided(dataset, file_dict, set_meta_kwds) dataset.metadata.to_JSON_dict( filename_out) # write out results of set_meta json.dump((True, 'Metadata has been set successfully'), open(filename_results_code, 'wb+')) # setting metadata has succeeded except Exception, e: json.dump((False, str(e)), open(filename_results_code, 'wb+')) # setting metadata has failed somehow
def test_pulsar_objectstore(self): # Define real object store used by Pulsar server. object_store_config_file = join(self.temp_directory, "object_store_conf.xml") with open(object_store_config_file, "w") as configf: config_template = Template("""<?xml version="1.0"?> <object_store type="disk"> <files_dir path="${temp_directory}"/> <extra_dir type="temp" path="${temp_directory}"/> <extra_dir type="job_work" path="${temp_directory}"/> </object_store> """) config_contents = config_template.safe_substitute(temp_directory=self.temp_directory) configf.write(config_contents) app_conf = dict( object_store_config_file=object_store_config_file, private_token="12345", ) from .test_utils import test_pulsar_server with test_pulsar_server(app_conf=app_conf) as server: url = server.application_url # Define a proxy Pulsar object store. proxy_object_store_config_file = join(self.temp_directory, "proxy_object_store_conf.xml") with open(proxy_object_store_config_file, "w") as configf: config_template = Template("""<?xml version="1.0"?> <object_store type="pulsar" url="$url" private_token="12345" transport="urllib"> <!-- private_token is optional - see Pulsar documentation for more information. --> <!-- transport is optional, set to curl to use libcurl instead of urllib for communication with Pulsar. --> </object_store> """) contents = config_template.safe_substitute(url=url) configf.write(contents) config = Bunch(object_store_config_file=proxy_object_store_config_file) object_store = build_object_store_from_config(config=config) # Test no dataset with id 1 exists. absent_dataset = MockDataset(1) assert not object_store.exists(absent_dataset) # Write empty dataset 2 in second backend, ensure it is empty and # exists. empty_dataset = MockDataset(2) self.__write(b"", "000/dataset_2.dat") assert object_store.exists(empty_dataset) assert object_store.empty(empty_dataset) # Write non-empty dataset in backend 1, test it is not emtpy & exists. hello_world_dataset = MockDataset(3) self.__write(b"Hello World!", "000/dataset_3.dat") assert object_store.exists(hello_world_dataset) assert not object_store.empty(hello_world_dataset) # Test get_data data = object_store.get_data(hello_world_dataset) assert data == "Hello World!" data = object_store.get_data(hello_world_dataset, start=1, count=6) assert data == "ello W" # Test Size # Test absent and empty datasets yield size of 0. assert object_store.size(absent_dataset) == 0 assert object_store.size(empty_dataset) == 0 # Elsewise assert object_store.size(hello_world_dataset) > 0 # Should this always be the number of bytes? # Test percent used (to some degree) percent_store_used = object_store.get_store_usage_percent() assert percent_store_used > 0.0 assert percent_store_used < 100.0 # Test update_from_file test output_dataset = MockDataset(4) output_real_path = join(self.temp_directory, "000", "dataset_4.dat") assert not exists(output_real_path) output_working_path = self.__write(b"NEW CONTENTS", "job_working_directory1/example_output") object_store.update_from_file(output_dataset, file_name=output_working_path, create=True) assert exists(output_real_path) # Test delete to_delete_dataset = MockDataset(5) to_delete_real_path = self.__write(b"content to be deleted!", "000/dataset_5.dat") assert object_store.exists(to_delete_dataset) assert object_store.delete(to_delete_dataset) assert not object_store.exists(to_delete_dataset) assert not exists(to_delete_real_path) # Test json content. complex_contents_dataset = MockDataset(6) complex_content = b'{"a":6}' self.__write(complex_content, "000/dataset_6.dat") assert object_store.exists(complex_contents_dataset) data = object_store.get_data(complex_contents_dataset) == complex_content
def __load_object_store(self): self.object_store = build_object_store_from_config(self.config)
def _configure_object_store( self, **kwds ): from galaxy.objectstore import build_object_store_from_config self.object_store = build_object_store_from_config( self.config, **kwds )
def __main__(): file_path = sys.argv.pop(1) tool_job_working_directory = tmp_dir = sys.argv.pop(1) # this is also the job_working_directory now galaxy.model.Dataset.file_path = file_path galaxy.datatypes.metadata.MetadataTempFile.tmp_dir = tmp_dir config_root = sys.argv.pop(1) config_file_name = sys.argv.pop(1) if not os.path.isabs(config_file_name): config_file_name = os.path.join(config_root, config_file_name) # Set up reference to object store # First, read in the main config file for Galaxy; this is required because # the object store configuration is stored there conf_dict = load_app_properties(ini_file=config_file_name) # config object is required by ObjectStore class so create it now universe_config = config.Configuration(**conf_dict) universe_config.ensure_tempdir() object_store = build_object_store_from_config(universe_config) galaxy.model.Dataset.object_store = object_store # Set up datatypes registry datatypes_config = sys.argv.pop(1) datatypes_registry = galaxy.datatypes.registry.Registry() datatypes_registry.load_datatypes(root_dir=config_root, config=datatypes_config) galaxy.model.set_datatypes_registry(datatypes_registry) job_metadata = sys.argv.pop(1) existing_job_metadata_dict = {} new_job_metadata_dict = {} if job_metadata != "None" and os.path.exists(job_metadata): for line in open(job_metadata, "r"): try: line = stringify_dictionary_keys(json.loads(line)) if line["type"] == "dataset": existing_job_metadata_dict[line["dataset_id"]] = line elif line["type"] == "new_primary_dataset": new_job_metadata_dict[line["filename"]] = line except: continue for filenames in sys.argv[1:]: fields = filenames.split(",") filename_in = fields.pop(0) filename_kwds = fields.pop(0) filename_out = fields.pop(0) filename_results_code = fields.pop(0) dataset_filename_override = fields.pop(0) # Need to be careful with the way that these parameters are populated from the filename splitting, # because if a job is running when the server is updated, any existing external metadata command-lines # will not have info about the newly added override_metadata file if fields: override_metadata = fields.pop(0) else: override_metadata = None set_meta_kwds = stringify_dictionary_keys( json.load(open(filename_kwds)) ) # load kwds; need to ensure our keywords are not unicode try: dataset = cPickle.load(open(filename_in)) # load DatasetInstance if dataset_filename_override: dataset.dataset.external_filename = dataset_filename_override files_path = os.path.abspath( os.path.join(tool_job_working_directory, "dataset_%s_files" % (dataset.dataset.id)) ) dataset.dataset.external_extra_files_path = files_path if dataset.dataset.id in existing_job_metadata_dict: dataset.extension = existing_job_metadata_dict[dataset.dataset.id].get("ext", dataset.extension) # Metadata FileParameter types may not be writable on a cluster node, and are therefore temporarily substituted with MetadataTempFiles if override_metadata: override_metadata = json.load(open(override_metadata)) for metadata_name, metadata_file_override in override_metadata: if galaxy.datatypes.metadata.MetadataTempFile.is_JSONified_value(metadata_file_override): metadata_file_override = galaxy.datatypes.metadata.MetadataTempFile.from_JSON( metadata_file_override ) setattr(dataset.metadata, metadata_name, metadata_file_override) file_dict = existing_job_metadata_dict.get(dataset.dataset.id, {}) set_meta_with_tool_provided(dataset, file_dict, set_meta_kwds) dataset.metadata.to_JSON_dict(filename_out) # write out results of set_meta json.dump( (True, "Metadata has been set successfully"), open(filename_results_code, "wb+") ) # setting metadata has succeeded except Exception, e: json.dump((False, str(e)), open(filename_results_code, "wb+")) # setting metadata has failed somehow