예제 #1
0
    def _configure_tool_data_tables( self, from_shed_config ):
        from galaxy.tools.data import ToolDataTableManager

        # Initialize tool data tables using the config defined by self.config.tool_data_table_config_path.
        self.tool_data_tables = ToolDataTableManager( tool_data_path=self.config.tool_data_path,
                                                      config_filename=self.config.tool_data_table_config_path )
        # Load additional entries defined by self.config.shed_tool_data_table_config into tool data tables.
        self.tool_data_tables.load_from_config_file( config_filename=self.config.shed_tool_data_table_config,
                                                     tool_data_path=self.tool_data_tables.tool_data_path,
                                                     from_shed_config=from_shed_config )
예제 #2
0
 def __init__(self, app):
     super(GenomeTransferPlugin, self).__init__(app)
     self.app = app
     self.tool = app.toolbox.tools_by_id['__GENOME_INDEX__']
     self.sa_session = app.model.context.current
     tdtman = ToolDataTableManager(app.config.tool_data_path)
     xmltree = tdtman.load_from_config_file(
         app.config.tool_data_table_config_path, app.config.tool_data_path)
     for node in xmltree:
         table = node.get('name')
         location = node.findall('file')[0].get('path')
         self.locations[table] = location
예제 #3
0
파일: app.py 프로젝트: mvdbeek/galaxy
 def _configure_tool_data_tables(self, from_shed_config):
     # Initialize tool data tables using the config defined by self.config.tool_data_table_config_path.
     self.tool_data_tables = ToolDataTableManager(
         tool_data_path=self.config.tool_data_path,
         config_filename=self.config.tool_data_table_config_path,
         other_config_dict=self.config)
     # Load additional entries defined by self.config.shed_tool_data_table_config into tool data tables.
     try:
         self.tool_data_tables.load_from_config_file(
             config_filename=self.config.shed_tool_data_table_config,
             tool_data_path=self.tool_data_tables.tool_data_path,
             from_shed_config=from_shed_config)
     except OSError as exc:
         # Missing shed_tool_data_table_config is okay if it's the default
         if exc.errno != errno.ENOENT or self.config.is_set(
                 'shed_tool_data_table_config'):
             raise
예제 #4
0
 def from_app(app, work_dir=None):
     cleanup = False
     if not work_dir:
         work_dir = tempfile.mkdtemp()
         cleanup = True
     tool_data_tables = ToolDataTableManager(work_dir)
     with ValidationContext(app_name=app.name,
                            security=app.security,
                            model=app.model,
                            tool_data_path=work_dir,
                            shed_tool_data_path=work_dir,
                            tool_data_tables=tool_data_tables,
                            hgweb_config_manager=getattr(
                                app, 'hgweb_config_manager', None)) as app:
         yield app
     if cleanup:
         shutil.rmtree(work_dir, ignore_errors=True)
예제 #5
0
    def __init__(self, config=None, **kwargs):
        super().__init__()
        config = config or MockAppConfig(**kwargs)
        GalaxyDataTestApp.__init__(self, config=config, **kwargs)
        self[BasicSharedApp] = self
        self[MinimalManagerApp] = self
        self[StructuredApp] = self
        self[idencoding.IdEncodingHelper] = self.security
        self.name = kwargs.get('name', 'galaxy')
        self[SharedModelMapping] = self.model
        self[GalaxyModelMapping] = self.model
        self[galaxy_scoped_session] = self.model.context
        self.visualizations_registry = MockVisualizationsRegistry()
        self.tag_handler = tags.GalaxyTagHandler(self.model.context)
        self[tags.GalaxyTagHandler] = self.tag_handler
        self.quota_agent = quota.DatabaseQuotaAgent(self.model)
        self.job_config = Bunch(
            dynamic_params=None,
            destinations={},
            use_messaging=False,
            assign_handler=lambda *args, **kwargs: None
        )
        self.tool_data_tables = ToolDataTableManager(tool_data_path=self.config.tool_data_path)
        self.dataset_collections_service = None
        self.container_finder = NullContainerFinder()
        self._toolbox_lock = MockLock()
        self.tool_shed_registry = Bunch(tool_sheds={})
        self.genome_builds = GenomeBuilds(self)
        self.job_manager = NoopManager()
        self.application_stack = ApplicationStack()
        self.auth_manager = AuthManager(self.config)
        self.user_manager = UserManager(self)
        self.execution_timer_factory = Bunch(get_timer=StructuredExecutionTimer)
        self.file_sources = Bunch(to_dict=lambda *args, **kwargs: {})
        self.interactivetool_manager = Bunch(create_interactivetool=lambda *args, **kwargs: None)
        self.is_job_handler = False
        self.biotools_metadata_source = None
        rebind_container_to_task(self)

        def url_for(*args, **kwds):
            return "/mock/url"
        self.url_for = url_for
예제 #6
0
 def from_app(app, work_dir=None):
     cleanup = False
     if not work_dir:
         work_dir = tempfile.mkdtemp()
         cleanup = True
     tool_data_tables = ToolDataTableManager(work_dir)
     try:
         with ValidationContext(
             app_name=app.name,
             security=app.security,
             model=app.model,
             tool_data_path=work_dir,
             shed_tool_data_path=work_dir,
             tool_data_tables=tool_data_tables,
             registry=app.datatypes_registry,
             hgweb_config_manager=getattr(app, 'hgweb_config_manager', None),
             biotools_metadata_source=getattr(app, 'biotools_metadata_source', None),
         ) as app:
             yield app
     finally:
         if cleanup:
             shutil.rmtree(work_dir, ignore_errors=True)
예제 #7
0
    def postprocessing( self, sa_session, app ):
        """ Finish the job, move the finished indexes to their final resting place,
            and update the .loc files where applicable. """
        gitd = sa_session.query( model.GenomeIndexToolData ).filter_by( job_id=self.job_id ).first()
        indexdirs = dict( bfast='bfast_index', bowtie='bowtie_index', bowtie2='bowtie2_index', 
                          bwa='bwa_index', perm='perm_%s_index', picard='srma_index', sam='sam_index' )
        

        if gitd:
            fp = open( gitd.dataset.get_file_name(), 'r' )
            deferred = sa_session.query( model.DeferredJob ).filter_by( id=gitd.deferred_job_id ).first()
            try:
                logloc = simplejson.load( fp )
            except ValueError:
                deferred.state = app.model.DeferredJob.states.ERROR
                sa_session.add( deferred )
                sa_session.flush()
                log.debug( 'Indexing job failed, setting deferred job state to error.' )
                return False
            finally:
                fp.close()
            destination = None
            tdtman = ToolDataTableManager( app.config.tool_data_path )
            xmltree = tdtman.load_from_config_file( app.config.tool_data_table_config_path, app.config.tool_data_path )
            for node in xmltree:
                table = node.get('name')
                location = node.findall('file')[0].get('path')
                self.locations[table] = os.path.abspath( location )
            locbase = os.path.abspath( os.path.split( self.locations['all_fasta'] )[0] )
            params = deferred.params
            dbkey = params[ 'dbkey' ]
            basepath = os.path.join( os.path.abspath( app.config.genome_data_path ), dbkey )
            intname = params[ 'intname' ]
            indexer = gitd.indexer
            workingdir = os.path.abspath( gitd.dataset.extra_files_path )
            location = []
            indexdata = gitd.dataset.extra_files_path
            if indexer == '2bit':
                indexdata = os.path.join( workingdir, '%s.2bit' % dbkey )
                destination = os.path.join( basepath, 'seq', '%s.2bit' % dbkey )
                location.append( dict( line='\t'.join( [ 'seq', dbkey, destination ] ), file= os.path.join( locbase, 'alignseq.loc' ) ) )
            elif indexer == 'bowtie':
                self._ex_tar( workingdir, 'cs.tar' )
                destination = os.path.join( basepath, 'bowtie_index' )
                for var in [ 'nt', 'cs' ]:
                    for line in logloc[ var ]:
                        idx = line
                        if var == 'nt':
                            locfile = self.locations[ 'bowtie_indexes' ]
                            locdir = os.path.join( destination, idx )
                        else:
                            locfile = self.locations[ 'bowtie_indexes_color' ]
                            locdir = os.path.join( destination, var, idx )
                        location.append( dict( line='\t'.join( [ dbkey, dbkey, intname, locdir ] ), file=locfile ) )
            elif indexer == 'bowtie2':
                destination = os.path.join( basepath, 'bowtie2_index' )
                for line in logloc[ 'nt' ]:
                    idx = line
                    locfile = self.locations[ 'bowtie2_indexes' ]
                    locdir = os.path.join( destination, idx )
                    location.append( dict( line='\t'.join( [ dbkey, dbkey, intname, locdir ] ), file=locfile ) )
            elif indexer == 'bwa':
                self._ex_tar( workingdir, 'cs.tar' )
                destination = os.path.join( basepath, 'bwa_index' )
                for var in [ 'nt', 'cs' ]:
                    for line in logloc[ var ]:
                        idx = line
                        if var == 'nt':
                            locfile = self.locations[ 'bwa_indexes' ]
                            locdir = os.path.join( destination, idx )
                        else:
                            locfile = self.locations[ 'bwa_indexes_color' ]
                            locdir = os.path.join( destination, var, idx )
                        location.append( dict( line='\t'.join( [ dbkey, dbkey, intname, locdir ] ), file=locfile ) )
            elif indexer == 'perm':
                self._ex_tar( workingdir, 'cs.tar' )
                destination = os.path.join( basepath, 'perm_index' )
                for var in [ 'nt', 'cs' ]:
                    for line in logloc[ var ]:
                        idx = line.pop()
                        if var == 'nt':
                            locfile = self.locations[ 'perm_base_indexes' ]
                            locdir = os.path.join( destination, idx )
                        else:
                            locfile = self.locations[ 'perm_color_indexes' ]
                            locdir = os.path.join( destination, var, idx )
                        line.append( locdir )
                        location.append( dict( line='\t'.join( line ), file=locfile ) )
            elif indexer == 'picard':
                destination = os.path.join( basepath, 'srma_index' )
                for var in [ 'nt' ]:
                    for line in logloc[ var ]:
                        idx = line
                        locfile = self.locations[ 'picard_indexes' ]
                        locdir = os.path.join( destination, idx )
                        location.append( dict( line='\t'.join( [ dbkey, dbkey, intname, locdir ] ), file=locfile ) )
            elif indexer == 'sam':
                destination = os.path.join( basepath, 'sam_index' )
                for var in [ 'nt' ]:
                    for line in logloc[ var ]:
                        locfile = self.locations[ 'sam_fa_indexes' ]
                        locdir = os.path.join( destination, line )
                        location.append( dict( line='\t'.join( [ 'index', dbkey, locdir ] ), file=locfile ) )
            
            if destination is not None and os.path.exists( os.path.split( destination )[0] ) and not os.path.exists( destination ):
                log.debug( 'Moving %s to %s' % ( indexdata, destination ) )
                shutil.move( indexdata, destination )
                if indexer not in [ '2bit' ]:
                    genome = '%s.fa' % dbkey
                    target = os.path.join( destination, genome )
                    fasta = os.path.abspath( os.path.join( basepath, 'seq', genome ) )
                    self._check_link( fasta, target )
                    if os.path.exists( os.path.join( destination, 'cs' ) ):
                        target = os.path.join( destination, 'cs', genome )
                        fasta = os.path.abspath( os.path.join( basepath, 'seq', genome ) )
                        self._check_link( fasta, target )
            for line in location:
                self._add_line( line[ 'file' ], line[ 'line' ] )
            deferred.state = app.model.DeferredJob.states.OK
            sa_session.add( deferred )
            sa_session.flush()