Exemple #1
0
def check_for_missing_tools( app, tool_panel_configs, latest_tool_migration_script_number ):
    # Get the 000x_tools.xml file associated with the current migrate_tools version number.
    tools_xml_file_path = os.path.abspath( os.path.join( 'scripts', 'migrate_tools', '%04d_tools.xml' % latest_tool_migration_script_number ) )
    # Parse the XML and load the file attributes for later checking against the proprietary tool_panel_config.
    migrated_tool_configs_dict = odict()
    tree, error_message = xml_util.parse_xml( tools_xml_file_path )
    if tree is None:
        return False, odict()
    root = tree.getroot()
    tool_shed = root.get( 'name' )
    tool_shed_url = get_tool_shed_url_from_tools_xml_file_path( app, tool_shed )
    # The default behavior is that the tool shed is down.
    tool_shed_accessible = False
    missing_tool_configs_dict = odict()
    if tool_shed_url:
        for elem in root:
            if elem.tag == 'repository':
                tool_dependencies = []
                tool_dependencies_dict = {}
                repository_name = elem.get( 'name' )
                changeset_revision = elem.get( 'changeset_revision' )
                url = '%s/repository/get_tool_dependencies?name=%s&owner=%s&changeset_revision=%s&from_install_manager=True' % \
                ( tool_shed_url, repository_name, REPOSITORY_OWNER, changeset_revision )
                try:
                    text = tool_shed_get( app, tool_shed_url, url )
                    tool_shed_accessible = True
                except Exception, e:
                    # Tool shed may be unavailable - we have to set tool_shed_accessible since we're looping.
                    tool_shed_accessible = False
                    print "The URL\n%s\nraised the exception:\n%s\n" % ( url, str( e ) )
                if tool_shed_accessible:
                    if text:
                        tool_dependencies_dict = encoding_util.tool_shed_decode( text )
                        for dependency_key, requirements_dict in tool_dependencies_dict.items():
                            tool_dependency_name = requirements_dict[ 'name' ]
                            tool_dependency_version = requirements_dict[ 'version' ]
                            tool_dependency_type = requirements_dict[ 'type' ]
                            tool_dependency_readme = requirements_dict.get( 'readme', '' )
                            tool_dependencies.append( ( tool_dependency_name, tool_dependency_version, tool_dependency_type, tool_dependency_readme ) )
                    for tool_elem in elem.findall( 'tool' ):
                        migrated_tool_configs_dict[ tool_elem.get( 'file' ) ] = tool_dependencies
        if tool_shed_accessible:
            # Parse the proprietary tool_panel_configs (the default is tool_conf.xml) and generate the list of missing tool config file names.
            for tool_panel_config in tool_panel_configs:
                tree, error_message = xml_util.parse_xml( tool_panel_config )
                if tree:
                    root = tree.getroot()
                    for elem in root:
                        if elem.tag == 'tool':
                            missing_tool_configs_dict = check_tool_tag_set( elem, migrated_tool_configs_dict, missing_tool_configs_dict )
                        elif elem.tag == 'section':
                            for section_elem in elem:
                                if section_elem.tag == 'tool':
                                    missing_tool_configs_dict = check_tool_tag_set( section_elem, migrated_tool_configs_dict, missing_tool_configs_dict )
def get_archives_from_manifest( manifest_file_path ):
    """
    Return the list of archive names defined in the capsule manifest.  This method sill validate the manifest by ensuring all
    <repository> tag sets contain a valid <archive> sub-element.
    """
    archives = []
    error_message = ''
    manifest_tree, error_message = xml_util.parse_xml( manifest_file_path )
    if error_message:
        return archives, error_message
    manifest_root = manifest_tree.getroot()
    for elem in manifest_root:
        # <repository name="package_lapack_3_4" type="tool_dependency_definition" username="******">
        if elem.tag != 'repository':
            error_message = 'All level one sub-elements in the manifest.xml file must be <repository> tag sets.  '
            error_message += 'The tag <b><%s></b> is invalid.' % str( elem.tag )
            return [], error_message
        archive_file_name = None
        for repository_elem in elem:
            if repository_elem.tag == 'archive':
                # <archive>package_lapack_3_4-9e7a45ad3522.tar.gz</archive>
                archive_file_name = repository_elem.text
                break
        if archive_file_name is None:
            error_message = 'The %s tag set is missing a required <archive> sub-element.' % str( elem.tag )
            return [], error_message
        archives.append( archive_file_name )
    return archives, error_message
 def handle_tag_attributes( self, config ):
     """
     Populate or unpopulate the toolshed and changeset_revision attributes of a
     <repository> tag.  Populating will occur when a dependency definition file
     is being uploaded to the repository, while unpopulating will occur when the
     repository is being exported.
     """
     # Make sure we're looking at a valid repository_dependencies.xml file.
     tree, error_message = xml_util.parse_xml( config )
     if tree is None:
         return False, None, error_message
     root = tree.getroot()
     root_altered = False
     new_root = copy.deepcopy( root )
     for index, elem in enumerate( root ):
         if elem.tag == 'repository':
             # <repository name="molecule_datatypes" owner="test" changeset_revision="1a070566e9c6" />
             altered, new_elem, error_message = self.handle_elem( elem )
             if error_message:
                 error_message = 'The %s file contains an invalid <repository> tag.  %s' % ( self.file_name, error_message )
                 return False, None, error_message
             if altered:
                 if not root_altered:
                     root_altered = True
                 new_root[ index ] = new_elem
     return root_altered, new_root, error_message
 def filter_and_persist_proprietary_tool_panel_configs( self, tool_configs_to_filter ):
     """Eliminate all entries in all non-shed-related tool panel configs for all tool config file names in the received tool_configs_to_filter."""
     for proprietary_tool_conf in self.proprietary_tool_confs:
         persist_required = False
         tree, error_message = xml_util.parse_xml( proprietary_tool_conf )
         if tree:
             root = tree.getroot()
             for elem in root:
                 if elem.tag == 'tool':
                     # Tools outside of sections.
                     file_path = elem.get( 'file', None )
                     if file_path:
                         if file_path in tool_configs_to_filter:
                             root.remove( elem )
                             persist_required = True
                 elif elem.tag == 'section':
                     # Tools contained in a section.
                     for section_elem in elem:
                         if section_elem.tag == 'tool':
                             file_path = section_elem.get( 'file', None )
                             if file_path:
                                 if file_path in tool_configs_to_filter:
                                     elem.remove( section_elem )
                                     persist_required = True
         if persist_required:
             fh = tempfile.NamedTemporaryFile( 'wb', prefix="tmp-toolshed-fapptpc"  )
             tmp_filename = fh.name
             fh.close()
             fh = open( tmp_filename, 'wb' )
             tree.write( tmp_filename, encoding='utf-8', xml_declaration=True )
             fh.close()
             shutil.move( tmp_filename, os.path.abspath( proprietary_tool_conf ) )
             os.chmod( proprietary_tool_conf, 0644 )
Exemple #5
0
 def add_to_shed_tool_config( self, shed_tool_conf_dict, elem_list ):
     """
     "A tool shed repository is being installed so change the shed_tool_conf file.  Parse the
     config file to generate the entire list of config_elems instead of using the in-memory list
     since it will be a subset of the entire list if one or more repositories have been deactivated.
     """
     if not elem_list:
         # We may have an empty elem_list in case a data manager is being installed.
         # In that case we don't want to wait for a toolbox reload that will never happen.
         return
     shed_tool_conf = shed_tool_conf_dict[ 'config_filename' ]
     tool_path = shed_tool_conf_dict[ 'tool_path' ]
     config_elems = []
     tree, error_message = xml_util.parse_xml( shed_tool_conf )
     if tree:
         root = tree.getroot()
         for elem in root:
             config_elems.append( elem )
         # Add the new elements to the in-memory list of config_elems.
         for elem_entry in elem_list:
             config_elems.append( elem_entry )
         # Persist the altered shed_tool_config file.
         toolbox = self.app.toolbox
         self.config_elems_to_xml_file( config_elems, shed_tool_conf, tool_path )
         self.app.wait_for_toolbox_reload(toolbox)
 def __init__( self, root_dir=None, config=None ):
     self.tool_sheds = odict()
     self.tool_sheds_auth = odict()
     if root_dir and config:
         # Parse tool_sheds_conf.xml
         tree, error_message = xml_util.parse_xml( config )
         if tree is None:
             log.warning( "Unable to load references to tool sheds defined in file %s" % str( config ) )
         else:
             root = tree.getroot()
             log.debug( 'Loading references to tool sheds from %s' % config )
             for elem in root.findall( 'tool_shed' ):
                 try:
                     name = elem.get( 'name', None )
                     url = elem.get( 'url', None )
                     username = elem.get( 'user', None )
                     password = elem.get( 'pass', None )
                     if name and url:
                         self.tool_sheds[ name ] = url
                         self.tool_sheds_auth[ name ] = None
                         log.debug( 'Loaded reference to tool shed: %s' % name )
                     if name and url and username and password:
                         pass_mgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
                         pass_mgr.add_password( None, url, username, password )
                         self.tool_sheds_auth[ name ] = pass_mgr
                 except Exception, e:
                     log.warning( 'Error loading reference to tool shed "%s", problem: %s' % ( name, str( e ) ) )
def handle_repository_dependencies_definition( trans, repository_dependencies_config, unpopulate=False ):
    """
    Populate or unpopulate the toolshed and changeset_revision attributes of a <repository> tag.  Populating will occur when a
    dependency definition file is being uploaded to the repository, while depopulating will occur when the repository is being
    exported.
    """
    altered = False
    # Make sure we're looking at a valid repository_dependencies.xml file.
    tree, error_message = xml_util.parse_xml( repository_dependencies_config )
    if tree is None:
        return False, None, error_message
    root = tree.getroot()
    if root.tag == 'repositories':
        for index, elem in enumerate( root ):
            if elem.tag == 'repository':
                # <repository name="molecule_datatypes" owner="test" changeset_revision="1a070566e9c6" />
                revised, elem, error_message = handle_repository_dependency_elem( trans, elem, unpopulate=unpopulate )
                if error_message:
                    error_message = 'The repository_dependencies.xml file contains an invalid <repository> tag.  %s' % error_message
                    return False, None, error_message
                if revised:
                    root[ index ] = elem
                    if not altered:
                        altered = True
        return altered, root, error_message
    return False, None, error_message
 def load_tool_from_tmp_config( self, repo, repository_id, ctx, ctx_file, work_dir ):
     tool = None
     message = ''
     tmp_tool_config = hg_util.get_named_tmpfile_from_ctx( ctx, ctx_file, work_dir )
     if tmp_tool_config:
         tool_element, error_message = xml_util.parse_xml( tmp_tool_config )
         if tool_element is None:
             return tool, message
         # Look for external files required by the tool config.
         tmp_code_files = []
         external_paths = Tool.get_externally_referenced_paths( tmp_tool_config )
         for path in external_paths:
             tmp_code_file_name = hg_util.copy_file_from_manifest( repo, ctx, path, work_dir )
             if tmp_code_file_name:
                 tmp_code_files.append( tmp_code_file_name )
         tool, valid, message = self.load_tool_from_config( repository_id, tmp_tool_config )
         for tmp_code_file in tmp_code_files:
             try:
                 os.unlink( tmp_code_file )
             except:
                 pass
         try:
             os.unlink( tmp_tool_config )
         except:
             pass
     return tool, message
def create_user( app ):
    user_info_config = os.path.abspath( os.path.join( app.config.root, 'lib/tool_shed/scripts/bootstrap_tool_shed', 'user_info.xml' ) )
    email = None
    password = None
    username = None
    tree, error_message = xml_util.parse_xml( user_info_config )
    if tree is None:
        print "The XML file ", user_info_config, " seems to be invalid, using defaults."
        email = '*****@*****.**'
        password = '******'
        username = '******'
    else:
        root = tree.getroot()
        for elem in root:
            if elem.tag == 'email':
                email = elem.text
            elif elem.tag == 'password':
                password = elem.text
            elif elem.tag == 'username':
                username = elem.text
    if email and password and username:
        invalid_message = validate( email, password, username )
        if invalid_message:
            print invalid_message
        else:
            user = app.model.User( email=email )
            user.set_password_cleartext( password )
            user.username = username
            app.sa_session.add( user )
            app.sa_session.flush()
            app.model.security_agent.create_private_user_role( user )
            return user
    else:
        print "Missing required values for email: ", email, ", password: "******", username: ", username
    return None
 def remove_from_shed_tool_config( self, shed_tool_conf_dict, guids_to_remove ):
     """
     A tool shed repository is being uninstalled so change the shed_tool_conf file.
     Parse the config file to generate the entire list of config_elems instead of
     using the in-memory list since it will be a subset of the entire list if one
     or more repositories have been deactivated.
     """
     shed_tool_conf = shed_tool_conf_dict[ 'config_filename' ]
     tool_path = shed_tool_conf_dict[ 'tool_path' ]
     config_elems = []
     tree, error_message = xml_util.parse_xml( shed_tool_conf )
     if tree:
         root = tree.getroot()
         for elem in root:
             config_elems.append( elem )
         config_elems_to_remove = []
         for config_elem in config_elems:
             if config_elem.tag == 'section':
                 tool_elems_to_remove = []
                 for tool_elem in config_elem:
                     if tool_elem.get( 'guid' ) in guids_to_remove:
                         tool_elems_to_remove.append( tool_elem )
                 for tool_elem in tool_elems_to_remove:
                     # Remove all of the appropriate tool sub-elements from the section element.
                     config_elem.remove( tool_elem )
                 if len( config_elem ) < 1:
                     # Keep a list of all empty section elements so they can be removed.
                     config_elems_to_remove.append( config_elem )
             elif config_elem.tag == 'tool':
                 if config_elem.get( 'guid' ) in guids_to_remove:
                     config_elems_to_remove.append( config_elem )
         for config_elem in config_elems_to_remove:
             config_elems.remove( config_elem )
         # Persist the altered in-memory version of the tool config.
         self.config_elems_to_xml_file( config_elems, shed_tool_conf, tool_path )
 def get_proprietary_tool_panel_elems( self, latest_tool_migration_script_number ):
     """
     Parse each config in self.proprietary_tool_confs (the default is tool_conf.xml) and generate a list of Elements that are
     either ToolSection elements or Tool elements.  These will be used to generate new entries in the migrated_tools_conf.xml
     file for the installed tools.
     """
     tools_xml_file_path = os.path.abspath( os.path.join( 'scripts', 'migrate_tools', '%04d_tools.xml' % latest_tool_migration_script_number ) )
     # Parse the XML and load the file attributes for later checking against the integrated elements from self.proprietary_tool_confs.
     migrated_tool_configs = []
     tree, error_message = xml_util.parse_xml( tools_xml_file_path )
     if tree is None:
         return []
     root = tree.getroot()
     for elem in root:
         if elem.tag == 'repository':
             for tool_elem in elem:
                 migrated_tool_configs.append( tool_elem.get( 'file' ) )
     # Parse each file in self.proprietary_tool_confs and generate the integrated list of tool panel Elements that contain them.
     tool_panel_elems = []
     for proprietary_tool_conf in self.proprietary_tool_confs:
         tree, error_message = xml_util.parse_xml( proprietary_tool_conf )
         if tree is None:
             return []
         root = tree.getroot()
         for elem in root:
             if elem.tag == 'tool':
                 # Tools outside of sections.
                 file_path = elem.get( 'file', None )
                 if file_path:
                     name = suc.strip_path( file_path )
                     if name in migrated_tool_configs:
                         if elem not in tool_panel_elems:
                             tool_panel_elems.append( elem )
             elif elem.tag == 'section':
                 # Tools contained in a section.
                 for section_elem in elem:
                     if section_elem.tag == 'tool':
                         file_path = section_elem.get( 'file', None )
                         if file_path:
                             name = suc.strip_path( file_path )
                             if name in migrated_tool_configs:
                                 # Append the section, not the tool.
                                 if elem not in tool_panel_elems:
                                     tool_panel_elems.append( elem )
     return tool_panel_elems
 def generate_tool_panel_dict_from_shed_tool_conf_entries( self, repository ):
     """
     Keep track of the section in the tool panel in which this repository's
     tools will be contained by parsing the shed_tool_conf in which the
     repository's tools are defined and storing the tool panel definition
     of each tool in the repository. This method is called only when the
     repository is being deactivated or un-installed and allows for
     activation or re-installation using the original layout.
     """
     tool_panel_dict = {}
     shed_tool_conf, tool_path, relative_install_dir = \
         suc.get_tool_panel_config_tool_path_install_dir( self.app, repository )
     metadata = repository.metadata
     # Create a dictionary of tool guid and tool config file name for each tool in the repository.
     guids_and_configs = {}
     if 'tools' in metadata:
         for tool_dict in metadata[ 'tools' ]:
             guid = tool_dict[ 'guid' ]
             tool_config = tool_dict[ 'tool_config' ]
             file_name = basic_util.strip_path( tool_config )
             guids_and_configs[ guid ] = file_name
     # Parse the shed_tool_conf file in which all of this repository's tools are defined and generate the tool_panel_dict.
     tree, error_message = xml_util.parse_xml( shed_tool_conf )
     if tree is None:
         return tool_panel_dict
     root = tree.getroot()
     for elem in root:
         if elem.tag == 'tool':
             guid = elem.get( 'guid' )
             if guid in guids_and_configs:
                 # The tool is displayed in the tool panel outside of any tool sections.
                 tool_section_dict = dict( tool_config=guids_and_configs[ guid ], id='', name='', version='' )
                 if guid in tool_panel_dict:
                     tool_panel_dict[ guid ].append( tool_section_dict )
                 else:
                     tool_panel_dict[ guid ] = [ tool_section_dict ]
         elif elem.tag == 'section':
             section_id = elem.get( 'id' ) or ''
             section_name = elem.get( 'name' ) or ''
             section_version = elem.get( 'version' ) or ''
             for section_elem in elem:
                 if section_elem.tag == 'tool':
                     guid = section_elem.get( 'guid' )
                     if guid in guids_and_configs:
                         # The tool is displayed in the tool panel inside the current tool section.
                         tool_section_dict = dict( tool_config=guids_and_configs[ guid ],
                                                   id=section_id,
                                                   name=section_name,
                                                   version=section_version )
                         if guid in tool_panel_dict:
                             tool_panel_dict[ guid ].append( tool_section_dict )
                         else:
                             tool_panel_dict[ guid ] = [ tool_section_dict ]
     return tool_panel_dict
 def install_tool_data_tables( self, tool_shed_repository, tool_index_sample_files ):
     TOOL_DATA_TABLE_FILE_NAME = 'tool_data_table_conf.xml'
     TOOL_DATA_TABLE_FILE_SAMPLE_NAME = '%s.sample' % ( TOOL_DATA_TABLE_FILE_NAME )
     SAMPLE_SUFFIX = '.sample'
     SAMPLE_SUFFIX_OFFSET = -len( SAMPLE_SUFFIX )
     tool_path, relative_target_dir = tool_shed_repository.get_tool_relative_path( self.app )
     # This is where index files will reside on a per repo/installed version basis.
     target_dir = os.path.join( self.app.config.shed_tool_data_path, relative_target_dir )
     if not os.path.exists( target_dir ):
         os.makedirs( target_dir )
     for sample_file in tool_index_sample_files:
         path, filename = os.path.split ( sample_file )
         target_filename = filename
         if target_filename.endswith( SAMPLE_SUFFIX ):
             target_filename = target_filename[ : SAMPLE_SUFFIX_OFFSET ]
         source_file = os.path.join( tool_path, sample_file )
         # We're not currently uninstalling index files, do not overwrite existing files.
         target_path_filename = os.path.join( target_dir, target_filename )
         if not os.path.exists( target_path_filename ) or target_filename == TOOL_DATA_TABLE_FILE_NAME:
             shutil.copy2( source_file, target_path_filename )
         else:
             log.debug( "Did not copy sample file '%s' to install directory '%s' because file already exists.", filename, target_dir )
         # For provenance and to simplify introspection, let's keep the original data table sample file around.
         if filename == TOOL_DATA_TABLE_FILE_SAMPLE_NAME:
             shutil.copy2( source_file, os.path.join( target_dir, filename ) )
     tool_data_table_conf_filename = os.path.join( target_dir, TOOL_DATA_TABLE_FILE_NAME )
     elems = []
     if os.path.exists( tool_data_table_conf_filename ):
         tree, error_message = xml_util.parse_xml( tool_data_table_conf_filename )
         if tree:
             for elem in tree.getroot():
                 # Append individual table elems or other elemes, but not tables elems.
                 if elem.tag == 'tables':
                     for table_elem in elems:
                         elems.append( elem )
                 else:
                     elems.append( elem )
     else:
         log.debug( "The '%s' data table file was not found, but was expected to be copied from '%s' during repository installation.",
                    tool_data_table_conf_filename, TOOL_DATA_TABLE_FILE_SAMPLE_NAME )
     for elem in elems:
         if elem.tag == 'table':
             for file_elem in elem.findall( 'file' ):
                 path = file_elem.get( 'path', None )
                 if path:
                     file_elem.set( 'path', os.path.normpath( os.path.join( target_dir, os.path.split( path )[1] ) ) )
             # Store repository info in the table tag set for trace-ability.
             repo_elem = self.generate_repository_info_elem_from_repository( tool_shed_repository, parent_elem=elem )
     if elems:
         # Remove old data_table
         os.unlink( tool_data_table_conf_filename )
         # Persist new data_table content.
         self.app.tool_data_tables.to_xml_file( tool_data_table_conf_filename, elems )
     return tool_data_table_conf_filename, elems
def handle_tool_dependencies( app, tool_shed_repository, tool_dependencies_config, tool_dependencies ):
    """
    Install and build tool dependencies defined in the tool_dependencies_config.  This config's tag sets can currently refer to installation
    methods in Galaxy's tool_dependencies module.  In the future, proprietary fabric scripts contained in the repository will be supported.
    Future enhancements to handling tool dependencies may provide installation processes in addition to fabric based processes.  The dependencies
    will be installed in:
    ~/<app.config.tool_dependency_dir>/<package_name>/<package_version>/<repo_owner>/<repo_name>/<repo_installed_changeset_revision>
    """
    sa_session = app.model.context.current
    installed_tool_dependencies = []
    # Parse the tool_dependencies.xml config.
    tree, error_message = xml_util.parse_xml( tool_dependencies_config )
    if tree is None:
        return installed_tool_dependencies
    root = tree.getroot()
    fabric_version_checked = False
    for elem in root:
        if elem.tag == 'package':
            # Only install the tool_dependency if it is not already installed.
            package_name = elem.get( 'name', None )
            package_version = elem.get( 'version', None )
            if package_name and package_version:
                for tool_dependency in tool_dependencies:
                    if tool_dependency.name==package_name and tool_dependency.version==package_version:
                        break
                if tool_dependency.can_install:
                    try:
                        tool_dependency = install_package( app, elem, tool_shed_repository, tool_dependencies=tool_dependencies )
                    except Exception, e:
                        error_message = "Error installing tool dependency %s version %s: %s" % ( str( package_name ), str( package_version ), str( e ) )
                        log.debug( error_message )
                        if tool_dependency:
                            tool_dependency.status = app.model.ToolDependency.installation_status.ERROR
                            tool_dependency.error_message = error_message
                            sa_session.add( tool_dependency )
                            sa_session.flush()
                    if tool_dependency and tool_dependency.status in [ app.model.ToolDependency.installation_status.INSTALLED,
                                                                       app.model.ToolDependency.installation_status.ERROR ]:
                        installed_tool_dependencies.append( tool_dependency )
        elif elem.tag == 'set_environment':
            try:
                tool_dependency = set_environment( app, elem, tool_shed_repository )
            except Exception, e:
                error_message = "Error setting environment for tool dependency: %s" % str( e )
                log.debug( error_message )
                if tool_dependency:
                    tool_dependency.status = app.model.ToolDependency.installation_status.ERROR
                    tool_dependency.error_message = error_message
                    sa_session.add( tool_dependency )
                    sa_session.flush()
def get_non_shed_tool_panel_configs(app):
    """Get the non-shed related tool panel configs - there can be more than one, and the default is tool_conf.xml."""
    config_filenames = []
    for config_filename in app.config.tool_configs:
        # Any config file that includes a tool_path attribute in the root tag set like the following is shed-related.
        # <toolbox tool_path="../shed_tools">
        tree, error_message = xml_util.parse_xml(config_filename)
        if tree is None:
            continue
        root = tree.getroot()
        tool_path = root.get('tool_path', None)
        if tool_path is None:
            config_filenames.append(config_filename)
    return config_filenames
 def handle_tag_attributes( self, tool_dependencies_config ):
     """
     Populate or unpopulate the tooshed and changeset_revision attributes of each <repository>
     tag defined within a tool_dependencies.xml file.
     """
     rdah = RepositoryDependencyAttributeHandler( self.app, self.unpopulate )
     tah = tag_attribute_handler.TagAttributeHandler( self.app, rdah, self.unpopulate )
     altered = False
     error_message = ''
     # Make sure we're looking at a valid tool_dependencies.xml file.
     tree, error_message = xml_util.parse_xml( tool_dependencies_config )
     if tree is None:
         return False, None, error_message
     root = tree.getroot()
     altered, new_root, error_message = tah.process_config( root, skip_actions_tags=False )
     return altered, new_root, error_message
def create_tool_dependency_objects( app, tool_shed_repository, relative_install_dir, set_status=True ):
    """
    Create or update a ToolDependency for each entry in tool_dependencies_config.  This method is called when
    installing a new tool_shed_repository.
    """
    tool_dependency_objects = []
    shed_config_dict = tool_shed_repository.get_shed_config_dict( app )
    if shed_config_dict.get( 'tool_path' ):
        relative_install_dir = os.path.join( shed_config_dict.get( 'tool_path' ), relative_install_dir )
    # Get the tool_dependencies.xml file from the repository.
    tool_dependencies_config = suc.get_config_from_disk( 'tool_dependencies.xml', relative_install_dir )
    tree, error_message = xml_util.parse_xml( tool_dependencies_config )
    if tree is None:
        return tool_dependency_objects
    root = tree.getroot()
    fabric_version_checked = False
    for elem in root:
        tool_dependency_type = elem.tag
        if tool_dependency_type == 'package':
            name = elem.get( 'name', None )
            version = elem.get( 'version', None )
            if name and version:
                status = app.install_model.ToolDependency.installation_status.NEVER_INSTALLED
                tool_dependency = create_or_update_tool_dependency( app,
                                                                    tool_shed_repository,
                                                                    name=name,
                                                                    version=version,
                                                                    type=tool_dependency_type,
                                                                    status=status,
                                                                    set_status=set_status )
                tool_dependency_objects.append( tool_dependency )
        elif tool_dependency_type == 'set_environment':
            for env_elem in elem:
                # <environment_variable name="R_SCRIPT_PATH" action="set_to">$REPOSITORY_INSTALL_DIR</environment_variable>
                name = env_elem.get( 'name', None )
                action = env_elem.get( 'action', None )
                if name and action:
                    status = app.install_model.ToolDependency.installation_status.NEVER_INSTALLED
                    tool_dependency = create_or_update_tool_dependency( app,
                                                                        tool_shed_repository,
                                                                        name=name,
                                                                        version=None,
                                                                        type=tool_dependency_type,
                                                                        status=status,
                                                                        set_status=set_status )
                    tool_dependency_objects.append( tool_dependency )
    return tool_dependency_objects
 def get_repository_install_dir( self, tool_shed_repository ):
     for tool_config in self.tool_configs:
         tree, error_message = xml_util.parse_xml( tool_config )
         if tree is None:
             return None
         root = tree.getroot()
         tool_path = root.get( 'tool_path', None )
         if tool_path:
             ts = suc.clean_tool_shed_url( tool_shed_repository.tool_shed )
             relative_path = os.path.join( tool_path,
                                           ts,
                                           'repos',
                                           str( tool_shed_repository.owner ),
                                           str( tool_shed_repository.name ),
                                           str( tool_shed_repository.installed_changeset_revision ) )
             if os.path.exists( relative_path ):
                 return relative_path
     return None
def admin_user_info( config_parser ):
    user_info_config = os.path.abspath( os.path.join( os.getcwd(), 'lib/tool_shed/scripts/bootstrap_tool_shed', 'user_info.xml' ) )
    tree, error_message = xml_util.parse_xml( user_info_config )
    if tree is None:
        print "The XML file ", user_info_config, " seems to be invalid, using defaults."
        email = '*****@*****.**'
        password = '******'
        username = '******'
    else:
        root = tree.getroot()
        for elem in root:
            if elem.tag == 'email':
                email = elem.text
            elif elem.tag == 'password':
                password = elem.text
            elif elem.tag == 'username':
                username = elem.text
    print '%s__SEP__%s__SEP__%s' % ( username, email, password )
    return 0
def get_repository_info_from_manifest( manifest_file_path ):
    """
    Parse the capsule manifest and return a list of dictionaries containing information about each exported repository
    archive contained within the capsule.
    """
    repository_info_dicts = []
    manifest_tree, error_message = xml_util.parse_xml( manifest_file_path )
    if error_message:
        return repository_info_dicts, error_message
    manifest_root = manifest_tree.getroot()
    for elem in manifest_root:
        # <repository name="package_lapack_3_4" type="tool_dependency_definition" username="******">
        if elem.tag != 'repository':
            error_message = 'All level one sub-elements in the manifest.xml file must be <repository> tag sets.  '
            error_message += 'The tag <b><%s></b> is invalid.' % str( elem.tag )
            return [], error_message
        name = elem.get( 'name', None )
        owner = elem.get( 'username', None )
        type = elem.get( 'type', None )
        if name is None or owner is None or type is None:
            error_message = 'Missing required name, type, owner attributes from the tag %s' % str( elem.tag )
            return [], error_message
        repository_info_dict = dict( name=name, owner=owner, type=type )
        for repository_elem in elem:
            if repository_elem.tag == 'archive':
                # <archive>package_lapack_3_4-9e7a45ad3522.tar.gz</archive>
                archive_file_name = repository_elem.text
                repository_info_dict[ 'archive_file_name' ] = archive_file_name
                items = archive_file_name.split( '-' )
                changeset_revision = items[ 1 ].rstrip( '.tar.gz' )
                repository_info_dict [ 'changeset_revision' ] = changeset_revision
            elif repository_elem.tag == 'categories':
                category_names = []
                for category_elem in repository_elem:
                    if category_elem.tag == 'category':
                        category_names.append( category_elem.text )
                repository_info_dict[ 'category_names' ] = category_names
            elif repository_elem.tag == 'description':
                repository_info_dict[ 'description' ] = repository_elem.text
            elif repository_elem.tag == 'long_description':
                repository_info_dict[ 'long_description' ] = repository_elem.text
        repository_info_dicts.append( repository_info_dict )
    return repository_info_dicts, error_message
 def add_to_shed_tool_config( self, shed_tool_conf_dict, elem_list ):
     """
     "A tool shed repository is being installed so change the shed_tool_conf file.  Parse the
     config file to generate the entire list of config_elems instead of using the in-memory list
     since it will be a subset of the entire list if one or more repositories have been deactivated.
     """
     shed_tool_conf = shed_tool_conf_dict[ 'config_filename' ]
     tool_path = shed_tool_conf_dict[ 'tool_path' ]
     config_elems = []
     tree, error_message = xml_util.parse_xml( shed_tool_conf )
     if tree:
         root = tree.getroot()
         for elem in root:
             config_elems.append( elem )
         # Add the elements to the in-memory list of config_elems.
         for elem_entry in elem_list:
             config_elems.append( elem_entry )
         # Persist the altered shed_tool_config file.
         self.config_elems_to_xml_file( config_elems, shed_tool_conf, tool_path )
 def update_in_shed_tool_config( self ):
     """
     A tool shed repository is being updated so change the shed_tool_conf file.  Parse the config
     file to generate the entire list of config_elems instead of using the in-memory list.
     """
     shed_conf_dict = self.repository.get_shed_config_dict( self.app )
     shed_tool_conf = shed_conf_dict[ 'config_filename' ]
     tool_path = shed_conf_dict[ 'tool_path' ]
     self.tpm.generate_tool_panel_dict_from_shed_tool_conf_entries( self.repository )
     repository_tools_tups = self.get_repository_tools_tups()
     clone_url = common_util.generate_clone_url_for_installed_repository( self.app, self.repository )
     tool_shed = self.tool_shed_from_repository_clone_url()
     owner = self.repository.owner
     if not owner:
         cleaned_repository_clone_url = common_util.remove_protocol_and_user_from_clone_url( clone_url )
         owner = repository_util.get_repository_owner( cleaned_repository_clone_url )
     guid_to_tool_elem_dict = {}
     for tool_config_filename, guid, tool in repository_tools_tups:
         guid_to_tool_elem_dict[ guid ] = self.tpm.generate_tool_elem( tool_shed,
                                                                       self.repository.name,
                                                                       self.repository.changeset_revision,
                                                                       self.repository.owner or '',
                                                                       tool_config_filename,
                                                                       tool,
                                                                       None )
     config_elems = []
     tree, error_message = xml_util.parse_xml( shed_tool_conf )
     if tree:
         root = tree.getroot()
         for elem in root:
             if elem.tag == 'section':
                 for i, tool_elem in enumerate( elem ):
                     guid = tool_elem.attrib.get( 'guid' )
                     if guid in guid_to_tool_elem_dict:
                         elem[i] = guid_to_tool_elem_dict[ guid ]
             elif elem.tag == 'tool':
                 guid = elem.attrib.get( 'guid' )
                 if guid in guid_to_tool_elem_dict:
                     elem = guid_to_tool_elem_dict[ guid ]
             config_elems.append( elem )
         self.tpm.config_elems_to_xml_file( config_elems, shed_tool_conf, tool_path )
Exemple #23
0
def admin_user_info(config_parser):
    user_info_config = os.path.abspath(
        os.path.join(os.getcwd(), "lib/tool_shed/scripts/bootstrap_tool_shed", "user_info.xml")
    )
    tree, error_message = xml_util.parse_xml(user_info_config)
    if tree is None:
        print "The XML file ", user_info_config, " seems to be invalid, using defaults."
        email = "*****@*****.**"
        password = "******"
        username = "******"
    else:
        root = tree.getroot()
        for elem in root:
            if elem.tag == "email":
                email = elem.text
            elif elem.tag == "password":
                password = elem.text
            elif elem.tag == "username":
                username = elem.text
    print "%s__SEP__%s__SEP__%s" % (username, email, password)
    return 0
def get_export_info_dict( export_info_file_path ):
    """Parse the export_info.xml file contained within the capsule and return a dictionary containing it's entries."""
    export_info_tree, error_message = xml_util.parse_xml( export_info_file_path )
    export_info_root = export_info_tree.getroot()
    export_info_dict = {}
    for elem in export_info_root:
        if elem.tag == 'export_time':
            export_info_dict[ 'export_time' ] = elem.text
        elif elem.tag == 'tool_shed':
            export_info_dict[ 'tool_shed' ] = elem.text
        elif elem.tag == 'repository_name':
            export_info_dict[ 'repository_name' ] = elem.text
        elif elem.tag == 'repository_owner':
            export_info_dict[ 'repository_owner' ] = elem.text
        elif elem.tag == 'changeset_revision':
            export_info_dict[ 'changeset_revision' ] = elem.text
        elif elem.tag == 'export_repository_dependencies':
            if util.asbool( elem.text ):
                export_info_dict[ 'export_repository_dependencies' ] = 'Yes'
            else:
                export_info_dict[ 'export_repository_dependencies' ] = 'No'
    return export_info_dict
Exemple #25
0
 def remove_from_data_manager( self, repository ):
     metadata_dict = repository.metadata
     if metadata_dict and 'data_manager' in metadata_dict:
         shed_data_manager_conf_filename = self.app.config.shed_data_manager_config_file
         tree, error_message = xml_util.parse_xml( shed_data_manager_conf_filename )
         if tree:
             root = tree.getroot()
             assert root.tag == 'data_managers', 'The file provided (%s) for removing data managers from is not a valid data manager xml file.' % ( shed_data_manager_conf_filename )
             guids = [ data_manager_dict.get( 'guid' ) for data_manager_dict in metadata_dict.get( 'data_manager', {} ).get( 'data_managers', {} ).itervalues() if 'guid' in data_manager_dict ]
             load_old_data_managers_by_guid = {}
             data_manager_config_has_changes = False
             config_elems = []
             for elem in root:
                 # Match Data Manager elements by guid and installed_changeset_revision
                 elem_matches_removed_data_manager = False
                 if elem.tag == 'data_manager':
                     guid = elem.get( 'guid', None )
                     if guid in guids:
                         tool_elem = elem.find( 'tool' )
                         if tool_elem is not None:
                             installed_changeset_revision_elem = tool_elem.find( 'installed_changeset_revision' )
                             if installed_changeset_revision_elem is not None:
                                 if installed_changeset_revision_elem.text == repository.installed_changeset_revision:
                                     elem_matches_removed_data_manager = True
                                 else:
                                     # This is a different version, which had been previously overridden
                                     load_old_data_managers_by_guid[ guid ] = elem
                 if elem_matches_removed_data_manager:
                     data_manager_config_has_changes = True
                 else:
                     config_elems.append( elem )
             # Remove data managers from in memory
             self.app.data_managers.remove_manager( guids )
             # Load other versions of any now uninstalled data managers, if any
             for elem in load_old_data_managers_by_guid.itervalues():
                 self.app.data_managers.load_manager_from_elem( elem )
             # Persist the altered shed_data_manager_config file.
             if data_manager_config_has_changes:
                 self.data_manager_config_elems_to_xml_file( config_elems, shed_data_manager_conf_filename  )
Exemple #26
0
def populate_actions_dict( app, dependent_install_dir, required_install_dir, tool_shed_repository, required_repository, package_name, package_version, tool_dependencies_config ):
    """
    Populate an actions dictionary that can be sent to fabric_util.install_and_build_package.  This method handles the scenario where a tool_dependencies.xml
    file defines a complex repository dependency.  In this case, the tool dependency package will be installed in a separate repository and the tool dependency
    defined for the dependent repository will use an environment_variable setting defined in it's env.sh file to locate the required package.  This method
    basically does what the install_via_fabric method does, but restricts it's activity to the <action type="set_environment"> tag set within the required
    repository's tool_dependencies.xml file.
    """
    sa_session = app.model.context.current
    if not os.path.exists( dependent_install_dir ):
        os.makedirs( dependent_install_dir )
    actions_dict = dict( install_dir=dependent_install_dir )
    if package_name:
        actions_dict[ 'package_name' ] = package_name
    tool_dependency = None
    action_dict = {}
    if tool_dependencies_config:
        required_td_tree, error_message = xml_util.parse_xml( tool_dependencies_config )
        if required_td_tree:
            required_td_root = required_td_tree.getroot()
            for required_td_elem in required_td_root:
                # Find the appropriate package name and version.
                if required_td_elem.tag == 'package':
                    # <package name="bwa" version="0.5.9">
                    required_td_package_name = required_td_elem.get( 'name', None )
                    required_td_package_version = required_td_elem.get( 'version', None )
                    if required_td_package_name==package_name and required_td_package_version==package_version:
                        tool_dependency, actions = handle_set_environment_entry_for_package( app=app,
                                                                                             install_dir=required_install_dir,
                                                                                             tool_shed_repository=tool_shed_repository,
                                                                                             package_name=package_name,
                                                                                             package_version=package_version,
                                                                                             elem=required_td_elem,
                                                                                             required_repository=required_repository )
                        if actions:
                            actions_dict[ 'actions' ] = actions
                        break
    return tool_dependency, actions_dict
def validate_capsule( trans, **kwd ):
    """Inspect the uploaded capsule's manifest and it's contained files to ensure it is a valid repository capsule."""
    capsule_dict = {}
    capsule_dict.update( kwd )
    encoded_file_path = capsule_dict.get( 'encoded_file_path', '' )
    file_path = encoding_util.tool_shed_decode( encoded_file_path )
    # The capsule must contain a valid XML file named export_info.xml.
    export_info_file_path = os.path.join( file_path, 'export_info.xml' )
    export_info_tree, error_message = xml_util.parse_xml( export_info_file_path )
    if error_message:
        capsule_dict[ 'error_message' ] = error_message
        capsule_dict[ 'status' ] = 'error'
        return capsule_dict
    # The capsule must contain a valid XML file named manifest.xml.
    manifest_file_path = os.path.join( file_path, 'manifest.xml' )
    # Validate the capsule manifest by inspecting name, owner, changeset_revision and type information contained within
    # each <repository> tag set.
    repository_info_dicts, error_message = get_repository_info_from_manifest( manifest_file_path )
    if error_message:
        capsule_dict[ 'error_message' ] = error_message
        capsule_dict[ 'status' ] = 'error'
        return capsule_dict
    # Validate the capsule manifest by ensuring all <repository> tag sets contain a valid <archive> sub-element.
    archives, error_message = get_archives_from_manifest( manifest_file_path )
    if error_message:
        capsule_dict[ 'error_message' ] = error_message
        capsule_dict[ 'status' ] = 'error'
        return capsule_dict
    # Validate the capsule manifest by ensuring each defined archive file name exists within the capsule.
    error_message = verify_archives_in_capsule( file_path, archives )
    if error_message:
        capsule_dict[ 'error_message' ] = error_message
        capsule_dict[ 'status' ] = 'error'
        return capsule_dict
    capsule_dict[ 'status' ] = 'ok'
    return capsule_dict
    def __init__( self, app, latest_migration_script_number, tool_shed_install_config, migrated_tools_config,
                  install_dependencies ):
        """
        Check tool settings in tool_shed_install_config and install all repositories
        that are not already installed.  The tool panel configuration file is the received
        migrated_tools_config, which is the reserved file named migrated_tools_conf.xml.
        """
        self.app = app
        self.toolbox = self.app.toolbox
        self.migrated_tools_config = migrated_tools_config
        # Initialize the ToolPanelManager.
        self.tpm = tool_panel_manager.ToolPanelManager( self.app )
        # If install_dependencies is True but tool_dependency_dir is not set, do not attempt
        # to install but print informative error message.
        if install_dependencies and app.config.tool_dependency_dir is None:
            message = 'You are attempting to install tool dependencies but do not have a value '
            message += 'for "tool_dependency_dir" set in your galaxy.ini file.  Set this '
            message += 'location value to the path where you want tool dependencies installed and '
            message += 'rerun the migration script.'
            raise Exception( message )
        # Get the local non-shed related tool panel configs (there can be more than one, and the
        # default name is tool_conf.xml).
        self.proprietary_tool_confs = self.non_shed_tool_panel_configs
        self.proprietary_tool_panel_elems = self.get_proprietary_tool_panel_elems( latest_migration_script_number )
        # Set the location where the repositories will be installed by retrieving the tool_path
        # setting from migrated_tools_config.
        tree, error_message = xml_util.parse_xml( migrated_tools_config )
        if tree is None:
            print error_message
        else:
            root = tree.getroot()
            self.tool_path = root.get( 'tool_path' )
            print "Repositories will be installed into configured tool_path location ", str( self.tool_path )
            # Parse tool_shed_install_config to check each of the tools.
            self.tool_shed_install_config = tool_shed_install_config
            tree, error_message = xml_util.parse_xml( tool_shed_install_config )
            if tree is None:
                print error_message
            else:
                root = tree.getroot()
                defined_tool_shed_url = root.get( 'name' )
                self.tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( self.app, defined_tool_shed_url )
                self.tool_shed = common_util.remove_protocol_and_port_from_tool_shed_url( self.tool_shed_url )
                self.repository_owner = common_util.REPOSITORY_OWNER
                index, self.shed_config_dict = self.tpm.get_shed_tool_conf_dict( self.migrated_tools_config )
                # Since tool migration scripts can be executed any number of times, we need to
                # make sure the appropriate tools are defined in tool_conf.xml.  If no tools
                # associated with the migration stage are defined, no repositories will be installed
                # on disk.  The default behavior is that the tool shed is down.
                tool_shed_accessible = False
                tool_panel_configs = common_util.get_non_shed_tool_panel_configs( app )
                if tool_panel_configs:
                    # The missing_tool_configs_dict contents are something like:
                    # {'emboss_antigenic.xml': [('emboss', '5.0.0', 'package', '\nreadme blah blah blah\n')]}
                    tool_shed_accessible, missing_tool_configs_dict = \
                        common_util.check_for_missing_tools( app,
                                                             tool_panel_configs,
                                                             latest_migration_script_number )
                else:
                    # It doesn't matter if the tool shed is accessible since there are no migrated
                    # tools defined in the local Galaxy instance, but we have to set the value of
                    # tool_shed_accessible to True so that the value of migrate_tools.version can
                    # be correctly set in the database.
                    tool_shed_accessible = True
                    missing_tool_configs_dict = odict()
                if tool_shed_accessible:
                    if len( self.proprietary_tool_confs ) == 1:
                        plural = ''
                        file_names = self.proprietary_tool_confs[ 0 ]
                    else:
                        plural = 's'
                        file_names = ', '.join( self.proprietary_tool_confs )
                    if missing_tool_configs_dict:
                        for proprietary_tool_conf in self.proprietary_tool_confs:
                            # Create a backup of the tool configuration in the un-migrated state.
                            shutil.copy( proprietary_tool_conf, '%s-pre-stage-%04d' % ( proprietary_tool_conf,
                                                                                        latest_migration_script_number ) )
                        for repository_elem in root:
                            # Make sure we have a valid repository tag.
                            if self.__is_valid_repository_tag( repository_elem ):
                                # Get all repository dependencies for the repository defined by the
                                # current repository_elem.  Repository dependency definitions contained
                                # in tool shed repositories with migrated tools must never define a
                                # relationship to a repository dependency that contains a tool.  The
                                # repository dependency can only contain items that are not loaded into
                                # the Galaxy tool panel (e.g., tool dependency definitions, custom datatypes,
                                # etc).  This restriction must be followed down the entire dependency hierarchy.
                                name = repository_elem.get( 'name' )
                                changeset_revision = repository_elem.get( 'changeset_revision' )
                                tool_shed_accessible, repository_dependencies_dict = \
                                    common_util.get_repository_dependencies( app,
                                                                             self.tool_shed_url,
                                                                             name,
                                                                             self.repository_owner,
                                                                             changeset_revision )
                                # Make sure all repository dependency records exist (as tool_shed_repository
                                # table rows) in the Galaxy database.
                                created_tool_shed_repositories = \
                                    self.create_or_update_tool_shed_repository_records( name,
                                                                                        changeset_revision,
                                                                                        repository_dependencies_dict )
                                # Order the repositories for proper installation.  This process is similar to the
                                # process used when installing tool shed repositories, but does not handle managing
                                # tool panel sections and other components since repository dependency definitions
                                # contained in tool shed repositories with migrated tools must never define a relationship
                                # to a repository dependency that contains a tool.
                                ordered_tool_shed_repositories = \
                                    self.order_repositories_for_installation( created_tool_shed_repositories,
                                                                              repository_dependencies_dict )

                                for tool_shed_repository in ordered_tool_shed_repositories:
                                    is_repository_dependency = self.__is_repository_dependency( name,
                                                                                                changeset_revision,
                                                                                                tool_shed_repository )
                                    self.install_repository( repository_elem,
                                                             tool_shed_repository,
                                                             install_dependencies,
                                                             is_repository_dependency=is_repository_dependency )
                    else:
                        message = "\nNo tools associated with migration stage %s are defined in your " % \
                            str( latest_migration_script_number )
                        message += "file%s named %s,\nso no repositories will be installed on disk.\n" % \
                            ( plural, file_names )
                        print message
                else:
                    message = "\nThe main Galaxy tool shed is not currently available, so skipped migration stage %s.\n" % \
                        str( latest_migration_script_number )
                    message += "Try again later.\n"
                    print message
Exemple #29
0
def alter_config_and_load_prorietary_datatypes(app,
                                               datatypes_config,
                                               relative_install_dir,
                                               deactivate=False,
                                               override=True):
    """
    Parse a proprietary datatypes config (a datatypes_conf.xml file included in an installed tool shed repository) and
    add information to appropriate element attributes that will enable proprietary datatype class modules, datatypes converters
    and display applications to be discovered and properly imported by the datatypes registry.  The value of override will
    be False when a tool shed repository is being installed.  Since installation is occurring after the datatypes registry
    has been initialized, the registry's contents cannot be overridden by conflicting data types.
    """
    tree, error_message = xml_util.parse_xml(datatypes_config)
    if tree is None:
        return None, None
    datatypes_config_root = tree.getroot()
    registration = datatypes_config_root.find('registration')
    if registration is None:
        # We have valid XML, but not a valid proprietary datatypes definition.
        return None, None
    sniffers = datatypes_config_root.find('sniffers')
    converter_path, display_path = get_converter_and_display_paths(
        registration, relative_install_dir)
    if converter_path:
        # Path to datatype converters
        registration.attrib['proprietary_converter_path'] = converter_path
    if display_path:
        # Path to datatype display applications
        registration.attrib['proprietary_display_path'] = display_path
    relative_path_to_datatype_file_name = None
    datatype_files = datatypes_config_root.find('datatype_files')
    datatype_class_modules = []
    if datatype_files is not None:
        # The <datatype_files> tag set contains any number of <datatype_file> tags.
        # <datatype_files>
        #    <datatype_file name="gmap.py"/>
        #    <datatype_file name="metagenomics.py"/>
        # </datatype_files>
        # We'll add attributes to the datatype tag sets so that the modules can be properly imported by the datatypes registry.
        for elem in datatype_files.findall('datatype_file'):
            datatype_file_name = elem.get('name', None)
            if datatype_file_name:
                # Find the file in the installed repository.
                for root, dirs, files in os.walk(relative_install_dir):
                    if root.find('.hg') < 0:
                        for name in files:
                            if name == datatype_file_name:
                                datatype_class_modules.append(
                                    os.path.join(root, name))
                                break
                break
        if datatype_class_modules:
            for relative_path_to_datatype_file_name in datatype_class_modules:
                datatype_file_name_path, datatype_file_name = os.path.split(
                    relative_path_to_datatype_file_name)
                for elem in registration.findall('datatype'):
                    # Handle 'type' attribute which should be something like one of the following:
                    # type="gmap:GmapDB"
                    # type="galaxy.datatypes.gmap:GmapDB"
                    dtype = elem.get('type', None)
                    if dtype:
                        fields = dtype.split(':')
                        proprietary_datatype_module = fields[0]
                        if proprietary_datatype_module.find('.') >= 0:
                            # Handle the case where datatype_module is "galaxy.datatypes.gmap".
                            proprietary_datatype_module = proprietary_datatype_module.split(
                                '.')[-1]
                        # The value of proprietary_path must be an absolute path due to job_working_directory.
                        elem.attrib['proprietary_path'] = os.path.abspath(
                            datatype_file_name_path)
                        elem.attrib[
                            'proprietary_datatype_module'] = proprietary_datatype_module
    # Temporarily persist the proprietary datatypes configuration file so it can be loaded into the datatypes registry.
    fd, proprietary_datatypes_config = tempfile.mkstemp(
        prefix="tmp-toolshed-acalpd")
    os.write(fd, '<?xml version="1.0"?>\n')
    os.write(fd, '<datatypes>\n')
    os.write(fd, '%s' % xml_util.xml_to_string(registration))
    if sniffers is not None:
        os.write(fd, '%s' % xml_util.xml_to_string(sniffers))
    os.write(fd, '</datatypes>\n')
    os.close(fd)
    os.chmod(proprietary_datatypes_config, 0644)
    # Load proprietary datatypes
    app.datatypes_registry.load_datatypes(root_dir=app.config.root,
                                          config=proprietary_datatypes_config,
                                          deactivate=deactivate,
                                          override=override)
    if deactivate:
        # Reload the upload tool to eliminate deactivated datatype extensions from the file_type select list.
        tool_util.reload_upload_tools(app)
    else:
        append_to_datatypes_registry_upload_file_formats(app, registration)
        tool_util.reload_upload_tools(app)
    if datatype_files is not None:
        try:
            os.unlink(proprietary_datatypes_config)
        except:
            pass
    return converter_path, display_path
 def alter_config_and_load_prorietary_datatypes(self,
                                                datatypes_config,
                                                relative_install_dir,
                                                deactivate=False,
                                                override=True):
     """
     Parse a custom datatypes config (a datatypes_conf.xml file included in an installed
     tool shed repository) and add information to appropriate element attributes that will
     enable custom datatype class modules, datatypes converters and display applications
     to be discovered and properly imported by the datatypes registry.  The value of override
     will be False when a tool shed repository is being installed.  Since installation is
     occurring after the datatypes registry has been initialized, the registry's contents
     cannot be overridden by conflicting data types.
     """
     tree, error_message = xml_util.parse_xml(datatypes_config)
     if tree is None:
         return None, None
     datatypes_config_root = tree.getroot()
     registration = datatypes_config_root.find('registration')
     if registration is None:
         # We have valid XML, but not a valid custom datatypes definition.
         return None, None
     converter_path, display_path = self.get_converter_and_display_paths(
         registration, relative_install_dir)
     if converter_path:
         # Path to datatype converters
         registration.attrib['proprietary_converter_path'] = converter_path
     if display_path:
         # Path to datatype display applications
         registration.attrib['proprietary_display_path'] = display_path
     relative_path_to_datatype_file_name = None
     datatype_files = datatypes_config_root.find('datatype_files')
     datatype_class_modules = []
     if datatype_files is not None:
         # The <datatype_files> tag set contains any number of <datatype_file> tags.
         # <datatype_files>
         #    <datatype_file name="gmap.py"/>
         #    <datatype_file name="metagenomics.py"/>
         # </datatype_files>
         # We'll add attributes to the datatype tag sets so that the modules can be properly imported
         # by the datatypes registry.
         for elem in datatype_files.findall('datatype_file'):
             datatype_file_name = elem.get('name', None)
             if datatype_file_name:
                 # Find the file in the installed repository.
                 for root, dirs, files in os.walk(relative_install_dir):
                     if root.find('.hg') < 0:
                         for name in files:
                             if name == datatype_file_name:
                                 datatype_class_modules.append(
                                     os.path.join(root, name))
                                 break
                 break
         if datatype_class_modules:
             for relative_path_to_datatype_file_name in datatype_class_modules:
                 datatype_file_name_path, datatype_file_name = os.path.split(
                     relative_path_to_datatype_file_name)
                 for elem in registration.findall('datatype'):
                     # Handle 'type' attribute which should be something like one of the following:
                     # type="gmap:GmapDB"
                     # type="galaxy.datatypes.gmap:GmapDB"
                     dtype = elem.get('type', None)
                     if dtype:
                         fields = dtype.split(':')
                         proprietary_datatype_module = fields[0]
                         if proprietary_datatype_module.find('.') >= 0:
                             # Handle the case where datatype_module is "galaxy.datatypes.gmap".
                             proprietary_datatype_module = proprietary_datatype_module.split(
                                 '.')[-1]
                         # The value of proprietary_path must be an absolute path due to job_working_directory.
                         elem.attrib['proprietary_path'] = os.path.abspath(
                             datatype_file_name_path)
                         elem.attrib[
                             'proprietary_datatype_module'] = proprietary_datatype_module
     # Load custom datatypes
     self.app.datatypes_registry.load_datatypes(
         root_dir=self.app.config.root,
         config=datatypes_config_root,
         deactivate=deactivate,
         override=override)
     return converter_path, display_path
Exemple #31
0
 def __init__( self, app, latest_migration_script_number, tool_shed_install_config, migrated_tools_config, install_dependencies ):
     """
     Check tool settings in tool_shed_install_config and install all repositories that are not already installed.  The tool
     panel configuration file is the received migrated_tools_config, which is the reserved file named migrated_tools_conf.xml.
     """
     self.app = app
     self.toolbox = self.app.toolbox
     self.migrated_tools_config = migrated_tools_config
     # If install_dependencies is True but tool_dependency_dir is not set, do not attempt to install but print informative error message.
     if install_dependencies and app.config.tool_dependency_dir is None:
         message = 'You are attempting to install tool dependencies but do not have a value for "tool_dependency_dir" set in your universe_wsgi.ini '
         message += 'file.  Set this location value to the path where you want tool dependencies installed and rerun the migration script.'
         raise Exception( message )
     # Get the local non-shed related tool panel configs (there can be more than one, and the default name is tool_conf.xml).
     self.proprietary_tool_confs = self.non_shed_tool_panel_configs
     self.proprietary_tool_panel_elems = self.get_proprietary_tool_panel_elems( latest_migration_script_number )
     # Set the location where the repositories will be installed by retrieving the tool_path setting from migrated_tools_config.
     tree, error_message = xml_util.parse_xml( migrated_tools_config )
     if tree is None:
         print error_message
     else:
         root = tree.getroot()
         self.tool_path = root.get( 'tool_path' )
         print "Repositories will be installed into configured tool_path location ", str( self.tool_path )
         # Parse tool_shed_install_config to check each of the tools.
         self.tool_shed_install_config = tool_shed_install_config
         tree, error_message = xml_util.parse_xml( tool_shed_install_config )
         if tree is None:
             print error_message
         else:
             root = tree.getroot()
             self.tool_shed = suc.clean_tool_shed_url( root.get( 'name' ) )
             self.repository_owner = common_util.REPOSITORY_OWNER
             index, self.shed_config_dict = suc.get_shed_tool_conf_dict( app, self.migrated_tools_config )
             # Since tool migration scripts can be executed any number of times, we need to make sure the appropriate tools are defined in
             # tool_conf.xml.  If no tools associated with the migration stage are defined, no repositories will be installed on disk.
             # The default behavior is that the tool shed is down.
             tool_shed_accessible = False
             tool_panel_configs = common_util.get_non_shed_tool_panel_configs( app )
             if tool_panel_configs:
                 # The missing_tool_configs_dict contents are something like:
                 # {'emboss_antigenic.xml': [('emboss', '5.0.0', 'package', '\nreadme blah blah blah\n')]}
                 tool_shed_accessible, missing_tool_configs_dict = common_util.check_for_missing_tools( app, tool_panel_configs, latest_migration_script_number )
             else:
                 # It doesn't matter if the tool shed is accessible since there are no migrated tools defined in the local Galaxy instance, but
                 # we have to set the value of tool_shed_accessible to True so that the value of migrate_tools.version can be correctly set in 
                 # the database.
                 tool_shed_accessible = True
                 missing_tool_configs_dict = odict()
             if tool_shed_accessible:
                 if len( self.proprietary_tool_confs ) == 1:
                     plural = ''
                     file_names = self.proprietary_tool_confs[ 0 ]
                 else:
                     plural = 's'
                     file_names = ', '.join( self.proprietary_tool_confs )
                 if missing_tool_configs_dict:
                     for repository_elem in root:
                         self.install_repository( repository_elem, install_dependencies )
                 else:
                     message = "\nNo tools associated with migration stage %s are defined in your " % str( latest_migration_script_number )
                     message += "file%s named %s,\nso no repositories will be installed on disk.\n" % ( plural, file_names )
                     print message
             else:
                 message = "\nThe main Galaxy tool shed is not currently available, so skipped migration stage %s.\n" % str( latest_migration_script_number )
                 message += "Try again later.\n"
                 print message
Exemple #32
0
 def install_data_managers( self, shed_data_manager_conf_filename, metadata_dict, shed_config_dict,
                            relative_install_dir, repository, repository_tools_tups ):
     rval = []
     if 'data_manager' in metadata_dict:
         tpm = tool_panel_manager.ToolPanelManager( self.app )
         repository_tools_by_guid = {}
         for tool_tup in repository_tools_tups:
             repository_tools_by_guid[ tool_tup[ 1 ] ] = dict( tool_config_filename=tool_tup[ 0 ], tool=tool_tup[ 2 ] )
         # Load existing data managers.
         tree, error_message = xml_util.parse_xml( shed_data_manager_conf_filename )
         if tree is None:
             return rval
         config_elems = [ elem for elem in tree.getroot() ]
         repo_data_manager_conf_filename = metadata_dict['data_manager'].get( 'config_filename', None )
         if repo_data_manager_conf_filename is None:
             log.debug( "No data_manager_conf.xml file has been defined." )
             return rval
         data_manager_config_has_changes = False
         relative_repo_data_manager_dir = os.path.join( shed_config_dict.get( 'tool_path', '' ), relative_install_dir )
         repo_data_manager_conf_filename = os.path.join( relative_repo_data_manager_dir, repo_data_manager_conf_filename )
         tree, error_message = xml_util.parse_xml( repo_data_manager_conf_filename )
         if tree is None:
             return rval
         root = tree.getroot()
         for elem in root:
             if elem.tag == 'data_manager':
                 data_manager_id = elem.get( 'id', None )
                 if data_manager_id is None:
                     log.error( "A data manager was defined that does not have an id and will not be installed:\n%s" % \
                                 xml_util.xml_to_string( elem ) )
                     continue
                 data_manager_dict = metadata_dict['data_manager'].get( 'data_managers', {} ).get( data_manager_id, None )
                 if data_manager_dict is None:
                     log.error( "Data manager metadata is not defined properly for '%s'." % ( data_manager_id ) )
                     continue
                 guid = data_manager_dict.get( 'guid', None )
                 if guid is None:
                     log.error( "Data manager guid '%s' is not set in metadata for '%s'." % ( guid, data_manager_id ) )
                     continue
                 elem.set( 'guid', guid )
                 tool_guid = data_manager_dict.get( 'tool_guid', None )
                 if tool_guid is None:
                     log.error( "Data manager tool guid '%s' is not set in metadata for '%s'." % ( tool_guid, data_manager_id ) )
                     continue
                 tool_dict = repository_tools_by_guid.get( tool_guid, None )
                 if tool_dict is None:
                     log.error( "Data manager tool guid '%s' could not be found for '%s'. Perhaps the tool is invalid?" % \
                                 ( tool_guid, data_manager_id ) )
                     continue
                 tool = tool_dict.get( 'tool', None )
                 if tool is None:
                     log.error( "Data manager tool with guid '%s' could not be found for '%s'. Perhaps the tool is invalid?" % \
                                ( tool_guid, data_manager_id ) )
                     continue
                 tool_config_filename = tool_dict.get( 'tool_config_filename', None )
                 if tool_config_filename is None:
                     log.error( "Data manager metadata is missing 'tool_config_file' for '%s'." % ( data_manager_id ) )
                     continue
                 elem.set( 'shed_conf_file', shed_config_dict['config_filename'] )
                 if elem.get( 'tool_file', None ) is not None:
                     del elem.attrib[ 'tool_file' ] #remove old tool_file info
                 tool_elem = tpm.generate_tool_elem( repository.tool_shed,
                                                     repository.name,
                                                     repository.installed_changeset_revision,
                                                     repository.owner,
                                                     tool_config_filename,
                                                     tool,
                                                     None )
                 elem.insert( 0, tool_elem )
                 data_manager = \
                     self.app.data_managers.load_manager_from_elem( elem,
                                                                    tool_path=shed_config_dict.get( 'tool_path', '' ),
                                                                    replace_existing=True )
                 if data_manager:
                     rval.append( data_manager )
             else:
                 log.warning( "Encountered unexpected element '%s':\n%s" % ( elem.tag, xml_util.xml_to_string( elem ) ) )
             config_elems.append( elem )
             data_manager_config_has_changes = True
         # Persist the altered shed_data_manager_config file.
         if data_manager_config_has_changes:
             self.data_manager_config_elems_to_xml_file( config_elems, shed_data_manager_conf_filename  )
     return rval
Exemple #33
0
 def install_data_managers( self, shed_data_manager_conf_filename, metadata_dict, shed_config_dict,
                            relative_install_dir, repository, repository_tools_tups ):
     rval = []
     if 'data_manager' in metadata_dict:
         tpm = tool_panel_manager.ToolPanelManager( self.app )
         repository_tools_by_guid = {}
         for tool_tup in repository_tools_tups:
             repository_tools_by_guid[ tool_tup[ 1 ] ] = dict( tool_config_filename=tool_tup[ 0 ], tool=tool_tup[ 2 ] )
         # Load existing data managers.
         tree, error_message = xml_util.parse_xml( shed_data_manager_conf_filename )
         if tree is None:
             return rval
         config_elems = [ elem for elem in tree.getroot() ]
         repo_data_manager_conf_filename = metadata_dict['data_manager'].get( 'config_filename', None )
         if repo_data_manager_conf_filename is None:
             log.debug( "No data_manager_conf.xml file has been defined." )
             return rval
         data_manager_config_has_changes = False
         relative_repo_data_manager_dir = os.path.join( shed_config_dict.get( 'tool_path', '' ), relative_install_dir )
         repo_data_manager_conf_filename = os.path.join( relative_repo_data_manager_dir, repo_data_manager_conf_filename )
         tree, error_message = xml_util.parse_xml( repo_data_manager_conf_filename )
         if tree is None:
             return rval
         root = tree.getroot()
         for elem in root:
             if elem.tag == 'data_manager':
                 data_manager_id = elem.get( 'id', None )
                 if data_manager_id is None:
                     log.error( "A data manager was defined that does not have an id and will not be installed:\n%s" %
                                xml_util.xml_to_string( elem ) )
                     continue
                 data_manager_dict = metadata_dict['data_manager'].get( 'data_managers', {} ).get( data_manager_id, None )
                 if data_manager_dict is None:
                     log.error( "Data manager metadata is not defined properly for '%s'." % ( data_manager_id ) )
                     continue
                 guid = data_manager_dict.get( 'guid', None )
                 if guid is None:
                     log.error( "Data manager guid '%s' is not set in metadata for '%s'." % ( guid, data_manager_id ) )
                     continue
                 elem.set( 'guid', guid )
                 tool_guid = data_manager_dict.get( 'tool_guid', None )
                 if tool_guid is None:
                     log.error( "Data manager tool guid '%s' is not set in metadata for '%s'." % ( tool_guid, data_manager_id ) )
                     continue
                 tool_dict = repository_tools_by_guid.get( tool_guid, None )
                 if tool_dict is None:
                     log.error( "Data manager tool guid '%s' could not be found for '%s'. Perhaps the tool is invalid?" %
                                ( tool_guid, data_manager_id ) )
                     continue
                 tool = tool_dict.get( 'tool', None )
                 if tool is None:
                     log.error( "Data manager tool with guid '%s' could not be found for '%s'. Perhaps the tool is invalid?" %
                                ( tool_guid, data_manager_id ) )
                     continue
                 tool_config_filename = tool_dict.get( 'tool_config_filename', None )
                 if tool_config_filename is None:
                     log.error( "Data manager metadata is missing 'tool_config_file' for '%s'." % ( data_manager_id ) )
                     continue
                 elem.set( 'shed_conf_file', shed_config_dict['config_filename'] )
                 if elem.get( 'tool_file', None ) is not None:
                     del elem.attrib[ 'tool_file' ]  # remove old tool_file info
                 tool_elem = tpm.generate_tool_elem( repository.tool_shed,
                                                     repository.name,
                                                     repository.installed_changeset_revision,
                                                     repository.owner,
                                                     tool_config_filename,
                                                     tool,
                                                     None )
                 elem.insert( 0, tool_elem )
                 data_manager = \
                     self.app.data_managers.load_manager_from_elem( elem,
                                                                    tool_path=shed_config_dict.get( 'tool_path', '' ),
                                                                    replace_existing=True )
                 if data_manager:
                     rval.append( data_manager )
             else:
                 log.warning( "Encountered unexpected element '%s':\n%s" % ( elem.tag, xml_util.xml_to_string( elem ) ) )
             config_elems.append( elem )
             data_manager_config_has_changes = True
         # Persist the altered shed_data_manager_config file.
         if data_manager_config_has_changes:
             self.data_manager_config_elems_to_xml_file( config_elems, shed_data_manager_conf_filename  )
     return rval
 def create_tool_dependency_with_initialized_env_sh_file( self, dependent_install_dir, tool_shed_repository,
                                                          required_repository, package_name, package_version,
                                                          tool_dependencies_config ):
     """
     Create or get a tool_dependency record that is defined by the received package_name and package_version.
     An env.sh file will be created for the tool_dependency in the received dependent_install_dir.
     """
     # The received required_repository refers to a tool_shed_repository record that is defined as a complex
     # repository dependency for this tool_dependency.  The required_repository may or may not be currently
     # installed (it doesn't matter).  If it is installed, it is associated with a tool_dependency that has
     # an env.sh file that this new tool_dependency must be able to locate and "source".  If it is not installed,
     # we can still determine where that env.sh file will be, so we'll initialize this new tool_dependency's env.sh
     # file in either case.  If the required repository ends up with an installation error, this new tool
     # dependency will still be fine because its containing repository will be defined as missing dependencies.
     tool_dependencies = []
     if not os.path.exists( dependent_install_dir ):
         os.makedirs( dependent_install_dir )
     required_tool_dependency_env_file_path = None
     if tool_dependencies_config:
         required_td_tree, error_message = xml_util.parse_xml( tool_dependencies_config )
         if required_td_tree:
             required_td_root = required_td_tree.getroot()
             for required_td_elem in required_td_root:
                 # Find the appropriate package name and version.
                 if required_td_elem.tag == 'package':
                     # <package name="bwa" version="0.5.9">
                     required_td_package_name = required_td_elem.get( 'name', None )
                     required_td_package_version = required_td_elem.get( 'version', None )
                     # Check the database to see if we have a record for the required tool dependency (we may not which is ok).  If we
                     # find a record, we need to see if it is in an error state and if so handle it appropriately.
                     required_tool_dependency = \
                         tool_dependency_util.get_tool_dependency_by_name_version_type_repository( self.app,
                                                                                                   required_repository,
                                                                                                   required_td_package_name,
                                                                                                   required_td_package_version,
                                                                                                   'package' )
                     if required_td_package_name == package_name and required_td_package_version == package_version:
                         # Get or create a database tool_dependency record with which the installed package on disk will be associated.
                         tool_dependency = \
                             tool_dependency_util.create_or_update_tool_dependency( app=self.app,
                                                                                    tool_shed_repository=tool_shed_repository,
                                                                                    name=package_name,
                                                                                    version=package_version,
                                                                                    type='package',
                                                                                    status=self.app.install_model.ToolDependency.installation_status.NEVER_INSTALLED,
                                                                                    set_status=True )
                         # Create an env.sh file for the tool_dependency whose first line will source the env.sh file located in
                         # the path defined by required_tool_dependency_env_file_path.  It doesn't matter if the required env.sh
                         # file currently exists..
                         required_tool_dependency_env_file_path = \
                             self.get_required_repository_package_env_sh_path( package_name,
                                                                               package_version,
                                                                               required_repository )
                         env_file_builder = EnvFileBuilder( tool_dependency.installation_directory( self.app ) )
                         env_file_builder.append_line( action="source", value=required_tool_dependency_env_file_path )
                         return_code = env_file_builder.return_code
                         if return_code:
                             error_message = 'Error defining env.sh file for package %s, return_code: %s' % \
                                 ( str( package_name ), str( return_code ) )
                             tool_dependency = \
                                 tool_dependency_util.set_tool_dependency_attributes(self.app,
                                                                                     tool_dependency=tool_dependency,
                                                                                     status=self.app.install_model.ToolDependency.installation_status.ERROR,
                                                                                     error_message=error_message)
                         elif required_tool_dependency is not None and required_tool_dependency.in_error_state:
                             error_message = "This tool dependency's required tool dependency %s version %s has status %s." % \
                                 ( str( required_tool_dependency.name ), str( required_tool_dependency.version ), str( required_tool_dependency.status ) )
                             tool_dependency = \
                                 tool_dependency_util.set_tool_dependency_attributes(self.app,
                                                                                     tool_dependency=tool_dependency,
                                                                                     status=self.app.install_model.ToolDependency.installation_status.ERROR,
                                                                                     error_message=error_message)
                         else:
                             tool_dependency = \
                                 tool_dependency_util.set_tool_dependency_attributes( self.app,
                                                                                      tool_dependency=tool_dependency,
                                                                                      status=self.app.install_model.ToolDependency.installation_status.INSTALLED )
                         tool_dependencies.append( tool_dependency )
     return tool_dependencies
Exemple #35
0
def check_for_missing_tools(app, tool_panel_configs,
                            latest_tool_migration_script_number):
    # Get the 000x_tools.xml file associated with the current migrate_tools version number.
    tools_xml_file_path = os.path.abspath(
        os.path.join('scripts', 'migrate_tools',
                     '%04d_tools.xml' % latest_tool_migration_script_number))
    # Parse the XML and load the file attributes for later checking against the proprietary tool_panel_config.
    migrated_tool_configs_dict = odict()
    tree, error_message = xml_util.parse_xml(tools_xml_file_path)
    if tree is None:
        return False, odict()
    root = tree.getroot()
    tool_shed = root.get('name')
    tool_shed_url = get_tool_shed_url_from_tool_shed_registry(app, tool_shed)
    # The default behavior is that the tool shed is down.
    tool_shed_accessible = False
    missing_tool_configs_dict = odict()
    if tool_shed_url:
        for elem in root:
            if elem.tag == 'repository':
                repository_dependencies = []
                all_tool_dependencies = []
                repository_name = elem.get('name')
                changeset_revision = elem.get('changeset_revision')
                tool_shed_accessible, repository_dependencies_dict = get_repository_dependencies(
                    app, tool_shed_url, repository_name, REPOSITORY_OWNER,
                    changeset_revision)
                if tool_shed_accessible:
                    # Accumulate all tool dependencies defined for repository dependencies for display to the user.
                    for rd_key, rd_tups in repository_dependencies_dict.items(
                    ):
                        if rd_key in ['root_key', 'description']:
                            continue
                        for rd_tup in rd_tups:
                            tool_shed, name, owner, changeset_revision, prior_installation_required, only_if_compiling_contained_td = \
                                parse_repository_dependency_tuple( rd_tup )
                        tool_shed_accessible, tool_dependencies = get_tool_dependencies(
                            app, tool_shed_url, name, owner,
                            changeset_revision)
                        all_tool_dependencies = accumulate_tool_dependencies(
                            tool_shed_accessible, tool_dependencies,
                            all_tool_dependencies)
                    tool_shed_accessible, tool_dependencies = get_tool_dependencies(
                        app, tool_shed_url, repository_name, REPOSITORY_OWNER,
                        changeset_revision)
                    all_tool_dependencies = accumulate_tool_dependencies(
                        tool_shed_accessible, tool_dependencies,
                        all_tool_dependencies)
                    for tool_elem in elem.findall('tool'):
                        tool_config_file_name = tool_elem.get('file')
                        if tool_config_file_name:
                            # We currently do nothing with repository dependencies except install them (we do not display repositories that will be
                            # installed to the user).  However, we'll store them in the following dictionary in case we choose to display them in the
                            # future.
                            dependencies_dict = dict(
                                tool_dependencies=all_tool_dependencies,
                                repository_dependencies=repository_dependencies
                            )
                            migrated_tool_configs_dict[
                                tool_config_file_name] = dependencies_dict
                else:
                    break
        if tool_shed_accessible:
            # Parse the proprietary tool_panel_configs (the default is tool_conf.xml) and generate the list of missing tool config file names.
            for tool_panel_config in tool_panel_configs:
                tree, error_message = xml_util.parse_xml(tool_panel_config)
                if tree:
                    root = tree.getroot()
                    for elem in root:
                        if elem.tag == 'tool':
                            missing_tool_configs_dict = check_tool_tag_set(
                                elem, migrated_tool_configs_dict,
                                missing_tool_configs_dict)
                        elif elem.tag == 'section':
                            for section_elem in elem:
                                if section_elem.tag == 'tool':
                                    missing_tool_configs_dict = check_tool_tag_set(
                                        section_elem,
                                        migrated_tool_configs_dict,
                                        missing_tool_configs_dict)
    else:
        exception_msg = '\n\nThe entry for the main Galaxy tool shed at %s is missing from the %s file.  ' % (
            tool_shed, app.config.tool_sheds_config)
        exception_msg += 'The entry for this tool shed must always be available in this file, so re-add it before attempting to start your Galaxy server.\n'
        raise Exception(exception_msg)
    return tool_shed_accessible, missing_tool_configs_dict
Exemple #36
0
 def install_tool_data_tables(self, tool_shed_repository,
                              tool_index_sample_files):
     TOOL_DATA_TABLE_FILE_NAME = 'tool_data_table_conf.xml'
     TOOL_DATA_TABLE_FILE_SAMPLE_NAME = '%s.sample' % (
         TOOL_DATA_TABLE_FILE_NAME)
     SAMPLE_SUFFIX = '.sample'
     SAMPLE_SUFFIX_OFFSET = -len(SAMPLE_SUFFIX)
     target_dir, tool_path, relative_target_dir = self.get_target_install_dir(
         tool_shed_repository)
     for sample_file in tool_index_sample_files:
         path, filename = os.path.split(sample_file)
         target_filename = filename
         if target_filename.endswith(SAMPLE_SUFFIX):
             target_filename = target_filename[:SAMPLE_SUFFIX_OFFSET]
         source_file = os.path.join(tool_path, sample_file)
         # We're not currently uninstalling index files, do not overwrite existing files.
         target_path_filename = os.path.join(target_dir, target_filename)
         if not os.path.exists(
                 target_path_filename
         ) or target_filename == TOOL_DATA_TABLE_FILE_NAME:
             shutil.copy2(source_file, target_path_filename)
         else:
             log.debug(
                 "Did not copy sample file '%s' to install directory '%s' because file already exists.",
                 filename, target_dir)
         # For provenance and to simplify introspection, let's keep the original data table sample file around.
         if filename == TOOL_DATA_TABLE_FILE_SAMPLE_NAME:
             shutil.copy2(source_file, os.path.join(target_dir, filename))
     tool_data_table_conf_filename = os.path.join(
         target_dir, TOOL_DATA_TABLE_FILE_NAME)
     elems = []
     if os.path.exists(tool_data_table_conf_filename):
         tree, error_message = xml_util.parse_xml(
             tool_data_table_conf_filename)
         if tree:
             for elem in tree.getroot():
                 # Append individual table elems or other elemes, but not tables elems.
                 if elem.tag == 'tables':
                     for table_elem in elems:
                         elems.append(elem)
                 else:
                     elems.append(elem)
     else:
         log.debug(
             "The '%s' data table file was not found, but was expected to be copied from '%s' during repository installation.",
             tool_data_table_conf_filename,
             TOOL_DATA_TABLE_FILE_SAMPLE_NAME)
     for elem in elems:
         if elem.tag == 'table':
             for file_elem in elem.findall('file'):
                 path = file_elem.get('path', None)
                 if path:
                     file_elem.set(
                         'path',
                         os.path.normpath(
                             os.path.join(target_dir,
                                          os.path.split(path)[1])))
             # Store repository info in the table tag set for trace-ability.
             self.generate_repository_info_elem_from_repository(
                 tool_shed_repository, parent_elem=elem)
     if elems:
         # Remove old data_table
         os.unlink(tool_data_table_conf_filename)
         # Persist new data_table content.
         self.app.tool_data_tables.to_xml_file(
             tool_data_table_conf_filename, elems)
     return tool_data_table_conf_filename, elems
Exemple #37
0
    def __init__(self, app, latest_migration_script_number,
                 tool_shed_install_config, migrated_tools_config,
                 install_dependencies):
        """
        Check tool settings in tool_shed_install_config and install all repositories
        that are not already installed.  The tool panel configuration file is the received
        migrated_tools_config, which is the reserved file named migrated_tools_conf.xml.
        """
        self.app = app
        self.toolbox = self.app.toolbox
        self.migrated_tools_config = migrated_tools_config
        # Initialize the ToolPanelManager.
        self.tpm = tool_panel_manager.ToolPanelManager(self.app)
        # If install_dependencies is True but tool_dependency_dir is not set, do not attempt
        # to install but print informative error message.
        if install_dependencies and app.config.tool_dependency_dir is None:
            message = 'You are attempting to install tool dependencies but do not have a value '
            message += 'for "tool_dependency_dir" set in your galaxy.ini file.  Set this '
            message += 'location value to the path where you want tool dependencies installed and '
            message += 'rerun the migration script.'
            raise Exception(message)
        # Get the local non-shed related tool panel configs (there can be more than one, and the
        # default name is tool_conf.xml).
        self.proprietary_tool_confs = self.non_shed_tool_panel_configs
        self.proprietary_tool_panel_elems = self.get_proprietary_tool_panel_elems(
            latest_migration_script_number)
        # Set the location where the repositories will be installed by retrieving the tool_path
        # setting from migrated_tools_config.
        tree, error_message = xml_util.parse_xml(migrated_tools_config)
        if tree is None:
            log.error(error_message)
        else:
            root = tree.getroot()
            self.tool_path = root.get('tool_path')
            log.debug(
                "Repositories will be installed into configured tool_path location ",
                str(self.tool_path))
            # Parse tool_shed_install_config to check each of the tools.
            self.tool_shed_install_config = tool_shed_install_config
            tree, error_message = xml_util.parse_xml(tool_shed_install_config)
            if tree is None:
                log.error(error_message)
            else:
                root = tree.getroot()
                defined_tool_shed_url = root.get('name')
                self.tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry(
                    self.app, defined_tool_shed_url)
                self.tool_shed = common_util.remove_protocol_and_port_from_tool_shed_url(
                    self.tool_shed_url)
                self.repository_owner = common_util.REPOSITORY_OWNER
                self.shed_config_dict = self.tpm.get_shed_tool_conf_dict(
                    self.migrated_tools_config)
                # Since tool migration scripts can be executed any number of times, we need to
                # make sure the appropriate tools are defined in tool_conf.xml.  If no tools
                # associated with the migration stage are defined, no repositories will be installed
                # on disk.  The default behavior is that the tool shed is down.
                tool_shed_accessible = False
                tool_panel_configs = common_util.get_non_shed_tool_panel_configs(
                    app)
                if tool_panel_configs:
                    # The missing_tool_configs_dict contents are something like:
                    # {'emboss_antigenic.xml': [('emboss', '5.0.0', 'package', '\nreadme blah blah blah\n')]}
                    tool_shed_accessible, missing_tool_configs_dict = \
                        common_util.check_for_missing_tools(app,
                                                            tool_panel_configs,
                                                            latest_migration_script_number)
                else:
                    # It doesn't matter if the tool shed is accessible since there are no migrated
                    # tools defined in the local Galaxy instance, but we have to set the value of
                    # tool_shed_accessible to True so that the value of migrate_tools.version can
                    # be correctly set in the database.
                    tool_shed_accessible = True
                    missing_tool_configs_dict = odict()
                if tool_shed_accessible:
                    if len(self.proprietary_tool_confs) == 1:
                        plural = ''
                        file_names = self.proprietary_tool_confs[0]
                    else:
                        plural = 's'
                        file_names = ', '.join(self.proprietary_tool_confs)
                    if missing_tool_configs_dict:
                        for proprietary_tool_conf in self.proprietary_tool_confs:
                            # Create a backup of the tool configuration in the un-migrated state.
                            shutil.copy(
                                proprietary_tool_conf, '%s-pre-stage-%04d' %
                                (proprietary_tool_conf,
                                 latest_migration_script_number))
                        for repository_elem in root:
                            # Make sure we have a valid repository tag.
                            if self.__is_valid_repository_tag(repository_elem):
                                # Get all repository dependencies for the repository defined by the
                                # current repository_elem.  Repository dependency definitions contained
                                # in tool shed repositories with migrated tools must never define a
                                # relationship to a repository dependency that contains a tool.  The
                                # repository dependency can only contain items that are not loaded into
                                # the Galaxy tool panel (e.g., tool dependency definitions, custom datatypes,
                                # etc).  This restriction must be followed down the entire dependency hierarchy.
                                name = repository_elem.get('name')
                                changeset_revision = repository_elem.get(
                                    'changeset_revision')
                                tool_shed_accessible, repository_dependencies_dict = \
                                    common_util.get_repository_dependencies(app,
                                                                            self.tool_shed_url,
                                                                            name,
                                                                            self.repository_owner,
                                                                            changeset_revision)
                                # Make sure all repository dependency records exist (as tool_shed_repository
                                # table rows) in the Galaxy database.
                                created_tool_shed_repositories = \
                                    self.create_or_update_tool_shed_repository_records(name,
                                                                                       changeset_revision,
                                                                                       repository_dependencies_dict)
                                # Order the repositories for proper installation.  This process is similar to the
                                # process used when installing tool shed repositories, but does not handle managing
                                # tool panel sections and other components since repository dependency definitions
                                # contained in tool shed repositories with migrated tools must never define a relationship
                                # to a repository dependency that contains a tool.
                                ordered_tool_shed_repositories = \
                                    self.order_repositories_for_installation(created_tool_shed_repositories,
                                                                             repository_dependencies_dict)

                                for tool_shed_repository in ordered_tool_shed_repositories:
                                    is_repository_dependency = self.__is_repository_dependency(
                                        name, changeset_revision,
                                        tool_shed_repository)
                                    self.install_repository(
                                        repository_elem,
                                        tool_shed_repository,
                                        install_dependencies,
                                        is_repository_dependency=
                                        is_repository_dependency)
                    else:
                        message = "\nNo tools associated with migration stage %s are defined in your " % \
                            str(latest_migration_script_number)
                        message += "file%s named %s,\nso no repositories will be installed on disk.\n" % \
                            (plural, file_names)
                        log.info(message)
                else:
                    message = "\nThe main Galaxy tool shed is not currently available, so skipped migration stage %s.\n" % \
                        str(latest_migration_script_number)
                    message += "Try again later.\n"
                    log.error(message)
Exemple #38
0
def handle_tool_dependencies_definition(trans, tool_dependencies_config):
    altered = False
    # Make sure we're looking at a valid tool_dependencies.xml file.
    tree, error_message = xml_util.parse_xml(tool_dependencies_config)
    if tree is None:
        return False, None
    root = tree.getroot()
    if root.tag == 'tool_dependency':
        for root_index, root_elem in enumerate(root):
            # <package name="eigen" version="2.0.17">
            if root_elem.tag == 'package':
                package_altered = False
                for package_index, package_elem in enumerate(root_elem):
                    if package_elem.tag == 'repository':
                        # <repository name="package_eigen_2_0" owner="test" changeset_revision="09eb05087cd0" prior_installation_required="True" />
                        populated, repository_elem, error_message = handle_repository_dependency_elem(
                            trans, package_elem)
                        if error_message:
                            exception_message = 'The tool_dependencies.xml file contains an invalid <repository> tag.  %s' % error_message
                            raise Exception(exception_message)
                        if populated:
                            root_elem[package_index] = repository_elem
                            package_altered = True
                            if not altered:
                                altered = True
                    elif package_elem.tag == 'install':
                        # <install version="1.0">
                        for actions_index, actions_elem in enumerate(
                                package_elem):
                            for action_index, action_elem in enumerate(
                                    actions_elem):
                                action_type = action_elem.get('type')
                                if action_type == 'set_environment_for_install':
                                    # <action type="set_environment_for_install">
                                    #     <repository name="package_eigen_2_0" owner="test" changeset_revision="09eb05087cd0">
                                    #        <package name="eigen" version="2.0.17" />
                                    #     </repository>
                                    # </action>
                                    for repo_index, repo_elem in enumerate(
                                            action_elem):
                                        populated, repository_elem, error_message = handle_repository_dependency_elem(
                                            trans, repo_elem)
                                        if error_message:
                                            exception_message = 'The tool_dependencies.xml file contains an invalid <repository> tag.  %s' % error_message
                                            raise Exception(exception_message)
                                        if populated:
                                            action_elem[
                                                repo_index] = repository_elem
                                            package_altered = True
                                            if not altered:
                                                altered = True
                                    if package_altered:
                                        actions_elem[
                                            action_index] = action_elem
                            if package_altered:
                                package_elem[actions_index] = actions_elem
                        if package_altered:
                            root_elem[package_index] = package_elem
                if package_altered:
                    root[root_index] = root_elem
        return altered, root
    return False, None