Exemple #1
0
 def upload( self, trans, **kwd ):
     message = kwd.get( 'message', ''  )
     status = kwd.get( 'status', 'done' )
     commit_message = kwd.get( 'commit_message', 'Uploaded'  )
     category_ids = util.listify( kwd.get( 'category_id', '' ) )
     categories = suc.get_categories( trans )
     repository_id = kwd.get( 'repository_id', '' )
     repository = suc.get_repository_in_tool_shed( trans, repository_id )
     repo_dir = repository.repo_path( trans.app )
     repo = hg.repository( suc.get_configured_ui(), repo_dir )
     uncompress_file = util.string_as_bool( kwd.get( 'uncompress_file', 'true' ) )
     remove_repo_files_not_in_tar = util.string_as_bool( kwd.get( 'remove_repo_files_not_in_tar', 'true' ) )
     uploaded_file = None
     upload_point = commit_util.get_upload_point( repository, **kwd )
     tip = repository.tip( trans.app )
     file_data = kwd.get( 'file_data', '' )
     url = kwd.get( 'url', '' )
     # Part of the upload process is sending email notification to those that have registered to
     # receive them.  One scenario occurs when the first change set is produced for the repository.
     # See the suc.handle_email_alerts() method for the definition of the scenarios.
     new_repo_alert = repository.is_new( trans.app )
     uploaded_directory = None
     if kwd.get( 'upload_button', False ):
         if file_data == '' and url == '':
             message = 'No files were entered on the upload form.'
             status = 'error'
             uploaded_file = None
         elif url and url.startswith( 'hg' ):
             # Use mercurial clone to fetch repository, contents will then be copied over.
             uploaded_directory = tempfile.mkdtemp()
             repo_url = 'http%s' % url[ len( 'hg' ): ]
             repo_url = repo_url.encode( 'ascii', 'replace' )
             try:
                 commands.clone( suc.get_configured_ui(), repo_url, uploaded_directory )
             except Exception, e:
                 message = 'Error uploading via mercurial clone: %s' % suc.to_html_string( str( e ) )
                 status = 'error'
                 suc.remove_dir( uploaded_directory )
                 uploaded_directory = None
         elif url:
             valid_url = True
             try:
                 stream = urllib.urlopen( url )
             except Exception, e:
                 valid_url = False
                 message = 'Error uploading file via http: %s' % str( e )
                 status = 'error'
                 uploaded_file = None
             if valid_url:
                 fd, uploaded_file_name = tempfile.mkstemp()
                 uploaded_file = open( uploaded_file_name, 'wb' )
                 while 1:
                     chunk = stream.read( util.CHUNK_SIZE )
                     if not chunk:
                         break
                     uploaded_file.write( chunk )
                 uploaded_file.flush()
                 uploaded_file_filename = url.split( '/' )[ -1 ]
                 isempty = os.path.getsize( os.path.abspath( uploaded_file_name ) ) == 0
            if converter_path or display_path:
                # Create a dictionary of tool shed repository related information.
                repository_dict = datatype_util.create_repository_dict_for_proprietary_datatypes( tool_shed=self.tool_shed,
                                                                                                  name=tool_shed_repository.name,
                                                                                                  owner=self.repository_owner,
                                                                                                  installed_changeset_revision=tool_shed_repository.installed_changeset_revision,
                                                                                                  tool_dicts=metadata_dict.get( 'tools', [] ),
                                                                                                  converter_path=converter_path,
                                                                                                  display_path=display_path )
            if converter_path:
                # Load proprietary datatype converters
                self.app.datatypes_registry.load_datatype_converters( self.toolbox, installed_repository_dict=repository_dict )
            if display_path:
                # Load proprietary datatype display applications
                self.app.datatypes_registry.load_display_applications( installed_repository_dict=repository_dict )
            suc.remove_dir( work_dir )

    def install_repository( self, repository_elem, tool_shed_repository, install_dependencies, is_repository_dependency=False ):
        """Install a single repository, loading contained tools into the tool panel."""
        # Install path is of the form: <tool path>/<tool shed>/repos/<repository owner>/<repository name>/<installed changeset revision>
        relative_clone_dir = os.path.join( tool_shed_repository.tool_shed,
                                           'repos',
                                           tool_shed_repository.owner,
                                           tool_shed_repository.name,
                                           tool_shed_repository.installed_changeset_revision )
        clone_dir = os.path.join( self.tool_path, relative_clone_dir )
        cloned_ok = self.__iscloned( clone_dir )
        is_installed = False
        # Any of the following states should count as installed in this context.
        if tool_shed_repository.status in [ self.app.install_model.ToolShedRepository.installation_status.INSTALLED,
                                            self.app.install_model.ToolShedRepository.installation_status.ERROR,
def export_repository( trans, tool_shed_url, repository_id, repository_name, changeset_revision, file_type, export_repository_dependencies, api=False ):
    repository = suc.get_repository_in_tool_shed( trans, repository_id )
    repositories_archive_filename = generate_repository_archive_filename( tool_shed_url,
                                                                          str( repository.name ),
                                                                          str( repository.user.username ),
                                                                          changeset_revision,
                                                                          file_type,
                                                                          export_repository_dependencies=export_repository_dependencies,
                                                                          use_tmp_archive_dir=True )
    if export_repository_dependencies:
        repo_info_dicts = get_repo_info_dicts( trans, tool_shed_url, repository_id, changeset_revision )
        repository_ids = get_repository_ids( trans, repo_info_dicts )
        ordered_repository_ids, ordered_repositories, ordered_changeset_revisions = order_components_for_import( trans, repository_ids, repo_info_dicts )
    else:
        ordered_repository_ids = []
        ordered_repositories = []
        ordered_changeset_revisions = []
        if repository:
            repository_metadata = suc.get_current_repository_metadata_for_changeset_revision( trans, repository, changeset_revision )
            if repository_metadata:
                ordered_repository_ids = [ repository_id ]
                ordered_repositories = [ repository ]
                ordered_changeset_revisions = [ repository_metadata.changeset_revision ]
    repositories_archive = None
    error_messages = ''
    lock = threading.Lock()
    lock.acquire( True )
    try:
        repositories_archive = tarfile.open( repositories_archive_filename, "w:%s" % file_type )
        exported_repository_registry = ExportedRepositoryRegistry()
        for index, repository_id in enumerate( ordered_repository_ids ):
            work_dir = tempfile.mkdtemp( prefix="tmp-toolshed-export-er" )
            ordered_repository = ordered_repositories[ index ]
            ordered_changeset_revision = ordered_changeset_revisions[ index ]
            repository_archive, error_message = generate_repository_archive( trans,
                                                                             work_dir,
                                                                             tool_shed_url,
                                                                             ordered_repository,
                                                                             ordered_changeset_revision,
                                                                             file_type )
            if error_message:
                error_messages = '%s  %s' % ( error_messages, error_message )
            else:
                archive_name = str( os.path.basename( repository_archive.name ) )
                repositories_archive.add( repository_archive.name, arcname=archive_name )
                attributes, sub_elements = get_repository_attributes_and_sub_elements( ordered_repository, archive_name )
                elem = xml_util.create_element( 'repository', attributes=attributes, sub_elements=sub_elements )
                exported_repository_registry.exported_repository_elems.append( elem )
            suc.remove_dir( work_dir )
        # Keep information about the export in a file name export_info.xml in the archive.
        sub_elements = generate_export_elem( tool_shed_url, repository, changeset_revision, export_repository_dependencies, api )
        export_elem = xml_util.create_element( 'export_info', attributes=None, sub_elements=sub_elements )
        tmp_export_info = xml_util.create_and_write_tmp_file( export_elem, use_indent=True )
        repositories_archive.add( tmp_export_info, arcname='export_info.xml' )
        # Write the manifest, which must preserve the order in which the repositories should be imported.
        exported_repository_root = xml_util.create_element( 'repositories' )
        for exported_repository_elem in exported_repository_registry.exported_repository_elems:
            exported_repository_root.append( exported_repository_elem )
        tmp_manifest = xml_util.create_and_write_tmp_file( exported_repository_root, use_indent=True )
        repositories_archive.add( tmp_manifest, arcname='manifest.xml' )
    except Exception, e:
        log.exception( str( e ) )
def sync_database_with_file_system( app, tool_shed_repository, tool_dependency_name, tool_dependency_version, tool_dependency_install_dir,
                                    tool_dependency_type='package' ):
    """
    The installation directory defined by the received tool_dependency_install_dir exists, so check for the presence
    of fabric_util.INSTALLATION_LOG.  If the files exists, we'll assume the tool dependency is installed, but not
    necessarily successfully (it could be in an error state on disk.  However, we can justifiably assume here that no
    matter the state, an associated database record will exist.
    """
    # This method should be reached very rarely.  It implies that either the Galaxy environment became corrupted (i.e.,
    # the database records for installed tool dependencies is not synchronized with tool dependencies on disk) or the Tool
    # Shed's install and test framework is running.
    sa_session = app.install_model.context.current
    can_install_tool_dependency = False
    tool_dependency = get_tool_dependency_by_name_version_type_repository( app,
                                                                           tool_shed_repository,
                                                                           tool_dependency_name,
                                                                           tool_dependency_version,
                                                                           tool_dependency_type )
    if tool_dependency.status == app.install_model.ToolDependency.installation_status.INSTALLING:
        # The tool dependency is in an Installing state, so we don't want to do anything to it.  If the tool
        # dependency is being installed by someone else, we don't want to interfere with that.  This assumes
        # the installation by "someone else" is not hung in an Installing state, which is a weakness if that
        # "someone else" never repaired it.
        log.debug( 'Skipping installation of tool dependency %s version %s because it has a status of %s' % \
            ( str( tool_dependency.name ), str( tool_dependency.version ), str( tool_dependency.status ) ) )
    else:
        # We have a pre-existing installation directory on the file system, but our associated database record is
        # in a state that allowed us to arrive here - see the comment in common_install_util.handle_tool_dependencies().
        # At this point, we'll inspect the installation directory to see if we have a "proper installation" and
        # if so, synchronize the database record rather than reinstalling the dependency if we're "running_functional_tests".
        # If we're not "running_functional_tests, we'll set the tool dependency's installation status to ERROR.
        tool_dependency_installation_directory_contents = os.listdir( tool_dependency_install_dir )
        if fabric_util.INSTALLATION_LOG in tool_dependency_installation_directory_contents:
            # Since this tool dependency's installation directory contains an installation log, we consider it to be
            # installed.  In some cases the record may be missing from the database due to some activity outside of
            # the control of the Tool Shed.  Since a new record was created for it and we don't know the state of the
            # files on disk, we will set it to an error state (unless we are running Tool Shed functional tests - see
            # below).
            log.debug( 'Skipping installation of tool dependency %s version %s because it is installed in %s' % \
                ( str( tool_dependency.name ), str( tool_dependency.version ), str( tool_dependency_install_dir ) ) )
            if app.config.running_functional_tests:
                # If we are running functional tests, the state will be set to Installed because previously compiled
                # tool dependencies are not deleted by default, from the "install and test" framework..
                tool_dependency.status = app.install_model.ToolDependency.installation_status.INSTALLED
            else:
                error_message = 'The installation directory for this tool dependency had contents, but the database had no record. '
                error_message += 'The installation log may show this tool dependency to be correctly installed, but due to the '
                error_message += 'missing database record, it is automatically set to Error.'
                tool_dependency.status = app.install_model.ToolDependency.installation_status.ERROR
                tool_dependency.error_message = error_message
        else:
            error_message = '\nInstallation path %s for tool dependency %s version %s exists, but the expected file %s' % \
                ( str( tool_dependency_install_dir ),
                  str( tool_dependency_name ),
                  str( tool_dependency_version ),
                  str( fabric_util.INSTALLATION_LOG ) )
            error_message += ' is missing.  This indicates an installation error so the tool dependency is being'
            error_message += ' prepared for re-installation.'
            print error_message
            tool_dependency.status = app.install_model.ToolDependency.installation_status.NEVER_INSTALLED
            suc.remove_dir( tool_dependency_install_dir )
            can_install_tool_dependency = True
        sa_session.add( tool_dependency )
        sa_session.flush()
    return tool_dependency, can_install_tool_dependency
 def upload( self, trans, **kwd ):
     message = kwd.get( 'message', ''  )
     status = kwd.get( 'status', 'done' )
     commit_message = kwd.get( 'commit_message', 'Uploaded'  )
     category_ids = util.listify( kwd.get( 'category_id', '' ) )
     categories = suc.get_categories( trans )
     repository_id = kwd.get( 'repository_id', '' )
     repository = suc.get_repository_in_tool_shed( trans, repository_id )
     repo_dir = repository.repo_path( trans.app )
     repo = hg.repository( suc.get_configured_ui(), repo_dir )
     uncompress_file = util.string_as_bool( kwd.get( 'uncompress_file', 'true' ) )
     remove_repo_files_not_in_tar = util.string_as_bool( kwd.get( 'remove_repo_files_not_in_tar', 'true' ) )
     uploaded_file = None
     upload_point = commit_util.get_upload_point( repository, **kwd )
     tip = repository.tip( trans.app )
     file_data = kwd.get( 'file_data', '' )
     url = kwd.get( 'url', '' )
     # Part of the upload process is sending email notification to those that have registered to
     # receive them.  One scenario occurs when the first change set is produced for the repository.
     # See the suc.handle_email_alerts() method for the definition of the scenarios.
     new_repo_alert = repository.is_new( trans.app )
     uploaded_directory = None
     if kwd.get( 'upload_button', False ):
         if file_data == '' and url == '':
             message = 'No files were entered on the upload form.'
             status = 'error'
             uploaded_file = None
         elif url and url.startswith( 'hg' ):
             # Use mercurial clone to fetch repository, contents will then be copied over.
             uploaded_directory = tempfile.mkdtemp()
             repo_url = 'http%s' % url[ len( 'hg' ): ]
             repo_url = repo_url.encode( 'ascii', 'replace' )
             commands.clone( suc.get_configured_ui(), repo_url, uploaded_directory )
         elif url:
             valid_url = True
             try:
                 stream = urllib.urlopen( url )
             except Exception, e:
                 valid_url = False
                 message = 'Error uploading file via http: %s' % str( e )
                 status = 'error'
                 uploaded_file = None
             if valid_url:
                 fd, uploaded_file_name = tempfile.mkstemp()
                 uploaded_file = open( uploaded_file_name, 'wb' )
                 while 1:
                     chunk = stream.read( util.CHUNK_SIZE )
                     if not chunk:
                         break
                     uploaded_file.write( chunk )
                 uploaded_file.flush()
                 uploaded_file_filename = url.split( '/' )[ -1 ]
                 isempty = os.path.getsize( os.path.abspath( uploaded_file_name ) ) == 0
         elif file_data not in ( '', None ):
             uploaded_file = file_data.file
             uploaded_file_name = uploaded_file.name
             uploaded_file_filename = os.path.split( file_data.filename )[ -1 ]
             isempty = os.path.getsize( os.path.abspath( uploaded_file_name ) ) == 0
         if uploaded_file or uploaded_directory:
             ok = True
             isgzip = False
             isbz2 = False
             if uploaded_file:
                 if uncompress_file:
                     isgzip = checkers.is_gzip( uploaded_file_name )
                     if not isgzip:
                         isbz2 = checkers.is_bz2( uploaded_file_name )
                 if isempty:
                     tar = None
                     istar = False
                 else:
                     # Determine what we have - a single file or an archive
                     try:
                         if ( isgzip or isbz2 ) and uncompress_file:
                             # Open for reading with transparent compression.
                             tar = tarfile.open( uploaded_file_name, 'r:*' )
                         else:
                             tar = tarfile.open( uploaded_file_name )
                         istar = True
                     except tarfile.ReadError, e:
                         tar = None
                         istar = False
             else:
                 # Uploaded directory
                 istar = False
             if istar:
                 ok, message, files_to_remove, content_alert_str, undesirable_dirs_removed, undesirable_files_removed = \
                     self.upload_tar( trans, repository, tar, uploaded_file, upload_point, remove_repo_files_not_in_tar, commit_message, new_repo_alert )
             elif uploaded_directory:
                 ok, message, files_to_remove, content_alert_str, undesirable_dirs_removed, undesirable_files_removed = \
                     self.upload_directory( trans, repository, uploaded_directory, upload_point, remove_repo_files_not_in_tar, commit_message, new_repo_alert )
             else:
                 if ( isgzip or isbz2 ) and uncompress_file:
                     uploaded_file_filename = commit_util.uncompress( repository, uploaded_file_name, uploaded_file_filename, isgzip=isgzip, isbz2=isbz2 )
                 if repository.type == rt_util.TOOL_DEPENDENCY_DEFINITION and uploaded_file_filename != suc.TOOL_DEPENDENCY_DEFINITION_FILENAME:
                     ok = False
                     message = 'Repositories of type <b>Tool dependency definition</b> can only contain a single file named <b>tool_dependencies.xml</b>.'
                 if ok:
                     if upload_point is not None:
                         full_path = os.path.abspath( os.path.join( repo_dir, upload_point, uploaded_file_filename ) )
                     else:
                         full_path = os.path.abspath( os.path.join( repo_dir, uploaded_file_filename ) )
                     # Move some version of the uploaded file to the load_point within the repository hierarchy.
                     if uploaded_file_filename in [ suc.REPOSITORY_DEPENDENCY_DEFINITION_FILENAME ]:
                         # Inspect the contents of the file to see if changeset_revision values are missing and if so, set them appropriately.
                         altered, root_elem, error_message = commit_util.handle_repository_dependencies_definition( trans,
                                                                                                                    uploaded_file_name,
                                                                                                                    unpopulate=False )
                         if error_message:
                             ok = False
                             message = error_message
                             status = 'error'
                         elif altered:
                             tmp_filename = xml_util.create_and_write_tmp_file( root_elem )
                             shutil.move( tmp_filename, full_path )
                         else:
                             shutil.move( uploaded_file_name, full_path )
                     elif uploaded_file_filename in [ suc.TOOL_DEPENDENCY_DEFINITION_FILENAME ]:
                         # Inspect the contents of the file to see if it defines a complex repository dependency definition whose changeset_revision values
                         # are missing and if so, set them appropriately.
                         altered, root_elem, error_message = commit_util.handle_tool_dependencies_definition( trans, uploaded_file_name )
                         if error_message:
                             ok = False
                             message = error_message
                             status = 'error'
                         if ok:
                             if altered:
                                 tmp_filename = xml_util.create_and_write_tmp_file( root_elem )
                                 shutil.move( tmp_filename, full_path )
                             else:
                                 shutil.move( uploaded_file_name, full_path )
                     else:
                         shutil.move( uploaded_file_name, full_path )
                     if ok:
                         # See if any admin users have chosen to receive email alerts when a repository is updated.  If so, check every uploaded file to ensure
                         # content is appropriate.
                         check_contents = commit_util.check_file_contents_for_email_alerts( trans )
                         if check_contents and os.path.isfile( full_path ):
                             content_alert_str = commit_util.check_file_content_for_html_and_images( full_path )
                         else:
                             content_alert_str = ''
                         commands.add( repo.ui, repo, full_path )
                         # Convert from unicode to prevent "TypeError: array item must be char"
                         full_path = full_path.encode( 'ascii', 'replace' )
                         commands.commit( repo.ui, repo, full_path, user=trans.user.username, message=commit_message )
                         if full_path.endswith( 'tool_data_table_conf.xml.sample' ):
                             # Handle the special case where a tool_data_table_conf.xml.sample file is being uploaded by parsing the file and adding new entries
                             # to the in-memory trans.app.tool_data_tables dictionary.
                             error, error_message = tool_util.handle_sample_tool_data_table_conf_file( trans.app, full_path )
                             if error:
                                 message = '%s<br/>%s' % ( message, error_message )
                         # See if the content of the change set was valid.
                         admin_only = len( repository.downloadable_revisions ) != 1
                         suc.handle_email_alerts( trans,
                                                  repository,
                                                  content_alert_str=content_alert_str,
                                                  new_repo_alert=new_repo_alert,
                                                  admin_only=admin_only )
             if ok:
                 # Update the repository files for browsing.
                 suc.update_repository( repo )
                 # Get the new repository tip.
                 if tip == repository.tip( trans.app ):
                     message = 'No changes to repository.  '
                     status = 'warning'
                 else:
                     if ( isgzip or isbz2 ) and uncompress_file:
                         uncompress_str = ' uncompressed and '
                     else:
                         uncompress_str = ' '
                     if uploaded_directory:
                         source_type = "repository"
                         source = url
                     else:
                         source_type = "file"
                         source = uploaded_file_filename
                     message = "The %s <b>%s</b> has been successfully%suploaded to the repository.  " % ( source_type, source, uncompress_str )
                     if istar and ( undesirable_dirs_removed or undesirable_files_removed ):
                         items_removed = undesirable_dirs_removed + undesirable_files_removed
                         message += "  %d undesirable items (.hg .svn .git directories, .DS_Store, hgrc files, etc) were removed from the archive.  " % items_removed
                     if istar and remove_repo_files_not_in_tar and files_to_remove:
                         if upload_point is not None:
                             message += "  %d files were removed from the repository relative to the selected upload point '%s'.  " % ( len( files_to_remove ), upload_point )
                         else:
                             message += "  %d files were removed from the repository root.  " % len( files_to_remove )
                     kwd[ 'message' ] = message
                     metadata_util.set_repository_metadata_due_to_new_tip( trans, repository, content_alert_str=content_alert_str, **kwd )
                 if repository.metadata_revisions:
                     # A repository's metadata revisions are order descending by update_time, so the zeroth revision will be the tip just after an upload.
                     metadata_dict = repository.metadata_revisions[0].metadata
                 else:
                     metadata_dict = {}
                 if str( repository.type ) != rt_util.TOOL_DEPENDENCY_DEFINITION:
                     change_repository_type_message = tool_dependency_util.generate_message_for_repository_type_change( trans, repository )
                     if change_repository_type_message:
                         message += change_repository_type_message
                         status = 'warning'
                     else:
                         # Provide a warning message if a tool_dependencies.xml file is provided, but tool dependencies weren't loaded due to a requirement tag mismatch
                         # or some other problem.  Tool dependency definitions can define orphan tool dependencies (no relationship to any tools contained in the repository),
                         # so warning messages are important because orphans are always valid.  The repository owner must be warned in case they did not intend to define an
                         # orphan dependency, but simply provided incorrect information (tool shed, name owner, changeset_revision) for the definition.
                         orphan_message = tool_dependency_util.generate_message_for_orphan_tool_dependencies( trans, repository, metadata_dict )
                         if orphan_message:
                             message += orphan_message
                             status = 'warning'
                 # Handle messaging for invalid tool dependencies.
                 invalid_tool_dependencies_message = tool_dependency_util.generate_message_for_invalid_tool_dependencies( metadata_dict )
                 if invalid_tool_dependencies_message:
                     message += invalid_tool_dependencies_message
                     status = 'error'
                 # Handle messaging for invalid repository dependencies.
                 invalid_repository_dependencies_message = repository_dependency_util.generate_message_for_invalid_repository_dependencies( metadata_dict )
                 if invalid_repository_dependencies_message:
                     message += invalid_repository_dependencies_message
                     status = 'error'
                 # Reset the tool_data_tables by loading the empty tool_data_table_conf.xml file.
                 tool_util.reset_tool_data_tables( trans.app )
                 if uploaded_directory:
                     suc.remove_dir( uploaded_directory )
                 trans.response.send_redirect( web.url_for( controller='repository',
                                                            action='browse_repository',
                                                            id=repository_id,
                                                            commit_message='Deleted selected files',
                                                            message=message,
                                                            status=status ) )
             else:
                 if uploaded_directory:
                     suc.remove_dir( uploaded_directory )
                 status = 'error'
             # Reset the tool_data_tables by loading the empty tool_data_table_conf.xml file.
             tool_util.reset_tool_data_tables( trans.app )
Exemple #6
0
                         message += orphan_message
                         status = 'warning'
             # Handle messaging for invalid tool dependencies.
             invalid_tool_dependencies_message = tool_dependency_util.generate_message_for_invalid_tool_dependencies( metadata_dict )
             if invalid_tool_dependencies_message:
                 message += invalid_tool_dependencies_message
                 status = 'error'
             # Handle messaging for invalid repository dependencies.
             invalid_repository_dependencies_message = repository_dependency_util.generate_message_for_invalid_repository_dependencies( metadata_dict )
             if invalid_repository_dependencies_message:
                 message += invalid_repository_dependencies_message
                 status = 'error'
             # Reset the tool_data_tables by loading the empty tool_data_table_conf.xml file.
             tool_util.reset_tool_data_tables( trans.app )
             if uploaded_directory:
                 suc.remove_dir( uploaded_directory )
             trans.response.send_redirect( web.url_for( controller='repository',
                                                        action='browse_repository',
                                                        id=repository_id,
                                                        commit_message='Deleted selected files',
                                                        message=message,
                                                        status=status ) )
         else:
             if uploaded_directory:
                 suc.remove_dir( uploaded_directory )
             status = 'error'
         # Reset the tool_data_tables by loading the empty tool_data_table_conf.xml file.
         tool_util.reset_tool_data_tables( trans.app )
 selected_categories = [ trans.security.decode_id( id ) for id in category_ids ]
 return trans.fill_template( '/webapps/tool_shed/repository/upload.mako',
                             repository=repository,
        # The manifest.xml file has already been validated, so no error_message should be returned here.
        repository_info_dicts, error_message = import_util.get_repository_info_from_manifest( manifest_file_path )
        # Determine the status for each exported repository archive contained within the capsule.
        repository_status_info_dicts = import_util.get_repository_status_from_tool_shed( trans, repository_info_dicts )
        # Generate a list of repository name / import results message tuples for display after the capsule is imported.
        import_results_tups = []
        # Only create repositories that do not yet exist and that the current user is authorized to create.  The
        # status will be None for repositories that fall into the intersection of these 2 categories.
        for repository_status_info_dict in repository_status_info_dicts:
            # Add the capsule_file_name and encoded_file_path to the repository_status_info_dict.
            repository_status_info_dict[ 'capsule_file_name' ] = capsule_file_name
            repository_status_info_dict[ 'encoded_file_path' ] = encoded_file_path
            import_results_tups = repository_maintenance_util.create_repository_and_import_archive( trans,
                                                                                                    repository_status_info_dict,
                                                                                                    import_results_tups )
        suc.remove_dir( file_path )
        # NOTE: the order of installation is defined in import_results_tups, but order will be lost when transferred to return_dict.
        return_dict = {}
        for import_results_tup in import_results_tups:
            name_owner, message = import_results_tup
            name, owner = name_owner
            key = 'Archive of repository "%s" owned by "%s"' % ( str( name ), str( owner ) )
            val = message.replace( '<b>', '"' ).replace( '</b>', '"' )
            return_dict[ key ] = val
        return return_dict

    @web.expose_api_anonymous
    def index( self, trans, deleted=False, **kwd ):
        """
        GET /api/repositories
        Displays a collection (list) of repositories.