def handle_repository_contents( self, tool_shed_repository, repository_clone_url, relative_install_dir, repository_elem, install_dependencies ): """Generate the metadata for the installed tool shed repository, among other things.""" tool_panel_dict_for_display = odict() if self.tool_path: repo_install_dir = os.path.join( self.tool_path, relative_install_dir ) else: repo_install_dir = relative_install_dir for tool_elem in repository_elem: # The tool_elem looks something like this: <tool id="EMBOSS: antigenic1" version="5.0.0" file="emboss_antigenic.xml" /> tool_config = tool_elem.get( 'file' ) guid = self.get_guid( repository_clone_url, relative_install_dir, tool_config ) # See if tool_config is defined inside of a section in self.proprietary_tool_panel_elems. is_displayed, tool_sections = self.get_containing_tool_sections( tool_config ) if is_displayed: tool_panel_dict_for_tool_config = shed_util.generate_tool_panel_dict_for_tool_config( guid, tool_config, tool_sections=tool_sections ) for k, v in tool_panel_dict_for_tool_config.items(): tool_panel_dict_for_display[ k ] = v else: print 'The tool "%s" (%s) has not been enabled because it is not defined in a proprietary tool config (%s).' \ % ( guid, tool_config, ", ".join( self.proprietary_tool_confs or [] ) ) metadata_dict, invalid_file_tups = suc.generate_metadata_for_changeset_revision( app=self.app, repository=tool_shed_repository, changeset_revision=tool_shed_repository.changeset_revision, repository_clone_url=repository_clone_url, shed_config_dict = self.shed_config_dict, relative_install_dir=relative_install_dir, repository_files_dir=None, resetting_all_metadata_on_repository=False, updating_installed_repository=False, persist=True ) tool_shed_repository.metadata = metadata_dict self.app.sa_session.add( tool_shed_repository ) self.app.sa_session.flush() if 'tool_dependencies' in metadata_dict: # All tool_dependency objects must be created before the tools are processed even if no tool dependencies will be installed. tool_dependencies = shed_util.create_tool_dependency_objects( self.app, tool_shed_repository, relative_install_dir, set_status=True ) else: tool_dependencies = None if 'tools' in metadata_dict: sample_files = metadata_dict.get( 'sample_files', [] ) sample_files = [ str( s ) for s in sample_files ] tool_index_sample_files = shed_util.get_tool_index_sample_files( sample_files ) shed_util.copy_sample_files( self.app, tool_index_sample_files, tool_path=self.tool_path ) sample_files_copied = [ s for s in tool_index_sample_files ] repository_tools_tups = suc.get_repository_tools_tups( self.app, metadata_dict ) if repository_tools_tups: # Handle missing data table entries for tool parameters that are dynamically generated select lists. repository_tools_tups = shed_util.handle_missing_data_table_entry( self.app, relative_install_dir, self.tool_path, repository_tools_tups ) # Handle missing index files for tool parameters that are dynamically generated select lists. repository_tools_tups, sample_files_copied = shed_util.handle_missing_index_file( self.app, self.tool_path, sample_files, repository_tools_tups, sample_files_copied ) # Copy remaining sample files included in the repository to the ~/tool-data directory of the local Galaxy instance. shed_util.copy_sample_files( self.app, sample_files, tool_path=self.tool_path, sample_files_copied=sample_files_copied ) if install_dependencies and tool_dependencies and 'tool_dependencies' in metadata_dict: # Install tool dependencies. shed_util.update_tool_shed_repository_status( self.app, tool_shed_repository, self.app.model.ToolShedRepository.installation_status.INSTALLING_TOOL_DEPENDENCIES ) # Get the tool_dependencies.xml file from disk. tool_dependencies_config = suc.get_config_from_disk( 'tool_dependencies.xml', repo_install_dir ) installed_tool_dependencies = shed_util.handle_tool_dependencies( app=self.app, tool_shed_repository=tool_shed_repository, tool_dependencies_config=tool_dependencies_config, tool_dependencies=tool_dependencies ) for installed_tool_dependency in installed_tool_dependencies: if installed_tool_dependency.status == self.app.model.ToolDependency.installation_status.ERROR: print '\nThe following error occurred from the InstallManager while installing tool dependency ', installed_tool_dependency.name, ':' print installed_tool_dependency.error_message, '\n\n' shed_util.add_to_tool_panel( self.app, tool_shed_repository.name, repository_clone_url, tool_shed_repository.installed_changeset_revision, repository_tools_tups, self.repository_owner, self.migrated_tools_config, tool_panel_dict=tool_panel_dict_for_display, new_install=True ) if 'datatypes' in metadata_dict: tool_shed_repository.status = self.app.model.ToolShedRepository.installation_status.LOADING_PROPRIETARY_DATATYPES if not tool_shed_repository.includes_datatypes: tool_shed_repository.includes_datatypes = True self.app.sa_session.add( tool_shed_repository ) self.app.sa_session.flush() work_dir = tempfile.mkdtemp() datatypes_config = suc.get_config_from_disk( 'datatypes_conf.xml', repo_install_dir ) # Load proprietary data types required by tools. The value of override is not important here since the Galaxy server will be started # after this installation completes. converter_path, display_path = shed_util.alter_config_and_load_prorietary_datatypes( self.app, datatypes_config, repo_install_dir, override=False ) #repo_install_dir was relative_install_dir if converter_path or display_path: # Create a dictionary of tool shed repository related information. repository_dict = shed_util.create_repository_dict_for_proprietary_datatypes( tool_shed=self.tool_shed, name=tool_shed_repository.name, owner=self.repository_owner, installed_changeset_revision=tool_shed_repository.installed_changeset_revision, tool_dicts=metadata_dict.get( 'tools', [] ), converter_path=converter_path, display_path=display_path ) if converter_path: # Load proprietary datatype converters self.app.datatypes_registry.load_datatype_converters( self.toolbox, installed_repository_dict=repository_dict ) if display_path: # Load proprietary datatype display applications self.app.datatypes_registry.load_display_applications( installed_repository_dict=repository_dict ) try: shutil.rmtree( work_dir ) except: pass
def handle_repository_contents(self, tool_shed_repository, repository_clone_url, relative_install_dir, repository_elem, install_dependencies): """Generate the metadata for the installed tool shed repository, among other things.""" tool_panel_dict_for_display = odict() if self.tool_path: repo_install_dir = os.path.join(self.tool_path, relative_install_dir) else: repo_install_dir = relative_install_dir for tool_elem in repository_elem: # The tool_elem looks something like this: <tool id="EMBOSS: antigenic1" version="5.0.0" file="emboss_antigenic.xml" /> tool_config = tool_elem.get('file') guid = self.get_guid(repository_clone_url, relative_install_dir, tool_config) # See if tool_config is defined inside of a section in self.proprietary_tool_panel_elems. is_displayed, tool_sections = self.get_containing_tool_sections( tool_config) if is_displayed: tool_panel_dict_for_tool_config = shed_util.generate_tool_panel_dict_for_tool_config( guid, tool_config, tool_sections=tool_sections) for k, v in tool_panel_dict_for_tool_config.items(): tool_panel_dict_for_display[k] = v else: print 'The tool "%s" (%s) has not been enabled because it is not defined in a proprietary tool config (%s).' \ % ( guid, tool_config, ", ".join( self.proprietary_tool_confs or [] ) ) metadata_dict, invalid_file_tups = suc.generate_metadata_for_changeset_revision( app=self.app, repository=tool_shed_repository, changeset_revision=tool_shed_repository.changeset_revision, repository_clone_url=repository_clone_url, shed_config_dict=self.shed_config_dict, relative_install_dir=relative_install_dir, repository_files_dir=None, resetting_all_metadata_on_repository=False, updating_installed_repository=False, persist=True) tool_shed_repository.metadata = metadata_dict self.app.sa_session.add(tool_shed_repository) self.app.sa_session.flush() if 'tool_dependencies' in metadata_dict: # All tool_dependency objects must be created before the tools are processed even if no tool dependencies will be installed. tool_dependencies = shed_util.create_tool_dependency_objects( self.app, tool_shed_repository, relative_install_dir, set_status=True) else: tool_dependencies = None if 'tools' in metadata_dict: sample_files = metadata_dict.get('sample_files', []) sample_files = [str(s) for s in sample_files] tool_index_sample_files = shed_util.get_tool_index_sample_files( sample_files) shed_util.copy_sample_files(self.app, tool_index_sample_files, tool_path=self.tool_path) sample_files_copied = [s for s in tool_index_sample_files] repository_tools_tups = suc.get_repository_tools_tups( self.app, metadata_dict) if repository_tools_tups: # Handle missing data table entries for tool parameters that are dynamically generated select lists. repository_tools_tups = shed_util.handle_missing_data_table_entry( self.app, relative_install_dir, self.tool_path, repository_tools_tups) # Handle missing index files for tool parameters that are dynamically generated select lists. repository_tools_tups, sample_files_copied = shed_util.handle_missing_index_file( self.app, self.tool_path, sample_files, repository_tools_tups, sample_files_copied) # Copy remaining sample files included in the repository to the ~/tool-data directory of the local Galaxy instance. shed_util.copy_sample_files( self.app, sample_files, tool_path=self.tool_path, sample_files_copied=sample_files_copied) if install_dependencies and tool_dependencies and 'tool_dependencies' in metadata_dict: # Install tool dependencies. shed_util.update_tool_shed_repository_status( self.app, tool_shed_repository, self.app.model.ToolShedRepository.installation_status. INSTALLING_TOOL_DEPENDENCIES) # Get the tool_dependencies.xml file from disk. tool_dependencies_config = suc.get_config_from_disk( 'tool_dependencies.xml', repo_install_dir) installed_tool_dependencies = shed_util.handle_tool_dependencies( app=self.app, tool_shed_repository=tool_shed_repository, tool_dependencies_config=tool_dependencies_config, tool_dependencies=tool_dependencies) for installed_tool_dependency in installed_tool_dependencies: if installed_tool_dependency.status == self.app.model.ToolDependency.installation_status.ERROR: print '\nThe following error occurred from the InstallManager while installing tool dependency ', installed_tool_dependency.name, ':' print installed_tool_dependency.error_message, '\n\n' shed_util.add_to_tool_panel( self.app, tool_shed_repository.name, repository_clone_url, tool_shed_repository.installed_changeset_revision, repository_tools_tups, self.repository_owner, self.migrated_tools_config, tool_panel_dict=tool_panel_dict_for_display, new_install=True) if 'datatypes' in metadata_dict: tool_shed_repository.status = self.app.model.ToolShedRepository.installation_status.LOADING_PROPRIETARY_DATATYPES if not tool_shed_repository.includes_datatypes: tool_shed_repository.includes_datatypes = True self.app.sa_session.add(tool_shed_repository) self.app.sa_session.flush() work_dir = tempfile.mkdtemp() datatypes_config = suc.get_config_from_disk( 'datatypes_conf.xml', repo_install_dir) # Load proprietary data types required by tools. The value of override is not important here since the Galaxy server will be started # after this installation completes. converter_path, display_path = shed_util.alter_config_and_load_prorietary_datatypes( self.app, datatypes_config, repo_install_dir, override=False) #repo_install_dir was relative_install_dir if converter_path or display_path: # Create a dictionary of tool shed repository related information. repository_dict = shed_util.create_repository_dict_for_proprietary_datatypes( tool_shed=self.tool_shed, name=tool_shed_repository.name, owner=self.repository_owner, installed_changeset_revision=tool_shed_repository. installed_changeset_revision, tool_dicts=metadata_dict.get('tools', []), converter_path=converter_path, display_path=display_path) if converter_path: # Load proprietary datatype converters self.app.datatypes_registry.load_datatype_converters( self.toolbox, installed_repository_dict=repository_dict) if display_path: # Load proprietary datatype display applications self.app.datatypes_registry.load_display_applications( installed_repository_dict=repository_dict) try: shutil.rmtree(work_dir) except: pass
def upload(self, trans, **kwd): params = util.Params(kwd) message = util.restore_text(params.get('message', '')) status = params.get('status', 'done') commit_message = util.restore_text( params.get('commit_message', 'Uploaded')) category_ids = util.listify(params.get('category_id', '')) categories = suc.get_categories(trans) repository_id = params.get('repository_id', '') repository = suc.get_repository_in_tool_shed(trans, repository_id) repo_dir = repository.repo_path(trans.app) repo = hg.repository(suc.get_configured_ui(), repo_dir) uncompress_file = util.string_as_bool( params.get('uncompress_file', 'true')) remove_repo_files_not_in_tar = util.string_as_bool( params.get('remove_repo_files_not_in_tar', 'true')) uploaded_file = None upload_point = self.__get_upload_point(repository, **kwd) tip = repository.tip(trans.app) file_data = params.get('file_data', '') url = params.get('url', '') # Part of the upload process is sending email notification to those that have registered to # receive them. One scenario occurs when the first change set is produced for the repository. # See the suc.handle_email_alerts() method for the definition of the scenarios. new_repo_alert = repository.is_new(trans.app) uploaded_directory = None if params.get('upload_button', False): if file_data == '' and url == '': message = 'No files were entered on the upload form.' status = 'error' uploaded_file = None elif url and url.startswith('hg'): # Use mercurial clone to fetch repository, contents will then be copied over. uploaded_directory = tempfile.mkdtemp() repo_url = 'http%s' % url[len('hg'):] repo_url = repo_url.encode('ascii', 'replace') commands.clone(suc.get_configured_ui(), repo_url, uploaded_directory) elif url: valid_url = True try: stream = urllib.urlopen(url) except Exception, e: valid_url = False message = 'Error uploading file via http: %s' % str(e) status = 'error' uploaded_file = None if valid_url: fd, uploaded_file_name = tempfile.mkstemp() uploaded_file = open(uploaded_file_name, 'wb') while 1: chunk = stream.read(CHUNK_SIZE) if not chunk: break uploaded_file.write(chunk) uploaded_file.flush() uploaded_file_filename = url.split('/')[-1] isempty = os.path.getsize( os.path.abspath(uploaded_file_name)) == 0 elif file_data not in ('', None): uploaded_file = file_data.file uploaded_file_name = uploaded_file.name uploaded_file_filename = os.path.split(file_data.filename)[-1] isempty = os.path.getsize( os.path.abspath(uploaded_file_name)) == 0 if uploaded_file or uploaded_directory: ok = True isgzip = False isbz2 = False if uploaded_file: if uncompress_file: isgzip = checkers.is_gzip(uploaded_file_name) if not isgzip: isbz2 = checkers.is_bz2(uploaded_file_name) if isempty: tar = None istar = False else: # Determine what we have - a single file or an archive try: if (isgzip or isbz2) and uncompress_file: # Open for reading with transparent compression. tar = tarfile.open(uploaded_file_name, 'r:*') else: tar = tarfile.open(uploaded_file_name) istar = True except tarfile.ReadError, e: tar = None istar = False else: # Uploaded directory istar = False if istar: ok, message, files_to_remove, content_alert_str, undesirable_dirs_removed, undesirable_files_removed = \ self.upload_tar( trans, repository, tar, uploaded_file, upload_point, remove_repo_files_not_in_tar, commit_message, new_repo_alert ) elif uploaded_directory: ok,message, files_to_remove, content_alert_str, undesirable_dirs_removed, undesirable_files_removed = \ self.upload_directory( trans, repository, uploaded_directory, upload_point, remove_repo_files_not_in_tar, commit_message, new_repo_alert ) else: if (isgzip or isbz2) and uncompress_file: uploaded_file_filename = self.uncompress( repository, uploaded_file_name, uploaded_file_filename, isgzip, isbz2) if upload_point is not None: full_path = os.path.abspath( os.path.join(repo_dir, upload_point, uploaded_file_filename)) else: full_path = os.path.abspath( os.path.join(repo_dir, uploaded_file_filename)) # Move the uploaded file to the load_point within the repository hierarchy. shutil.move(uploaded_file_name, full_path) # See if any admin users have chosen to receive email alerts when a repository is # updated. If so, check every uploaded file to ensure content is appropriate. check_contents = suc.check_file_contents(trans) if check_contents and os.path.isfile(full_path): content_alert_str = self.__check_file_content( full_path) else: content_alert_str = '' commands.add(repo.ui, repo, full_path) # Convert from unicode to prevent "TypeError: array item must be char" full_path = full_path.encode('ascii', 'replace') commands.commit(repo.ui, repo, full_path, user=trans.user.username, message=commit_message) if full_path.endswith('tool_data_table_conf.xml.sample'): # Handle the special case where a tool_data_table_conf.xml.sample file is being uploaded by parsing the file and adding new entries # to the in-memory trans.app.tool_data_tables dictionary. error, error_message = suc.handle_sample_tool_data_table_conf_file( trans.app, full_path) if error: message = '%s<br/>%s' % (message, error_message) # See if the content of the change set was valid. admin_only = len(repository.downloadable_revisions) != 1 suc.handle_email_alerts( trans, repository, content_alert_str=content_alert_str, new_repo_alert=new_repo_alert, admin_only=admin_only) if ok: # Update the repository files for browsing. suc.update_repository(repo) # Get the new repository tip. if tip == repository.tip(trans.app): message = 'No changes to repository. ' status = 'warning' else: if (isgzip or isbz2) and uncompress_file: uncompress_str = ' uncompressed and ' else: uncompress_str = ' ' if uploaded_directory: source_type = "repository" source = url else: source_type = "file" source = uploaded_file_filename message = "The %s '%s' has been successfully%suploaded to the repository. " % ( source_type, source, uncompress_str) if istar and (undesirable_dirs_removed or undesirable_files_removed): items_removed = undesirable_dirs_removed + undesirable_files_removed message += " %d undesirable items (.hg .svn .git directories, .DS_Store, hgrc files, etc) were removed from the archive. " % items_removed if istar and remove_repo_files_not_in_tar and files_to_remove: if upload_point is not None: message += " %d files were removed from the repository relative to the selected upload point '%s'. " % ( len(files_to_remove), upload_point) else: message += " %d files were removed from the repository root. " % len( files_to_remove) kwd['message'] = message suc.set_repository_metadata_due_to_new_tip( trans, repository, content_alert_str=content_alert_str, **kwd) # Provide a warning message if a tool_dependencies.xml file is provided, but tool dependencies weren't loaded due to a requirement tag mismatch # or some other problem. if suc.get_config_from_disk('tool_dependencies.xml', repo_dir): if repository.metadata_revisions: # A repository's metadata revisions are order descending by update_time, so the zeroth revision will be the tip just after an upload. metadata_dict = repository.metadata_revisions[ 0].metadata else: metadata_dict = {} if suc.has_orphan_tool_dependencies_in_tool_shed( metadata_dict): message += 'Name, version and type from a tool requirement tag does not match the information in the "tool_dependencies.xml file", ' message += 'so one or more of the defined tool dependencies are considered orphans within this repository.' status = 'warning' # Reset the tool_data_tables by loading the empty tool_data_table_conf.xml file. suc.reset_tool_data_tables(trans.app) trans.response.send_redirect( web.url_for(controller='repository', action='browse_repository', id=repository_id, commit_message='Deleted selected files', message=message, status=status)) else: status = 'error' # Reset the tool_data_tables by loading the empty tool_data_table_conf.xml file. suc.reset_tool_data_tables(trans.app)
def upload( self, trans, **kwd ): params = util.Params( kwd ) message = util.restore_text( params.get( 'message', '' ) ) status = params.get( 'status', 'done' ) commit_message = util.restore_text( params.get( 'commit_message', 'Uploaded' ) ) category_ids = util.listify( params.get( 'category_id', '' ) ) categories = suc.get_categories( trans ) repository_id = params.get( 'repository_id', '' ) repository = suc.get_repository_in_tool_shed( trans, repository_id ) repo_dir = repository.repo_path( trans.app ) repo = hg.repository( suc.get_configured_ui(), repo_dir ) uncompress_file = util.string_as_bool( params.get( 'uncompress_file', 'true' ) ) remove_repo_files_not_in_tar = util.string_as_bool( params.get( 'remove_repo_files_not_in_tar', 'true' ) ) uploaded_file = None upload_point = self.__get_upload_point( repository, **kwd ) tip = repository.tip( trans.app ) file_data = params.get( 'file_data', '' ) url = params.get( 'url', '' ) # Part of the upload process is sending email notification to those that have registered to # receive them. One scenario occurs when the first change set is produced for the repository. # See the suc.handle_email_alerts() method for the definition of the scenarios. new_repo_alert = repository.is_new( trans.app ) uploaded_directory = None if params.get( 'upload_button', False ): if file_data == '' and url == '': message = 'No files were entered on the upload form.' status = 'error' uploaded_file = None elif url and url.startswith( 'hg' ): # Use mercurial clone to fetch repository, contents will then be copied over. uploaded_directory = tempfile.mkdtemp() repo_url = 'http%s' % url[ len( 'hg' ): ] repo_url = repo_url.encode( 'ascii', 'replace' ) commands.clone( suc.get_configured_ui(), repo_url, uploaded_directory ) elif url: valid_url = True try: stream = urllib.urlopen( url ) except Exception, e: valid_url = False message = 'Error uploading file via http: %s' % str( e ) status = 'error' uploaded_file = None if valid_url: fd, uploaded_file_name = tempfile.mkstemp() uploaded_file = open( uploaded_file_name, 'wb' ) while 1: chunk = stream.read( CHUNK_SIZE ) if not chunk: break uploaded_file.write( chunk ) uploaded_file.flush() uploaded_file_filename = url.split( '/' )[ -1 ] isempty = os.path.getsize( os.path.abspath( uploaded_file_name ) ) == 0 elif file_data not in ( '', None ): uploaded_file = file_data.file uploaded_file_name = uploaded_file.name uploaded_file_filename = os.path.split( file_data.filename )[ -1 ] isempty = os.path.getsize( os.path.abspath( uploaded_file_name ) ) == 0 if uploaded_file or uploaded_directory: ok = True isgzip = False isbz2 = False if uploaded_file: if uncompress_file: isgzip = checkers.is_gzip( uploaded_file_name ) if not isgzip: isbz2 = checkers.is_bz2( uploaded_file_name ) if isempty: tar = None istar = False else: # Determine what we have - a single file or an archive try: if ( isgzip or isbz2 ) and uncompress_file: # Open for reading with transparent compression. tar = tarfile.open( uploaded_file_name, 'r:*' ) else: tar = tarfile.open( uploaded_file_name ) istar = True except tarfile.ReadError, e: tar = None istar = False else: # Uploaded directory istar = False if istar: ok, message, files_to_remove, content_alert_str, undesirable_dirs_removed, undesirable_files_removed = \ self.upload_tar( trans, repository, tar, uploaded_file, upload_point, remove_repo_files_not_in_tar, commit_message, new_repo_alert ) elif uploaded_directory: ok,message, files_to_remove, content_alert_str, undesirable_dirs_removed, undesirable_files_removed = \ self.upload_directory( trans, repository, uploaded_directory, upload_point, remove_repo_files_not_in_tar, commit_message, new_repo_alert ) else: if ( isgzip or isbz2 ) and uncompress_file: uploaded_file_filename = self.uncompress( repository, uploaded_file_name, uploaded_file_filename, isgzip, isbz2 ) if upload_point is not None: full_path = os.path.abspath( os.path.join( repo_dir, upload_point, uploaded_file_filename ) ) else: full_path = os.path.abspath( os.path.join( repo_dir, uploaded_file_filename ) ) # Move the uploaded file to the load_point within the repository hierarchy. shutil.move( uploaded_file_name, full_path ) # See if any admin users have chosen to receive email alerts when a repository is # updated. If so, check every uploaded file to ensure content is appropriate. check_contents = suc.check_file_contents( trans ) if check_contents and os.path.isfile( full_path ): content_alert_str = self.__check_file_content( full_path ) else: content_alert_str = '' commands.add( repo.ui, repo, full_path ) # Convert from unicode to prevent "TypeError: array item must be char" full_path = full_path.encode( 'ascii', 'replace' ) commands.commit( repo.ui, repo, full_path, user=trans.user.username, message=commit_message ) if full_path.endswith( 'tool_data_table_conf.xml.sample' ): # Handle the special case where a tool_data_table_conf.xml.sample file is being uploaded by parsing the file and adding new entries # to the in-memory trans.app.tool_data_tables dictionary. error, error_message = suc.handle_sample_tool_data_table_conf_file( trans.app, full_path ) if error: message = '%s<br/>%s' % ( message, error_message ) # See if the content of the change set was valid. admin_only = len( repository.downloadable_revisions ) != 1 suc.handle_email_alerts( trans, repository, content_alert_str=content_alert_str, new_repo_alert=new_repo_alert, admin_only=admin_only ) if ok: # Update the repository files for browsing. suc.update_repository( repo ) # Get the new repository tip. if tip == repository.tip( trans.app ): message = 'No changes to repository. ' status = 'warning' else: if ( isgzip or isbz2 ) and uncompress_file: uncompress_str = ' uncompressed and ' else: uncompress_str = ' ' if uploaded_directory: source_type = "repository" source = url else: source_type = "file" source = uploaded_file_filename message = "The %s '%s' has been successfully%suploaded to the repository. " % ( source_type, source, uncompress_str ) if istar and ( undesirable_dirs_removed or undesirable_files_removed ): items_removed = undesirable_dirs_removed + undesirable_files_removed message += " %d undesirable items (.hg .svn .git directories, .DS_Store, hgrc files, etc) were removed from the archive. " % items_removed if istar and remove_repo_files_not_in_tar and files_to_remove: if upload_point is not None: message += " %d files were removed from the repository relative to the selected upload point '%s'. " % ( len( files_to_remove ), upload_point ) else: message += " %d files were removed from the repository root. " % len( files_to_remove ) kwd[ 'message' ] = message suc.set_repository_metadata_due_to_new_tip( trans, repository, content_alert_str=content_alert_str, **kwd ) # Provide a warning message if a tool_dependencies.xml file is provided, but tool dependencies weren't loaded due to a requirement tag mismatch # or some other problem. if suc.get_config_from_disk( 'tool_dependencies.xml', repo_dir ): if repository.metadata_revisions: # A repository's metadata revisions are order descending by update_time, so the zeroth revision will be the tip just after an upload. metadata_dict = repository.metadata_revisions[0].metadata else: metadata_dict = {} if suc.has_orphan_tool_dependencies_in_tool_shed( metadata_dict ): message += 'Name, version and type from a tool requirement tag does not match the information in the "tool_dependencies.xml file", ' message += 'so one or more of the defined tool dependencies are considered orphans within this repository.' status = 'warning' # Reset the tool_data_tables by loading the empty tool_data_table_conf.xml file. suc.reset_tool_data_tables( trans.app ) trans.response.send_redirect( web.url_for( controller='repository', action='browse_repository', id=repository_id, commit_message='Deleted selected files', message=message, status=status ) ) else: status = 'error' # Reset the tool_data_tables by loading the empty tool_data_table_conf.xml file. suc.reset_tool_data_tables( trans.app )