def load_tool_from_changeset_revision(self, repository_id, changeset_revision, tool_config_filename): """ Return a loaded tool whose tool config file name (e.g., filtering.xml) is the value of tool_config_filename. The value of changeset_revision is a valid (downloadable) changeset revision. The tool config will be located in the repository manifest between the received valid changeset revision and the first changeset revision in the repository, searching backwards. """ original_tool_data_path = self.app.config.tool_data_path repository = repository_util.get_repository_in_tool_shed( self.app, repository_id) repo_files_dir = repository.repo_path(self.app) repo = hg_util.get_repo_for_repository(self.app, repository=None, repo_path=repo_files_dir, create=False) message = '' tool = None can_use_disk_file = False tool_config_filepath = repository_util.get_absolute_path_to_file_in_repository( repo_files_dir, tool_config_filename) work_dir = tempfile.mkdtemp(prefix="tmp-toolshed-ltfcr") can_use_disk_file = self.can_use_tool_config_disk_file( repository, repo, tool_config_filepath, changeset_revision) if can_use_disk_file: self.app.config.tool_data_path = work_dir tool, valid, message, sample_files = \ self.handle_sample_files_and_load_tool_from_disk( repo_files_dir, repository_id, tool_config_filepath, work_dir ) if tool is not None: invalid_files_and_errors_tups = \ self.check_tool_input_params( repo_files_dir, tool_config_filename, tool, sample_files ) if invalid_files_and_errors_tups: message2 = tool_util.generate_message_for_invalid_tools( self.app, invalid_files_and_errors_tups, repository, metadata_dict=None, as_html=True, displaying_invalid_tool=True) message = self.concat_messages(message, message2) else: tool, message, sample_files = \ self.handle_sample_files_and_load_tool_from_tmp_config( repo, repository_id, changeset_revision, tool_config_filename, work_dir ) basic_util.remove_dir(work_dir) self.app.config.tool_data_path = original_tool_data_path # Reset the tool_data_tables by loading the empty tool_data_table_conf.xml file. self.tdtm.reset_tool_data_tables() return repository, tool, message
def upload( self, trans, **kwd ): message = escape( kwd.get( 'message', '' ) ) status = kwd.get( 'status', 'done' ) commit_message = escape( kwd.get( 'commit_message', 'Uploaded' ) ) category_ids = util.listify( kwd.get( 'category_id', '' ) ) categories = suc.get_categories( trans.app ) repository_id = kwd.get( 'repository_id', '' ) repository = suc.get_repository_in_tool_shed( trans.app, repository_id ) repo_dir = repository.repo_path( trans.app ) repo = hg_util.get_repo_for_repository( trans.app, repository=None, repo_path=repo_dir, create=False ) uncompress_file = util.string_as_bool( kwd.get( 'uncompress_file', 'true' ) ) remove_repo_files_not_in_tar = util.string_as_bool( kwd.get( 'remove_repo_files_not_in_tar', 'true' ) ) uploaded_file = None upload_point = commit_util.get_upload_point( repository, **kwd ) tip = repository.tip( trans.app ) file_data = kwd.get( 'file_data', '' ) url = kwd.get( 'url', '' ) # Part of the upload process is sending email notification to those that have registered to # receive them. One scenario occurs when the first change set is produced for the repository. # See the suc.handle_email_alerts() method for the definition of the scenarios. new_repo_alert = repository.is_new( trans.app ) uploaded_directory = None if kwd.get( 'upload_button', False ): if file_data == '' and url == '': message = 'No files were entered on the upload form.' status = 'error' uploaded_file = None elif url and url.startswith( 'hg' ): # Use mercurial clone to fetch repository, contents will then be copied over. uploaded_directory = tempfile.mkdtemp() repo_url = 'http%s' % url[ len( 'hg' ): ] repo_url = repo_url.encode( 'ascii', 'replace' ) try: commands.clone( hg_util.get_configured_ui(), repo_url, uploaded_directory ) except Exception, e: message = 'Error uploading via mercurial clone: %s' % basic_util.to_html_string( str( e ) ) status = 'error' basic_util.remove_dir( uploaded_directory ) uploaded_directory = None elif url: valid_url = True try: stream = urllib.urlopen( url ) except Exception, e: valid_url = False message = 'Error uploading file via http: %s' % str( e ) status = 'error' uploaded_file = None if valid_url: fd, uploaded_file_name = tempfile.mkstemp() uploaded_file = open( uploaded_file_name, 'wb' ) while 1: chunk = stream.read( util.CHUNK_SIZE ) if not chunk: break uploaded_file.write( chunk ) uploaded_file.flush() uploaded_file_filename = url.split( '/' )[ -1 ] isempty = os.path.getsize( os.path.abspath( uploaded_file_name ) ) == 0
def upload( self, trans, **kwd ): message = escape( kwd.get( 'message', '' ) ) status = kwd.get( 'status', 'done' ) commit_message = escape( kwd.get( 'commit_message', 'Uploaded' ) ) repository_id = kwd.get( 'repository_id', '' ) repository = suc.get_repository_in_tool_shed( trans.app, repository_id ) repo_dir = repository.repo_path( trans.app ) repo = hg_util.get_repo_for_repository( trans.app, repository=None, repo_path=repo_dir, create=False ) uncompress_file = util.string_as_bool( kwd.get( 'uncompress_file', 'true' ) ) remove_repo_files_not_in_tar = util.string_as_bool( kwd.get( 'remove_repo_files_not_in_tar', 'true' ) ) uploaded_file = None upload_point = commit_util.get_upload_point( repository, **kwd ) tip = repository.tip( trans.app ) file_data = kwd.get( 'file_data', '' ) url = kwd.get( 'url', '' ) # Part of the upload process is sending email notification to those that have registered to # receive them. One scenario occurs when the first change set is produced for the repository. # See the suc.handle_email_alerts() method for the definition of the scenarios. new_repo_alert = repository.is_new( trans.app ) uploaded_directory = None if kwd.get( 'upload_button', False ): if file_data == '' and url == '': message = 'No files were entered on the upload form.' status = 'error' uploaded_file = None elif url and url.startswith( 'hg' ): # Use mercurial clone to fetch repository, contents will then be copied over. uploaded_directory = tempfile.mkdtemp() repo_url = 'http%s' % url[ len( 'hg' ): ] repo_url = repo_url.encode( 'ascii', 'replace' ) try: commands.clone( hg_util.get_configured_ui(), repo_url, uploaded_directory ) except Exception, e: message = 'Error uploading via mercurial clone: %s' % basic_util.to_html_string( str( e ) ) status = 'error' basic_util.remove_dir( uploaded_directory ) uploaded_directory = None elif url: valid_url = True try: stream = urllib.urlopen( url ) except Exception, e: valid_url = False message = 'Error uploading file via http: %s' % str( e ) status = 'error' uploaded_file = None if valid_url: fd, uploaded_file_name = tempfile.mkstemp() uploaded_file = open( uploaded_file_name, 'wb' ) while 1: chunk = stream.read( util.CHUNK_SIZE ) if not chunk: break uploaded_file.write( chunk ) uploaded_file.flush() uploaded_file_filename = url.split( '/' )[ -1 ] isempty = os.path.getsize( os.path.abspath( uploaded_file_name ) ) == 0
def load_tool_from_changeset_revision( self, repository_id, changeset_revision, tool_config_filename ): """ Return a loaded tool whose tool config file name (e.g., filtering.xml) is the value of tool_config_filename. The value of changeset_revision is a valid (downloadable) changeset revision. The tool config will be located in the repository manifest between the received valid changeset revision and the first changeset revision in the repository, searching backwards. """ original_tool_data_path = self.app.config.tool_data_path repository = suc.get_repository_in_tool_shed( self.app, repository_id ) repo_files_dir = repository.repo_path( self.app ) repo = hg_util.get_repo_for_repository( self.app, repository=None, repo_path=repo_files_dir, create=False ) message = '' tool = None can_use_disk_file = False tool_config_filepath = suc.get_absolute_path_to_file_in_repository( repo_files_dir, tool_config_filename ) work_dir = tempfile.mkdtemp( prefix="tmp-toolshed-ltfcr" ) can_use_disk_file = self.can_use_tool_config_disk_file( repository, repo, tool_config_filepath, changeset_revision ) if can_use_disk_file: self.app.config.tool_data_path = work_dir tool, valid, message, sample_files = \ self.handle_sample_files_and_load_tool_from_disk( repo_files_dir, repository_id, tool_config_filepath, work_dir ) if tool is not None: invalid_files_and_errors_tups = \ self.check_tool_input_params( repo_files_dir, tool_config_filename, tool, sample_files ) if invalid_files_and_errors_tups: message2 = tool_util.generate_message_for_invalid_tools( self.app, invalid_files_and_errors_tups, repository, metadata_dict=None, as_html=True, displaying_invalid_tool=True ) message = self.concat_messages( message, message2 ) else: tool, message, sample_files = \ self.handle_sample_files_and_load_tool_from_tmp_config( repo, repository_id, changeset_revision, tool_config_filename, work_dir ) basic_util.remove_dir( work_dir ) self.app.config.tool_data_path = original_tool_data_path # Reset the tool_data_tables by loading the empty tool_data_table_conf.xml file. self.tdtm.reset_tool_data_tables() return repository, tool, message
tool_dependency = \ tool_dependency_util.set_tool_dependency_attributes( self.app, tool_dependency=tool_dependency, status=self.app.install_model.ToolDependency.installation_status.UNINSTALLED ) # Install tool dependencies. irm.update_tool_shed_repository_status( repository, self.app.install_model.ToolShedRepository. installation_status.INSTALLING_TOOL_DEPENDENCIES) # Get the tool_dependencies.xml file from the repository. tool_dependencies_config = hg_util.get_config_from_disk( 'tool_dependencies.xml', repository.repo_path(self.app)) itdm = install_manager.InstallToolDependencyManager(self.app) installed_tool_dependencies = itdm.install_specified_tool_dependencies( tool_shed_repository=repository, tool_dependencies_config=tool_dependencies_config, tool_dependencies=repository.tool_dependencies, from_tool_migration_manager=False) for installed_tool_dependency in installed_tool_dependencies: if installed_tool_dependency.status in [ self.app.install_model.ToolDependency. installation_status.ERROR ]: repair_dict = add_repair_dict_entry( repository.name, installed_tool_dependency.error_message) basic_util.remove_dir(work_dir) irm.update_tool_shed_repository_status( repository, self.app.install_model.ToolShedRepository. installation_status.INSTALLED) return repair_dict
def upload(self, trans, **kwd): message = escape(kwd.get('message', '')) status = kwd.get('status', 'done') commit_message = escape(kwd.get('commit_message', 'Uploaded')) repository_id = kwd.get('repository_id', '') repository = repository_util.get_repository_in_tool_shed( trans.app, repository_id) repo_dir = repository.repo_path(trans.app) uncompress_file = util.string_as_bool( kwd.get('uncompress_file', 'true')) remove_repo_files_not_in_tar = util.string_as_bool( kwd.get('remove_repo_files_not_in_tar', 'true')) uploaded_file = None upload_point = commit_util.get_upload_point(repository, **kwd) tip = repository.tip() file_data = kwd.get('file_data', '') url = kwd.get('url', '') # Part of the upload process is sending email notification to those that have registered to # receive them. One scenario occurs when the first change set is produced for the repository. # See the suc.handle_email_alerts() method for the definition of the scenarios. new_repo_alert = repository.is_new() uploaded_directory = None if kwd.get('upload_button', False): if file_data == '' and url == '': message = 'No files were entered on the upload form.' status = 'error' uploaded_file = None elif url and url.startswith('hg'): # Use mercurial clone to fetch repository, contents will then be copied over. uploaded_directory = tempfile.mkdtemp() repo_url = 'http%s' % url[len('hg'):] cloned_ok, error_message = hg_util.clone_repository( repo_url, uploaded_directory) if not cloned_ok: message = 'Error uploading via mercurial clone: %s' % error_message status = 'error' basic_util.remove_dir(uploaded_directory) uploaded_directory = None elif url: valid_url = True try: stream = requests.get(url, stream=True) except Exception as e: valid_url = False message = 'Error uploading file via http: %s' % util.unicodify( e) status = 'error' uploaded_file = None if valid_url: fd, uploaded_file_name = tempfile.mkstemp() uploaded_file = open(uploaded_file_name, 'wb') for chunk in stream.iter_content( chunk_size=util.CHUNK_SIZE): if chunk: uploaded_file.write(chunk) uploaded_file.flush() uploaded_file_filename = url.split('/')[-1] isempty = os.path.getsize( os.path.abspath(uploaded_file_name)) == 0 elif file_data not in ('', None): uploaded_file = file_data.file uploaded_file_name = uploaded_file.name uploaded_file_filename = os.path.split(file_data.filename)[-1] isempty = os.path.getsize( os.path.abspath(uploaded_file_name)) == 0 if uploaded_file or uploaded_directory: rdah = attribute_handlers.RepositoryDependencyAttributeHandler( trans.app, unpopulate=False) tdah = attribute_handlers.ToolDependencyAttributeHandler( trans.app, unpopulate=False) stdtm = ShedToolDataTableManager(trans.app) ok = True isgzip = False isbz2 = False if uploaded_file: if uncompress_file: isgzip = checkers.is_gzip(uploaded_file_name) if not isgzip: isbz2 = checkers.is_bz2(uploaded_file_name) if isempty: tar = None istar = False else: # Determine what we have - a single file or an archive try: if (isgzip or isbz2) and uncompress_file: # Open for reading with transparent compression. tar = tarfile.open(uploaded_file_name, 'r:*') else: tar = tarfile.open(uploaded_file_name) istar = True except tarfile.ReadError: tar = None istar = False else: # Uploaded directory istar = False if istar: ok, message, files_to_remove, content_alert_str, undesirable_dirs_removed, undesirable_files_removed = \ repository_content_util.upload_tar( trans, rdah, tdah, repository, tar, uploaded_file, upload_point, remove_repo_files_not_in_tar, commit_message, new_repo_alert ) elif uploaded_directory: ok, message, files_to_remove, content_alert_str, undesirable_dirs_removed, undesirable_files_removed = \ self.upload_directory(trans, rdah, tdah, repository, uploaded_directory, upload_point, remove_repo_files_not_in_tar, commit_message, new_repo_alert) else: if (isgzip or isbz2) and uncompress_file: uploaded_file_filename = commit_util.uncompress( repository, uploaded_file_name, uploaded_file_filename, isgzip=isgzip, isbz2=isbz2) if repository.type == rt_util.REPOSITORY_SUITE_DEFINITION and \ uploaded_file_filename != rt_util.REPOSITORY_DEPENDENCY_DEFINITION_FILENAME: ok = False message = 'Repositories of type <b>Repository suite definition</b> can only contain a single file named ' message += '<b>repository_dependencies.xml</b>.' elif repository.type == rt_util.TOOL_DEPENDENCY_DEFINITION and \ uploaded_file_filename != rt_util.TOOL_DEPENDENCY_DEFINITION_FILENAME: ok = False message = 'Repositories of type <b>Tool dependency definition</b> can only contain a single file named ' message += '<b>tool_dependencies.xml</b>.' if ok: if upload_point is not None: full_path = os.path.abspath( os.path.join(repo_dir, upload_point, uploaded_file_filename)) else: full_path = os.path.abspath( os.path.join(repo_dir, uploaded_file_filename)) # Move some version of the uploaded file to the load_point within the repository hierarchy. if uploaded_file_filename in [ rt_util. REPOSITORY_DEPENDENCY_DEFINITION_FILENAME ]: # Inspect the contents of the file to see if toolshed or changeset_revision attributes # are missing and if so, set them appropriately. altered, root_elem, error_message = rdah.handle_tag_attributes( uploaded_file_name) if error_message: ok = False message = error_message status = 'error' elif altered: tmp_filename = xml_util.create_and_write_tmp_file( root_elem) shutil.move(tmp_filename, full_path) else: shutil.move(uploaded_file_name, full_path) elif uploaded_file_filename in [ rt_util.TOOL_DEPENDENCY_DEFINITION_FILENAME ]: # Inspect the contents of the file to see if changeset_revision values are # missing and if so, set them appropriately. altered, root_elem, error_message = tdah.handle_tag_attributes( uploaded_file_name) if error_message: ok = False message = error_message status = 'error' if ok: if altered: tmp_filename = xml_util.create_and_write_tmp_file( root_elem) shutil.move(tmp_filename, full_path) else: shutil.move(uploaded_file_name, full_path) else: shutil.move(uploaded_file_name, full_path) if ok: # See if any admin users have chosen to receive email alerts when a repository is updated. # If so, check every uploaded file to ensure content is appropriate. check_contents = commit_util.check_file_contents_for_email_alerts( trans.app) if check_contents and os.path.isfile(full_path): content_alert_str = commit_util.check_file_content_for_html_and_images( full_path) else: content_alert_str = '' hg_util.add_changeset(repo_dir, full_path) hg_util.commit_changeset( repo_dir, full_path_to_changeset=full_path, username=trans.user.username, message=commit_message) if full_path.endswith( 'tool_data_table_conf.xml.sample'): # Handle the special case where a tool_data_table_conf.xml.sample file is being uploaded # by parsing the file and adding new entries to the in-memory trans.app.tool_data_tables # dictionary. error, error_message = stdtm.handle_sample_tool_data_table_conf_file( full_path, persist=False) if error: message = '%s<br/>%s' % (message, error_message) # See if the content of the change set was valid. admin_only = len( repository.downloadable_revisions) != 1 suc.handle_email_alerts( trans.app, trans.request.host, repository, content_alert_str=content_alert_str, new_repo_alert=new_repo_alert, admin_only=admin_only) if ok: # Update the repository files for browsing. hg_util.update_repository(repo_dir) # Get the new repository tip. if tip == repository.tip(): message = 'No changes to repository. ' status = 'warning' else: if (isgzip or isbz2) and uncompress_file: uncompress_str = ' uncompressed and ' else: uncompress_str = ' ' if uploaded_directory: source_type = "repository" source = url else: source_type = "file" source = uploaded_file_filename message = "The %s <b>%s</b> has been successfully%suploaded to the repository. " % \ (source_type, escape(source), uncompress_str) if istar and (undesirable_dirs_removed or undesirable_files_removed): items_removed = undesirable_dirs_removed + undesirable_files_removed message += " %d undesirable items (.hg .svn .git directories, .DS_Store, hgrc files, etc) " % items_removed message += "were removed from the archive. " if istar and remove_repo_files_not_in_tar and files_to_remove: if upload_point is not None: message += " %d files were removed from the repository relative to the selected upload point '%s'. " % \ (len(files_to_remove), upload_point) else: message += " %d files were removed from the repository root. " % len( files_to_remove) rmm = repository_metadata_manager.RepositoryMetadataManager( app=trans.app, user=trans.user, repository=repository) status, error_message = \ rmm.set_repository_metadata_due_to_new_tip(trans.request.host, content_alert_str=content_alert_str, **kwd) if error_message: message = error_message kwd['message'] = message if repository.metadata_revisions: # A repository's metadata revisions are order descending by update_time, so the zeroth revision # will be the tip just after an upload. metadata_dict = repository.metadata_revisions[ 0].metadata else: metadata_dict = {} dd = dependency_display.DependencyDisplayer(trans.app) if str(repository.type) not in [ rt_util.REPOSITORY_SUITE_DEFINITION, rt_util.TOOL_DEPENDENCY_DEFINITION ]: change_repository_type_message = rt_util.generate_message_for_repository_type_change( trans.app, repository) if change_repository_type_message: message += change_repository_type_message status = 'warning' else: # Provide a warning message if a tool_dependencies.xml file is provided, but tool dependencies # weren't loaded due to a requirement tag mismatch or some other problem. Tool dependency # definitions can define orphan tool dependencies (no relationship to any tools contained in the # repository), so warning messages are important because orphans are always valid. The repository # owner must be warned in case they did not intend to define an orphan dependency, but simply # provided incorrect information (tool shed, name owner, changeset_revision) for the definition. orphan_message = dd.generate_message_for_orphan_tool_dependencies( repository, metadata_dict) if orphan_message: message += orphan_message status = 'warning' # Handle messaging for invalid tool dependencies. invalid_tool_dependencies_message = dd.generate_message_for_invalid_tool_dependencies( metadata_dict) if invalid_tool_dependencies_message: message += invalid_tool_dependencies_message status = 'error' # Handle messaging for invalid repository dependencies. invalid_repository_dependencies_message = \ dd.generate_message_for_invalid_repository_dependencies(metadata_dict, error_from_tuple=True) if invalid_repository_dependencies_message: message += invalid_repository_dependencies_message status = 'error' # Reset the tool_data_tables by loading the empty tool_data_table_conf.xml file. stdtm.reset_tool_data_tables() if uploaded_directory: basic_util.remove_dir(uploaded_directory) trans.response.send_redirect( web.url_for(controller='repository', action='browse_repository', id=repository_id, commit_message='Deleted selected files', message=message, status=status)) else: if uploaded_directory: basic_util.remove_dir(uploaded_directory) status = 'error' # Reset the tool_data_tables by loading the empty tool_data_table_conf.xml file. stdtm.reset_tool_data_tables() return trans.fill_template( '/webapps/tool_shed/repository/upload.mako', repository=repository, changeset_revision=tip, url=url, commit_message=commit_message, uncompress_file=uncompress_file, remove_repo_files_not_in_tar=remove_repo_files_not_in_tar, message=message, status=status)
def handle_repository_contents(self, tool_shed_repository, repository_clone_url, relative_install_dir, repository_elem, install_dependencies, is_repository_dependency=False): """ Generate the metadata for the installed tool shed repository, among other things. If the installed tool_shed_repository contains tools that are loaded into the Galaxy tool panel, this method will automatically eliminate all entries for each of the tools defined in the received repository_elem from all non-shed-related tool panel configuration files since the entries are automatically added to the reserved migrated_tools_conf.xml file as part of the migration process. """ tool_configs_to_filter = [] tool_panel_dict_for_display = odict() if self.tool_path: repo_install_dir = os.path.join(self.tool_path, relative_install_dir) else: repo_install_dir = relative_install_dir if not is_repository_dependency: for tool_elem in repository_elem: # The tool_elem looks something like this: # <tool id="EMBOSS: antigenic1" version="5.0.0" file="emboss_antigenic.xml" /> tool_config = tool_elem.get('file') guid = self.get_guid(repository_clone_url, relative_install_dir, tool_config) # See if tool_config is defined inside of a section in self.proprietary_tool_panel_elems. is_displayed, tool_sections = self.get_containing_tool_sections( tool_config) if is_displayed: tool_panel_dict_for_tool_config = \ self.tpm.generate_tool_panel_dict_for_tool_config(guid, tool_config, tool_sections=tool_sections) # The tool-panel_dict has the following structure. # {<Tool guid> : [{ tool_config : <tool_config_file>, # id: <ToolSection id>, # version : <ToolSection version>, # name : <TooSection name>}]} for k, v in tool_panel_dict_for_tool_config.items(): tool_panel_dict_for_display[k] = v for tool_panel_dict in v: # Keep track of tool config file names associated with entries that have been made to the # migrated_tools_conf.xml file so they can be eliminated from all non-shed-related tool panel configs. if tool_config not in tool_configs_to_filter: tool_configs_to_filter.append(tool_config) else: log.error( 'The tool "%s" (%s) has not been enabled because it is not defined in a proprietary tool config (%s).' % (guid, tool_config, ", ".join( self.proprietary_tool_confs or []))) if tool_configs_to_filter: lock = threading.Lock() lock.acquire(True) try: self.filter_and_persist_proprietary_tool_panel_configs( tool_configs_to_filter) except Exception: log.exception( "Exception attempting to filter and persist non-shed-related tool panel configs" ) finally: lock.release() irmm = InstalledRepositoryMetadataManager( app=self.app, tpm=self.tpm, repository=tool_shed_repository, changeset_revision=tool_shed_repository.changeset_revision, repository_clone_url=repository_clone_url, shed_config_dict=self.shed_config_dict, relative_install_dir=relative_install_dir, repository_files_dir=None, resetting_all_metadata_on_repository=False, updating_installed_repository=False, persist=True) irmm.generate_metadata_for_changeset_revision() irmm_metadata_dict = irmm.get_metadata_dict() tool_shed_repository.metadata = irmm_metadata_dict self.app.install_model.context.add(tool_shed_repository) self.app.install_model.context.flush() has_tool_dependencies = self.__has_tool_dependencies( irmm_metadata_dict) if has_tool_dependencies: # All tool_dependency objects must be created before the tools are processed even if no # tool dependencies will be installed. tool_dependencies = tool_dependency_util.create_tool_dependency_objects( self.app, tool_shed_repository, relative_install_dir, set_status=True) else: tool_dependencies = None if 'tools' in irmm_metadata_dict: tdtm = data_table_manager.ToolDataTableManager(self.app) sample_files = irmm_metadata_dict.get('sample_files', []) sample_files = [str(s) for s in sample_files] tool_index_sample_files = tdtm.get_tool_index_sample_files( sample_files) tool_util.copy_sample_files(self.app, tool_index_sample_files, tool_path=self.tool_path) sample_files_copied = [s for s in tool_index_sample_files] repository_tools_tups = irmm.get_repository_tools_tups() if repository_tools_tups: # Handle missing data table entries for tool parameters that are dynamically # generated select lists. repository_tools_tups = tdtm.handle_missing_data_table_entry( relative_install_dir, self.tool_path, repository_tools_tups) # Handle missing index files for tool parameters that are dynamically generated select lists. repository_tools_tups, sample_files_copied = tool_util.handle_missing_index_file( self.app, self.tool_path, sample_files, repository_tools_tups, sample_files_copied) # Copy remaining sample files included in the repository to the ~/tool-data # directory of the local Galaxy instance. tool_util.copy_sample_files( self.app, sample_files, tool_path=self.tool_path, sample_files_copied=sample_files_copied) if not is_repository_dependency: self.tpm.add_to_tool_panel( tool_shed_repository.name, repository_clone_url, tool_shed_repository.installed_changeset_revision, repository_tools_tups, self.repository_owner, self.migrated_tools_config, tool_panel_dict=tool_panel_dict_for_display, new_install=True) if install_dependencies and tool_dependencies and has_tool_dependencies: # Install tool dependencies. irm = install_manager.InstallRepositoryManager(self.app, self.tpm) itdm = install_manager.InstallToolDependencyManager(self.app) irm.update_tool_shed_repository_status( tool_shed_repository, self.app.install_model.ToolShedRepository.installation_status. INSTALLING_TOOL_DEPENDENCIES) # Get the tool_dependencies.xml file from disk. tool_dependencies_config = hg_util.get_config_from_disk( 'tool_dependencies.xml', repo_install_dir) installed_tool_dependencies = itdm.install_specified_tool_dependencies( tool_shed_repository=tool_shed_repository, tool_dependencies_config=tool_dependencies_config, tool_dependencies=tool_dependencies, from_tool_migration_manager=True) for installed_tool_dependency in installed_tool_dependencies: if installed_tool_dependency.status == self.app.install_model.ToolDependency.installation_status.ERROR: log.error( 'The ToolMigrationManager returned the following error while installing tool dependency %s: %s', installed_tool_dependency.name, installed_tool_dependency.error_message) if 'datatypes' in irmm_metadata_dict: cdl = custom_datatype_manager.CustomDatatypeLoader(self.app) tool_shed_repository.status = self.app.install_model.ToolShedRepository.installation_status.LOADING_PROPRIETARY_DATATYPES if not tool_shed_repository.includes_datatypes: tool_shed_repository.includes_datatypes = True self.app.install_model.context.add(tool_shed_repository) self.app.install_model.context.flush() work_dir = tempfile.mkdtemp(prefix="tmp-toolshed-hrc") datatypes_config = hg_util.get_config_from_disk( suc.DATATYPES_CONFIG_FILENAME, repo_install_dir) # Load proprietary data types required by tools. The value of override is not # important here since the Galaxy server will be started after this installation # completes. converter_path, display_path = \ cdl.alter_config_and_load_prorietary_datatypes(datatypes_config, repo_install_dir, override=False) if converter_path or display_path: # Create a dictionary of tool shed repository related information. repository_dict = \ cdl.create_repository_dict_for_proprietary_datatypes(tool_shed=self.tool_shed_url, name=tool_shed_repository.name, owner=self.repository_owner, installed_changeset_revision=tool_shed_repository.installed_changeset_revision, tool_dicts=irmm_metadata_dict.get('tools', []), converter_path=converter_path, display_path=display_path) if converter_path: # Load proprietary datatype converters self.app.datatypes_registry.load_datatype_converters( self.toolbox, installed_repository_dict=repository_dict) if display_path: # Load proprietary datatype display applications self.app.datatypes_registry.load_display_applications( self.app, installed_repository_dict=repository_dict) basic_util.remove_dir(work_dir)
def repair_tool_shed_repository(self, repository, repo_info_dict): def add_repair_dict_entry(repository_name, error_message): if repository_name in repair_dict: repair_dict[repository_name].append(error_message) else: repair_dict[repository_name] = [error_message] return repair_dict tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( self.app, repository.tool_shed) metadata = repository.metadata # The repository.metadata contains dependency information that corresponds to the current changeset revision, # which may be different from what is stored in the database # If any of these repository-repository dependency associations is obsolete, clean_dependency_relationships removes them. suc.clean_dependency_relationships(self.app, metadata, repository, tool_shed_url) repair_dict = {} tpm = tool_panel_manager.ToolPanelManager(self.app) if repository.status in [ self.app.install_model.ToolShedRepository.installation_status. DEACTIVATED ]: try: self.app.installed_repository_manager.activate_repository( repository) except Exception as e: error_message = "Error activating repository %s: %s" % ( repository.name, str(e)) log.debug(error_message) repair_dict[repository.name] = error_message elif repository.status not in [ self.app.install_model.ToolShedRepository.installation_status. INSTALLED ]: shed_tool_conf, tool_path, relative_install_dir = \ suc.get_tool_panel_config_tool_path_install_dir( self.app, repository ) # Reset the repository attributes to the New state for installation. if metadata: _, tool_panel_section_key = \ tpm.handle_tool_panel_selection( self.app.toolbox, metadata, no_changes_checked=True, tool_panel_section_id=None, new_tool_panel_section_label=None ) else: # The tools will be loaded outside of any sections in the tool panel. tool_panel_section_key = None repository_util.set_repository_attributes( self.app, repository, status=self.app.install_model.ToolShedRepository. installation_status.NEW, error_message=None, deleted=False, uninstalled=False, remove_from_disk=True) irm = install_manager.InstallRepositoryManager(self.app, tpm) irm.install_tool_shed_repository( repository, repo_info_dict, tool_panel_section_key, shed_tool_conf, tool_path, install_tool_dependencies=True, install_resolver_dependencies= False, # Assuming repairs are only necessary toolshed packages reinstalling=True) if repository.status in [ self.app.install_model.ToolShedRepository. installation_status.ERROR ]: repair_dict = add_repair_dict_entry(repository.name, repository.error_message) else: irm = install_manager.InstallRepositoryManager(self.app, tpm) # We have an installed tool shed repository, so handle tool dependencies if necessary. if repository.missing_tool_dependencies and metadata and 'tool_dependencies' in metadata: work_dir = tempfile.mkdtemp(prefix="tmp-toolshed-itdep") # Reset missing tool dependencies. for tool_dependency in repository.missing_tool_dependencies: if tool_dependency.status in [ self.app.install_model.ToolDependency. installation_status.ERROR, self.app.install_model. ToolDependency.installation_status.INSTALLING ]: tool_dependency = \ tool_dependency_util.set_tool_dependency_attributes( self.app, tool_dependency=tool_dependency, status=self.app.install_model.ToolDependency.installation_status.UNINSTALLED ) # Install tool dependencies. irm.update_tool_shed_repository_status( repository, self.app.install_model.ToolShedRepository. installation_status.INSTALLING_TOOL_DEPENDENCIES) # Get the tool_dependencies.xml file from the repository. tool_dependencies_config = hg_util.get_config_from_disk( 'tool_dependencies.xml', repository.repo_path(self.app)) itdm = install_manager.InstallToolDependencyManager(self.app) installed_tool_dependencies = itdm.install_specified_tool_dependencies( tool_shed_repository=repository, tool_dependencies_config=tool_dependencies_config, tool_dependencies=repository.tool_dependencies, from_tool_migration_manager=False) for installed_tool_dependency in installed_tool_dependencies: if installed_tool_dependency.status in [ self.app.install_model.ToolDependency. installation_status.ERROR ]: repair_dict = add_repair_dict_entry( repository.name, installed_tool_dependency.error_message) basic_util.remove_dir(work_dir) irm.update_tool_shed_repository_status( repository, self.app.install_model.ToolShedRepository. installation_status.INSTALLED) return repair_dict
def sync_database_with_file_system( self, app, tool_shed_repository, tool_dependency_name, tool_dependency_version, tool_dependency_install_dir, tool_dependency_type='package' ): """ The installation directory defined by the received tool_dependency_install_dir exists, so check for the presence of INSTALLATION_LOG. If the files exists, we'll assume the tool dependency is installed, but not necessarily successfully (it could be in an error state on disk. However, we can justifiably assume here that no matter the state, an associated database record will exist. """ # This method should be reached very rarely. It implies that either the Galaxy environment # became corrupted (i.e., the database records for installed tool dependencies is not synchronized # with tool dependencies on disk) or the Tool Shed's install and test framework is running. The Tool # Shed's install and test framework installs repositories in 2 stages, those of type tool_dependency_definition # followed by those containing valid tools and tool functional test components. log.debug( "Synchronizing the database with the file system..." ) try: log.debug( "The value of app.config.running_functional_tests is: %s" % str( app.config.running_functional_tests ) ) except: pass sa_session = app.install_model.context can_install_tool_dependency = False tool_dependency = \ tool_dependency_util.get_tool_dependency_by_name_version_type_repository( app, tool_shed_repository, tool_dependency_name, tool_dependency_version, tool_dependency_type ) if tool_dependency.status == app.install_model.ToolDependency.installation_status.INSTALLING: # The tool dependency is in an Installing state, so we don't want to do anything to it. If the tool # dependency is being installed by someone else, we don't want to interfere with that. This assumes # the installation by "someone else" is not hung in an Installing state, which is a weakness if that # "someone else" never repaired it. log.debug( 'Skipping installation of tool dependency %s version %s because it has a status of %s' % ( str( tool_dependency.name ), str( tool_dependency.version ), str( tool_dependency.status ) ) ) else: # We have a pre-existing installation directory on the file system, but our associated database record is # in a state that allowed us to arrive here. At this point, we'll inspect the installation directory to # see if we have a "proper installation" and if so, synchronize the database record rather than reinstalling # the dependency if we're "running_functional_tests". If we're not "running_functional_tests, we'll set # the tool dependency's installation status to ERROR. tool_dependency_installation_directory_contents = os.listdir( tool_dependency_install_dir ) if basic_util.INSTALLATION_LOG in tool_dependency_installation_directory_contents: # Since this tool dependency's installation directory contains an installation log, we consider it to be # installed. In some cases the record may be missing from the database due to some activity outside of # the control of the Tool Shed. Since a new record was created for it and we don't know the state of the # files on disk, we will set it to an error state (unless we are running Tool Shed functional tests - see # below). log.debug( 'Skipping installation of tool dependency %s version %s because it is installed in %s' % ( str( tool_dependency.name ), str( tool_dependency.version ), str( tool_dependency_install_dir ) ) ) if app.config.running_functional_tests: # If we are running functional tests, the state will be set to Installed because previously compiled # tool dependencies are not deleted by default, from the "install and test" framework.. tool_dependency.status = app.install_model.ToolDependency.installation_status.INSTALLED else: error_message = 'The installation directory for this tool dependency had contents but the database had no record. ' error_message += 'The installation log may show this tool dependency to be correctly installed, but due to the ' error_message += 'missing database record it is now being set to Error.' tool_dependency.status = app.install_model.ToolDependency.installation_status.ERROR tool_dependency.error_message = error_message else: error_message = '\nInstallation path %s for tool dependency %s version %s exists, but the expected file %s' % \ ( str( tool_dependency_install_dir ), str( tool_dependency_name ), str( tool_dependency_version ), str( basic_util.INSTALLATION_LOG ) ) error_message += ' is missing. This indicates an installation error so the tool dependency is being' error_message += ' prepared for re-installation.' log.error( error_message ) tool_dependency.status = app.install_model.ToolDependency.installation_status.NEVER_INSTALLED basic_util.remove_dir( tool_dependency_install_dir ) can_install_tool_dependency = True sa_session.add( tool_dependency ) sa_session.flush() try: log.debug( "Returning from sync_database_with_file_system with tool_dependency %s, can_install_tool_dependency %s." % ( str( tool_dependency.name ), str( can_install_tool_dependency ) ) ) except Exception as e: log.debug( str( e ) ) return tool_dependency, can_install_tool_dependency
def repair_tool_shed_repository( self, repository, repo_info_dict ): def add_repair_dict_entry( repository_name, error_message ): if repository_name in repair_dict: repair_dict[ repository_name ].append( error_message ) else: repair_dict[ repository_name ] = [ error_message ] return repair_dict tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( self.app, repository.tool_shed ) metadata = repository.metadata # The repository.metadata contains dependency information that corresponds to the current changeset revision, # which may be different from what is stored in the database # If any of these repository-repository dependency associations is obsolete, clean_dependency_relationships removes them. suc.clean_dependency_relationships(self.app, metadata, repository, tool_shed_url) repair_dict = {} tpm = tool_panel_manager.ToolPanelManager( self.app ) if repository.status in [ self.app.install_model.ToolShedRepository.installation_status.DEACTIVATED ]: try: self.app.installed_repository_manager.activate_repository( repository ) except Exception as e: error_message = "Error activating repository %s: %s" % ( repository.name, str( e ) ) log.debug( error_message ) repair_dict[ repository.name ] = error_message elif repository.status not in [ self.app.install_model.ToolShedRepository.installation_status.INSTALLED ]: shed_tool_conf, tool_path, relative_install_dir = \ suc.get_tool_panel_config_tool_path_install_dir( self.app, repository ) # Reset the repository attributes to the New state for installation. if metadata: _, tool_panel_section_key = \ tpm.handle_tool_panel_selection( self.app.toolbox, metadata, no_changes_checked=True, tool_panel_section_id=None, new_tool_panel_section_label=None ) else: # The tools will be loaded outside of any sections in the tool panel. tool_panel_section_key = None repository_util.set_repository_attributes( self.app, repository, status=self.app.install_model.ToolShedRepository.installation_status.NEW, error_message=None, deleted=False, uninstalled=False, remove_from_disk=True ) irm = install_manager.InstallRepositoryManager( self.app, tpm ) irm.install_tool_shed_repository( repository, repo_info_dict, tool_panel_section_key, shed_tool_conf, tool_path, install_tool_dependencies=True, install_resolver_dependencies=False, # Assuming repairs are only necessary toolshed packages reinstalling=True ) if repository.status in [ self.app.install_model.ToolShedRepository.installation_status.ERROR ]: repair_dict = add_repair_dict_entry( repository.name, repository.error_message ) else: irm = install_manager.InstallRepositoryManager( self.app, tpm ) # We have an installed tool shed repository, so handle tool dependencies if necessary. if repository.missing_tool_dependencies and metadata and 'tool_dependencies' in metadata: work_dir = tempfile.mkdtemp( prefix="tmp-toolshed-itdep" ) # Reset missing tool dependencies. for tool_dependency in repository.missing_tool_dependencies: if tool_dependency.status in [ self.app.install_model.ToolDependency.installation_status.ERROR, self.app.install_model.ToolDependency.installation_status.INSTALLING ]: tool_dependency = \ tool_dependency_util.set_tool_dependency_attributes( self.app, tool_dependency=tool_dependency, status=self.app.install_model.ToolDependency.installation_status.UNINSTALLED ) # Install tool dependencies. irm.update_tool_shed_repository_status( repository, self.app.install_model.ToolShedRepository.installation_status.INSTALLING_TOOL_DEPENDENCIES ) # Get the tool_dependencies.xml file from the repository. tool_dependencies_config = hg_util.get_config_from_disk( 'tool_dependencies.xml', repository.repo_path( self.app ) ) itdm = install_manager.InstallToolDependencyManager( self.app ) installed_tool_dependencies = itdm.install_specified_tool_dependencies( tool_shed_repository=repository, tool_dependencies_config=tool_dependencies_config, tool_dependencies=repository.tool_dependencies, from_tool_migration_manager=False ) for installed_tool_dependency in installed_tool_dependencies: if installed_tool_dependency.status in [ self.app.install_model.ToolDependency.installation_status.ERROR ]: repair_dict = add_repair_dict_entry( repository.name, installed_tool_dependency.error_message ) basic_util.remove_dir( work_dir ) irm.update_tool_shed_repository_status( repository, self.app.install_model.ToolShedRepository.installation_status.INSTALLED ) return repair_dict
# Handle messaging for invalid tool dependencies. invalid_tool_dependencies_message = dd.generate_message_for_invalid_tool_dependencies( metadata_dict ) if invalid_tool_dependencies_message: message += invalid_tool_dependencies_message status = 'error' # Handle messaging for invalid repository dependencies. invalid_repository_dependencies_message = \ dd.generate_message_for_invalid_repository_dependencies( metadata_dict, error_from_tuple=True ) if invalid_repository_dependencies_message: message += invalid_repository_dependencies_message status = 'error' # Reset the tool_data_tables by loading the empty tool_data_table_conf.xml file. tdtm.reset_tool_data_tables() if uploaded_directory: basic_util.remove_dir( uploaded_directory ) trans.response.send_redirect( web.url_for( controller='repository', action='browse_repository', id=repository_id, commit_message='Deleted selected files', message=message, status=status ) ) else: if uploaded_directory: basic_util.remove_dir( uploaded_directory ) status = 'error' # Reset the tool_data_tables by loading the empty tool_data_table_conf.xml file. tdtm.reset_tool_data_tables() selected_categories = [ trans.security.decode_id( id ) for id in category_ids ] return trans.fill_template( '/webapps/tool_shed/repository/upload.mako', repository=repository,
def upload(self, trans, **kwd): message = escape(kwd.get('message', '')) status = kwd.get('status', 'done') commit_message = escape(kwd.get('commit_message', 'Uploaded')) repository_id = kwd.get('repository_id', '') repository = repository_util.get_repository_in_tool_shed(trans.app, repository_id) repo_dir = repository.repo_path(trans.app) repo = hg_util.get_repo_for_repository(trans.app, repository=None, repo_path=repo_dir, create=False) uncompress_file = util.string_as_bool(kwd.get('uncompress_file', 'true')) remove_repo_files_not_in_tar = util.string_as_bool(kwd.get('remove_repo_files_not_in_tar', 'true')) uploaded_file = None upload_point = commit_util.get_upload_point(repository, **kwd) tip = repository.tip(trans.app) file_data = kwd.get('file_data', '') url = kwd.get('url', '') # Part of the upload process is sending email notification to those that have registered to # receive them. One scenario occurs when the first change set is produced for the repository. # See the suc.handle_email_alerts() method for the definition of the scenarios. new_repo_alert = repository.is_new(trans.app) uploaded_directory = None if kwd.get('upload_button', False): if file_data == '' and url == '': message = 'No files were entered on the upload form.' status = 'error' uploaded_file = None elif url and url.startswith('hg'): # Use mercurial clone to fetch repository, contents will then be copied over. uploaded_directory = tempfile.mkdtemp() repo_url = 'http%s' % url[len('hg'):] repo_url = repo_url.encode('ascii', 'replace') try: commands.clone(hg_util.get_configured_ui(), repo_url, uploaded_directory) except Exception as e: message = 'Error uploading via mercurial clone: %s' % basic_util.to_html_string(str(e)) status = 'error' basic_util.remove_dir(uploaded_directory) uploaded_directory = None elif url: valid_url = True try: stream = requests.get(url, stream=True) except Exception as e: valid_url = False message = 'Error uploading file via http: %s' % str(e) status = 'error' uploaded_file = None if valid_url: fd, uploaded_file_name = tempfile.mkstemp() uploaded_file = open(uploaded_file_name, 'wb') for chunk in stream.iter_content(chunk_size=util.CHUNK_SIZE): if chunk: uploaded_file.write(chunk) uploaded_file.flush() uploaded_file_filename = url.split('/')[-1] isempty = os.path.getsize(os.path.abspath(uploaded_file_name)) == 0 elif file_data not in ('', None): uploaded_file = file_data.file uploaded_file_name = uploaded_file.name uploaded_file_filename = os.path.split(file_data.filename)[-1] isempty = os.path.getsize(os.path.abspath(uploaded_file_name)) == 0 if uploaded_file or uploaded_directory: rdah = attribute_handlers.RepositoryDependencyAttributeHandler(trans.app, unpopulate=False) tdah = attribute_handlers.ToolDependencyAttributeHandler(trans.app, unpopulate=False) stdtm = ShedToolDataTableManager(trans.app) ok = True isgzip = False isbz2 = False if uploaded_file: if uncompress_file: isgzip = checkers.is_gzip(uploaded_file_name) if not isgzip: isbz2 = checkers.is_bz2(uploaded_file_name) if isempty: tar = None istar = False else: # Determine what we have - a single file or an archive try: if (isgzip or isbz2) and uncompress_file: # Open for reading with transparent compression. tar = tarfile.open(uploaded_file_name, 'r:*') else: tar = tarfile.open(uploaded_file_name) istar = True except tarfile.ReadError as e: tar = None istar = False else: # Uploaded directory istar = False if istar: ok, message, files_to_remove, content_alert_str, undesirable_dirs_removed, undesirable_files_removed = \ repository_content_util.upload_tar( trans, rdah, tdah, repository, tar, uploaded_file, upload_point, remove_repo_files_not_in_tar, commit_message, new_repo_alert ) elif uploaded_directory: ok, message, files_to_remove, content_alert_str, undesirable_dirs_removed, undesirable_files_removed = \ self.upload_directory(trans, rdah, tdah, repository, uploaded_directory, upload_point, remove_repo_files_not_in_tar, commit_message, new_repo_alert) else: if (isgzip or isbz2) and uncompress_file: uploaded_file_filename = commit_util.uncompress(repository, uploaded_file_name, uploaded_file_filename, isgzip=isgzip, isbz2=isbz2) if repository.type == rt_util.REPOSITORY_SUITE_DEFINITION and \ uploaded_file_filename != rt_util.REPOSITORY_DEPENDENCY_DEFINITION_FILENAME: ok = False message = 'Repositories of type <b>Repository suite definition</b> can only contain a single file named ' message += '<b>repository_dependencies.xml</b>.' elif repository.type == rt_util.TOOL_DEPENDENCY_DEFINITION and \ uploaded_file_filename != rt_util.TOOL_DEPENDENCY_DEFINITION_FILENAME: ok = False message = 'Repositories of type <b>Tool dependency definition</b> can only contain a single file named ' message += '<b>tool_dependencies.xml</b>.' if ok: if upload_point is not None: full_path = os.path.abspath(os.path.join(repo_dir, upload_point, uploaded_file_filename)) else: full_path = os.path.abspath(os.path.join(repo_dir, uploaded_file_filename)) # Move some version of the uploaded file to the load_point within the repository hierarchy. if uploaded_file_filename in [rt_util.REPOSITORY_DEPENDENCY_DEFINITION_FILENAME]: # Inspect the contents of the file to see if toolshed or changeset_revision attributes # are missing and if so, set them appropriately. altered, root_elem, error_message = rdah.handle_tag_attributes(uploaded_file_name) if error_message: ok = False message = error_message status = 'error' elif altered: tmp_filename = xml_util.create_and_write_tmp_file(root_elem) shutil.move(tmp_filename, full_path) else: shutil.move(uploaded_file_name, full_path) elif uploaded_file_filename in [rt_util.TOOL_DEPENDENCY_DEFINITION_FILENAME]: # Inspect the contents of the file to see if changeset_revision values are # missing and if so, set them appropriately. altered, root_elem, error_message = tdah.handle_tag_attributes(uploaded_file_name) if error_message: ok = False message = error_message status = 'error' if ok: if altered: tmp_filename = xml_util.create_and_write_tmp_file(root_elem) shutil.move(tmp_filename, full_path) else: shutil.move(uploaded_file_name, full_path) else: shutil.move(uploaded_file_name, full_path) if ok: # See if any admin users have chosen to receive email alerts when a repository is updated. # If so, check every uploaded file to ensure content is appropriate. check_contents = commit_util.check_file_contents_for_email_alerts(trans.app) if check_contents and os.path.isfile(full_path): content_alert_str = commit_util.check_file_content_for_html_and_images(full_path) else: content_alert_str = '' hg_util.add_changeset(repo.ui, repo, full_path) # Convert from unicode to prevent "TypeError: array item must be char" full_path = full_path.encode('ascii', 'replace') hg_util.commit_changeset(repo.ui, repo, full_path_to_changeset=full_path, username=trans.user.username, message=commit_message) if full_path.endswith('tool_data_table_conf.xml.sample'): # Handle the special case where a tool_data_table_conf.xml.sample file is being uploaded # by parsing the file and adding new entries to the in-memory trans.app.tool_data_tables # dictionary. error, error_message = stdtm.handle_sample_tool_data_table_conf_file(full_path, persist=False) if error: message = '%s<br/>%s' % (message, error_message) # See if the content of the change set was valid. admin_only = len(repository.downloadable_revisions) != 1 suc.handle_email_alerts(trans.app, trans.request.host, repository, content_alert_str=content_alert_str, new_repo_alert=new_repo_alert, admin_only=admin_only) if ok: # Update the repository files for browsing. hg_util.update_repository(repo) # Get the new repository tip. if tip == repository.tip(trans.app): message = 'No changes to repository. ' status = 'warning' else: if (isgzip or isbz2) and uncompress_file: uncompress_str = ' uncompressed and ' else: uncompress_str = ' ' if uploaded_directory: source_type = "repository" source = url else: source_type = "file" source = uploaded_file_filename message = "The %s <b>%s</b> has been successfully%suploaded to the repository. " % \ (source_type, escape(source), uncompress_str) if istar and (undesirable_dirs_removed or undesirable_files_removed): items_removed = undesirable_dirs_removed + undesirable_files_removed message += " %d undesirable items (.hg .svn .git directories, .DS_Store, hgrc files, etc) " % items_removed message += "were removed from the archive. " if istar and remove_repo_files_not_in_tar and files_to_remove: if upload_point is not None: message += " %d files were removed from the repository relative to the selected upload point '%s'. " % \ (len(files_to_remove), upload_point) else: message += " %d files were removed from the repository root. " % len(files_to_remove) rmm = repository_metadata_manager.RepositoryMetadataManager(app=trans.app, user=trans.user, repository=repository) status, error_message = \ rmm.set_repository_metadata_due_to_new_tip(trans.request.host, content_alert_str=content_alert_str, **kwd) if error_message: message = error_message kwd['message'] = message if repository.metadata_revisions: # A repository's metadata revisions are order descending by update_time, so the zeroth revision # will be the tip just after an upload. metadata_dict = repository.metadata_revisions[0].metadata else: metadata_dict = {} dd = dependency_display.DependencyDisplayer(trans.app) if str(repository.type) not in [rt_util.REPOSITORY_SUITE_DEFINITION, rt_util.TOOL_DEPENDENCY_DEFINITION]: change_repository_type_message = rt_util.generate_message_for_repository_type_change(trans.app, repository) if change_repository_type_message: message += change_repository_type_message status = 'warning' else: # Provide a warning message if a tool_dependencies.xml file is provided, but tool dependencies # weren't loaded due to a requirement tag mismatch or some other problem. Tool dependency # definitions can define orphan tool dependencies (no relationship to any tools contained in the # repository), so warning messages are important because orphans are always valid. The repository # owner must be warned in case they did not intend to define an orphan dependency, but simply # provided incorrect information (tool shed, name owner, changeset_revision) for the definition. orphan_message = dd.generate_message_for_orphan_tool_dependencies(repository, metadata_dict) if orphan_message: message += orphan_message status = 'warning' # Handle messaging for invalid tool dependencies. invalid_tool_dependencies_message = dd.generate_message_for_invalid_tool_dependencies(metadata_dict) if invalid_tool_dependencies_message: message += invalid_tool_dependencies_message status = 'error' # Handle messaging for invalid repository dependencies. invalid_repository_dependencies_message = \ dd.generate_message_for_invalid_repository_dependencies(metadata_dict, error_from_tuple=True) if invalid_repository_dependencies_message: message += invalid_repository_dependencies_message status = 'error' # Reset the tool_data_tables by loading the empty tool_data_table_conf.xml file. stdtm.reset_tool_data_tables() if uploaded_directory: basic_util.remove_dir(uploaded_directory) trans.response.send_redirect(web.url_for(controller='repository', action='browse_repository', id=repository_id, commit_message='Deleted selected files', message=message, status=status)) else: if uploaded_directory: basic_util.remove_dir(uploaded_directory) status = 'error' # Reset the tool_data_tables by loading the empty tool_data_table_conf.xml file. stdtm.reset_tool_data_tables() return trans.fill_template('/webapps/tool_shed/repository/upload.mako', repository=repository, changeset_revision=tip, url=url, commit_message=commit_message, uncompress_file=uncompress_file, remove_repo_files_not_in_tar=remove_repo_files_not_in_tar, message=message, status=status)
# Handle messaging for invalid tool dependencies. invalid_tool_dependencies_message = dd.generate_message_for_invalid_tool_dependencies( metadata_dict ) if invalid_tool_dependencies_message: message += invalid_tool_dependencies_message status = 'error' # Handle messaging for invalid repository dependencies. invalid_repository_dependencies_message = \ dd.generate_message_for_invalid_repository_dependencies( metadata_dict, error_from_tuple=True ) if invalid_repository_dependencies_message: message += invalid_repository_dependencies_message status = 'error' # Reset the tool_data_tables by loading the empty tool_data_table_conf.xml file. tdtm.reset_tool_data_tables() if uploaded_directory: basic_util.remove_dir( uploaded_directory ) trans.response.send_redirect( web.url_for( controller='repository', action='browse_repository', id=repository_id, commit_message='Deleted selected files', message=message, status=status ) ) else: if uploaded_directory: basic_util.remove_dir( uploaded_directory ) status = 'error' # Reset the tool_data_tables by loading the empty tool_data_table_conf.xml file. tdtm.reset_tool_data_tables() return trans.fill_template( '/webapps/tool_shed/repository/upload.mako', repository=repository, changeset_revision=tip,
def handle_repository_contents(self, tool_shed_repository, repository_clone_url, relative_install_dir, repository_elem, install_dependencies, is_repository_dependency=False): """ Generate the metadata for the installed tool shed repository, among other things. If the installed tool_shed_repository contains tools that are loaded into the Galaxy tool panel, this method will automatically eliminate all entries for each of the tools defined in the received repository_elem from all non-shed-related tool panel configuration files since the entries are automatically added to the reserved migrated_tools_conf.xml file as part of the migration process. """ tool_configs_to_filter = [] tool_panel_dict_for_display = odict() if self.tool_path: repo_install_dir = os.path.join(self.tool_path, relative_install_dir) else: repo_install_dir = relative_install_dir if not is_repository_dependency: for tool_elem in repository_elem: # The tool_elem looks something like this: # <tool id="EMBOSS: antigenic1" version="5.0.0" file="emboss_antigenic.xml" /> tool_config = tool_elem.get('file') guid = self.get_guid(repository_clone_url, relative_install_dir, tool_config) # See if tool_config is defined inside of a section in self.proprietary_tool_panel_elems. is_displayed, tool_sections = self.get_containing_tool_sections(tool_config) if is_displayed: tool_panel_dict_for_tool_config = \ self.tpm.generate_tool_panel_dict_for_tool_config(guid, tool_config, tool_sections=tool_sections) # The tool-panel_dict has the following structure. # {<Tool guid> : [{ tool_config : <tool_config_file>, # id: <ToolSection id>, # version : <ToolSection version>, # name : <TooSection name>}]} for k, v in tool_panel_dict_for_tool_config.items(): tool_panel_dict_for_display[k] = v for tool_panel_dict in v: # Keep track of tool config file names associated with entries that have been made to the # migrated_tools_conf.xml file so they can be eliminated from all non-shed-related tool panel configs. if tool_config not in tool_configs_to_filter: tool_configs_to_filter.append(tool_config) else: log.error('The tool "%s" (%s) has not been enabled because it is not defined in a proprietary tool config (%s).' % (guid, tool_config, ", ".join(self.proprietary_tool_confs or []))) if tool_configs_to_filter: lock = threading.Lock() lock.acquire(True) try: self.filter_and_persist_proprietary_tool_panel_configs(tool_configs_to_filter) except Exception: log.exception("Exception attempting to filter and persist non-shed-related tool panel configs") finally: lock.release() irmm = InstalledRepositoryMetadataManager(app=self.app, tpm=self.tpm, repository=tool_shed_repository, changeset_revision=tool_shed_repository.changeset_revision, repository_clone_url=repository_clone_url, shed_config_dict=self.shed_config_dict, relative_install_dir=relative_install_dir, repository_files_dir=None, resetting_all_metadata_on_repository=False, updating_installed_repository=False, persist=True) irmm.generate_metadata_for_changeset_revision() irmm_metadata_dict = irmm.get_metadata_dict() tool_shed_repository.metadata = irmm_metadata_dict self.app.install_model.context.add(tool_shed_repository) self.app.install_model.context.flush() has_tool_dependencies = self.__has_tool_dependencies(irmm_metadata_dict) if has_tool_dependencies: # All tool_dependency objects must be created before the tools are processed even if no # tool dependencies will be installed. tool_dependencies = tool_dependency_util.create_tool_dependency_objects(self.app, tool_shed_repository, relative_install_dir, set_status=True) else: tool_dependencies = None if 'tools' in irmm_metadata_dict: tdtm = data_table_manager.ToolDataTableManager(self.app) sample_files = irmm_metadata_dict.get('sample_files', []) sample_files = [str(s) for s in sample_files] tool_index_sample_files = tdtm.get_tool_index_sample_files(sample_files) tool_util.copy_sample_files(self.app, tool_index_sample_files, tool_path=self.tool_path) sample_files_copied = [s for s in tool_index_sample_files] repository_tools_tups = irmm.get_repository_tools_tups() if repository_tools_tups: # Handle missing data table entries for tool parameters that are dynamically # generated select lists. repository_tools_tups = tdtm.handle_missing_data_table_entry(relative_install_dir, self.tool_path, repository_tools_tups) # Handle missing index files for tool parameters that are dynamically generated select lists. repository_tools_tups, sample_files_copied = tool_util.handle_missing_index_file(self.app, self.tool_path, sample_files, repository_tools_tups, sample_files_copied) # Copy remaining sample files included in the repository to the ~/tool-data # directory of the local Galaxy instance. tool_util.copy_sample_files(self.app, sample_files, tool_path=self.tool_path, sample_files_copied=sample_files_copied) if not is_repository_dependency: self.tpm.add_to_tool_panel(tool_shed_repository.name, repository_clone_url, tool_shed_repository.installed_changeset_revision, repository_tools_tups, self.repository_owner, self.migrated_tools_config, tool_panel_dict=tool_panel_dict_for_display, new_install=True) if install_dependencies and tool_dependencies and has_tool_dependencies: # Install tool dependencies. irm = install_manager.InstallRepositoryManager(self.app, self.tpm) itdm = install_manager.InstallToolDependencyManager(self.app) irm.update_tool_shed_repository_status(tool_shed_repository, self.app.install_model.ToolShedRepository.installation_status.INSTALLING_TOOL_DEPENDENCIES) # Get the tool_dependencies.xml file from disk. tool_dependencies_config = hg_util.get_config_from_disk('tool_dependencies.xml', repo_install_dir) installed_tool_dependencies = itdm.install_specified_tool_dependencies(tool_shed_repository=tool_shed_repository, tool_dependencies_config=tool_dependencies_config, tool_dependencies=tool_dependencies, from_tool_migration_manager=True) for installed_tool_dependency in installed_tool_dependencies: if installed_tool_dependency.status == self.app.install_model.ToolDependency.installation_status.ERROR: log.error( 'The ToolMigrationManager returned the following error while installing tool dependency %s: %s', installed_tool_dependency.name, installed_tool_dependency.error_message) if 'datatypes' in irmm_metadata_dict: cdl = custom_datatype_manager.CustomDatatypeLoader(self.app) tool_shed_repository.status = self.app.install_model.ToolShedRepository.installation_status.LOADING_PROPRIETARY_DATATYPES if not tool_shed_repository.includes_datatypes: tool_shed_repository.includes_datatypes = True self.app.install_model.context.add(tool_shed_repository) self.app.install_model.context.flush() work_dir = tempfile.mkdtemp(prefix="tmp-toolshed-hrc") datatypes_config = hg_util.get_config_from_disk(suc.DATATYPES_CONFIG_FILENAME, repo_install_dir) # Load proprietary data types required by tools. The value of override is not # important here since the Galaxy server will be started after this installation # completes. converter_path, display_path = \ cdl.alter_config_and_load_prorietary_datatypes(datatypes_config, repo_install_dir, override=False) if converter_path or display_path: # Create a dictionary of tool shed repository related information. repository_dict = \ cdl.create_repository_dict_for_proprietary_datatypes(tool_shed=self.tool_shed_url, name=tool_shed_repository.name, owner=self.repository_owner, installed_changeset_revision=tool_shed_repository.installed_changeset_revision, tool_dicts=irmm_metadata_dict.get('tools', []), converter_path=converter_path, display_path=display_path) if converter_path: # Load proprietary datatype converters self.app.datatypes_registry.load_datatype_converters(self.toolbox, installed_repository_dict=repository_dict) if display_path: # Load proprietary datatype display applications self.app.datatypes_registry.load_display_applications(self.app, installed_repository_dict=repository_dict) basic_util.remove_dir(work_dir)
class RepositoriesController(BaseAPIController): """RESTful controller for interactions with repositories in the Tool Shed.""" @web.expose_api def add_repository_registry_entry(self, trans, payload, **kwd): """ POST /api/repositories/add_repository_registry_entry Adds appropriate entries to the repository registry for the repository defined by the received name and owner. :param key: the user's API key The following parameters are included in the payload. :param tool_shed_url (required): the base URL of the Tool Shed containing the Repository :param name (required): the name of the Repository :param owner (required): the owner of the Repository """ response_dict = {} if not trans.user_is_admin(): response_dict['status'] = 'error' response_dict[ 'message'] = "You are not authorized to add entries to this Tool Shed's repository registry." return response_dict tool_shed_url = payload.get('tool_shed_url', '') if not tool_shed_url: raise HTTPBadRequest( detail="Missing required parameter 'tool_shed_url'.") tool_shed_url = tool_shed_url.rstrip('/') name = payload.get('name', '') if not name: raise HTTPBadRequest(detail="Missing required parameter 'name'.") owner = payload.get('owner', '') if not owner: raise HTTPBadRequest(detail="Missing required parameter 'owner'.") repository = suc.get_repository_by_name_and_owner( trans.app, name, owner) if repository is None: error_message = 'Cannot locate repository with name %s and owner %s,' % ( str(name), str(owner)) log.debug(error_message) response_dict['status'] = 'error' response_dict['message'] = error_message return response_dict # Update the repository registry. trans.app.repository_registry.add_entry(repository) response_dict['status'] = 'ok' response_dict[ 'message' ] = 'Entries for repository %s owned by %s have been added to the Tool Shed repository registry.' \ % ( name, owner ) return response_dict @web.expose_api_anonymous def get_ordered_installable_revisions(self, trans, name, owner, **kwd): """ GET /api/repositories/get_ordered_installable_revisions :param name: the name of the Repository :param owner: the owner of the Repository Returns the ordered list of changeset revision hash strings that are associated with installable revisions. As in the changelog, the list is ordered oldest to newest. """ # Example URL: http://localhost:9009/api/repositories/get_installable_revisions?name=add_column&owner=test if name and owner: # Get the repository information. repository = suc.get_repository_by_name_and_owner( trans.app, name, owner) if repository is None: error_message = "Error in the Tool Shed repositories API in get_ordered_installable_revisions: " error_message += "cannot locate repository %s owned by %s." % ( str(name), str(owner)) log.debug(error_message) return [] repo = hg_util.get_repo_for_repository(trans.app, repository=repository, repo_path=None, create=False) ordered_installable_revisions = suc.get_ordered_metadata_changeset_revisions( repository, repo, downloadable=True) return ordered_installable_revisions else: error_message = "Error in the Tool Shed repositories API in get_ordered_installable_revisions: " error_message += "invalid name %s or owner %s received." % ( str(name), str(owner)) log.debug(error_message) return [] @web.expose_api_anonymous def get_repository_revision_install_info(self, trans, name, owner, changeset_revision, **kwd): """ GET /api/repositories/get_repository_revision_install_info :param name: the name of the Repository :param owner: the owner of the Repository :param changeset_revision: the changeset_revision of the RepositoryMetadata object associated with the Repository Returns a list of the following dictionaries:: - a dictionary defining the Repository. For example: { "deleted": false, "deprecated": false, "description": "add_column hello", "id": "f9cad7b01a472135", "long_description": "add_column hello", "name": "add_column", "owner": "test", "private": false, "times_downloaded": 6, "url": "/api/repositories/f9cad7b01a472135", "user_id": "f9cad7b01a472135" } - a dictionary defining the Repository revision (RepositoryMetadata). For example: { "changeset_revision": "3a08cc21466f", "downloadable": true, "has_repository_dependencies": false, "has_repository_dependencies_only_if_compiling_contained_td": false, "id": "f9cad7b01a472135", "includes_datatypes": false, "includes_tool_dependencies": false, "includes_tools": true, "includes_tools_for_display_in_tool_panel": true, "includes_workflows": false, "malicious": false, "repository_id": "f9cad7b01a472135", "url": "/api/repository_revisions/f9cad7b01a472135" } - a dictionary including the additional information required to install the repository. For example: { "add_column": [ "add_column hello", "http://test@localhost:9009/repos/test/add_column", "3a08cc21466f", "1", "test", {}, {} ] } """ # Example URL: # http://<xyz>/api/repositories/get_repository_revision_install_info?name=<n>&owner=<o>&changeset_revision=<cr> if name and owner and changeset_revision: # Get the repository information. repository = suc.get_repository_by_name_and_owner( trans.app, name, owner) if repository is None: log.debug('Cannot locate repository %s owned by %s' % (str(name), str(owner))) return {}, {}, {} encoded_repository_id = trans.security.encode_id(repository.id) repository_dict = repository.to_dict( view='element', value_mapper=self.__get_value_mapper(trans)) repository_dict['url'] = web.url_for(controller='repositories', action='show', id=encoded_repository_id) # Get the repository_metadata information. repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans.app, encoded_repository_id, changeset_revision) if repository_metadata is None: # The changeset_revision column in the repository_metadata table has been updated with a new # value value, so find the changeset_revision to which we need to update. repo = hg_util.get_repo_for_repository(trans.app, repository=repository, repo_path=None, create=False) new_changeset_revision = suc.get_next_downloadable_changeset_revision( repository, repo, changeset_revision) repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans.app, encoded_repository_id, new_changeset_revision) changeset_revision = new_changeset_revision if repository_metadata is not None: encoded_repository_metadata_id = trans.security.encode_id( repository_metadata.id) repository_metadata_dict = repository_metadata.to_dict( view='collection', value_mapper=self.__get_value_mapper(trans)) repository_metadata_dict['url'] = web.url_for( controller='repository_revisions', action='show', id=encoded_repository_metadata_id) # Get the repo_info_dict for installing the repository. repo_info_dict, \ includes_tools, \ includes_tool_dependencies, \ includes_tools_for_display_in_tool_panel, \ has_repository_dependencies, \ has_repository_dependencies_only_if_compiling_contained_td = \ repository_util.get_repo_info_dict( trans.app, trans.user, encoded_repository_id, changeset_revision ) return repository_dict, repository_metadata_dict, repo_info_dict else: log.debug( "Unable to locate repository_metadata record for repository id %s and changeset_revision %s" % \ ( str( repository.id ), str( changeset_revision ) ) ) return repository_dict, {}, {} else: debug_msg = "Error in the Tool Shed repositories API in get_repository_revision_install_info: " debug_msg += "Invalid name %s or owner %s or changeset_revision %s received." % \ ( str( name ), str( owner ), str( changeset_revision ) ) log.debug(debug_msg) return {}, {}, {} def __get_value_mapper(self, trans): value_mapper = { 'id': trans.security.encode_id, 'repository_id': trans.security.encode_id, 'user_id': trans.security.encode_id } return value_mapper @web.expose_api def import_capsule(self, trans, payload, **kwd): """ POST /api/repositories/new/import_capsule Import a repository capsule into the Tool Shed. :param key: the user's API key The following parameters are included in the payload. :param tool_shed_url (required): the base URL of the Tool Shed into which the capsule should be imported. :param capsule_file_name (required): the name of the capsule file. """ # Get the information about the capsule to be imported from the payload. tool_shed_url = payload.get('tool_shed_url', '') if not tool_shed_url: raise HTTPBadRequest( detail="Missing required parameter 'tool_shed_url'.") capsule_file_name = payload.get('capsule_file_name', '') if not capsule_file_name: raise HTTPBadRequest( detail="Missing required parameter 'capsule_file_name'.") capsule_file_path = os.path.abspath(capsule_file_name) capsule_dict = dict(error_message='', encoded_file_path=None, status='ok', tar_archive=None, uploaded_file=None, capsule_file_name=None) if os.path.getsize(os.path.abspath(capsule_file_name)) == 0: log.debug('Your capsule file %s is empty.' % str(capsule_file_name)) return {} try: # Open for reading with transparent compression. tar_archive = tarfile.open(capsule_file_path, 'r:*') except tarfile.ReadError, e: log.debug('Error opening capsule file %s: %s' % (str(capsule_file_name), str(e))) return {} irm = capsule_manager.ImportRepositoryManager(trans.app, trans.request.host, trans.user, trans.user_is_admin()) capsule_dict['tar_archive'] = tar_archive capsule_dict['capsule_file_name'] = capsule_file_name capsule_dict = irm.extract_capsule_files(**capsule_dict) capsule_dict = irm.validate_capsule(**capsule_dict) status = capsule_dict.get('status', 'error') if status == 'error': log.debug( 'The capsule contents are invalid and cannot be imported:<br/>%s' % \ str( capsule_dict.get( 'error_message', '' ) ) ) return {} encoded_file_path = capsule_dict.get('encoded_file_path', None) if encoded_file_path is None: log.debug( 'The capsule_dict %s is missing the required encoded_file_path entry.' % str(capsule_dict)) return {} file_path = encoding_util.tool_shed_decode(encoded_file_path) export_info_file_path = os.path.join(file_path, 'export_info.xml') export_info_dict = irm.get_export_info_dict(export_info_file_path) manifest_file_path = os.path.join(file_path, 'manifest.xml') # The manifest.xml file has already been validated, so no error_message should be returned here. repository_info_dicts, error_message = irm.get_repository_info_from_manifest( manifest_file_path) # Determine the status for each exported repository archive contained within the capsule. repository_status_info_dicts = irm.get_repository_status_from_tool_shed( repository_info_dicts) # Generate a list of repository name / import results message tuples for display after the capsule is imported. import_results_tups = [] # Only create repositories that do not yet exist and that the current user is authorized to create. The # status will be None for repositories that fall into the intersection of these 2 categories. for repository_status_info_dict in repository_status_info_dicts: # Add the capsule_file_name and encoded_file_path to the repository_status_info_dict. repository_status_info_dict[ 'capsule_file_name'] = capsule_file_name repository_status_info_dict[ 'encoded_file_path'] = encoded_file_path import_results_tups = irm.create_repository_and_import_archive( repository_status_info_dict, import_results_tups) irm.check_status_and_reset_downloadable(import_results_tups) basic_util.remove_dir(file_path) # NOTE: the order of installation is defined in import_results_tups, but order will be lost # when transferred to return_dict. return_dict = {} for import_results_tup in import_results_tups: ok, name_owner, message = import_results_tup name, owner = name_owner key = 'Archive of repository "%s" owned by "%s"' % (str(name), str(owner)) val = message.replace('<b>', '"').replace('</b>', '"') return_dict[key] = val return return_dict
repository_dict = \ cdl.create_repository_dict_for_proprietary_datatypes( tool_shed=self.tool_shed_url, name=tool_shed_repository.name, owner=self.repository_owner, installed_changeset_revision=tool_shed_repository.installed_changeset_revision, tool_dicts=irmm_metadata_dict.get( 'tools', [] ), converter_path=converter_path, display_path=display_path ) if converter_path: # Load proprietary datatype converters self.app.datatypes_registry.load_datatype_converters( self.toolbox, installed_repository_dict=repository_dict ) if display_path: # Load proprietary datatype display applications self.app.datatypes_registry.load_display_applications( installed_repository_dict=repository_dict ) basic_util.remove_dir( work_dir ) def install_repository( self, repository_elem, tool_shed_repository, install_dependencies, is_repository_dependency=False ): """Install a single repository, loading contained tools into the tool panel.""" # Install path is of the form: <tool path>/<tool shed>/repos/<repository owner>/<repository name>/<installed changeset revision> relative_clone_dir = os.path.join( tool_shed_repository.tool_shed, 'repos', tool_shed_repository.owner, tool_shed_repository.name, tool_shed_repository.installed_changeset_revision ) clone_dir = os.path.join( self.tool_path, relative_clone_dir ) cloned_ok = self.__iscloned( clone_dir ) is_installed = False # Any of the following states should count as installed in this context. if tool_shed_repository.status in [ self.app.install_model.ToolShedRepository.installation_status.INSTALLED, self.app.install_model.ToolShedRepository.installation_status.ERROR,
def import_capsule(self, trans, payload, **kwd): """ POST /api/repositories/new/import_capsule Import a repository capsule into the Tool Shed. :param key: the user's API key The following parameters are included in the payload. :param tool_shed_url (required): the base URL of the Tool Shed into which the capsule should be imported. :param capsule_file_name (required): the name of the capsule file. """ # Get the information about the capsule to be imported from the payload. tool_shed_url = payload.get("tool_shed_url", "") if not tool_shed_url: raise HTTPBadRequest(detail="Missing required parameter 'tool_shed_url'.") capsule_file_name = payload.get("capsule_file_name", "") if not capsule_file_name: raise HTTPBadRequest(detail="Missing required parameter 'capsule_file_name'.") capsule_file_path = os.path.abspath(capsule_file_name) capsule_dict = dict( error_message="", encoded_file_path=None, status="ok", tar_archive=None, uploaded_file=None, capsule_file_name=None, ) if os.path.getsize(os.path.abspath(capsule_file_name)) == 0: log.debug("Your capsule file %s is empty." % str(capsule_file_name)) return {} try: # Open for reading with transparent compression. tar_archive = tarfile.open(capsule_file_path, "r:*") except tarfile.ReadError as e: log.debug("Error opening capsule file %s: %s" % (str(capsule_file_name), str(e))) return {} irm = capsule_manager.ImportRepositoryManager(self.app, trans.request.host, trans.user, trans.user_is_admin()) capsule_dict["tar_archive"] = tar_archive capsule_dict["capsule_file_name"] = capsule_file_name capsule_dict = irm.extract_capsule_files(**capsule_dict) capsule_dict = irm.validate_capsule(**capsule_dict) status = capsule_dict.get("status", "error") if status == "error": log.debug( "The capsule contents are invalid and cannot be imported:<br/>%s" % str(capsule_dict.get("error_message", "")) ) return {} encoded_file_path = capsule_dict.get("encoded_file_path", None) if encoded_file_path is None: log.debug("The capsule_dict %s is missing the required encoded_file_path entry." % str(capsule_dict)) return {} file_path = encoding_util.tool_shed_decode(encoded_file_path) manifest_file_path = os.path.join(file_path, "manifest.xml") # The manifest.xml file has already been validated, so no error_message should be returned here. repository_info_dicts, error_message = irm.get_repository_info_from_manifest(manifest_file_path) # Determine the status for each exported repository archive contained within the capsule. repository_status_info_dicts = irm.get_repository_status_from_tool_shed(repository_info_dicts) # Generate a list of repository name / import results message tuples for display after the capsule is imported. import_results_tups = [] # Only create repositories that do not yet exist and that the current user is authorized to create. The # status will be None for repositories that fall into the intersection of these 2 categories. for repository_status_info_dict in repository_status_info_dicts: # Add the capsule_file_name and encoded_file_path to the repository_status_info_dict. repository_status_info_dict["capsule_file_name"] = capsule_file_name repository_status_info_dict["encoded_file_path"] = encoded_file_path import_results_tups = irm.create_repository_and_import_archive( repository_status_info_dict, import_results_tups ) irm.check_status_and_reset_downloadable(import_results_tups) basic_util.remove_dir(file_path) # NOTE: the order of installation is defined in import_results_tups, but order will be lost # when transferred to return_dict. return_dict = {} for import_results_tup in import_results_tups: ok, name_owner, message = import_results_tup name, owner = name_owner key = 'Archive of repository "%s" owned by "%s"' % (str(name), str(owner)) val = message.replace("<b>", '"').replace("</b>", '"') return_dict[key] = val return return_dict