Пример #1
0
def exec_before_job(app, inp_data, out_data, param_dict, tool=None, **kwd):
    # Look for any data tables that haven't been defined for this data manager before and dynamically add them to Galaxy
    param_dict = dict(**param_dict)
    param_dict['data_table_entries'] = param_dict.get('data_table_entries', [])
    if not isinstance(param_dict['data_table_entries'], list):
        param_dict['data_table_entries'] = [param_dict['data_table_entries']]
    param_dict['data_table_entries'] = ",".join(
        param_dict['data_table_entries'])
    if tool:
        tool_shed_repository = tool.tool_shed_repository
    else:
        tool_shed_repository = None
    tdtm = None
    data_manager = app.data_managers.get_manager(tool.data_manager_id, None)
    data_table_entries = get_data_table_entries(param_dict)
    data_tables = load_data_tables_from_url(
        data_table_class=app.tool_data_tables.__class__).get('data_tables')
    for data_table_name, entries in data_table_entries.items():
        # get data table managed by this data Manager
        has_data_table = app.tool_data_tables.get_tables().get(
            str(data_table_name))
        if has_data_table:
            has_data_table = bool(
                has_data_table.get_filename_for_source(data_manager, None))
        if not has_data_table:
            if tdtm is None:
                from tool_shed.tools import data_table_manager
                tdtm = data_table_manager.ToolDataTableManager(app)
                target_dir, tool_path, relative_target_dir = tdtm.get_target_install_dir(
                    tool_shed_repository)
            # Dynamically add this data table
            log.debug(
                "Attempting to dynamically create a missing Tool Data Table named %s."
                % data_table_name)
            data_table = data_tables[data_table_name]
            repo_info = tdtm.generate_repository_info_elem_from_repository(
                tool_shed_repository, parent_elem=None)
            if repo_info is not None:
                repo_info = tostring(repo_info)
            tmp_file = tempfile.NamedTemporaryFile(mode="w")
            tmp_file.write(
                get_new_xml_definition(app, data_table, data_manager,
                                       repo_info, target_dir))
            tmp_file.flush()
            app.tool_data_tables.add_new_entries_from_config_file(
                tmp_file.name,
                None,
                app.config.shed_tool_data_table_config,
                persist=True)
            tmp_file.close()
Пример #2
0
def exec_before_job(app, inp_data, out_data, param_dict, tool=None, **kwd):
    # Look for any data tables that haven't been defined for this data manager before and dynamically add them to Galaxy
    param_dict = dict(**param_dict)
    data_tables_param = param_dict.get('data_tables', [])
    if not isinstance(data_tables_param, list):
        data_tables_param = [data_tables_param]
    if tool:
        tool_shed_repository = tool.tool_shed_repository
    else:
        tool_shed_repository = None
    tdtm = None
    data_manager = app.data_managers.get_manager(tool.data_manager_id, None)
    for data_table_param in data_tables_param:
        data_table_name = data_table_param.get('data_table_name')
        if data_table_name:
            # the 'data_table_name' value in data_table_param is a SelectToolParameter,
            # to get the selected value we need to cast data_table_name to string
            data_table_name = str(data_table_name)
            # get data table managed by this data Manager
            data_table = app.tool_data_tables.get_tables().get(data_table_name)
            if data_table:
                data_table_filename = data_table.get_filename_for_source(
                    data_manager, None)
                if not data_table_filename:
                    if tdtm is None:
                        from tool_shed.tools import data_table_manager
                        tdtm = data_table_manager.ToolDataTableManager(app)
                        target_dir, tool_path, relative_target_dir = tdtm.get_target_install_dir(
                            tool_shed_repository)
                    # Dynamically add this data table
                    log.debug(
                        "Attempting to dynamically create a missing Tool Data Table named %s."
                        % data_table_name)
                    repo_info = tdtm.generate_repository_info_elem_from_repository(
                        tool_shed_repository, parent_elem=None)
                    if repo_info is not None:
                        repo_info = tostring(repo_info)
                    tmp_file = tempfile.NamedTemporaryFile(mode="w")
                    tmp_file.write(
                        __get_new_xml_definition(app, data_table, data_manager,
                                                 repo_info, target_dir))
                    tmp_file.flush()
                    app.tool_data_tables.add_new_entries_from_config_file(
                        tmp_file.name,
                        None,
                        app.config.shed_tool_data_table_config,
                        persist=True)
                    tmp_file.close()
Пример #3
0
 def __init__( self, app ):
     self.app = app
     self.tdtm = data_table_manager.ToolDataTableManager( self.app )
Пример #4
0
 def handle_repository_contents(self,
                                tool_shed_repository,
                                repository_clone_url,
                                relative_install_dir,
                                repository_elem,
                                install_dependencies,
                                is_repository_dependency=False):
     """
     Generate the metadata for the installed tool shed repository, among other things.  If the installed tool_shed_repository
     contains tools that are loaded into the Galaxy tool panel, this method will automatically eliminate all entries for each
     of the tools defined in the received repository_elem from all non-shed-related tool panel configuration files since the
     entries are automatically added to the reserved migrated_tools_conf.xml file as part of the migration process.
     """
     tool_configs_to_filter = []
     tool_panel_dict_for_display = odict()
     if self.tool_path:
         repo_install_dir = os.path.join(self.tool_path,
                                         relative_install_dir)
     else:
         repo_install_dir = relative_install_dir
     if not is_repository_dependency:
         for tool_elem in repository_elem:
             # The tool_elem looks something like this:
             # <tool id="EMBOSS: antigenic1" version="5.0.0" file="emboss_antigenic.xml" />
             tool_config = tool_elem.get('file')
             guid = self.get_guid(repository_clone_url,
                                  relative_install_dir, tool_config)
             # See if tool_config is defined inside of a section in self.proprietary_tool_panel_elems.
             is_displayed, tool_sections = self.get_containing_tool_sections(
                 tool_config)
             if is_displayed:
                 tool_panel_dict_for_tool_config = \
                     self.tpm.generate_tool_panel_dict_for_tool_config(guid,
                                                                       tool_config,
                                                                       tool_sections=tool_sections)
                 # The tool-panel_dict has the following structure.
                 # {<Tool guid> : [{ tool_config : <tool_config_file>,
                 #                   id: <ToolSection id>,
                 #                   version : <ToolSection version>,
                 #                   name : <TooSection name>}]}
                 for k, v in tool_panel_dict_for_tool_config.items():
                     tool_panel_dict_for_display[k] = v
                     for tool_panel_dict in v:
                         # Keep track of tool config file names associated with entries that have been made to the
                         # migrated_tools_conf.xml file so they can be eliminated from all non-shed-related tool panel configs.
                         if tool_config not in tool_configs_to_filter:
                             tool_configs_to_filter.append(tool_config)
             else:
                 log.error(
                     'The tool "%s" (%s) has not been enabled because it is not defined in a proprietary tool config (%s).'
                     % (guid, tool_config, ", ".join(
                         self.proprietary_tool_confs or [])))
         if tool_configs_to_filter:
             lock = threading.Lock()
             lock.acquire(True)
             try:
                 self.filter_and_persist_proprietary_tool_panel_configs(
                     tool_configs_to_filter)
             except Exception:
                 log.exception(
                     "Exception attempting to filter and persist non-shed-related tool panel configs"
                 )
             finally:
                 lock.release()
     irmm = InstalledRepositoryMetadataManager(
         app=self.app,
         tpm=self.tpm,
         repository=tool_shed_repository,
         changeset_revision=tool_shed_repository.changeset_revision,
         repository_clone_url=repository_clone_url,
         shed_config_dict=self.shed_config_dict,
         relative_install_dir=relative_install_dir,
         repository_files_dir=None,
         resetting_all_metadata_on_repository=False,
         updating_installed_repository=False,
         persist=True)
     irmm.generate_metadata_for_changeset_revision()
     irmm_metadata_dict = irmm.get_metadata_dict()
     tool_shed_repository.metadata = irmm_metadata_dict
     self.app.install_model.context.add(tool_shed_repository)
     self.app.install_model.context.flush()
     has_tool_dependencies = self.__has_tool_dependencies(
         irmm_metadata_dict)
     if has_tool_dependencies:
         # All tool_dependency objects must be created before the tools are processed even if no
         # tool dependencies will be installed.
         tool_dependencies = tool_dependency_util.create_tool_dependency_objects(
             self.app,
             tool_shed_repository,
             relative_install_dir,
             set_status=True)
     else:
         tool_dependencies = None
     if 'tools' in irmm_metadata_dict:
         tdtm = data_table_manager.ToolDataTableManager(self.app)
         sample_files = irmm_metadata_dict.get('sample_files', [])
         sample_files = [str(s) for s in sample_files]
         tool_index_sample_files = tdtm.get_tool_index_sample_files(
             sample_files)
         tool_util.copy_sample_files(self.app,
                                     tool_index_sample_files,
                                     tool_path=self.tool_path)
         sample_files_copied = [s for s in tool_index_sample_files]
         repository_tools_tups = irmm.get_repository_tools_tups()
         if repository_tools_tups:
             # Handle missing data table entries for tool parameters that are dynamically
             # generated select lists.
             repository_tools_tups = tdtm.handle_missing_data_table_entry(
                 relative_install_dir, self.tool_path,
                 repository_tools_tups)
             # Handle missing index files for tool parameters that are dynamically generated select lists.
             repository_tools_tups, sample_files_copied = tool_util.handle_missing_index_file(
                 self.app, self.tool_path, sample_files,
                 repository_tools_tups, sample_files_copied)
             # Copy remaining sample files included in the repository to the ~/tool-data
             # directory of the local Galaxy instance.
             tool_util.copy_sample_files(
                 self.app,
                 sample_files,
                 tool_path=self.tool_path,
                 sample_files_copied=sample_files_copied)
             if not is_repository_dependency:
                 self.tpm.add_to_tool_panel(
                     tool_shed_repository.name,
                     repository_clone_url,
                     tool_shed_repository.installed_changeset_revision,
                     repository_tools_tups,
                     self.repository_owner,
                     self.migrated_tools_config,
                     tool_panel_dict=tool_panel_dict_for_display,
                     new_install=True)
     if install_dependencies and tool_dependencies and has_tool_dependencies:
         # Install tool dependencies.
         irm = install_manager.InstallRepositoryManager(self.app, self.tpm)
         itdm = install_manager.InstallToolDependencyManager(self.app)
         irm.update_tool_shed_repository_status(
             tool_shed_repository,
             self.app.install_model.ToolShedRepository.installation_status.
             INSTALLING_TOOL_DEPENDENCIES)
         # Get the tool_dependencies.xml file from disk.
         tool_dependencies_config = hg_util.get_config_from_disk(
             'tool_dependencies.xml', repo_install_dir)
         installed_tool_dependencies = itdm.install_specified_tool_dependencies(
             tool_shed_repository=tool_shed_repository,
             tool_dependencies_config=tool_dependencies_config,
             tool_dependencies=tool_dependencies,
             from_tool_migration_manager=True)
         for installed_tool_dependency in installed_tool_dependencies:
             if installed_tool_dependency.status == self.app.install_model.ToolDependency.installation_status.ERROR:
                 log.error(
                     'The ToolMigrationManager returned the following error while installing tool dependency %s: %s',
                     installed_tool_dependency.name,
                     installed_tool_dependency.error_message)
     if 'datatypes' in irmm_metadata_dict:
         cdl = custom_datatype_manager.CustomDatatypeLoader(self.app)
         tool_shed_repository.status = self.app.install_model.ToolShedRepository.installation_status.LOADING_PROPRIETARY_DATATYPES
         if not tool_shed_repository.includes_datatypes:
             tool_shed_repository.includes_datatypes = True
         self.app.install_model.context.add(tool_shed_repository)
         self.app.install_model.context.flush()
         work_dir = tempfile.mkdtemp(prefix="tmp-toolshed-hrc")
         datatypes_config = hg_util.get_config_from_disk(
             suc.DATATYPES_CONFIG_FILENAME, repo_install_dir)
         # Load proprietary data types required by tools.  The value of override is not
         # important here since the Galaxy server will be started after this installation
         # completes.
         converter_path, display_path = \
             cdl.alter_config_and_load_prorietary_datatypes(datatypes_config,
                                                            repo_install_dir,
                                                            override=False)
         if converter_path or display_path:
             # Create a dictionary of tool shed repository related information.
             repository_dict = \
                 cdl.create_repository_dict_for_proprietary_datatypes(tool_shed=self.tool_shed_url,
                                                                      name=tool_shed_repository.name,
                                                                      owner=self.repository_owner,
                                                                      installed_changeset_revision=tool_shed_repository.installed_changeset_revision,
                                                                      tool_dicts=irmm_metadata_dict.get('tools', []),
                                                                      converter_path=converter_path,
                                                                      display_path=display_path)
         if converter_path:
             # Load proprietary datatype converters
             self.app.datatypes_registry.load_datatype_converters(
                 self.toolbox, installed_repository_dict=repository_dict)
         if display_path:
             # Load proprietary datatype display applications
             self.app.datatypes_registry.load_display_applications(
                 self.app, installed_repository_dict=repository_dict)
         basic_util.remove_dir(work_dir)
Пример #5
0
                     # The directory is not empty.
                     pass
 # See if any admin users have chosen to receive email alerts when a repository is updated.
 # If so, check every uploaded file to ensure content is appropriate.
 check_contents = check_file_contents_for_email_alerts(app)
 for filename_in_archive in filenames_in_archive:
     # Check file content to ensure it is appropriate.
     if check_contents and os.path.isfile(filename_in_archive):
         content_alert_str += check_file_content_for_html_and_images(
             filename_in_archive)
     hg_util.add_changeset(repo.ui, repo, filename_in_archive)
     if filename_in_archive.endswith('tool_data_table_conf.xml.sample'):
         # Handle the special case where a tool_data_table_conf.xml.sample file is being uploaded
         # by parsing the file and adding new entries to the in-memory app.tool_data_tables
         # dictionary.
         tdtm = data_table_manager.ToolDataTableManager(app)
         error, message = tdtm.handle_sample_tool_data_table_conf_file(
             filename_in_archive, persist=False)
         if error:
             return False, message, files_to_remove, content_alert_str, undesirable_dirs_removed, undesirable_files_removed
 hg_util.commit_changeset(repo.ui,
                          repo,
                          full_path_to_changeset=full_path,
                          username=username,
                          message=commit_message)
 admin_only = len(repository.downloadable_revisions) != 1
 suc.handle_email_alerts(app,
                         host,
                         repository,
                         content_alert_str=content_alert_str,
                         new_repo_alert=new_repo_alert,
Пример #6
0
 def upload(self, trans, **kwd):
     message = escape(kwd.get('message', ''))
     status = kwd.get('status', 'done')
     commit_message = escape(kwd.get('commit_message', 'Uploaded'))
     repository_id = kwd.get('repository_id', '')
     repository = repository_util.get_repository_in_tool_shed(trans.app, repository_id)
     repo_dir = repository.repo_path(trans.app)
     repo = hg_util.get_repo_for_repository(trans.app, repository=None, repo_path=repo_dir, create=False)
     uncompress_file = util.string_as_bool(kwd.get('uncompress_file', 'true'))
     remove_repo_files_not_in_tar = util.string_as_bool(kwd.get('remove_repo_files_not_in_tar', 'true'))
     uploaded_file = None
     upload_point = commit_util.get_upload_point(repository, **kwd)
     tip = repository.tip(trans.app)
     file_data = kwd.get('file_data', '')
     url = kwd.get('url', '')
     # Part of the upload process is sending email notification to those that have registered to
     # receive them.  One scenario occurs when the first change set is produced for the repository.
     # See the suc.handle_email_alerts() method for the definition of the scenarios.
     new_repo_alert = repository.is_new(trans.app)
     uploaded_directory = None
     if kwd.get('upload_button', False):
         if file_data == '' and url == '':
             message = 'No files were entered on the upload form.'
             status = 'error'
             uploaded_file = None
         elif url and url.startswith('hg'):
             # Use mercurial clone to fetch repository, contents will then be copied over.
             uploaded_directory = tempfile.mkdtemp()
             repo_url = 'http%s' % url[len('hg'):]
             repo_url = repo_url.encode('ascii', 'replace')
             try:
                 commands.clone(hg_util.get_configured_ui(), repo_url, uploaded_directory)
             except Exception as e:
                 message = 'Error uploading via mercurial clone: %s' % basic_util.to_html_string(str(e))
                 status = 'error'
                 basic_util.remove_dir(uploaded_directory)
                 uploaded_directory = None
         elif url:
             valid_url = True
             try:
                 stream = requests.get(url, stream=True)
             except Exception as e:
                 valid_url = False
                 message = 'Error uploading file via http: %s' % str(e)
                 status = 'error'
                 uploaded_file = None
             if valid_url:
                 fd, uploaded_file_name = tempfile.mkstemp()
                 uploaded_file = open(uploaded_file_name, 'wb')
                 for chunk in stream.iter_content(chunk_size=util.CHUNK_SIZE):
                     if chunk:
                         uploaded_file.write(chunk)
                 uploaded_file.flush()
                 uploaded_file_filename = url.split('/')[-1]
                 isempty = os.path.getsize(os.path.abspath(uploaded_file_name)) == 0
         elif file_data not in ('', None):
             uploaded_file = file_data.file
             uploaded_file_name = uploaded_file.name
             uploaded_file_filename = os.path.split(file_data.filename)[-1]
             isempty = os.path.getsize(os.path.abspath(uploaded_file_name)) == 0
         if uploaded_file or uploaded_directory:
             rdah = attribute_handlers.RepositoryDependencyAttributeHandler(trans.app, unpopulate=False)
             tdah = attribute_handlers.ToolDependencyAttributeHandler(trans.app, unpopulate=False)
             tdtm = data_table_manager.ToolDataTableManager(trans.app)
             ok = True
             isgzip = False
             isbz2 = False
             if uploaded_file:
                 if uncompress_file:
                     isgzip = checkers.is_gzip(uploaded_file_name)
                     if not isgzip:
                         isbz2 = checkers.is_bz2(uploaded_file_name)
                 if isempty:
                     tar = None
                     istar = False
                 else:
                     # Determine what we have - a single file or an archive
                     try:
                         if (isgzip or isbz2) and uncompress_file:
                             # Open for reading with transparent compression.
                             tar = tarfile.open(uploaded_file_name, 'r:*')
                         else:
                             tar = tarfile.open(uploaded_file_name)
                         istar = True
                     except tarfile.ReadError as e:
                         tar = None
                         istar = False
             else:
                 # Uploaded directory
                 istar = False
             if istar:
                 ok, message, files_to_remove, content_alert_str, undesirable_dirs_removed, undesirable_files_removed = \
                     repository_content_util.upload_tar(
                         trans,
                         rdah,
                         tdah,
                         repository,
                         tar,
                         uploaded_file,
                         upload_point,
                         remove_repo_files_not_in_tar,
                         commit_message,
                         new_repo_alert
                     )
             elif uploaded_directory:
                 ok, message, files_to_remove, content_alert_str, undesirable_dirs_removed, undesirable_files_removed = \
                     self.upload_directory(trans,
                                           rdah,
                                           tdah,
                                           repository,
                                           uploaded_directory,
                                           upload_point,
                                           remove_repo_files_not_in_tar,
                                           commit_message,
                                           new_repo_alert)
             else:
                 if (isgzip or isbz2) and uncompress_file:
                     uploaded_file_filename = commit_util.uncompress(repository,
                                                                     uploaded_file_name,
                                                                     uploaded_file_filename,
                                                                     isgzip=isgzip,
                                                                     isbz2=isbz2)
                 if repository.type == rt_util.REPOSITORY_SUITE_DEFINITION and \
                         uploaded_file_filename != rt_util.REPOSITORY_DEPENDENCY_DEFINITION_FILENAME:
                     ok = False
                     message = 'Repositories of type <b>Repository suite definition</b> can only contain a single file named '
                     message += '<b>repository_dependencies.xml</b>.'
                 elif repository.type == rt_util.TOOL_DEPENDENCY_DEFINITION and \
                         uploaded_file_filename != rt_util.TOOL_DEPENDENCY_DEFINITION_FILENAME:
                     ok = False
                     message = 'Repositories of type <b>Tool dependency definition</b> can only contain a single file named '
                     message += '<b>tool_dependencies.xml</b>.'
                 if ok:
                     if upload_point is not None:
                         full_path = os.path.abspath(os.path.join(repo_dir, upload_point, uploaded_file_filename))
                     else:
                         full_path = os.path.abspath(os.path.join(repo_dir, uploaded_file_filename))
                     # Move some version of the uploaded file to the load_point within the repository hierarchy.
                     if uploaded_file_filename in [rt_util.REPOSITORY_DEPENDENCY_DEFINITION_FILENAME]:
                         # Inspect the contents of the file to see if toolshed or changeset_revision attributes
                         # are missing and if so, set them appropriately.
                         altered, root_elem, error_message = rdah.handle_tag_attributes(uploaded_file_name)
                         if error_message:
                             ok = False
                             message = error_message
                             status = 'error'
                         elif altered:
                             tmp_filename = xml_util.create_and_write_tmp_file(root_elem)
                             shutil.move(tmp_filename, full_path)
                         else:
                             shutil.move(uploaded_file_name, full_path)
                     elif uploaded_file_filename in [rt_util.TOOL_DEPENDENCY_DEFINITION_FILENAME]:
                         # Inspect the contents of the file to see if changeset_revision values are
                         # missing and if so, set them appropriately.
                         altered, root_elem, error_message = tdah.handle_tag_attributes(uploaded_file_name)
                         if error_message:
                             ok = False
                             message = error_message
                             status = 'error'
                         if ok:
                             if altered:
                                 tmp_filename = xml_util.create_and_write_tmp_file(root_elem)
                                 shutil.move(tmp_filename, full_path)
                             else:
                                 shutil.move(uploaded_file_name, full_path)
                     else:
                         shutil.move(uploaded_file_name, full_path)
                     if ok:
                         # See if any admin users have chosen to receive email alerts when a repository is updated.
                         # If so, check every uploaded file to ensure content is appropriate.
                         check_contents = commit_util.check_file_contents_for_email_alerts(trans.app)
                         if check_contents and os.path.isfile(full_path):
                             content_alert_str = commit_util.check_file_content_for_html_and_images(full_path)
                         else:
                             content_alert_str = ''
                         hg_util.add_changeset(repo.ui, repo, full_path)
                         # Convert from unicode to prevent "TypeError: array item must be char"
                         full_path = full_path.encode('ascii', 'replace')
                         hg_util.commit_changeset(repo.ui,
                                                  repo,
                                                  full_path_to_changeset=full_path,
                                                  username=trans.user.username,
                                                  message=commit_message)
                         if full_path.endswith('tool_data_table_conf.xml.sample'):
                             # Handle the special case where a tool_data_table_conf.xml.sample file is being uploaded
                             # by parsing the file and adding new entries to the in-memory trans.app.tool_data_tables
                             # dictionary.
                             error, error_message = tdtm.handle_sample_tool_data_table_conf_file(full_path, persist=False)
                             if error:
                                 message = '%s<br/>%s' % (message, error_message)
                         # See if the content of the change set was valid.
                         admin_only = len(repository.downloadable_revisions) != 1
                         suc.handle_email_alerts(trans.app,
                                                 trans.request.host,
                                                 repository,
                                                 content_alert_str=content_alert_str,
                                                 new_repo_alert=new_repo_alert,
                                                 admin_only=admin_only)
             if ok:
                 # Update the repository files for browsing.
                 hg_util.update_repository(repo)
                 # Get the new repository tip.
                 if tip == repository.tip(trans.app):
                     message = 'No changes to repository.  '
                     status = 'warning'
                 else:
                     if (isgzip or isbz2) and uncompress_file:
                         uncompress_str = ' uncompressed and '
                     else:
                         uncompress_str = ' '
                     if uploaded_directory:
                         source_type = "repository"
                         source = url
                     else:
                         source_type = "file"
                         source = uploaded_file_filename
                     message = "The %s <b>%s</b> has been successfully%suploaded to the repository.  " % \
                         (source_type, escape(source), uncompress_str)
                     if istar and (undesirable_dirs_removed or undesirable_files_removed):
                         items_removed = undesirable_dirs_removed + undesirable_files_removed
                         message += "  %d undesirable items (.hg .svn .git directories, .DS_Store, hgrc files, etc) " % items_removed
                         message += "were removed from the archive.  "
                     if istar and remove_repo_files_not_in_tar and files_to_remove:
                         if upload_point is not None:
                             message += "  %d files were removed from the repository relative to the selected upload point '%s'.  " % \
                                 (len(files_to_remove), upload_point)
                         else:
                             message += "  %d files were removed from the repository root.  " % len(files_to_remove)
                     rmm = repository_metadata_manager.RepositoryMetadataManager(app=trans.app,
                                                                                 user=trans.user,
                                                                                 repository=repository)
                     status, error_message = \
                         rmm.set_repository_metadata_due_to_new_tip(trans.request.host,
                                                                    content_alert_str=content_alert_str,
                                                                    **kwd)
                     if error_message:
                         message = error_message
                     kwd['message'] = message
                 if repository.metadata_revisions:
                     # A repository's metadata revisions are order descending by update_time, so the zeroth revision
                     # will be the tip just after an upload.
                     metadata_dict = repository.metadata_revisions[0].metadata
                 else:
                     metadata_dict = {}
                 dd = dependency_display.DependencyDisplayer(trans.app)
                 if str(repository.type) not in [rt_util.REPOSITORY_SUITE_DEFINITION,
                                                 rt_util.TOOL_DEPENDENCY_DEFINITION]:
                     change_repository_type_message = rt_util.generate_message_for_repository_type_change(trans.app,
                                                                                                          repository)
                     if change_repository_type_message:
                         message += change_repository_type_message
                         status = 'warning'
                     else:
                         # Provide a warning message if a tool_dependencies.xml file is provided, but tool dependencies
                         # weren't loaded due to a requirement tag mismatch or some other problem.  Tool dependency
                         # definitions can define orphan tool dependencies (no relationship to any tools contained in the
                         # repository), so warning messages are important because orphans are always valid.  The repository
                         # owner must be warned in case they did not intend to define an orphan dependency, but simply
                         # provided incorrect information (tool shed, name owner, changeset_revision) for the definition.
                         orphan_message = dd.generate_message_for_orphan_tool_dependencies(repository, metadata_dict)
                         if orphan_message:
                             message += orphan_message
                             status = 'warning'
                 # Handle messaging for invalid tool dependencies.
                 invalid_tool_dependencies_message = dd.generate_message_for_invalid_tool_dependencies(metadata_dict)
                 if invalid_tool_dependencies_message:
                     message += invalid_tool_dependencies_message
                     status = 'error'
                 # Handle messaging for invalid repository dependencies.
                 invalid_repository_dependencies_message = \
                     dd.generate_message_for_invalid_repository_dependencies(metadata_dict,
                                                                             error_from_tuple=True)
                 if invalid_repository_dependencies_message:
                     message += invalid_repository_dependencies_message
                     status = 'error'
                 # Reset the tool_data_tables by loading the empty tool_data_table_conf.xml file.
                 tdtm.reset_tool_data_tables()
                 if uploaded_directory:
                     basic_util.remove_dir(uploaded_directory)
                 trans.response.send_redirect(web.url_for(controller='repository',
                                                          action='browse_repository',
                                                          id=repository_id,
                                                          commit_message='Deleted selected files',
                                                          message=message,
                                                          status=status))
             else:
                 if uploaded_directory:
                     basic_util.remove_dir(uploaded_directory)
                 status = 'error'
             # Reset the tool_data_tables by loading the empty tool_data_table_conf.xml file.
             tdtm.reset_tool_data_tables()
     return trans.fill_template('/webapps/tool_shed/repository/upload.mako',
                                repository=repository,
                                changeset_revision=tip,
                                url=url,
                                commit_message=commit_message,
                                uncompress_file=uncompress_file,
                                remove_repo_files_not_in_tar=remove_repo_files_not_in_tar,
                                message=message,
                                status=status)
Пример #7
0
             chunk = stream.read( util.CHUNK_SIZE )
             if not chunk:
                 break
             uploaded_file.write( chunk )
         uploaded_file.flush()
         uploaded_file_filename = url.split( '/' )[ -1 ]
         isempty = os.path.getsize( os.path.abspath( uploaded_file_name ) ) == 0
 elif file_data not in ( '', None ):
     uploaded_file = file_data.file
     uploaded_file_name = uploaded_file.name
     uploaded_file_filename = os.path.split( file_data.filename )[ -1 ]
     isempty = os.path.getsize( os.path.abspath( uploaded_file_name ) ) == 0
 if uploaded_file or uploaded_directory:
     rdah = attribute_handlers.RepositoryDependencyAttributeHandler( trans.app, unpopulate=False )
     tdah = attribute_handlers.ToolDependencyAttributeHandler( trans.app, unpopulate=False )
     tdtm = data_table_manager.ToolDataTableManager( trans.app )
     ok = True
     isgzip = False
     isbz2 = False
     if uploaded_file:
         if uncompress_file:
             isgzip = checkers.is_gzip( uploaded_file_name )
             if not isgzip:
                 isbz2 = checkers.is_bz2( uploaded_file_name )
         if isempty:
             tar = None
             istar = False
         else:
             # Determine what we have - a single file or an archive
             try:
                 if ( isgzip or isbz2 ) and uncompress_file:
Пример #8
0
def handle_directory_changes(app, host, username, repository, full_path,
                             filenames_in_archive,
                             remove_repo_files_not_in_tar, new_repo_alert,
                             commit_message, undesirable_dirs_removed,
                             undesirable_files_removed):
    repo = hg_util.get_repo_for_repository(app,
                                           repository=repository,
                                           repo_path=None,
                                           create=False)
    content_alert_str = ''
    files_to_remove = []
    filenames_in_archive = [
        os.path.join(full_path, name) for name in filenames_in_archive
    ]
    if remove_repo_files_not_in_tar and not repository.is_new(app):
        # We have a repository that is not new (it contains files), so discover those files that are in the
        # repository, but not in the uploaded archive.
        for root, dirs, files in os.walk(full_path):
            if root.find('.hg') < 0 and root.find('hgrc') < 0:
                for undesirable_dir in UNDESIRABLE_DIRS:
                    if undesirable_dir in dirs:
                        dirs.remove(undesirable_dir)
                        undesirable_dirs_removed += 1
                for undesirable_file in UNDESIRABLE_FILES:
                    if undesirable_file in files:
                        files.remove(undesirable_file)
                        undesirable_files_removed += 1
                for name in files:
                    full_name = os.path.join(root, name)
                    if full_name not in filenames_in_archive:
                        files_to_remove.append(full_name)
        for repo_file in files_to_remove:
            # Remove files in the repository (relative to the upload point) that are not in
            # the uploaded archive.
            try:
                hg_util.remove_file(repo.ui, repo, repo_file, force=True)
            except Exception as e:
                log.debug(
                    "Error removing files using the mercurial API, so trying a different approach, the error was: %s"
                    % str(e))
                relative_selected_file = repo_file.split(
                    'repo_%d' % repository.id)[1].lstrip('/')
                repo.dirstate.remove(relative_selected_file)
                repo.dirstate.write()
                absolute_selected_file = os.path.abspath(repo_file)
                if os.path.isdir(absolute_selected_file):
                    try:
                        os.rmdir(absolute_selected_file)
                    except OSError as e:
                        # The directory is not empty.
                        pass
                elif os.path.isfile(absolute_selected_file):
                    os.remove(absolute_selected_file)
                    dir = os.path.split(absolute_selected_file)[0]
                    try:
                        os.rmdir(dir)
                    except OSError as e:
                        # The directory is not empty.
                        pass
    # See if any admin users have chosen to receive email alerts when a repository is updated.
    # If so, check every uploaded file to ensure content is appropriate.
    check_contents = check_file_contents_for_email_alerts(app)
    for filename_in_archive in filenames_in_archive:
        # Check file content to ensure it is appropriate.
        if check_contents and os.path.isfile(filename_in_archive):
            content_alert_str += check_file_content_for_html_and_images(
                filename_in_archive)
        hg_util.add_changeset(repo.ui, repo, filename_in_archive)
        if filename_in_archive.endswith('tool_data_table_conf.xml.sample'):
            # Handle the special case where a tool_data_table_conf.xml.sample file is being uploaded
            # by parsing the file and adding new entries to the in-memory app.tool_data_tables
            # dictionary.
            tdtm = data_table_manager.ToolDataTableManager(app)
            error, message = tdtm.handle_sample_tool_data_table_conf_file(
                filename_in_archive, persist=False)
            if error:
                return False, message, files_to_remove, content_alert_str, undesirable_dirs_removed, undesirable_files_removed
    hg_util.commit_changeset(repo.ui,
                             repo,
                             full_path_to_changeset=full_path,
                             username=username,
                             message=commit_message)
    admin_only = len(repository.downloadable_revisions) != 1
    suc.handle_email_alerts(app,
                            host,
                            repository,
                            content_alert_str=content_alert_str,
                            new_repo_alert=new_repo_alert,
                            admin_only=admin_only)
    return True, '', files_to_remove, content_alert_str, undesirable_dirs_removed, undesirable_files_removed