def handle_repository(trans, start_time, repository): results = dict(start_time=start_time, repository_status=[]) try: rmm = repository_metadata_manager.RepositoryMetadataManager( app=trans.app, user=trans.user, repository=repository, resetting_all_metadata_on_repository=True, updating_installed_repository=False, persist=False) rmm.reset_all_metadata_on_repository_in_tool_shed() rmm_invalid_file_tups = rmm.get_invalid_file_tups() if rmm_invalid_file_tups: message = tool_util.generate_message_for_invalid_tools( trans.app, rmm_invalid_file_tups, repository, None, as_html=False) else: message = "Successfully reset metadata on repository %s owned by %s" % \ ( str( repository.name ), str( repository.user.username ) ) except Exception, e: message = "Error resetting metadata on repository %s owned by %s: %s" % \ ( str( repository.name ), str( repository.user.username ), str( e ) )
def repository_ids_for_setting_metadata(self, trans, my_writable=False, **kwd): """ GET /api/repository_ids_for_setting_metadata Displays a collection (list) of repository ids ordered for setting metadata. :param key: the API key of the Tool Shed user. :param my_writable (optional): if the API key is associated with an admin user in the Tool Shed, setting this param value to True will restrict resetting metadata to only repositories that are writable by the user in addition to those repositories of type tool_dependency_definition. This param is ignored if the current user is not an admin user, in which case this same restriction is automatic. """ if trans.user_is_admin(): my_writable = util.asbool(my_writable) else: my_writable = True handled_repository_ids = [] repository_ids = [] rmm = repository_metadata_manager.RepositoryMetadataManager( trans.app, trans.user) query = rmm.get_query_for_setting_metadata_on_repositories( my_writable=my_writable, order=False) # Make sure repositories of type tool_dependency_definition are first in the list. for repository in query: if repository.type == rt_util.TOOL_DEPENDENCY_DEFINITION and repository.id not in handled_repository_ids: repository_ids.append(trans.security.encode_id(repository.id)) # Now add all remaining repositories to the list. for repository in query: if repository.type != rt_util.TOOL_DEPENDENCY_DEFINITION and repository.id not in handled_repository_ids: repository_ids.append(trans.security.encode_id(repository.id)) return repository_ids
def handle_request(self, trans, **kwd): # The os command that results in this method being called will look something like: # hg clone http://[email protected]:9009/repos/test/convert_characters1 hg_version = mercurial.__version__.version cmd = kwd.get('cmd', None) hgweb_config = trans.app.hgweb_config_manager.hgweb_config def make_web_app(): hgwebapp = hgwebdir(hgweb_config) return hgwebapp wsgi_app = wsgiapplication(make_web_app) if hg_version >= '2.2.3' and cmd == 'pushkey': # When doing an "hg push" from the command line, the following commands, in order, will be # retrieved from environ, depending upon the mercurial version being used. In mercurial # version 2.2.3, section 15.2. Command changes includes a new feature: # pushkey: add hooks for pushkey/listkeys # (see http://mercurial.selenic.com/wiki/WhatsNew#Mercurial_2.2.3_.282012-07-01.29). # We require version 2.2.3 since the pushkey hook was added in that version. # If mercurial version >= '2.2.3': capabilities -> batch -> branchmap -> unbundle -> listkeys -> pushkey path_info = kwd.get('path_info', None) if path_info: owner, name = path_info.split('/') repository = get_repository_by_name_and_owner( trans.app, name, owner) if repository: if hg_version >= '2.2.3': # Update the repository on disk to the tip revision, because the web upload # form uses the on-disk working directory. If the repository is not updated # on disk, pushing from the command line and then uploading via the web # interface will result in a new head being created. repo = hg.repository(ui.ui(), repository.repo_path(trans.app)) update_repository(repo, ctx_rev=None) repository_clone_url = generate_clone_url_for_repository_in_tool_shed( trans.user, repository) # Set metadata using the repository files on disk. rmm = repository_metadata_manager.RepositoryMetadataManager( app=trans.app, user=trans.user, repository=repository, changeset_revision=repository.tip(trans.app), repository_clone_url=repository_clone_url, relative_install_dir=repository.repo_path( trans.app), repository_files_dir=None, resetting_all_metadata_on_repository=False, updating_installed_repository=False, persist=False) error_message, status = rmm.set_repository_metadata( trans.request.host) if status == 'ok' and error_message: log.debug( "Successfully reset metadata on repository %s owned by %s, but encountered problem: %s" % \ ( str( repository.name ), str( repository.user.username ), error_message ) ) elif status != 'ok' and error_message: log.debug( "Error resetting metadata on repository %s owned by %s: %s" % \ ( str( repository.name ), str( repository.user.username ), error_message ) ) return wsgi_app
def reset_metadata_on_selected_repositories_in_tool_shed(self, trans, **kwd): rmm = repository_metadata_manager.RepositoryMetadataManager(trans.app, trans.user) if 'reset_metadata_on_selected_repositories_button' in kwd: message, status = rmm.reset_metadata_on_selected_repositories(**kwd) else: message = escape(util.restore_text(kwd.get('message', ''))) status = kwd.get('status', 'done') repositories_select_field = rmm.build_repository_ids_select_field(name='repository_ids', multiple=True, display='checkboxes', my_writable=False) return trans.fill_template('/webapps/tool_shed/common/reset_metadata_on_selected_repositories.mako', repositories_select_field=repositories_select_field, message=message, status=status)
def handle_repository(trans, start_time, repository): results = dict(start_time=start_time, repository_status=[]) try: rmm = repository_metadata_manager.RepositoryMetadataManager( trans.app, trans.user) invalid_file_tups, metadata_dict = \ rmm.reset_all_metadata_on_repository_in_tool_shed( trans.security.encode_id( repository.id ) ) if invalid_file_tups: message = tool_util.generate_message_for_invalid_tools( trans.app, invalid_file_tups, repository, None, as_html=False) else: message = "Successfully reset metadata on repository %s owned by %s" % \ ( str( repository.name ), str( repository.user.username ) ) except Exception, e: message = "Error resetting metadata on repository %s owned by %s: %s" % \ ( str( repository.name ), str( repository.user.username ), str( e ) )
def upload(self, trans, **kwd): message = escape(kwd.get('message', '')) status = kwd.get('status', 'done') commit_message = escape(kwd.get('commit_message', 'Uploaded')) repository_id = kwd.get('repository_id', '') repository = repository_util.get_repository_in_tool_shed( trans.app, repository_id) repo_dir = repository.repo_path(trans.app) uncompress_file = util.string_as_bool( kwd.get('uncompress_file', 'true')) remove_repo_files_not_in_tar = util.string_as_bool( kwd.get('remove_repo_files_not_in_tar', 'true')) uploaded_file = None upload_point = commit_util.get_upload_point(repository, **kwd) tip = repository.tip() file_data = kwd.get('file_data', '') url = kwd.get('url', '') # Part of the upload process is sending email notification to those that have registered to # receive them. One scenario occurs when the first change set is produced for the repository. # See the suc.handle_email_alerts() method for the definition of the scenarios. new_repo_alert = repository.is_new() uploaded_directory = None if kwd.get('upload_button', False): if file_data == '' and url == '': message = 'No files were entered on the upload form.' status = 'error' uploaded_file = None elif url and url.startswith('hg'): # Use mercurial clone to fetch repository, contents will then be copied over. uploaded_directory = tempfile.mkdtemp() repo_url = 'http%s' % url[len('hg'):] cloned_ok, error_message = hg_util.clone_repository( repo_url, uploaded_directory) if not cloned_ok: message = 'Error uploading via mercurial clone: %s' % error_message status = 'error' basic_util.remove_dir(uploaded_directory) uploaded_directory = None elif url: valid_url = True try: stream = requests.get(url, stream=True) except Exception as e: valid_url = False message = 'Error uploading file via http: %s' % util.unicodify( e) status = 'error' uploaded_file = None if valid_url: fd, uploaded_file_name = tempfile.mkstemp() uploaded_file = open(uploaded_file_name, 'wb') for chunk in stream.iter_content( chunk_size=util.CHUNK_SIZE): if chunk: uploaded_file.write(chunk) uploaded_file.flush() uploaded_file_filename = url.split('/')[-1] isempty = os.path.getsize( os.path.abspath(uploaded_file_name)) == 0 elif file_data not in ('', None): uploaded_file = file_data.file uploaded_file_name = uploaded_file.name uploaded_file_filename = os.path.split(file_data.filename)[-1] isempty = os.path.getsize( os.path.abspath(uploaded_file_name)) == 0 if uploaded_file or uploaded_directory: rdah = attribute_handlers.RepositoryDependencyAttributeHandler( trans.app, unpopulate=False) tdah = attribute_handlers.ToolDependencyAttributeHandler( trans.app, unpopulate=False) stdtm = ShedToolDataTableManager(trans.app) ok = True isgzip = False isbz2 = False if uploaded_file: if uncompress_file: isgzip = checkers.is_gzip(uploaded_file_name) if not isgzip: isbz2 = checkers.is_bz2(uploaded_file_name) if isempty: tar = None istar = False else: # Determine what we have - a single file or an archive try: if (isgzip or isbz2) and uncompress_file: # Open for reading with transparent compression. tar = tarfile.open(uploaded_file_name, 'r:*') else: tar = tarfile.open(uploaded_file_name) istar = True except tarfile.ReadError: tar = None istar = False else: # Uploaded directory istar = False if istar: ok, message, files_to_remove, content_alert_str, undesirable_dirs_removed, undesirable_files_removed = \ repository_content_util.upload_tar( trans, rdah, tdah, repository, tar, uploaded_file, upload_point, remove_repo_files_not_in_tar, commit_message, new_repo_alert ) elif uploaded_directory: ok, message, files_to_remove, content_alert_str, undesirable_dirs_removed, undesirable_files_removed = \ self.upload_directory(trans, rdah, tdah, repository, uploaded_directory, upload_point, remove_repo_files_not_in_tar, commit_message, new_repo_alert) else: if (isgzip or isbz2) and uncompress_file: uploaded_file_filename = commit_util.uncompress( repository, uploaded_file_name, uploaded_file_filename, isgzip=isgzip, isbz2=isbz2) if repository.type == rt_util.REPOSITORY_SUITE_DEFINITION and \ uploaded_file_filename != rt_util.REPOSITORY_DEPENDENCY_DEFINITION_FILENAME: ok = False message = 'Repositories of type <b>Repository suite definition</b> can only contain a single file named ' message += '<b>repository_dependencies.xml</b>.' elif repository.type == rt_util.TOOL_DEPENDENCY_DEFINITION and \ uploaded_file_filename != rt_util.TOOL_DEPENDENCY_DEFINITION_FILENAME: ok = False message = 'Repositories of type <b>Tool dependency definition</b> can only contain a single file named ' message += '<b>tool_dependencies.xml</b>.' if ok: if upload_point is not None: full_path = os.path.abspath( os.path.join(repo_dir, upload_point, uploaded_file_filename)) else: full_path = os.path.abspath( os.path.join(repo_dir, uploaded_file_filename)) # Move some version of the uploaded file to the load_point within the repository hierarchy. if uploaded_file_filename in [ rt_util. REPOSITORY_DEPENDENCY_DEFINITION_FILENAME ]: # Inspect the contents of the file to see if toolshed or changeset_revision attributes # are missing and if so, set them appropriately. altered, root_elem, error_message = rdah.handle_tag_attributes( uploaded_file_name) if error_message: ok = False message = error_message status = 'error' elif altered: tmp_filename = xml_util.create_and_write_tmp_file( root_elem) shutil.move(tmp_filename, full_path) else: shutil.move(uploaded_file_name, full_path) elif uploaded_file_filename in [ rt_util.TOOL_DEPENDENCY_DEFINITION_FILENAME ]: # Inspect the contents of the file to see if changeset_revision values are # missing and if so, set them appropriately. altered, root_elem, error_message = tdah.handle_tag_attributes( uploaded_file_name) if error_message: ok = False message = error_message status = 'error' if ok: if altered: tmp_filename = xml_util.create_and_write_tmp_file( root_elem) shutil.move(tmp_filename, full_path) else: shutil.move(uploaded_file_name, full_path) else: shutil.move(uploaded_file_name, full_path) if ok: # See if any admin users have chosen to receive email alerts when a repository is updated. # If so, check every uploaded file to ensure content is appropriate. check_contents = commit_util.check_file_contents_for_email_alerts( trans.app) if check_contents and os.path.isfile(full_path): content_alert_str = commit_util.check_file_content_for_html_and_images( full_path) else: content_alert_str = '' hg_util.add_changeset(repo_dir, full_path) hg_util.commit_changeset( repo_dir, full_path_to_changeset=full_path, username=trans.user.username, message=commit_message) if full_path.endswith( 'tool_data_table_conf.xml.sample'): # Handle the special case where a tool_data_table_conf.xml.sample file is being uploaded # by parsing the file and adding new entries to the in-memory trans.app.tool_data_tables # dictionary. error, error_message = stdtm.handle_sample_tool_data_table_conf_file( full_path, persist=False) if error: message = '%s<br/>%s' % (message, error_message) # See if the content of the change set was valid. admin_only = len( repository.downloadable_revisions) != 1 suc.handle_email_alerts( trans.app, trans.request.host, repository, content_alert_str=content_alert_str, new_repo_alert=new_repo_alert, admin_only=admin_only) if ok: # Update the repository files for browsing. hg_util.update_repository(repo_dir) # Get the new repository tip. if tip == repository.tip(): message = 'No changes to repository. ' status = 'warning' else: if (isgzip or isbz2) and uncompress_file: uncompress_str = ' uncompressed and ' else: uncompress_str = ' ' if uploaded_directory: source_type = "repository" source = url else: source_type = "file" source = uploaded_file_filename message = "The %s <b>%s</b> has been successfully%suploaded to the repository. " % \ (source_type, escape(source), uncompress_str) if istar and (undesirable_dirs_removed or undesirable_files_removed): items_removed = undesirable_dirs_removed + undesirable_files_removed message += " %d undesirable items (.hg .svn .git directories, .DS_Store, hgrc files, etc) " % items_removed message += "were removed from the archive. " if istar and remove_repo_files_not_in_tar and files_to_remove: if upload_point is not None: message += " %d files were removed from the repository relative to the selected upload point '%s'. " % \ (len(files_to_remove), upload_point) else: message += " %d files were removed from the repository root. " % len( files_to_remove) rmm = repository_metadata_manager.RepositoryMetadataManager( app=trans.app, user=trans.user, repository=repository) status, error_message = \ rmm.set_repository_metadata_due_to_new_tip(trans.request.host, content_alert_str=content_alert_str, **kwd) if error_message: message = error_message kwd['message'] = message if repository.metadata_revisions: # A repository's metadata revisions are order descending by update_time, so the zeroth revision # will be the tip just after an upload. metadata_dict = repository.metadata_revisions[ 0].metadata else: metadata_dict = {} dd = dependency_display.DependencyDisplayer(trans.app) if str(repository.type) not in [ rt_util.REPOSITORY_SUITE_DEFINITION, rt_util.TOOL_DEPENDENCY_DEFINITION ]: change_repository_type_message = rt_util.generate_message_for_repository_type_change( trans.app, repository) if change_repository_type_message: message += change_repository_type_message status = 'warning' else: # Provide a warning message if a tool_dependencies.xml file is provided, but tool dependencies # weren't loaded due to a requirement tag mismatch or some other problem. Tool dependency # definitions can define orphan tool dependencies (no relationship to any tools contained in the # repository), so warning messages are important because orphans are always valid. The repository # owner must be warned in case they did not intend to define an orphan dependency, but simply # provided incorrect information (tool shed, name owner, changeset_revision) for the definition. orphan_message = dd.generate_message_for_orphan_tool_dependencies( repository, metadata_dict) if orphan_message: message += orphan_message status = 'warning' # Handle messaging for invalid tool dependencies. invalid_tool_dependencies_message = dd.generate_message_for_invalid_tool_dependencies( metadata_dict) if invalid_tool_dependencies_message: message += invalid_tool_dependencies_message status = 'error' # Handle messaging for invalid repository dependencies. invalid_repository_dependencies_message = \ dd.generate_message_for_invalid_repository_dependencies(metadata_dict, error_from_tuple=True) if invalid_repository_dependencies_message: message += invalid_repository_dependencies_message status = 'error' # Reset the tool_data_tables by loading the empty tool_data_table_conf.xml file. stdtm.reset_tool_data_tables() if uploaded_directory: basic_util.remove_dir(uploaded_directory) trans.response.send_redirect( web.url_for(controller='repository', action='browse_repository', id=repository_id, commit_message='Deleted selected files', message=message, status=status)) else: if uploaded_directory: basic_util.remove_dir(uploaded_directory) status = 'error' # Reset the tool_data_tables by loading the empty tool_data_table_conf.xml file. stdtm.reset_tool_data_tables() return trans.fill_template( '/webapps/tool_shed/repository/upload.mako', repository=repository, changeset_revision=tip, url=url, commit_message=commit_message, uncompress_file=uncompress_file, remove_repo_files_not_in_tar=remove_repo_files_not_in_tar, message=message, status=status)
def create_changeset_revision(self, trans, id, payload, **kwd): """ POST /api/repositories/{encoded_repository_id}/changeset_revision Create a new tool shed repository commit - leaving PUT on parent resource open for updating meta-attributes of the repository (and Galaxy doesn't allow PUT multipart data anyway https://trello.com/c/CQwmCeG6). :param id: the encoded id of the Repository object The following parameters may be included in the payload. :param commit_message: hg commit message for update. """ # Example URL: http://localhost:9009/api/repositories/f9cad7b01a472135 rdah = attribute_handlers.RepositoryDependencyAttributeHandler( trans.app, unpopulate=False) tdah = attribute_handlers.ToolDependencyAttributeHandler( trans.app, unpopulate=False) repository = suc.get_repository_in_tool_shed(trans.app, id) repo_dir = repository.repo_path(trans.app) repo = hg_util.get_repo_for_repository(trans.app, repository=None, repo_path=repo_dir, create=False) upload_point = commit_util.get_upload_point(repository, **kwd) tip = repository.tip(trans.app) file_data = payload.get('file') # Code stolen from gx's upload_common.py if isinstance(file_data, FieldStorage): assert not isinstance(file_data.file, StringIO.StringIO) assert file_data.file.name != '<fdopen>' local_filename = util.mkstemp_ln(file_data.file.name, 'upload_file_data_') file_data.file.close() file_data = dict(filename=file_data.filename, local_filename=local_filename) elif type(file_data) == dict and 'local_filename' not in file_data: raise Exception( 'Uploaded file was encoded in a way not understood.') commit_message = kwd.get('commit_message', 'Uploaded') uploaded_file = open(file_data['local_filename'], 'rb') uploaded_file_name = file_data['local_filename'] isgzip = False isbz2 = False isgzip = checkers.is_gzip(uploaded_file_name) if not isgzip: isbz2 = checkers.is_bz2(uploaded_file_name) if (isgzip or isbz2): # Open for reading with transparent compression. tar = tarfile.open(uploaded_file_name, 'r:*') else: tar = tarfile.open(uploaded_file_name) new_repo_alert = False remove_repo_files_not_in_tar = True ok, message, files_to_remove, content_alert_str, undesirable_dirs_removed, undesirable_files_removed = \ repository_content_util.upload_tar( trans, rdah, tdah, repository, tar, uploaded_file, upload_point, remove_repo_files_not_in_tar, commit_message, new_repo_alert ) if ok: # Update the repository files for browsing. hg_util.update_repository(repo) # Get the new repository tip. if tip == repository.tip(trans.app): trans.response.status = 400 message = 'No changes to repository.' ok = False else: rmm = repository_metadata_manager.RepositoryMetadataManager( app=trans.app, user=trans.user, repository=repository) status, error_message = \ rmm.set_repository_metadata_due_to_new_tip( trans.request.host, content_alert_str=content_alert_str, **kwd ) if error_message: ok = False trans.response.status = 500 message = error_message else: trans.response.status = 500 if not ok: return {"err_msg": message, "content_alert": content_alert_str} else: return {"message": message, "content_alert": content_alert_str}
results['unsuccessful_count'] += 1 else: message = "Successfully reset metadata on repository %s owned by %s" % \ ( str( repository.name ), str( repository.user.username ) ) results['successful_count'] += 1 except Exception, e: message = "Error resetting metadata on repository %s owned by %s: %s" % \ ( str( repository.name ), str( repository.user.username ), str( e ) ) results['unsuccessful_count'] += 1 status = '%s : %s' % (str(repository.name), message) results['repository_status'].append(status) return results rmm = repository_metadata_manager.RepositoryMetadataManager( app=trans.app, user=trans.user, resetting_all_metadata_on_repository=True, updating_installed_repository=False, persist=False) start_time = strftime("%Y-%m-%d %H:%M:%S") results = dict(start_time=start_time, repository_status=[], successful_count=0, unsuccessful_count=0) handled_repository_ids = [] encoded_ids_to_skip = payload.get('encoded_ids_to_skip', []) skip_file = payload.get('skip_file', None) if skip_file and os.path.exists(skip_file) and not encoded_ids_to_skip: # Load the list of encoded_ids_to_skip from the skip_file. # Contents of file must be 1 encoded repository id per line. lines = open(skip_file, 'rb').readlines() for line in lines:
source_type = "file" source = uploaded_file_filename message = "The %s <b>%s</b> has been successfully%suploaded to the repository. " % \ ( source_type, escape( source ), uncompress_str ) if istar and ( undesirable_dirs_removed or undesirable_files_removed ): items_removed = undesirable_dirs_removed + undesirable_files_removed message += " %d undesirable items (.hg .svn .git directories, .DS_Store, hgrc files, etc) " % items_removed message += "were removed from the archive. " if istar and remove_repo_files_not_in_tar and files_to_remove: if upload_point is not None: message += " %d files were removed from the repository relative to the selected upload point '%s'. " % \ ( len( files_to_remove ), upload_point ) else: message += " %d files were removed from the repository root. " % len( files_to_remove ) rmm = repository_metadata_manager.RepositoryMetadataManager( app=trans.app, user=trans.user, repository=repository ) status, error_message = \ rmm.set_repository_metadata_due_to_new_tip( trans.request.host, content_alert_str=content_alert_str, **kwd ) if error_message: message = error_message kwd[ 'message' ] = message if repository.metadata_revisions: # A repository's metadata revisions are order descending by update_time, so the zeroth revision # will be the tip just after an upload. metadata_dict = repository.metadata_revisions[ 0 ].metadata else: metadata_dict = {} dd = dependency_display.DependencyDisplayer( trans.app )
None, as_html=False) results['unsuccessful_count'] += 1 else: message = "Successfully reset metadata on repository %s owned by %s" % \ ( str( repository.name ), str( repository.user.username ) ) results['successful_count'] += 1 except Exception, e: message = "Error resetting metadata on repository %s owned by %s: %s" % \ ( str( repository.name ), str( repository.user.username ), str( e ) ) results['unsuccessful_count'] += 1 status = '%s : %s' % (str(repository.name), message) results['repository_status'].append(status) return results rmm = repository_metadata_manager.RepositoryMetadataManager( trans.app, trans.user) start_time = strftime("%Y-%m-%d %H:%M:%S") results = dict(start_time=start_time, repository_status=[], successful_count=0, unsuccessful_count=0) handled_repository_ids = [] encoded_ids_to_skip = payload.get('encoded_ids_to_skip', []) skip_file = payload.get('skip_file', None) if skip_file and os.path.exists(skip_file) and not encoded_ids_to_skip: # Load the list of encoded_ids_to_skip from the skip_file. # Contents of file must be 1 encoded repository id per line. lines = open(skip_file, 'rb').readlines() for line in lines: if line.startswith('#'): # Skip comments.