def run_proprietary_fabric_method( app, elem, proprietary_fabfile_path, install_dir, package_name=None, **kwd ): """ TODO: Handle this using the fabric api. Parse a tool_dependency.xml file's fabfile <method> tag set to build the method parameters and execute the method. """ if not os.path.exists( install_dir ): os.makedirs( install_dir ) # Default value for env_dependency_path. env_dependency_path = install_dir method_name = elem.get( 'name', None ) params_str = '' actions = [] for param_elem in elem: param_name = param_elem.get( 'name' ) if param_name: if param_name == 'actions': for action_elem in param_elem: actions.append( action_elem.text.replace( '$INSTALL_DIR', install_dir ) ) if actions: params_str += 'actions=%s,' % encoding_util.tool_shed_encode( encoding_util.encoding_sep.join( actions ) ) else: if param_elem.text: param_value = encoding_util.tool_shed_encode( param_elem.text ) params_str += '%s=%s,' % ( param_name, param_value ) if package_name: params_str += 'package_name=%s' % package_name else: params_str = params_str.rstrip( ',' ) try: cmd = 'fab -f %s %s:%s' % ( proprietary_fabfile_path, method_name, params_str ) returncode, message = run_subprocess( app, cmd ) except Exception, e: return "Exception executing fabric script %s: %s. " % ( str( proprietary_fabfile_path ), str( e ) )
def run_proprietary_fabric_method(app, elem, proprietary_fabfile_path, install_dir, package_name=None, **kwd): """ TODO: Handle this using the fabric api. Parse a tool_dependency.xml file's fabfile <method> tag set to build the method parameters and execute the method. """ if not os.path.exists(install_dir): os.makedirs(install_dir) # Default value for env_dependency_path. env_dependency_path = install_dir method_name = elem.get('name', None) params_str = '' actions = [] for param_elem in elem: param_name = param_elem.get('name') if param_name: if param_name == 'actions': for action_elem in param_elem: actions.append( action_elem.text.replace('$INSTALL_DIR', install_dir)) if actions: params_str += 'actions=%s,' % encoding_util.tool_shed_encode( encoding_util.encoding_sep.join(actions)) else: if param_elem.text: param_value = encoding_util.tool_shed_encode( param_elem.text) params_str += '%s=%s,' % (param_name, param_value) if package_name: params_str += 'package_name=%s' % package_name else: params_str = params_str.rstrip(',') try: cmd = 'fab -f %s %s:%s' % (proprietary_fabfile_path, method_name, params_str) returncode, message = run_subprocess(app, cmd) except Exception, e: return "Exception executing fabric script %s: %s. " % ( str(proprietary_fabfile_path), str(e))
def repair_repository_revision( self, trans, payload, **kwd ): """ POST /api/tool_shed_repositories/repair_repository_revision Repair a specified repository revision previously installed into Galaxy. :param key: the current Galaxy admin user's API key The following parameters are included in the payload. :param tool_shed_url (required): the base URL of the Tool Shed from which the Repository was installed :param name (required): the name of the Repository :param owner (required): the owner of the Repository :param changset_revision (required): the changset_revision of the RepositoryMetadata object associated with the Repository """ api_key = kwd.get( 'key', None ) # Get the information about the repository to be installed from the payload. tool_shed_url = payload.get( 'tool_shed_url', '' ) if not tool_shed_url: raise HTTPBadRequest( detail="Missing required parameter 'tool_shed_url'." ) name = payload.get( 'name', '' ) if not name: raise HTTPBadRequest( detail="Missing required parameter 'name'." ) owner = payload.get( 'owner', '' ) if not owner: raise HTTPBadRequest( detail="Missing required parameter 'owner'." ) changeset_revision = payload.get( 'changeset_revision', '' ) if not changeset_revision: raise HTTPBadRequest( detail="Missing required parameter 'changeset_revision'." ) tool_shed_repositories = [] tool_shed_repository = suc.get_tool_shed_repository_by_shed_name_owner_changeset_revision( trans.app, tool_shed_url, name, owner, changeset_revision ) repair_dict = repository_util.get_repair_dict( trans, tool_shed_repository ) ordered_tsr_ids = repair_dict.get( 'ordered_tsr_ids', [] ) ordered_repo_info_dicts = repair_dict.get( 'ordered_repo_info_dicts', [] ) if ordered_tsr_ids and ordered_repo_info_dicts: repositories_for_repair = [] for index, tsr_id in enumerate( ordered_tsr_ids ): repository = trans.sa_session.query( trans.model.ToolShedRepository ).get( trans.security.decode_id( tsr_id ) ) repo_info_dict = ordered_repo_info_dicts[ index ] # TODO: handle errors in repair_dict. repair_dict = repository_util.repair_tool_shed_repository( trans, repository, encoding_util.tool_shed_encode( repo_info_dict ) ) repository_dict = repository.get_api_value( value_mapper=default_tool_shed_repository_value_mapper( trans, repository ) ) repository_dict[ 'url' ] = web.url_for( controller='tool_shed_repositories', action='show', id=trans.security.encode_id( repository.id ) ) if repair_dict: errors = repair_dict.get( repository.name, [] ) repository_dict[ 'errors_attempting_repair' ] = ' '.join( errors ) tool_shed_repositories.append( repository_dict ) # Display the list of repaired repositories. return tool_shed_repositories
def repair_repository_revision(self, trans, payload, **kwd): """ POST /api/tool_shed_repositories/repair_repository_revision Repair a specified repository revision previously installed into Galaxy. :param key: the current Galaxy admin user's API key The following parameters are included in the payload. :param tool_shed_url (required): the base URL of the Tool Shed from which the Repository was installed :param name (required): the name of the Repository :param owner (required): the owner of the Repository :param changeset_revision (required): the changeset_revision of the RepositoryMetadata object associated with the Repository """ # Get the information about the repository to be installed from the payload. tool_shed_url, name, owner, changeset_revision = self.__parse_repository_from_payload( payload, include_changeset=True) tool_shed_repositories = [] tool_shed_repository = repository_util.get_installed_repository( self.app, tool_shed=tool_shed_url, name=name, owner=owner, changeset_revision=changeset_revision) rrm = RepairRepositoryManager(self.app) repair_dict = rrm.get_repair_dict(tool_shed_repository) ordered_tsr_ids = repair_dict.get('ordered_tsr_ids', []) ordered_repo_info_dicts = repair_dict.get('ordered_repo_info_dicts', []) if ordered_tsr_ids and ordered_repo_info_dicts: for index, tsr_id in enumerate(ordered_tsr_ids): repository = trans.install_model.context.query( trans.install_model.ToolShedRepository).get( trans.security.decode_id(tsr_id)) repo_info_dict = ordered_repo_info_dicts[index] # TODO: handle errors in repair_dict. repair_dict = rrm.repair_tool_shed_repository( repository, encoding_util.tool_shed_encode(repo_info_dict)) repository_dict = repository.to_dict( value_mapper=self.__get_value_mapper(trans, repository)) repository_dict['url'] = web.url_for( controller='tool_shed_repositories', action='show', id=trans.security.encode_id(repository.id)) if repair_dict: errors = repair_dict.get(repository.name, []) repository_dict['errors_attempting_repair'] = ' '.join( errors) tool_shed_repositories.append(repository_dict) # Display the list of repaired repositories. return tool_shed_repositories
def repair_repository_revision( self, trans, payload, **kwd ): """ POST /api/tool_shed_repositories/repair_repository_revision Repair a specified repository revision previously installed into Galaxy. :param key: the current Galaxy admin user's API key The following parameters are included in the payload. :param tool_shed_url (required): the base URL of the Tool Shed from which the Repository was installed :param name (required): the name of the Repository :param owner (required): the owner of the Repository :param changeset_revision (required): the changeset_revision of the RepositoryMetadata object associated with the Repository """ # Get the information about the repository to be installed from the payload. tool_shed_url, name, owner, changeset_revision = self.__parse_repository_from_payload( payload, include_changeset=True ) tool_shed_repositories = [] tool_shed_repository = repository_util.get_installed_repository( self.app, tool_shed=tool_shed_url, name=name, owner=owner, changeset_revision=changeset_revision ) rrm = RepairRepositoryManager( self.app ) repair_dict = rrm.get_repair_dict( tool_shed_repository ) ordered_tsr_ids = repair_dict.get( 'ordered_tsr_ids', [] ) ordered_repo_info_dicts = repair_dict.get( 'ordered_repo_info_dicts', [] ) if ordered_tsr_ids and ordered_repo_info_dicts: for index, tsr_id in enumerate( ordered_tsr_ids ): repository = trans.install_model.context.query( trans.install_model.ToolShedRepository ).get( trans.security.decode_id( tsr_id ) ) repo_info_dict = ordered_repo_info_dicts[ index ] # TODO: handle errors in repair_dict. repair_dict = rrm.repair_tool_shed_repository( repository, encoding_util.tool_shed_encode( repo_info_dict ) ) repository_dict = repository.to_dict( value_mapper=self.__get_value_mapper( trans, repository ) ) repository_dict[ 'url' ] = web.url_for( controller='tool_shed_repositories', action='show', id=trans.security.encode_id( repository.id ) ) if repair_dict: errors = repair_dict.get( repository.name, [] ) repository_dict[ 'errors_attempting_repair' ] = ' '.join( errors ) tool_shed_repositories.append( repository_dict ) # Display the list of repaired repositories. return tool_shed_repositories
def extract_capsule_files( trans, **kwd ): """Extract the uploaded capsule archive into a temporary location for inspection, validation and potential import.""" return_dict = {} tar_archive = kwd.get( 'tar_archive', None ) capsule_file_name = kwd.get( 'capsule_file_name', None ) if tar_archive is not None and capsule_file_name is not None: return_dict.update( kwd ) extract_directory_path = tempfile.mkdtemp( prefix="tmp-capsule-ecf" ) if capsule_file_name.endswith( '.tar.gz' ): extract_directory_name = capsule_file_name.replace( '.tar.gz', '' ) elif capsule_file_name.endswith( '.tar' ): extract_directory_name = capsule_file_name.replace( '.tar', '' ) else: extract_directory_name = capsule_file_name file_path = os.path.join( extract_directory_path, extract_directory_name ) return_dict[ 'encoded_file_path' ] = encoding_util.tool_shed_encode( file_path ) tar_archive.extractall( path=file_path ) try: tar_archive.close() except Exception, e: log.exception( "Cannot close tar_archive: %s" % str( e ) ) del return_dict[ 'tar_archive' ]
def get_required_repo_info_dicts( self, tool_shed_url, repo_info_dicts ): """ Inspect the list of repo_info_dicts for repository dependencies and append a repo_info_dict for each of them to the list. All repository_dependency entries in each of the received repo_info_dicts includes all required repositories, so only one pass through this method is required to retrieve all repository dependencies. """ all_required_repo_info_dict = {} all_repo_info_dicts = [] if repo_info_dicts: # We'll send tuples of ( tool_shed, repository_name, repository_owner, changeset_revision ) to the tool # shed to discover repository ids. required_repository_tups = [] for repo_info_dict in repo_info_dicts: if repo_info_dict not in all_repo_info_dicts: all_repo_info_dicts.append( repo_info_dict ) for repository_name, repo_info_tup in repo_info_dict.items(): description, \ repository_clone_url, \ changeset_revision, \ ctx_rev, \ repository_owner, \ repository_dependencies, \ tool_dependencies = \ suc.get_repo_info_tuple_contents( repo_info_tup ) if repository_dependencies: for key, val in repository_dependencies.items(): if key in [ 'root_key', 'description' ]: continue repository_components_tuple = container_util.get_components_from_key( key ) components_list = suc.extract_components_from_tuple( repository_components_tuple ) # Skip listing a repository dependency if it is required only to compile a tool dependency # defined for the dependent repository since in this case, the repository dependency is really # a dependency of the dependent repository's contained tool dependency, and only if that # tool dependency requires compilation. # For backward compatibility to the 12/20/12 Galaxy release. prior_installation_required = 'False' only_if_compiling_contained_td = 'False' if len( components_list ) == 4: prior_installation_required = 'False' only_if_compiling_contained_td = 'False' elif len( components_list ) == 5: prior_installation_required = components_list[ 4 ] only_if_compiling_contained_td = 'False' if not asbool( only_if_compiling_contained_td ): if components_list not in required_repository_tups: required_repository_tups.append( components_list ) for components_list in val: try: only_if_compiling_contained_td = components_list[ 5 ] except: only_if_compiling_contained_td = 'False' # Skip listing a repository dependency if it is required only to compile a tool dependency # defined for the dependent repository (see above comment). if not asbool( only_if_compiling_contained_td ): if components_list not in required_repository_tups: required_repository_tups.append( components_list ) else: # We have a single repository with no dependencies. components_list = [ tool_shed_url, repository_name, repository_owner, changeset_revision ] required_repository_tups.append( components_list ) if required_repository_tups: # The value of required_repository_tups is a list of tuples, so we need to encode it. encoded_required_repository_tups = [] for required_repository_tup in required_repository_tups: # Convert every item in required_repository_tup to a string. required_repository_tup = [ str( item ) for item in required_repository_tup ] encoded_required_repository_tups.append( encoding_util.encoding_sep.join( required_repository_tup ) ) encoded_required_repository_str = encoding_util.encoding_sep2.join( encoded_required_repository_tups ) encoded_required_repository_str = encoding_util.tool_shed_encode( encoded_required_repository_str ) if suc.is_tool_shed_client( self.app ): # Handle secure / insecure Tool Shed URL protocol changes and port changes. tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( self.app, tool_shed_url ) url = common_util.url_join( tool_shed_url, '/repository/get_required_repo_info_dict' ) # Fix for handling 307 redirect not being handled nicely by urllib2.urlopen when the urllib2.Request has data provided url = urllib2.urlopen( urllib2.Request( url ) ).geturl() request = urllib2.Request( url, data=urllib.urlencode( dict( encoded_str=encoded_required_repository_str ) ) ) response = urllib2.urlopen( request ).read() if response: try: required_repo_info_dict = json.loads( response ) except Exception, e: log.exception( e ) return all_repo_info_dicts required_repo_info_dicts = [] for k, v in required_repo_info_dict.items(): if k == 'repo_info_dicts': encoded_dict_strings = required_repo_info_dict[ 'repo_info_dicts' ] for encoded_dict_str in encoded_dict_strings: decoded_dict = encoding_util.tool_shed_decode( encoded_dict_str ) required_repo_info_dicts.append( decoded_dict ) else: if k not in all_required_repo_info_dict: all_required_repo_info_dict[ k ] = v else: if v and not all_required_repo_info_dict[ k ]: all_required_repo_info_dict[ k ] = v if required_repo_info_dicts: for required_repo_info_dict in required_repo_info_dicts: # Each required_repo_info_dict has a single entry, and all_repo_info_dicts is a list # of dictionaries, each of which has a single entry. We'll check keys here rather than # the entire dictionary because a dictionary entry in all_repo_info_dicts will include # lists of discovered repository dependencies, but these lists will be empty in the # required_repo_info_dict since dependency discovery has not yet been performed for these # dictionaries. required_repo_info_dict_key = required_repo_info_dict.keys()[ 0 ] all_repo_info_dicts_keys = [ d.keys()[ 0 ] for d in all_repo_info_dicts ] if required_repo_info_dict_key not in all_repo_info_dicts_keys: all_repo_info_dicts.append( required_repo_info_dict ) all_required_repo_info_dict[ 'all_repo_info_dicts' ] = all_repo_info_dicts
export_elem = xml_util.create_element( 'export_info', attributes=None, sub_elements=sub_elements ) tmp_export_info = xml_util.create_and_write_tmp_file( export_elem, use_indent=True ) repositories_archive.add( tmp_export_info, arcname='export_info.xml' ) # Write the manifest, which must preserve the order in which the repositories should be imported. exported_repository_root = xml_util.create_element( 'repositories' ) for exported_repository_elem in exported_repository_registry.exported_repository_elems: exported_repository_root.append( exported_repository_elem ) tmp_manifest = xml_util.create_and_write_tmp_file( exported_repository_root, use_indent=True ) repositories_archive.add( tmp_manifest, arcname='manifest.xml' ) except Exception, e: log.exception( str( e ) ) finally: lock.release() repositories_archive.close() if api: encoded_repositories_archive_name = encoding_util.tool_shed_encode( repositories_archive_filename ) download_url = suc.url_join( web.url_for( '/', qualified=True ), 'repository/export_via_api?encoded_repositories_archive_name=%s' % encoded_repositories_archive_name ) return dict( download_url=download_url, error_messages=error_messages ) return repositories_archive, error_messages def generate_repository_archive( trans, work_dir, tool_shed_url, repository, changeset_revision, file_type ): file_type_str = suc.get_file_type_str( changeset_revision, file_type ) file_name = '%s-%s' % ( repository.name, file_type_str ) return_code, error_message = archive_repository_revision( trans, ui, repository, work_dir, changeset_revision ) if return_code: return None, error_message repository_archive_name = os.path.join( work_dir, file_name ) # Create a compressed tar archive that will contain only valid files and possibly altered dependency definition files. repository_archive = tarfile.open( repository_archive_name, "w:%s" % file_type ) for root, dirs, files in os.walk( work_dir ):
def get_required_repo_info_dicts( trans, tool_shed_url, repo_info_dicts ): """ Inspect the list of repo_info_dicts for repository dependencies and append a repo_info_dict for each of them to the list. All repository_dependencies entries in each of the received repo_info_dicts includes all required repositories, so only one pass through this method is required to retrieve all repository dependencies. """ all_required_repo_info_dict = {} all_repo_info_dicts = [] if repo_info_dicts: # We'll send tuples of ( tool_shed, repository_name, repository_owner, changeset_revision ) to the tool shed to discover repository ids. required_repository_tups = [] for repo_info_dict in repo_info_dicts: if repo_info_dict not in all_repo_info_dicts: all_repo_info_dicts.append( repo_info_dict ) for repository_name, repo_info_tup in repo_info_dict.items(): description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_dependencies, tool_dependencies = \ suc.get_repo_info_tuple_contents( repo_info_tup ) if repository_dependencies: for key, val in repository_dependencies.items(): if key in [ 'root_key', 'description' ]: continue try: toolshed, name, owner, changeset_revision, prior_installation_required = container_util.get_components_from_key( key ) components_list = [ toolshed, name, owner, changeset_revision, prior_installation_required ] except ValueError: # For backward compatibility to the 12/20/12 Galaxy release, default prior_installation_required to False in the caller. toolshed, name, owner, changeset_revision = container_util.get_components_from_key( key ) components_list = [ toolshed, name, owner, changeset_revision ] if components_list not in required_repository_tups: required_repository_tups.append( components_list ) for components_list in val: if components_list not in required_repository_tups: required_repository_tups.append( components_list ) else: # We have a single repository with no dependencies. components_list = [ tool_shed_url, repository_name, repository_owner, changeset_revision, 'False' ] required_repository_tups.append( components_list ) if required_repository_tups: # The value of required_repository_tups is a list of tuples, so we need to encode it. encoded_required_repository_tups = [] for required_repository_tup in required_repository_tups: # Convert every item in required_repository_tup to a string. required_repository_tup = [ str( item ) for item in required_repository_tup ] encoded_required_repository_tups.append( encoding_util.encoding_sep.join( required_repository_tup ) ) encoded_required_repository_str = encoding_util.encoding_sep2.join( encoded_required_repository_tups ) encoded_required_repository_str = encoding_util.tool_shed_encode( encoded_required_repository_str ) url = suc.url_join( tool_shed_url, '/repository/get_required_repo_info_dict' ) request = urllib2.Request( url, data=urllib.urlencode( dict( encoded_str=encoded_required_repository_str ) ) ) response = urllib2.urlopen( request ).read() if response: try: required_repo_info_dict = json.from_json_string( response ) except Exception, e: log.exception( e ) return all_repo_info_dicts required_repo_info_dicts = [] for k, v in required_repo_info_dict.items(): if k == 'repo_info_dicts': encoded_dict_strings = required_repo_info_dict[ 'repo_info_dicts' ] for encoded_dict_str in encoded_dict_strings: decoded_dict = encoding_util.tool_shed_decode( encoded_dict_str ) required_repo_info_dicts.append( decoded_dict ) else: if k not in all_required_repo_info_dict: all_required_repo_info_dict[ k ] = v else: if v and not all_required_repo_info_dict[ k ]: all_required_repo_info_dict[ k ] = v if required_repo_info_dicts: for required_repo_info_dict in required_repo_info_dicts: if required_repo_info_dict not in all_repo_info_dicts: all_repo_info_dicts.append( required_repo_info_dict ) all_required_repo_info_dict[ 'all_repo_info_dicts' ] = all_repo_info_dicts
def render_render_galaxy_repository_actions(context,repository=None): context.caller_stack._push_frame() try: h = context.get('h', UNDEFINED) workflow_name = context.get('workflow_name', UNDEFINED) trans = context.get('trans', UNDEFINED) __M_writer = context.writer() # SOURCE LINE 3 __M_writer(u'\n ') # SOURCE LINE 4 from tool_shed.util.encoding_util import tool_shed_encode in_error_state = repository.in_error_state tool_dependency_ids = [ trans.security.encode_id( td.id ) for td in repository.tool_dependencies ] # SOURCE LINE 8 __M_writer(u'\n <br/><br/>\n <ul class="manage-table-actions">\n <li><a class="action-button" id="repository-') # SOURCE LINE 11 __M_writer(unicode(repository.id)) __M_writer(u'-popup" class="menubutton">Repository Actions</a></li>\n <div popupmenu="repository-') # SOURCE LINE 12 __M_writer(unicode(repository.id)) __M_writer(u'-popup">\n') # SOURCE LINE 13 if workflow_name: # SOURCE LINE 14 __M_writer(u' <li><a class="action-button" target="galaxy_main" href="') __M_writer(unicode(h.url_for( controller='admin_toolshed', action='import_workflow', workflow_name=tool_shed_encode( workflow_name ), repository_id=trans.security.encode_id( repository.id ) ))) __M_writer(u'">Import workflow to Galaxy</a></li>\n') pass # SOURCE LINE 16 if repository.can_reinstall_or_activate: # SOURCE LINE 17 __M_writer(u' <a class="action-button" href="') __M_writer(unicode(h.url_for( controller='admin_toolshed', action='browse_repositories', operation='activate or reinstall', id=trans.security.encode_id( repository.id ) ))) __M_writer(u'">Activate or reinstall repository</a>\n') pass # SOURCE LINE 19 if in_error_state: # SOURCE LINE 20 __M_writer(u' <a class="action-button" target="galaxy_main" href="') __M_writer(unicode(h.url_for( controller='admin_toolshed', action='reset_to_install', id=trans.security.encode_id( repository.id ), reset_repository=True ))) __M_writer(u'">Reset to install</a>\n') # SOURCE LINE 21 elif repository.can_install: # SOURCE LINE 22 __M_writer(u' <a class="action-button" target="galaxy_main" href="') __M_writer(unicode(h.url_for( controller='admin_toolshed', action='manage_repository', id=trans.security.encode_id( repository.id ), operation='install' ))) __M_writer(u'">Install</a>\n') # SOURCE LINE 23 elif repository.can_uninstall: # SOURCE LINE 24 __M_writer(u' <a class="action-button" target="galaxy_main" href="') __M_writer(unicode(h.url_for( controller='admin_toolshed', action='manage_repository', id=trans.security.encode_id( repository.id ) ))) __M_writer(u'">Manage repository</a>\n <a class="action-button" target="galaxy_main" href="') # SOURCE LINE 25 __M_writer(unicode(h.url_for( controller='admin_toolshed', action='browse_repository', id=trans.security.encode_id( repository.id ) ))) __M_writer(u'">Browse repository files</a>\n <a class="action-button" target="galaxy_main" href="') # SOURCE LINE 26 __M_writer(unicode(h.url_for( controller='admin_toolshed', action='check_for_updates', id=trans.security.encode_id( repository.id ) ))) __M_writer(u'">Get repository updates</a>\n <a class="action-button" target="galaxy_main" href="') # SOURCE LINE 27 __M_writer(unicode(h.url_for( controller='admin_toolshed', action='repair_repository', id=trans.security.encode_id( repository.id ) ))) __M_writer(u'">Repair repository</a>\n') # SOURCE LINE 28 if repository.can_reset_metadata: # SOURCE LINE 29 __M_writer(u' <a class="action-button" target="galaxy_main" href="') __M_writer(unicode(h.url_for( controller='admin_toolshed', action='reset_repository_metadata', id=trans.security.encode_id( repository.id ) ))) __M_writer(u'">Reset repository metadata</a>\n') pass # SOURCE LINE 31 if repository.includes_tools: # SOURCE LINE 32 __M_writer(u' <a class="action-button" target="galaxy_main" href="') __M_writer(unicode(h.url_for( controller='admin_toolshed', action='set_tool_versions', id=trans.security.encode_id( repository.id ) ))) __M_writer(u'">Set tool versions</a>\n') pass # SOURCE LINE 34 if tool_dependency_ids: # SOURCE LINE 35 __M_writer(u' <a class="action-button" target="galaxy_main" href="') __M_writer(unicode(h.url_for( controller='admin_toolshed', action='manage_repository_tool_dependencies', tool_dependency_ids=tool_dependency_ids, repository_id=trans.security.encode_id( repository.id ) ))) __M_writer(u'">Manage tool dependencies</a>\n') pass # SOURCE LINE 37 __M_writer(u' <a class="action-button" target="galaxy_main" href="') __M_writer(unicode(h.url_for( controller='admin_toolshed', action='deactivate_or_uninstall_repository', id=trans.security.encode_id( repository.id ) ))) __M_writer(u'">Deactivate or uninstall repository</a>\n') pass # SOURCE LINE 39 __M_writer(u' </div>\n </ul>\n') return '' finally: context.caller_stack._pop_frame()
def get_required_repo_info_dicts(self, tool_shed_url, repo_info_dicts): """ Inspect the list of repo_info_dicts for repository dependencies and append a repo_info_dict for each of them to the list. All repository_dependency entries in each of the received repo_info_dicts includes all required repositories, so only one pass through this method is required to retrieve all repository dependencies. """ all_required_repo_info_dict = {} all_repo_info_dicts = [] if repo_info_dicts: # We'll send tuples of ( tool_shed, repository_name, repository_owner, changeset_revision ) to the tool # shed to discover repository ids. required_repository_tups = [] for repo_info_dict in repo_info_dicts: if repo_info_dict not in all_repo_info_dicts: all_repo_info_dicts.append(repo_info_dict) for repository_name, repo_info_tup in repo_info_dict.items(): description, \ repository_clone_url, \ changeset_revision, \ ctx_rev, \ repository_owner, \ repository_dependencies, \ tool_dependencies = \ repository_util.get_repo_info_tuple_contents(repo_info_tup) if repository_dependencies: for key, val in repository_dependencies.items(): if key in ['root_key', 'description']: continue repository_components_tuple = container_util.get_components_from_key(key) components_list = repository_util.extract_components_from_tuple(repository_components_tuple) # Skip listing a repository dependency if it is required only to compile a tool dependency # defined for the dependent repository since in this case, the repository dependency is really # a dependency of the dependent repository's contained tool dependency, and only if that # tool dependency requires compilation. # For backward compatibility to the 12/20/12 Galaxy release. only_if_compiling_contained_td = 'False' if len(components_list) == 4: only_if_compiling_contained_td = 'False' elif len(components_list) == 5: only_if_compiling_contained_td = 'False' if not asbool(only_if_compiling_contained_td): if components_list not in required_repository_tups: required_repository_tups.append(components_list) for components_list in val: try: only_if_compiling_contained_td = components_list[5] except: only_if_compiling_contained_td = 'False' # Skip listing a repository dependency if it is required only to compile a tool dependency # defined for the dependent repository (see above comment). if not asbool(only_if_compiling_contained_td): if components_list not in required_repository_tups: required_repository_tups.append(components_list) else: # We have a single repository with no dependencies. components_list = [tool_shed_url, repository_name, repository_owner, changeset_revision] required_repository_tups.append(components_list) if required_repository_tups: # The value of required_repository_tups is a list of tuples, so we need to encode it. encoded_required_repository_tups = [] for required_repository_tup in required_repository_tups: # Convert every item in required_repository_tup to a string. required_repository_tup = [str(item) for item in required_repository_tup] encoded_required_repository_tups.append(encoding_util.encoding_sep.join(required_repository_tup)) encoded_required_repository_str = encoding_util.encoding_sep2.join(encoded_required_repository_tups) encoded_required_repository_str = encoding_util.tool_shed_encode(encoded_required_repository_str) if repository_util.is_tool_shed_client(self.app): # Handle secure / insecure Tool Shed URL protocol changes and port changes. tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry(self.app, tool_shed_url) pathspec = ['repository', 'get_required_repo_info_dict'] url = build_url(tool_shed_url, pathspec=pathspec) # Fix for handling 307 redirect not being handled nicely by urlopen() when the Request() has data provided url = _urlopen(url).geturl() response = _urlopen(url, urlencode(dict(encoded_str=encoded_required_repository_str))).read() if response: try: required_repo_info_dict = json.loads(response) except Exception as e: log.exception(e) return all_repo_info_dicts required_repo_info_dicts = [] for k, v in required_repo_info_dict.items(): if k == 'repo_info_dicts': encoded_dict_strings = required_repo_info_dict['repo_info_dicts'] for encoded_dict_str in encoded_dict_strings: decoded_dict = encoding_util.tool_shed_decode(encoded_dict_str) required_repo_info_dicts.append(decoded_dict) else: if k not in all_required_repo_info_dict: all_required_repo_info_dict[k] = v else: if v and not all_required_repo_info_dict[k]: all_required_repo_info_dict[k] = v if required_repo_info_dicts: for required_repo_info_dict in required_repo_info_dicts: # Each required_repo_info_dict has a single entry, and all_repo_info_dicts is a list # of dictionaries, each of which has a single entry. We'll check keys here rather than # the entire dictionary because a dictionary entry in all_repo_info_dicts will include # lists of discovered repository dependencies, but these lists will be empty in the # required_repo_info_dict since dependency discovery has not yet been performed for these # dictionaries. required_repo_info_dict_key = next(iter(required_repo_info_dict)) all_repo_info_dicts_keys = [next(iter(d)) for d in all_repo_info_dicts] if required_repo_info_dict_key not in all_repo_info_dicts_keys: all_repo_info_dicts.append(required_repo_info_dict) else: # required_repo_info_dict_key corresponds to the repo name. # A single install transaction might require the installation of 2 or more repos # with the same repo name but different owners or versions. # Therefore, if required_repo_info_dict_key is already in all_repo_info_dicts, # check that the tool id is already present. If it is not, we are dealing with the same repo name, # but a different owner/changeset revision or version and we add the repo to the list of repos to be installed. tool_id = required_repo_info_dict[required_repo_info_dict_key][1] is_present = False for repo_info_dict in all_repo_info_dicts: for k, v in repo_info_dict.items(): if required_repo_info_dict_key == k: if tool_id == v[1]: is_present = True break if not is_present: all_repo_info_dicts.append(required_repo_info_dict) all_required_repo_info_dict['all_repo_info_dicts'] = all_repo_info_dicts return all_required_repo_info_dict
def get_required_repo_info_dicts( self, tool_shed_url, repo_info_dicts ): """ Inspect the list of repo_info_dicts for repository dependencies and append a repo_info_dict for each of them to the list. All repository_dependency entries in each of the received repo_info_dicts includes all required repositories, so only one pass through this method is required to retrieve all repository dependencies. """ all_required_repo_info_dict = {} all_repo_info_dicts = [] if repo_info_dicts: # We'll send tuples of ( tool_shed, repository_name, repository_owner, changeset_revision ) to the tool # shed to discover repository ids. required_repository_tups = [] for repo_info_dict in repo_info_dicts: if repo_info_dict not in all_repo_info_dicts: all_repo_info_dicts.append( repo_info_dict ) for repository_name, repo_info_tup in repo_info_dict.items(): description, \ repository_clone_url, \ changeset_revision, \ ctx_rev, \ repository_owner, \ repository_dependencies, \ tool_dependencies = \ suc.get_repo_info_tuple_contents( repo_info_tup ) if repository_dependencies: for key, val in repository_dependencies.items(): if key in [ 'root_key', 'description' ]: continue repository_components_tuple = container_util.get_components_from_key( key ) components_list = suc.extract_components_from_tuple( repository_components_tuple ) # Skip listing a repository dependency if it is required only to compile a tool dependency # defined for the dependent repository since in this case, the repository dependency is really # a dependency of the dependent repository's contained tool dependency, and only if that # tool dependency requires compilation. # For backward compatibility to the 12/20/12 Galaxy release. only_if_compiling_contained_td = 'False' if len( components_list ) == 4: only_if_compiling_contained_td = 'False' elif len( components_list ) == 5: only_if_compiling_contained_td = 'False' if not asbool( only_if_compiling_contained_td ): if components_list not in required_repository_tups: required_repository_tups.append( components_list ) for components_list in val: try: only_if_compiling_contained_td = components_list[ 5 ] except: only_if_compiling_contained_td = 'False' # Skip listing a repository dependency if it is required only to compile a tool dependency # defined for the dependent repository (see above comment). if not asbool( only_if_compiling_contained_td ): if components_list not in required_repository_tups: required_repository_tups.append( components_list ) else: # We have a single repository with no dependencies. components_list = [ tool_shed_url, repository_name, repository_owner, changeset_revision ] required_repository_tups.append( components_list ) if required_repository_tups: # The value of required_repository_tups is a list of tuples, so we need to encode it. encoded_required_repository_tups = [] for required_repository_tup in required_repository_tups: # Convert every item in required_repository_tup to a string. required_repository_tup = [ str( item ) for item in required_repository_tup ] encoded_required_repository_tups.append( encoding_util.encoding_sep.join( required_repository_tup ) ) encoded_required_repository_str = encoding_util.encoding_sep2.join( encoded_required_repository_tups ) encoded_required_repository_str = encoding_util.tool_shed_encode( encoded_required_repository_str ) if suc.is_tool_shed_client( self.app ): # Handle secure / insecure Tool Shed URL protocol changes and port changes. tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( self.app, tool_shed_url ) pathspec = [ 'repository', 'get_required_repo_info_dict' ] url = common_util.url_join( tool_shed_url, pathspec=pathspec ) # Fix for handling 307 redirect not being handled nicely by urllib2.urlopen when the urllib2.Request has data provided url = urllib2.urlopen( urllib2.Request( url ) ).geturl() request = urllib2.Request( url, data=urllib.urlencode( dict( encoded_str=encoded_required_repository_str ) ) ) response = urllib2.urlopen( request ).read() if response: try: required_repo_info_dict = json.loads( response ) except Exception, e: log.exception( e ) return all_repo_info_dicts required_repo_info_dicts = [] for k, v in required_repo_info_dict.items(): if k == 'repo_info_dicts': encoded_dict_strings = required_repo_info_dict[ 'repo_info_dicts' ] for encoded_dict_str in encoded_dict_strings: decoded_dict = encoding_util.tool_shed_decode( encoded_dict_str ) required_repo_info_dicts.append( decoded_dict ) else: if k not in all_required_repo_info_dict: all_required_repo_info_dict[ k ] = v else: if v and not all_required_repo_info_dict[ k ]: all_required_repo_info_dict[ k ] = v if required_repo_info_dicts: for required_repo_info_dict in required_repo_info_dicts: # Each required_repo_info_dict has a single entry, and all_repo_info_dicts is a list # of dictionaries, each of which has a single entry. We'll check keys here rather than # the entire dictionary because a dictionary entry in all_repo_info_dicts will include # lists of discovered repository dependencies, but these lists will be empty in the # required_repo_info_dict since dependency discovery has not yet been performed for these # dictionaries. required_repo_info_dict_key = required_repo_info_dict.keys()[ 0 ] all_repo_info_dicts_keys = [ d.keys()[ 0 ] for d in all_repo_info_dicts ] if required_repo_info_dict_key not in all_repo_info_dicts_keys: all_repo_info_dicts.append( required_repo_info_dict ) all_required_repo_info_dict[ 'all_repo_info_dicts' ] = all_repo_info_dicts
def get_required_repo_info_dicts(self, tool_shed_url, repo_info_dicts): """ Inspect the list of repo_info_dicts for repository dependencies and append a repo_info_dict for each of them to the list. All repository_dependency entries in each of the received repo_info_dicts includes all required repositories, so only one pass through this method is required to retrieve all repository dependencies. """ all_required_repo_info_dict = {} all_repo_info_dicts = [] if repo_info_dicts: # We'll send tuples of ( tool_shed, repository_name, repository_owner, changeset_revision ) to the tool # shed to discover repository ids. required_repository_tups = [] for repo_info_dict in repo_info_dicts: if repo_info_dict not in all_repo_info_dicts: all_repo_info_dicts.append(repo_info_dict) for repository_name, repo_info_tup in repo_info_dict.items(): description, \ repository_clone_url, \ changeset_revision, \ ctx_rev, \ repository_owner, \ repository_dependencies, \ tool_dependencies = \ repository_util.get_repo_info_tuple_contents(repo_info_tup) if repository_dependencies: for key, val in repository_dependencies.items(): if key in ['root_key', 'description']: continue repository_components_tuple = container_util.get_components_from_key( key) components_list = repository_util.extract_components_from_tuple( repository_components_tuple) # Skip listing a repository dependency if it is required only to compile a tool dependency # defined for the dependent repository since in this case, the repository dependency is really # a dependency of the dependent repository's contained tool dependency, and only if that # tool dependency requires compilation. # For backward compatibility to the 12/20/12 Galaxy release. only_if_compiling_contained_td = 'False' if len(components_list) == 4: only_if_compiling_contained_td = 'False' elif len(components_list) == 5: only_if_compiling_contained_td = 'False' if not asbool(only_if_compiling_contained_td): if components_list not in required_repository_tups: required_repository_tups.append( components_list) for components_list in val: try: only_if_compiling_contained_td = components_list[ 5] except IndexError: only_if_compiling_contained_td = 'False' # Skip listing a repository dependency if it is required only to compile a tool dependency # defined for the dependent repository (see above comment). if not asbool(only_if_compiling_contained_td): if components_list not in required_repository_tups: required_repository_tups.append( components_list) else: # We have a single repository with no dependencies. components_list = [ tool_shed_url, repository_name, repository_owner, changeset_revision ] required_repository_tups.append(components_list) if required_repository_tups: # The value of required_repository_tups is a list of tuples, so we need to encode it. encoded_required_repository_tups = [] for required_repository_tup in required_repository_tups: # Convert every item in required_repository_tup to a string. required_repository_tup = [ str(item) for item in required_repository_tup ] encoded_required_repository_tups.append( encoding_util.encoding_sep.join( required_repository_tup)) encoded_required_repository_str = encoding_util.encoding_sep2.join( encoded_required_repository_tups) encoded_required_repository_str = encoding_util.tool_shed_encode( encoded_required_repository_str) if repository_util.is_tool_shed_client(self.app): # Handle secure / insecure Tool Shed URL protocol changes and port changes. tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( self.app, tool_shed_url) pathspec = ['repository', 'get_required_repo_info_dict'] url = build_url(tool_shed_url, pathspec=pathspec) # Fix for handling 307 redirect not being handled nicely by urlopen() when the Request() has data provided try: url = _urlopen(url).geturl() except HTTPError as e: if e.code == 502: pass else: raise payload = urlencode( dict(encoded_str=encoded_required_repository_str)) response = _urlopen(url, payload).read() if response: try: required_repo_info_dict = json.loads( unicodify(response)) except Exception as e: log.exception(e) return all_repo_info_dicts required_repo_info_dicts = [] for k, v in required_repo_info_dict.items(): if k == 'repo_info_dicts': encoded_dict_strings = required_repo_info_dict[ 'repo_info_dicts'] for encoded_dict_str in encoded_dict_strings: decoded_dict = encoding_util.tool_shed_decode( encoded_dict_str) required_repo_info_dicts.append( decoded_dict) else: if k not in all_required_repo_info_dict: all_required_repo_info_dict[k] = v else: if v and not all_required_repo_info_dict[k]: all_required_repo_info_dict[k] = v if required_repo_info_dicts: for required_repo_info_dict in required_repo_info_dicts: # Each required_repo_info_dict has a single entry, and all_repo_info_dicts is a list # of dictionaries, each of which has a single entry. We'll check keys here rather than # the entire dictionary because a dictionary entry in all_repo_info_dicts will include # lists of discovered repository dependencies, but these lists will be empty in the # required_repo_info_dict since dependency discovery has not yet been performed for these # dictionaries. required_repo_info_dict_key = next( iter(required_repo_info_dict)) all_repo_info_dicts_keys = [ next(iter(d)) for d in all_repo_info_dicts ] if required_repo_info_dict_key not in all_repo_info_dicts_keys: all_repo_info_dicts.append( required_repo_info_dict) else: # required_repo_info_dict_key corresponds to the repo name. # A single install transaction might require the installation of 2 or more repos # with the same repo name but different owners or versions. # Therefore, if required_repo_info_dict_key is already in all_repo_info_dicts, # check that the tool id is already present. If it is not, we are dealing with the same repo name, # but a different owner/changeset revision or version and we add the repo to the list of repos to be installed. tool_id = required_repo_info_dict[ required_repo_info_dict_key][1] is_present = False for repo_info_dict in all_repo_info_dicts: for k, v in repo_info_dict.items(): if required_repo_info_dict_key == k: if tool_id == v[1]: is_present = True break if not is_present: all_repo_info_dicts.append( required_repo_info_dict) all_required_repo_info_dict[ 'all_repo_info_dicts'] = all_repo_info_dicts return all_required_repo_info_dict
def get_required_repo_info_dicts( trans, tool_shed_url, repo_info_dicts ): """ Inspect the list of repo_info_dicts for repository dependencies and append a repo_info_dict for each of them to the list. All repository_dependencies entries in each of the received repo_info_dicts includes all required repositories, so only one pass through this method is required to retrieve all repository dependencies. """ all_required_repo_info_dict = {} all_repo_info_dicts = [] if repo_info_dicts: # We'll send tuples of ( tool_shed, repository_name, repository_owner, changeset_revision ) to the tool shed to discover repository ids. required_repository_tups = [] for repo_info_dict in repo_info_dicts: if repo_info_dict not in all_repo_info_dicts: all_repo_info_dicts.append( repo_info_dict ) for repository_name, repo_info_tup in repo_info_dict.items(): description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_dependencies, tool_dependencies = \ suc.get_repo_info_tuple_contents( repo_info_tup ) if repository_dependencies: for key, val in repository_dependencies.items(): if key in [ 'root_key', 'description' ]: continue repository_components_tuple = container_util.get_components_from_key( key ) components_list = suc.extract_components_from_tuple( repository_components_tuple ) # Skip listing a repository dependency if it is required only to compile a tool dependency defined for the dependent repository since # in this case, the repository dependency is really a dependency of the dependent repository's contained tool dependency, and only if # that tool dependency requires compilation. # For backward compatibility to the 12/20/12 Galaxy release. prior_installation_required = 'False' only_if_compiling_contained_td = 'False' if len( components_list ) == 4: prior_installation_required = 'False' only_if_compiling_contained_td = 'False' elif len( components_list ) == 5: prior_installation_required = components_list[ 4 ] only_if_compiling_contained_td = 'False' if not util.asbool( only_if_compiling_contained_td ): if components_list not in required_repository_tups: required_repository_tups.append( components_list ) for components_list in val: try: only_if_compiling_contained_td = components_list[ 5 ] except: only_if_compiling_contained_td = 'False' # Skip listing a repository dependency if it is required only to compile a tool dependency defined for the dependent repository # (see above comment). if not util.asbool( only_if_compiling_contained_td ): if components_list not in required_repository_tups: required_repository_tups.append( components_list ) else: # We have a single repository with no dependencies. components_list = [ tool_shed_url, repository_name, repository_owner, changeset_revision ] required_repository_tups.append( components_list ) if required_repository_tups: # The value of required_repository_tups is a list of tuples, so we need to encode it. encoded_required_repository_tups = [] for required_repository_tup in required_repository_tups: # Convert every item in required_repository_tup to a string. required_repository_tup = [ str( item ) for item in required_repository_tup ] encoded_required_repository_tups.append( encoding_util.encoding_sep.join( required_repository_tup ) ) encoded_required_repository_str = encoding_util.encoding_sep2.join( encoded_required_repository_tups ) encoded_required_repository_str = encoding_util.tool_shed_encode( encoded_required_repository_str ) url = suc.url_join( tool_shed_url, '/repository/get_required_repo_info_dict' ) request = urllib2.Request( url, data=urllib.urlencode( dict( encoded_str=encoded_required_repository_str ) ) ) response = urllib2.urlopen( request ).read() if response: try: required_repo_info_dict = json.from_json_string( response ) except Exception, e: log.exception( e ) return all_repo_info_dicts required_repo_info_dicts = [] for k, v in required_repo_info_dict.items(): if k == 'repo_info_dicts': encoded_dict_strings = required_repo_info_dict[ 'repo_info_dicts' ] for encoded_dict_str in encoded_dict_strings: decoded_dict = encoding_util.tool_shed_decode( encoded_dict_str ) required_repo_info_dicts.append( decoded_dict ) else: if k not in all_required_repo_info_dict: all_required_repo_info_dict[ k ] = v else: if v and not all_required_repo_info_dict[ k ]: all_required_repo_info_dict[ k ] = v if required_repo_info_dicts: for required_repo_info_dict in required_repo_info_dicts: if required_repo_info_dict not in all_repo_info_dicts: all_repo_info_dicts.append( required_repo_info_dict ) all_required_repo_info_dict[ 'all_repo_info_dicts' ] = all_repo_info_dicts