コード例 #1
0
def get_components_from_repo_info_dict( trans, repo_info_dict ):
    """
    Return the repository and the associated latest installable changeset_revision (including updates) for the repository defined by the received
    repo_info_dict.
    """
    for repository_name, repo_info_tup in repo_info_dict.items():
        # There should only be one entry in the received repo_info_dict.
        description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_dependencies, tool_dependencies = \
            suc.get_repo_info_tuple_contents( repo_info_tup )
        repository = suc.get_repository_by_name_and_owner( trans.app, repository_name, repository_owner )
        repository_metadata = suc.get_current_repository_metadata_for_changeset_revision( trans, repository, changeset_revision )
        if repository_metadata:
            return repository, repository_metadata.changeset_revision
    return None, None
コード例 #2
0
ファイル: __init__.py プロジェクト: remimarenco/galaxy
 def get_repository_dependencies( self, app, changeset, toolshed_url ):
     # We aren't concerned with repositories of type tool_dependency_definition here if a
     # repository_metadata record is not returned because repositories of this type will never
     # have repository dependencies. However, if a readme file is uploaded, or some other change
     # is made that does not create a new downloadable changeset revision but updates the existing
     # one, we still want to be able to get repository dependencies.
     repository_metadata = suc.get_current_repository_metadata_for_changeset_revision( app,
                                                                                       self,
                                                                                       changeset )
     if repository_metadata:
         metadata = repository_metadata.metadata
         if metadata:
             rb = relation_builder.RelationBuilder( app, self, repository_metadata, toolshed_url )
             repository_dependencies = rb.get_repository_dependencies_for_changeset_revision()
             if repository_dependencies:
                 return repository_dependencies
     return None
コード例 #3
0
ファイル: __init__.py プロジェクト: avengerpb/Galaxyondocker
 def get_repository_dependencies(self, app, changeset, toolshed_url):
     # We aren't concerned with repositories of type tool_dependency_definition here if a
     # repository_metadata record is not returned because repositories of this type will never
     # have repository dependencies. However, if a readme file is uploaded, or some other change
     # is made that does not create a new downloadable changeset revision but updates the existing
     # one, we still want to be able to get repository dependencies.
     repository_metadata = suc.get_current_repository_metadata_for_changeset_revision(
         app, self, changeset)
     if repository_metadata:
         metadata = repository_metadata.metadata
         if metadata:
             rb = relation_builder.RelationBuilder(app, self,
                                                   repository_metadata,
                                                   toolshed_url)
             repository_dependencies = rb.get_repository_dependencies_for_changeset_revision(
             )
             if repository_dependencies:
                 return repository_dependencies
     return None
コード例 #4
0
def export_repository( trans, tool_shed_url, repository_id, repository_name, changeset_revision, file_type, export_repository_dependencies, api=False ):
    repository = suc.get_repository_in_tool_shed( trans, repository_id )
    repositories_archive_filename = generate_repository_archive_filename( tool_shed_url,
                                                                          str( repository.name ),
                                                                          str( repository.user.username ),
                                                                          changeset_revision,
                                                                          file_type,
                                                                          export_repository_dependencies=export_repository_dependencies,
                                                                          use_tmp_archive_dir=True )
    if export_repository_dependencies:
        repo_info_dicts = get_repo_info_dicts( trans, tool_shed_url, repository_id, changeset_revision )
        repository_ids = get_repository_ids( trans, repo_info_dicts )
        ordered_repository_ids, ordered_repositories, ordered_changeset_revisions = order_components_for_import( trans, repository_ids, repo_info_dicts )
    else:
        ordered_repository_ids = []
        ordered_repositories = []
        ordered_changeset_revisions = []
        if repository:
            repository_metadata = suc.get_current_repository_metadata_for_changeset_revision( trans, repository, changeset_revision )
            if repository_metadata:
                ordered_repository_ids = [ repository_id ]
                ordered_repositories = [ repository ]
                ordered_changeset_revisions = [ repository_metadata.changeset_revision ]
    repositories_archive = None
    error_messages = ''
    lock = threading.Lock()
    lock.acquire( True )
    try:
        repositories_archive = tarfile.open( repositories_archive_filename, "w:%s" % file_type )
        exported_repository_registry = ExportedRepositoryRegistry()
        for index, repository_id in enumerate( ordered_repository_ids ):
            work_dir = tempfile.mkdtemp( prefix="tmp-toolshed-export-er" )
            ordered_repository = ordered_repositories[ index ]
            ordered_changeset_revision = ordered_changeset_revisions[ index ]
            repository_archive, error_message = generate_repository_archive( trans,
                                                                             work_dir,
                                                                             tool_shed_url,
                                                                             ordered_repository,
                                                                             ordered_changeset_revision,
                                                                             file_type )
            if error_message:
                error_messages = '%s  %s' % ( error_messages, error_message )
            else:
                archive_name = str( os.path.basename( repository_archive.name ) )
                repositories_archive.add( repository_archive.name, arcname=archive_name )
                attributes, sub_elements = get_repository_attributes_and_sub_elements( ordered_repository, archive_name )
                elem = xml_util.create_element( 'repository', attributes=attributes, sub_elements=sub_elements )
                exported_repository_registry.exported_repository_elems.append( elem )
            suc.remove_dir( work_dir )
        # Keep information about the export in a file name export_info.xml in the archive.
        sub_elements = generate_export_elem( tool_shed_url, repository, changeset_revision, export_repository_dependencies, api )
        export_elem = xml_util.create_element( 'export_info', attributes=None, sub_elements=sub_elements )
        tmp_export_info = xml_util.create_and_write_tmp_file( export_elem, use_indent=True )
        repositories_archive.add( tmp_export_info, arcname='export_info.xml' )
        # Write the manifest, which must preserve the order in which the repositories should be imported.
        exported_repository_root = xml_util.create_element( 'repositories' )
        for exported_repository_elem in exported_repository_registry.exported_repository_elems:
            exported_repository_root.append( exported_repository_elem )
        tmp_manifest = xml_util.create_and_write_tmp_file( exported_repository_root, use_indent=True )
        repositories_archive.add( tmp_manifest, arcname='manifest.xml' )
    except Exception, e:
        log.exception( str( e ) )