Пример #1
0
def load_editable_archive_info(archive_path, repository_spec):
    file_walker = collection_members.FileWalker(collection_path=archive_path)
    col_members = collection_members.CollectionMembers(walker=file_walker)
    file_members = list(col_members.run())

    archive_info = build_archive_info(archive_path, file_members)

    return archive_info, None
Пример #2
0
def test_file_walker_cwd():
    cwd = os.getcwd()
    git_dir = os.path.join(cwd, '.git/')

    file_walker = collection_members.FileWalker(
        cwd,
        exclude_patterns=collection_members.DEFAULT_IGNORE_PATTERNS +
        ['*.json'])
    file_names = file_walker.walk()
    for file_name in file_names:
        assert not file_name.startswith(
            git_dir), '%s found in %s but should be excluded by default' % (
                git_dir, file_name)
        assert '__pycache__' not in file_name, '__pycache__ found in path %s, but __pycache__ dirs should be ignored by default' % (
            file_name)
Пример #3
0
def test_collection_members_post_filter():

    file_walker = collection_members.FileWalker(hello_path,
                                                exclude_patterns=['*.json'])
    coll_members = collection_members.CollectionMembers(walker=file_walker)

    members = coll_members.run()

    # just to trigger the iteration
    members_list = list(members)
    assert isinstance(members_list, list)

    log.debug('members_list: %s', pf(members_list))

    some_json_path = os.path.join(hello_path, 'some_json_file.json')
    assert some_json_path not in members_list
Пример #4
0
def test_collection_members_init():
    # gather files from mazer src dir, not really a role
    collection_path = os.path.join(os.getcwd())
    file_walker = collection_members.FileWalker(collection_path)
    coll_members = collection_members.CollectionMembers(walker=file_walker)
    log.debug('coll_members: %s', coll_members)

    members = coll_members.run()

    # NOTE: no reason .run() couldn't return a iterable/generator this is not
    #       actually an (sub)instance of collections.Iterable, but it is to catch
    #       when I forget to yield
    assert isinstance(members, collections.Iterable)

    members_list = list(members)
    assert isinstance(members_list, list)
Пример #5
0
def test_file_walker_rel_path():

    file_walker = collection_members.FileWalker(
        hello_path,
        exclude_patterns=collection_members.DEFAULT_IGNORE_PATTERNS +
        ['*.json'])
    rel_paths = file_walker.relative_walk()

    paths = []
    for rel_path in rel_paths:

        assert rel_path != 'some_json_file.json', \
            'Found some_json_file.json in relative_walk() results and it should be exclude'

        assert not rel_path.startswith(
            hello_path), 'rel_path (%s) starts with %s' % (rel_path,
                                                           hello_path)

        assert not rel_path.startswith(
            'releases'
        ), 'releases subdir should be ignore but was found in %s' % rel_path
        paths.append(rel_path)

    log.debug(pf(sorted(paths)))
Пример #6
0
    def run(self, display_callback):

        log.debug('INFO self.collection_info: %s', self.collection_info)

        # ie, 'v1.2.3.tar.gz', not full path
        archive_filename_basename = \
            ARCHIVE_FILENAME_TEMPLATE.format(namespace=self.collection_info.namespace,
                                             name=self.collection_info.name,
                                             version=self.collection_info.version,
                                             extension=ARCHIVE_FILENAME_EXTENSION)

        archive_path = os.path.join(self.build_context.output_path,
                                    archive_filename_basename)
        log.debug('Building archive into archive_path: %s', archive_path)

        # The name of the top level dir in the tar file, ie, there isnt one.
        archive_top_dir = ""
        log.debug('archive_top_dir: %s', archive_top_dir)

        # 'x:gz' is 'create exclusive gzipped'
        tar_file = tarfile.open(archive_path, mode='w:gz')

        # Find collection files, build a file manifest, serialize to json and add to the tarfile
        file_walker = collection_members.FileWalker(
            collection_path=self.build_context.collection_path)
        col_members = collection_members.CollectionMembers(walker=file_walker)

        log.debug('col_members: %s', col_members)

        col_file_names = col_members.run()
        col_files = collection_artifact_file_manifest.gen_file_manifest_items(
            col_file_names, self.build_context.collection_path)

        file_manifest = CollectionArtifactFileManifest(files=col_files)

        log.debug('file_manifest: %s', file_manifest)

        for col_member_file in file_manifest.files:
            top_dir = False
            # arcname will be a relative path not an abspath at this point
            rel_path = col_member_file.name or col_member_file.src_name
            if rel_path == '.':
                rel_path = ''
            archive_member_path = rel_path

            log.debug('adding %s to %s (from %s)', archive_member_path,
                      archive_path, col_member_file.name)

            log.debug('name=%s, arcname=%s, top_dir=%s', col_member_file.name,
                      archive_member_path, top_dir)

            # if top_dir:
            #     tar_file.add(col_member_file.name, arcname=archive_top_dir, recursive=False)
            # else:
            #     tar_file.add(col_member_file.name, arcname=archive_member_path, recursive=False)
            tar_file.add(col_member_file.src_name,
                         arcname=archive_member_path,
                         recursive=False)

        # Generate FILES.json contents
        # TODO/FIXME: find and use some streamable file format for the filelist (csv?)
        file_manifest_buf = json.dumps(attr.asdict(
            file_manifest, filter=filter_artifact_file_name),
                                       indent=4)

        log.debug('file_manifest_buf: %s', file_manifest_buf)

        b_file_manifest_buf = to_bytes(file_manifest_buf)
        b_file_manifest_buf_bytesio = six.BytesIO(b_file_manifest_buf)

        archive_manifest_path = collection_artifact_manifest.COLLECTION_MANIFEST_FILENAME
        log.debug('archive_manifest_path: %s', archive_manifest_path)

        archive_file_manifest_path = collection_artifact_file_manifest.COLLECTION_FILE_MANIFEST_FILENAME
        log.debug('archive_file_manifest_path: %s', archive_file_manifest_path)

        file_manifest_tar_info = tar_file.gettarinfo(
            os.path.join(self.build_context.collection_path,
                         COLLECTION_INFO_FILENAME))

        file_manifest_tar_info.name = archive_file_manifest_path
        file_manifest_tar_info.size = len(b_file_manifest_buf)

        # Add FILES.json contents to tarball
        tar_file.addfile(tarinfo=file_manifest_tar_info,
                         fileobj=b_file_manifest_buf_bytesio)

        # addfile reads to end of bytesio, seek back to begin
        b_file_manifest_buf_bytesio.seek(0)
        file_manifest_file_chksum = chksums.sha256sum_from_fo(
            b_file_manifest_buf_bytesio)

        log.debug('file_manifest_file_chksum: %s', file_manifest_file_chksum)

        # file_manifest_file_name_in_archive = os.path.relpath(archive_file_manifest_path, self.build_context.collection_path)

        file_manifest_file_item = CollectionArtifactFile(
            src_name=collection_artifact_file_manifest.
            COLLECTION_FILE_MANIFEST_FILENAME,
            # The path where the file will live inside the archive
            name=collection_artifact_file_manifest.
            COLLECTION_FILE_MANIFEST_FILENAME,
            ftype='file',
            chksum_type='sha256',
            chksum_sha256=file_manifest_file_chksum)

        # Generage MANIFEST.json contents
        manifest = CollectionArtifactManifest(
            collection_info=self.collection_info,
            file_manifest_file=file_manifest_file_item)

        log.debug('manifest: %s', manifest)

        manifest_buf = json.dumps(
            attr.asdict(manifest, filter=filter_artifact_file_name),
            # sort_keys=True,
            indent=4)
        log.debug('manifest buf: %s', manifest_buf)

        # add MANIFEST.yml to archive
        b_manifest_buf = to_bytes(manifest_buf)
        b_manifest_buf_bytesio = six.BytesIO(b_manifest_buf)

        archive_manifest_path = os.path.join(
            archive_top_dir,
            collection_artifact_manifest.COLLECTION_MANIFEST_FILENAME)
        log.debug('archive_manifest_path: %s', archive_manifest_path)

        # copy the uid/gid/perms for galaxy.yml to use on the manifes. Need sep instances for manifest and file_manifest
        # TODO: decide on what the generators owner/group/perms should be (root.root 644?)
        manifest_tar_info = tar_file.gettarinfo(
            os.path.join(self.build_context.collection_path,
                         COLLECTION_INFO_FILENAME))

        manifest_tar_info.name = archive_manifest_path
        manifest_tar_info.size = len(b_manifest_buf)

        # TODO: set mtime equal to the 'build time' / build_info when we start creating that.

        tar_file.addfile(tarinfo=manifest_tar_info,
                         fileobj=b_manifest_buf_bytesio)

        log.debug('populated tarfile %s: %s', archive_path,
                  pprint.pformat(tar_file.getmembers))

        tar_file.close()

        # could in theory make creating the release artifact work much the same
        # as serializing some object (I mean, that is what it is... but

        messages = [
            'Building collection: %s' % self.build_context.collection_path,
            'Created  artifact: %s' % archive_path
        ]

        result = BuildResult(
            status=BuildStatuses.success,
            messages=messages,
            # errors=[],
            errors=col_members.walker.file_errors,
            manifest=manifest,
            file_manifest=file_manifest,
            artifact_file_path=archive_path)

        for message in result.messages:
            log.info(message)
            display_callback(message)

        for error in result.errors:
            log.error(error)
            display_callback(error, level='warning')

        return result
Пример #7
0
    def run(self, display_callback):

        file_walker = collection_members.FileWalker(collection_path=self.build_context.collection_path)
        col_members = collection_members.CollectionMembers(walker=file_walker)

        log.debug('col_members: %s', col_members)
        log.debug('INFO self.collection_info: %s', self.collection_info)

        col_file_names = col_members.run()
        col_files = collection_artifact_manifest.gen_manifest_artifact_files(col_file_names,
                                                                             self.build_context.collection_path)

        manifest = CollectionArtifactManifest(collection_info=self.collection_info,
                                              files=col_files)

        log.debug('manifest: %s', manifest)

        manifest_buf = json.dumps(attr.asdict(manifest,
                                              filter=filter_artifact_file_name),
                                  # sort_keys=True,
                                  indent=4)
        # manifest_buf = yaml.safe_dump(attr.asdict(manifest),
        #                              default_flow_style=False)
        log.debug('manifest buf: %s', manifest_buf)

        # ie, 'v1.2.3.tar.gz', not full path
        archive_filename_basename = \
            ARCHIVE_FILENAME_TEMPLATE.format(namespace=self.collection_info.namespace,
                                             name=self.collection_info.name,
                                             version=self.collection_info.version,
                                             extension=ARCHIVE_FILENAME_EXTENSION)

        archive_path = os.path.join(self.build_context.output_path,
                                    archive_filename_basename)
        log.debug('Building archive into archive_path: %s', archive_path)

        # The name of the top level dir in the tar file. It is
        # in the format '{collection_name}-{version}'.
        # NOTE: This doesnt follow convention of 'foo-bar-1.2.3.tar.gz -> foo-bar-1.2.3/*'
        archive_top_dir = ARCHIVE_TOPDIR_TEMPLATE.format(collection_info=self.collection_info)

        log.debug('archive_top_dir: %s', archive_top_dir)

        # 'x:gz' is 'create exclusive gzipped'
        tar_file = tarfile.open(archive_path, mode='w:gz')

        # tar_file.add(archive_top_dir, arcname=archive_top_dir, recursive=False)

        for col_member_file in manifest.files:
            top_dir = False
            # arcname will be a relative path not an abspath at this point
            rel_path = col_member_file.name or col_member_file.src_name
            if rel_path == '.':
                rel_path = ''
            archive_member_path = os.path.join(archive_top_dir, rel_path)

            log.debug('adding %s to %s (from %s)', archive_member_path,
                      archive_path, col_member_file.name)

            log.debug('name=%s, arcname=%s, top_dir=%s', col_member_file.name, archive_member_path, top_dir)

            # if top_dir:
            #     tar_file.add(col_member_file.name, arcname=archive_top_dir, recursive=False)
            # else:
            #     tar_file.add(col_member_file.name, arcname=archive_member_path, recursive=False)
            tar_file.add(col_member_file.src_name, arcname=archive_member_path, recursive=False)

        # add MANIFEST.yml to archive

        b_manifest_buf = to_bytes(manifest_buf)
        b_manifest_buf_bytesio = six.BytesIO(b_manifest_buf)

        archive_manifest_path = os.path.join(archive_top_dir,
                                             collection_artifact_manifest.COLLECTION_MANIFEST_FILENAME)
        log.debug('archive_manifest_path: %s', archive_manifest_path)

        # copy the uid/gid/perms for galaxy.yml to use on the manifest
        # TODO: decide on what the generators owner/group/perms should be (root.root 644?)
        manifest_tar_info = tar_file.gettarinfo(os.path.join(self.build_context.collection_path, COLLECTION_INFO_FILENAME))

        manifest_tar_info.name = archive_manifest_path
        manifest_tar_info.size = len(b_manifest_buf)
        # TODO: set mtime equal to the 'build time' / build_info when we start creating that.

        tar_file.addfile(tarinfo=manifest_tar_info,
                         fileobj=b_manifest_buf_bytesio)

        log.debug('populated tarfile %s: %s', archive_path,
                  pprint.pformat(tar_file.getmembers))

        tar_file.close()

        # could in theory make creating the release artifact work much the same
        # as serializing some object (I mean, that is what it is... but

        messages = ['Building collection: %s' % self.build_context.collection_path,
                    'Created  artifact: %s' % archive_path]

        result = BuildResult(status=BuildStatuses.success,
                             messages=messages,
                             # errors=[],
                             errors=col_members.walker.file_errors,
                             manifest=manifest,
                             artifact_file_path=archive_path)

        for message in result.messages:
            log.info(message)
            display_callback(message)

        for error in result.errors:
            log.error(error)
            display_callback(error, level='warning')

        return result