Exemple #1
0
def test_build_with_symlink_inside_collection(collection_input):
    input_dir, output_dir = collection_input

    os.makedirs(os.path.join(input_dir, 'playbooks', 'roles'))
    roles_link = os.path.join(input_dir, 'playbooks', 'roles', 'linked')
    file_link = os.path.join(input_dir, 'docs', 'README.md')

    roles_target = os.path.join(input_dir, 'roles', 'linked')
    roles_target_tasks = os.path.join(roles_target, 'tasks')
    os.makedirs(roles_target_tasks)
    with open(os.path.join(roles_target_tasks, 'main.yml'),
              'w+') as tasks_main:
        tasks_main.write("---\n- hosts: localhost\n  tasks:\n  - ping:")
        tasks_main.flush()

    os.symlink(roles_target, roles_link)
    os.symlink(os.path.join(input_dir, 'README.md'), file_link)

    collection.build_collection(input_dir, output_dir, False)

    output_artifact = os.path.join(
        output_dir, 'ansible_namespace-collection-0.1.0.tar.gz')
    assert tarfile.is_tarfile(output_artifact)

    with tarfile.open(output_artifact, mode='r') as actual:
        members = actual.getmembers()

        linked_members = [
            m for m in members
            if m.path.startswith('playbooks/roles/linked/tasks')
        ]
        assert len(linked_members) == 2
        assert linked_members[0].name == 'playbooks/roles/linked/tasks'
        assert linked_members[0].isdir()

        assert linked_members[
            1].name == 'playbooks/roles/linked/tasks/main.yml'
        assert linked_members[1].isreg()

        linked_task = actual.extractfile(linked_members[1].name)
        actual_task = secure_hash_s(linked_task.read())
        linked_task.close()

        assert actual_task == 'f4dcc52576b6c2cd8ac2832c52493881c4e54226'

        linked_file = [m for m in members if m.path == 'docs/README.md']
        assert len(linked_file) == 1
        assert linked_file[0].isreg()

        linked_file_obj = actual.extractfile(linked_file[0].name)
        actual_file = secure_hash_s(linked_file_obj.read())
        linked_file_obj.close()

        assert actual_file == '63444bfc766154e1bc7557ef6280de20d03fcd81'
Exemple #2
0
def create_changeset(module, stack_params, cfn):
    if 'TemplateBody' not in stack_params and 'TemplateURL' not in stack_params:
        module.fail_json(msg="Either 'template' or 'template_url' is required.")

    try:
        if not 'ChangeSetName' in stack_params:
            # Determine ChangeSetName using hash of parameters.
            changeset_name = 'Ansible-' + stack_params['StackName'] + '-' + secure_hash_s(json.dumps(stack_params, sort_keys=True))
            stack_params['ChangeSetName'] = changeset_name
        # Determine if this changeset already exists
        pending_changesets = list_changesets(cfn, stack_params['StackName'])
        if changeset_name in pending_changesets:
            warning = 'WARNING: '+str(len(pending_changesets))+' pending changeset(s) exist(s) for this stack!'
            result = dict(changed=False, output='ChangeSet ' + changeset_name + ' already exists.', warnings=[warning])
        else:
            cs = cfn.create_change_set(**stack_params)
            result = stack_operation(cfn, stack_params['StackName'], 'UPDATE')
            result['warnings'] = [('Created changeset named ' + changeset_name + ' for stack ' + stack_params['StackName']),
                ('You can execute it using: aws cloudformation execute-change-set --change-set-name ' + cs['Id']),
                ('NOTE that dependencies on this stack might fail due to pending changes!')]
    except Exception as err:
        error_msg = boto_exception(err)
        if 'No updates are to be performed.' in error_msg:
            result = dict(changed=False, output='Stack is already up-to-date.')
        else:
            module.fail_json(msg=error_msg)

    if not result:
        module.fail_json(msg="empty result")
    return result
Exemple #3
0
    def publish_collection(self, collection_path):
        """
        Publishes a collection to a Galaxy server and returns the import task URI.

        :param collection_path: The path to the collection tarball to publish.
        :return: The import task URI that contains the import results.
        """
        display.display("Publishing collection artifact '%s' to %s %s" %
                        (collection_path, self.name, self.api_server))

        b_collection_path = to_bytes(collection_path,
                                     errors='surrogate_or_strict')
        if not os.path.exists(b_collection_path):
            raise AnsibleError(
                "The collection path specified '%s' does not exist." %
                to_native(collection_path))
        elif not tarfile.is_tarfile(b_collection_path):
            raise AnsibleError(
                "The collection path specified '%s' is not a tarball, use 'ansible-galaxy collection "
                "build' to create a proper release artifact." %
                to_native(collection_path))

        with open(b_collection_path, 'rb') as collection_tar:
            sha256 = secure_hash_s(collection_tar.read(),
                                   hash_func=hashlib.sha256)

        content_type, b_form_data = prepare_multipart({
            'sha256': sha256,
            'file': {
                'filename': b_collection_path,
                'mime_type': 'application/octet-stream',
            },
        })

        headers = {
            'Content-type': content_type,
            'Content-length': len(b_form_data),
        }

        if 'v3' in self.available_api_versions:
            n_url = _urljoin(self.api_server,
                             self.available_api_versions['v3'], 'artifacts',
                             'collections') + '/'
        else:
            n_url = _urljoin(self.api_server,
                             self.available_api_versions['v2'],
                             'collections') + '/'

        resp = self._call_galaxy(
            n_url,
            args=b_form_data,
            headers=headers,
            method='POST',
            auth_required=True,
            error_context_msg='Error when publishing collection to %s (%s)' %
            (self.name, self.api_server))

        return resp['task']
def _build_collection_tar(b_collection_path, b_tar_path, collection_manifest, file_manifest):
    files_manifest_json = to_bytes(json.dumps(file_manifest, indent=True), errors='surrogate_or_strict')
    collection_manifest['file_manifest_file']['chksum_sha256'] = secure_hash_s(files_manifest_json, hash_func=sha256)
    collection_manifest_json = to_bytes(json.dumps(collection_manifest, indent=True), errors='surrogate_or_strict')

    with _tempdir() as b_temp_path:
        b_tar_filepath = os.path.join(b_temp_path, os.path.basename(b_tar_path))

        with tarfile.open(b_tar_filepath, mode='w:gz') as tar_file:
            # Add the MANIFEST.json and FILES.json file to the archive
            for name, b in [('MANIFEST.json', collection_manifest_json), ('FILES.json', files_manifest_json)]:
                b_io = BytesIO(b)
                tar_info = tarfile.TarInfo(name)
                tar_info.size = len(b)
                tar_info.mtime = time.time()
                tar_info.mode = 0o0644
                tar_file.addfile(tarinfo=tar_info, fileobj=b_io)

            for file_info in file_manifest['files']:
                if file_info['name'] == '.':
                    continue

                # arcname expects a native string, cannot be bytes
                filename = to_native(file_info['name'], errors='surrogate_or_strict')
                b_src_path = os.path.join(b_collection_path, to_bytes(filename, errors='surrogate_or_strict'))

                def reset_stat(tarinfo):
                    if tarinfo.type != tarfile.SYMTYPE:
                        existing_is_exec = tarinfo.mode & stat.S_IXUSR
                        tarinfo.mode = 0o0755 if existing_is_exec or tarinfo.isdir() else 0o0644
                    tarinfo.uid = tarinfo.gid = 0
                    tarinfo.uname = tarinfo.gname = ''

                    return tarinfo

                if os.path.islink(b_src_path):
                    b_link_target = os.path.realpath(b_src_path)
                    if _is_child_path(b_link_target, b_collection_path):
                        b_rel_path = os.path.relpath(b_link_target, start=os.path.dirname(b_src_path))

                        tar_info = tarfile.TarInfo(filename)
                        tar_info.type = tarfile.SYMTYPE
                        tar_info.linkname = to_native(b_rel_path, errors='surrogate_or_strict')
                        tar_info = reset_stat(tar_info)
                        tar_file.addfile(tarinfo=tar_info)

                        continue

                # Dealing with a normal file, just add it by name.
                tar_file.add(os.path.realpath(b_src_path), arcname=filename, recursive=False, filter=reset_stat)

        shutil.copy(b_tar_filepath, b_tar_path)
        collection_name = "%s.%s" % (collection_manifest['collection_info']['namespace'],
                                     collection_manifest['collection_info']['name'])
        display.display('Created collection for %s at %s' % (collection_name, to_text(b_tar_path)))
Exemple #5
0
    def publish_collection(self, collection_path):
        """
        Publishes a collection to a Galaxy server and returns the import task URI.

        :param collection_path: The path to the collection tarball to publish.
        :return: The import task URI that contains the import results.
        """
        display.display("Publishing collection artifact '%s' to %s %s" % (collection_path, self.name, self.api_server))

        b_collection_path = to_bytes(collection_path, errors='surrogate_or_strict')
        if not os.path.exists(b_collection_path):
            raise AnsibleError("The collection path specified '%s' does not exist." % to_native(collection_path))
        elif not tarfile.is_tarfile(b_collection_path):
            raise AnsibleError("The collection path specified '%s' is not a tarball, use 'ansible-galaxy collection "
                               "build' to create a proper release artifact." % to_native(collection_path))

        with open(b_collection_path, 'rb') as collection_tar:
            data = collection_tar.read()

        boundary = '--------------------------%s' % uuid.uuid4().hex
        b_file_name = os.path.basename(b_collection_path)
        part_boundary = b"--" + to_bytes(boundary, errors='surrogate_or_strict')

        form = [
            part_boundary,
            b"Content-Disposition: form-data; name=\"sha256\"",
            to_bytes(secure_hash_s(data), errors='surrogate_or_strict'),
            part_boundary,
            b"Content-Disposition: file; name=\"file\"; filename=\"%s\"" % b_file_name,
            b"Content-Type: application/octet-stream",
            b"",
            data,
            b"%s--" % part_boundary,
        ]
        data = b"\r\n".join(form)

        headers = {
            'Content-type': 'multipart/form-data; boundary=%s' % boundary,
            'Content-length': len(data),
        }

        if 'v3' in self.available_api_versions:
            n_url = _urljoin(self.api_server, self.available_api_versions['v3'], 'artifacts', 'collections') + '/'
        else:
            n_url = _urljoin(self.api_server, self.available_api_versions['v2'], 'collections') + '/'

        resp = self._call_galaxy(n_url, args=data, headers=headers, method='POST', auth_required=True,
                                 error_context_msg='Error when publishing collection to %s (%s)'
                                                   % (self.name, self.api_server))
        return resp['task']
def main():

    module = AnsibleModule(
        argument_spec=dict(
            dest=dict(default='README.md'),
            template=dict(required=True),
            name=dict(aliases=['template']),
            timestamp=dict(default='no', choices=['yes', 'no']),
            insert=dict(default=False)
        ),
        supports_check_mode=False
    )

    # Try to import the library
    try:
        from cfautodoc.autodoc import autodoc
    except ImportError:
        module.fail_json(msg='Install the cfautodoc tool using pip.')

    templ = module.params['name']
    dest = module.params['dest']
    timestamp = module.params['timestamp']
    insert = module.params['insert']

    # Massage timestamp for library
    if timestamp == 'yes':
        timestamp = True
    else:
        timestamp = False

    # Generate the Markdown
    towrite = autodoc(templ, dest, timestamp, insert)

    # Write the towrite list
    if secure_hash(dest) == secure_hash_s(''.join(towrite)):
        msg = 'Documentation already up to date for {0}'.format(templ)
        module.exit_json(changed=False, written=msg)
    else:
        try:
            with open(dest, 'w') as destination:
                for line in towrite:
                    destination.write(line)
            msg = '{0} documentation updated to {1}'.format(templ, dest)
            module.exit_json(changed=True, written=msg)
        except Exception as exc:
            module.fail_json(msg=exc)
def main():

    module = AnsibleModule(argument_spec=dict(dest=dict(default='README.md'),
                                              template=dict(required=True),
                                              name=dict(aliases=['template']),
                                              timestamp=dict(
                                                  default='no',
                                                  choices=['yes', 'no']),
                                              insert=dict(default=False)),
                           supports_check_mode=False)

    # Try to import the library
    try:
        from cfautodoc.autodoc import autodoc
    except ImportError:
        module.fail_json(msg='Install the cfautodoc tool using pip.')

    templ = module.params['name']
    dest = module.params['dest']
    timestamp = module.params['timestamp']
    insert = module.params['insert']

    # Massage timestamp for library
    if timestamp == 'yes':
        timestamp = True
    else:
        timestamp = False

    # Generate the Markdown
    towrite = autodoc(templ, dest, timestamp, insert)

    # Write the towrite list
    if secure_hash(dest) == secure_hash_s(''.join(towrite)):
        msg = 'Documentation already up to date for {0}'.format(templ)
        module.exit_json(changed=False, written=msg)
    else:
        try:
            with open(dest, 'w') as destination:
                for line in towrite:
                    destination.write(line)
            msg = '{0} documentation updated to {1}'.format(templ, dest)
            module.exit_json(changed=True, written=msg)
        except Exception as exc:
            module.fail_json(msg=exc)
def test_build_with_symlink_inside_collection(collection_input):
    input_dir, output_dir = collection_input

    os.makedirs(os.path.join(input_dir, 'playbooks', 'roles'))
    roles_link = os.path.join(input_dir, 'playbooks', 'roles', 'linked')
    file_link = os.path.join(input_dir, 'docs', 'README.md')

    roles_target = os.path.join(input_dir, 'roles', 'linked')
    roles_target_tasks = os.path.join(roles_target, 'tasks')
    os.makedirs(roles_target_tasks)
    with open(os.path.join(roles_target_tasks, 'main.yml'),
              'w+') as tasks_main:
        tasks_main.write("---\n- hosts: localhost\n  tasks:\n  - ping:")
        tasks_main.flush()

    os.symlink(roles_target, roles_link)
    os.symlink(os.path.join(input_dir, 'README.md'), file_link)

    collection.build_collection(
        to_text(input_dir, errors='surrogate_or_strict'),
        to_text(output_dir, errors='surrogate_or_strict'), False)

    output_artifact = os.path.join(
        output_dir, 'ansible_namespace-collection-0.1.0.tar.gz')
    assert tarfile.is_tarfile(output_artifact)

    with tarfile.open(output_artifact, mode='r') as actual:
        members = actual.getmembers()

        linked_folder = next(m for m in members
                             if m.path == 'playbooks/roles/linked')
        assert linked_folder.type == tarfile.SYMTYPE
        assert linked_folder.linkname == '../../roles/linked'

        linked_file = next(m for m in members if m.path == 'docs/README.md')
        assert linked_file.type == tarfile.SYMTYPE
        assert linked_file.linkname == '../README.md'

        linked_file_obj = actual.extractfile(linked_file.name)
        actual_file = secure_hash_s(linked_file_obj.read())
        linked_file_obj.close()

        assert actual_file == '63444bfc766154e1bc7557ef6280de20d03fcd81'
Exemple #9
0
def _get_mime_data(b_collection_path):
    with open(b_collection_path, 'rb') as collection_tar:
        data = collection_tar.read()

    boundary = '--------------------------%s' % uuid.uuid4().hex
    b_file_name = os.path.basename(b_collection_path)
    part_boundary = b"--" + to_bytes(boundary, errors='surrogate_or_strict')

    form = [
        part_boundary,
        b"Content-Disposition: form-data; name=\"sha256\"",
        to_bytes(secure_hash_s(data), errors='surrogate_or_strict'),
        part_boundary,
        b"Content-Disposition: file; name=\"file\"; filename=\"%s\"" % b_file_name,
        b"Content-Type: application/octet-stream",
        b"",
        data,
        b"%s--" % part_boundary,
    ]

    content_type = 'multipart/form-data; boundary=%s' % boundary

    return b"\r\n".join(form), content_type