def test_fail(tmpdir): """Test packer error is passed.""" params = { 'template_path': 'src/non-existing.json', } with pytest.raises(Exception): cmd('out', {}, [str(tmpdir)], params=params)
def test_output(tmpdir, capfd): """Test output to stderr.""" params = { 'template_path': 'src/ami.json', } # disable test debug logging to get normal output del os.environ['RESOURCE_DEBUG'] cmd('out', {}, [str(tmpdir)], params=params) os.environ['RESOURCE_DEBUG'] = 'true' out, err = capfd.readouterr() print(err) assert re.search('^amazon-ebs output will be in this color\.$', err, re.MULTILINE) assert '==> amazon-ebs: Force Deregister flag found, skipping prevalidating AMI Name' in err
def test_hipchat_notify(): """Test posting notification to Hipchat.""" source = { 'uri': 'https://www.hipchat.com/v2/room/2442416/notification', 'headers': { 'Authorization': 'Bearer ' + HIPCHAT_TOKEN }, 'method': 'POST', } params = { 'json': { 'color': 'green', 'message': 'Build {BUILD_PIPELINE_NAME}/{BUILD_JOB_NAME}, nr: {BUILD_NAME} was a success!', } } cmd('out', source, params=params)
def test_passing_build_vars(tmpdir): """Test is build vars are passed.""" params = { 'template_path': 'src/build-vars.json', 'build_vars': { 'role': 'test', } } assert cmd('out', {}, [str(tmpdir)], params=params)
def test_check(httpbin): """Test if check returns latest version number.""" source = { 'index': httpbin + '/links/10', 'regex': "href='/links/10/(?P<version>[0-9]+)'", } output = cmd('check', source) assert output == [{'version': '9'}]
def test_empty_check(httpbin): """Check must return an empty response but not nothing.""" source = { 'uri': httpbin + '/post', 'method': 'POST', } check = cmd('check', source) assert check == []
def test_out(tmpdir): """Test packing image from template.""" params = { 'template_path': 'src/ami.json', } output = cmd('out', {}, [str(tmpdir)], params=params) assert output.get('version').get('ImageId') == 'ami-01234567' assert output.get('metadata')[0].get('name') == 'tag_version'
def _resize_wic_file(self, increase_bytes: int, extra_space=0.2): bs = 1024 increase_k = ceil((increase_bytes + increase_bytes * extra_space) / bs) + 1 wic_k = ceil(os.stat(self._path).st_size / bs) logger.info('Extending the wic image; adding: {} bytes, asked {}'.format(increase_k * bs, increase_bytes)) cmd('dd', 'if=/dev/zero', 'bs=' + str(bs), 'of=' + self._path, 'conv=notrunc', 'oflag=append', 'count=' + str(increase_k), 'seek=' + str(wic_k)) parted_out = subprocess.check_output(['parted', self._path, 'print']) if parted_out.find(b'Partition Table: gpt') != -1: subprocess.check_call(['sgdisk', '-e', self._path]) # save last partition # for resizing. Example line for GPT: # 5 33.6MB 1459MB 1425MB ext4 primary # and like this for msdos: # 2 50.3MB 688MB 638MB primary ext4 # either way we can capture the first column as the last partition # # NOTE: use -3 index as parted_out will have 2x b'' items at the end self._last_part = int(parted_out.split(b'\n')[-3].split()[0]) logger.info('last partition: %d' % self._last_part) subprocess.check_call(['parted', self._path, 'resizepart', str(self._last_part), '100%'])
def test_passing_build_vars_from_files(tmpdir): """Test is build vars can be read from files.""" tmpdir.join('ami_id').write('ami-12345678') params = { 'template_path': 'src/build-vars-from-file.json', 'build_vars_from_file': { 'source_ami': 'ami_id', } } assert cmd('out', {}, [str(tmpdir)], params=params)
def copy_compose_apps_to_wic(target: FactoryClient.Target, fetch_dir: str, wic_image: str, token: str, apps_shortlist: list, progress: Progress): p = Progress(4, progress) apps_fetcher = TargetAppsFetcher(token, fetch_dir) apps_fetcher.fetch_target(target, shortlist=apps_shortlist, force=True) p.tick() apps_size_b = apps_fetcher.get_target_apps_size(target) p.tick() logger.info('Compose Apps require extra {} bytes of storage'.format(apps_size_b)) with WicImage(wic_image, apps_size_b) as wic_image: if os.path.exists(wic_image.docker_data_root): # wic image was populated by container images data during LmP build (/var/lib/docker) # let's remove it and populate with the given images data logger.info('Removing existing preloaded app images from the system image') shutil.rmtree(wic_image.docker_data_root) else: # intel installer images won't have this directory _mk_parent_dir(wic_image.docker_data_root) if os.path.exists(wic_image.compose_apps_root): # wic image was populated by container images data during LmP build (/var/sota/compose-apps) # let's remove it and populate with the given images data logger.info('Removing existing preloaded compose apps from the system image') shutil.rmtree(wic_image.compose_apps_root) else: # intel installer images won't have this directory _mk_parent_dir(wic_image.compose_apps_root) # copy <fetch-dir>/<target-name>/apps/* to /var/sota/compose-apps/ cmd('cp', '-a', apps_fetcher.apps_dir(target.name), wic_image.compose_apps_root) # copy <fetch-dir>/<target-name>/images/* to /var/lib/docker/ cmd('cp', '-a', apps_fetcher.images_dir(target.name), wic_image.docker_data_root) p.tick() wic_image.update_target(target) p.tick()
def test_OUT(httpbin): """Test action with no params.""" data = { 'params': { # empty on purpose }, 'source': { 'url': httpbin + '/post' } } result, debug = cmd('out', data) assert result['version'] == {}
def test_OUT_malformed_card(httpbin): """Test action with a malformed card.""" data = { 'params': { }, 'source': { 'url': httpbin + '/status/404', } } result, debug = cmd('out', data) assert result['version'] == {} assert 'ERROR: ' in debug
def test_json(httpbin): """Json should be passed as JSON content.""" source = { 'uri': httpbin + '/post', 'method': 'POST', 'json': { 'test': 123, }, } output = cmd('out', source) assert output['json']['test'] == 123
def test_data_urlencode(httpbin): """Test passing URL encoded data.""" source = { 'uri': httpbin + '/post', 'method': 'POST', 'form_data': { 'field': { 'test': 123, }, }, } output = cmd('out', source) assert output['form'] == {'field': '{"test": 123}'}
def test_json(httpbin): """Json should be passed as JSON content.""" source = { 'uri': httpbin + '/post', 'method': 'POST', 'json': { 'test': 123, }, 'version': {} } output = cmd('out', source, args=["/opt/resource-tests"]) assert output['json']['test'] == 123 assert output['version'] == {}
def test_data_ensure_ascii(httpbin): """Test form_data json ensure_ascii.""" source = { 'uri': httpbin + '/post', 'method': 'POST', 'form_data': { 'field': { 'test': '日本語', }, }, } output = cmd('out', source) assert output['form'] == {'field': '{"test": "日本語"}'}
def test_in_filename(httpbin, tmpdir): """Test downloading versioned file with predetermined filename.""" source = { 'uri': httpbin + '/range/{version}', 'filename': 'filename_{version}', } in_dir = tmpdir.mkdir('work_dir') output = cmd('in', source, [str(in_dir)], {'version': '9'}) assert output == {'version': {'version': '9'}, 'metadata': []} assert in_dir.join('filename_9').exists() assert len(in_dir.join('filename_9').read()) == 9
def test_OUT_simple_card(httpbin): """Test action with a simple card.""" data = { 'params': { 'summary': 'Card Summary', 'title': 'Card Title', 'text': 'Card text.', }, 'source': { 'url': httpbin + '/post', } } result, debug = cmd('out', data) assert result['version'] == {}
def publish(factory: str, tag: str, app_name: str) -> str: base = 'hub.foundries.io/' + factory + '/' app = base + app_name tagged = app + ':app-' + tag changed = False with open(os.path.join(app_name, 'docker-compose.yml')) as f: compose = yaml.safe_load(f) for name, svc in compose['services'].items(): img = svc['image'] if img.startswith(base): # this should be a container defined in this factory and # in its containers.git, so it should get pinned to ${TAG} # to work as expected parts = img.split(':') if len(parts) == 1: status('Image pinned to latest: %s, updating to %s' % (img, tag)) svc['image'] = img + ':' + tag changed = True elif len(parts) == 2: allowed = ['${TAG}', 'latest', tag] if factory == 'lmp': # the lmp containers repo pulls in fiotest which sets # its tag to "postmerge" which is okay based on how # we do tagging for that repo. allowed.append('postmerge') if parts[1] not in allowed: sys.exit('Image pinned to %s should be ${TAG} or %s' % (parts[1], tag)) svc['image'] = parts[0] + ':' + tag changed = True else: sys.exit('Unexpected image value: ' + img) if changed: with open(os.path.join(app_name, 'docker-compose.yml'), 'w') as f: yaml.dump(compose, f) out = cmd('compose-publish', tagged, cwd=app_name, capture=True) # The publish command produces output like: # = Publishing app... # |-> app: sha256:fc73321368c7a805b0f697a0a845470bc022b6bdd45a8b34 # |-> manifest: sha256:e15e3824fc21ce13815aecb0490d60b3a32 # We need the manifest sha so we can pin it properly in targets.json needle = b'|-> manifest: sha256:' sha = out[out.find(needle) + len(needle):].strip() return app + '@sha256:' + sha.decode()
def test_data_urlencode(httpbin): """Test passing URL encoded data.""" source = { 'uri': httpbin + '/post', 'method': 'POST', 'form_data': { 'field': { 'test': 123, }, } } output = cmd('out', source, args=["/opt/resource-tests"]) assert output['form'] == {'field': '{"test": 123}'} assert output['version'] == {}
def test_params_file_inject_trim(httpbin): """ Ensure that file content gets trimmed and injected to param """ source = { "uri": httpbin + "/post", "method": "POST", "form_data": { "triggered_by": { "commit_sha": "-@data/padded.txt", }, }, } output = cmd("out", source, args=["/opt/resource-tests"]) assert output["form"] == {"triggered_by": '{"commit_sha": "cafe430"}'}
def test_not_parsed_data(httpbin): """Test form_data in a standard format.""" source = { 'uri': httpbin + '/post', 'method': 'POST', 'parse_form_data': False, 'form_data': { 'firstname': 'John', 'lastname': 'Doe' } } output = cmd('out', source) assert output['form'] == {"firstname": "John", "lastname": "Doe"} assert output['version'] == {}
def test_check_with_version(httpbin): """Test if check returns newer version numbers.""" source = { 'index': httpbin + '/links/10', 'regex': "href='/links/10/(?P<version>[0-9]+)'", } version = { 'version': '7', } output = cmd('check', source, version=version) assert output == [ {'version': '8'}, {'version': '9'}, ]
def test_interpolation(httpbin): """Values should be interpolated recursively.""" source = { 'uri': httpbin + '/post', 'method': 'POST', 'json': { 'object': { 'test': '{BUILD_NAME}' }, 'array': ['{BUILD_NAME}'] }, } output = cmd('out', source) assert output['json']['object']['test'] == '1' assert output['json']['array'][0] == '1'
def test_in_filename(httpbin, tmpdir): """Test downloading versioned file with predetermined filename.""" source = { 'uri': httpbin + '/range/{version}', 'filename': 'filename_{version}', } in_dir = tmpdir.mkdir('work_dir') output = cmd('in', source, [str(in_dir)], {'version': '9'}) assert output['version'] == {'version': '9'} assert {'name': 'url', 'value': httpbin + '/range/9'} in output['metadata'] assert {'name': 'Content-Type', 'value': 'application/octet-stream'} in output['metadata'] assert in_dir.join('filename_9').exists() assert len(in_dir.join('filename_9').read()) == 9
def test_in_filename(httpbin, tmpdir): """Test downloading versioned file with predetermined filename.""" source = { 'uri': httpbin + '/range/{version}', 'filename': 'filename_{version}', } in_dir = tmpdir.mkdir('work_dir') output = cmd('in', source, [str(in_dir)], {'version': '9'}) assert output['version'] == {'version': '9'} assert {'name': 'url', 'value': httpbin + '/range/9'} in output['metadata'] assert { 'name': 'Content-Type', 'value': 'application/octet-stream' } in output['metadata'] assert in_dir.join('filename_9').exists() assert len(in_dir.join('filename_9').read()) == 9
def test_check_with_version(httpbin): """Test if check returns newer version numbers.""" source = { 'index': httpbin + '/links/10', 'regex': "href='/links/10/(?P<version>[0-9]+)'", } version = { 'version': '7', } output = cmd('check', source, version=version) assert output == [ { 'version': '8' }, { 'version': '9' }, ]
def __exit__(self, exc_type, exc_val, exc_tb): cmd('umount', self._mnt_dir) os.rmdir(self._mnt_dir) cmd('umount', '/dev') cmd('losetup', '-d', self._loop_device)
def __enter__(self): cmd('losetup', '-P', '-f', self._path) out = cmd('losetup', '-a', capture=True).decode() for line in out.splitlines(): if self._path in line: self._loop_device = line.split(':', 1)[0] self._wic_device = line.split(':', 1)[0] + 'p' + str( self._last_part) break else: raise RuntimeError('Unable to find loop device for wic image') # containers don't see changes to /dev, so we have to hack around # this by basically mounting a new /dev. The idea was inspired by # this comment: # https://github.com/moby/moby/issues/27886#issuecomment-257244027 cmd('mount', '-t', 'devtmpfs', 'devtmpfs', '/dev') cmd('e2fsck', '-y', '-f', self._wic_device) if self._resized_image: cmd('resize2fs', self._wic_device) os.mkdir(self._mnt_dir) cmd('mount', self._wic_device, self._mnt_dir) return self
def test_CHECK_empty(): """CHECK must return an empty response but not nothing.""" result, debug = cmd('check', {}) assert result == []
def test_IN_empty(): """IN must return an empty version response but not nothing.""" result, debug = cmd('in', {}) assert result['version'] == {}
def __enter__(self): cmd('losetup', '-P', '-f', self._path) out = cmd('losetup', '-a', capture=True).decode() for line in out.splitlines(): if self._path in line: self._loop_device = line.split(':', 1)[0] self._wic_device = line.split(':', 1)[0] + 'p' + str(self._last_part) break else: raise RuntimeError('Unable to find loop device for wic image') # containers don't see changes to /dev, so we have to hack around # this by basically mounting a new /dev. The idea was inspired by # this comment: # https://github.com/moby/moby/issues/27886#issuecomment-257244027 cmd('mount', '-t', 'devtmpfs', 'devtmpfs', '/dev') cmd('e2fsck', '-y', '-f', self._wic_device) if self._resized_image: cmd('resize2fs', self._wic_device) os.mkdir(self._mnt_dir) cmd('mount', self._wic_device, self._mnt_dir) installer = os.path.join(self._mnt_dir, 'rootfs.img') if os.path.exists(installer): if self._resized_image: self._resize_rootfs_img(installer, self._rootfs_bytes_increase) self._installer_mount = os.path.join('/mnt/installer_rootfs') os.mkdir(self._installer_mount) cmd('mount', '-oloop', installer, self._installer_mount) self.compose_apps_root = os.path.join(self._installer_mount, self.ComposeAppsRootDir) self.docker_data_root = os.path.join(self._installer_mount, self.DockerDataRootDir) self.restorable_apps_root = os.path.join(self._installer_mount, self.RestorableAppsRoot) self.installed_target_filepath = os.path.join(self._installer_mount, self.InstalledTargetFile) return self