def upload(src_dir): for path in pathlib.Path(src_dir).rglob('*.yml'): asset_name = os.path.basename( # noqa: F841 (variable never used) os.path.splitext(path.name)[0].replace('_', ' ')) log('INFO', f"Sending '{asset_name}' from '{path}'") tower_send(path) log('INFO', "Successfully sent")
def main(assets_glob, assets_schema, custom_validators): # Run only if schema is set if assets_schema: schema = yamale.make_schema(assets_schema, validators=validators.load(custom_validators)) for f in sorted(glob.glob(assets_glob, recursive=True)): log('INFO', f"Validating {f} against schema {assets_schema}") yamale.validate(schema, yamale.make_data(f)) log('INFO', "... finished")
def test_log(mock_click, mock_textwrap, level, fatal): message = "This is a message." mock_click.Abort = click.Abort mock_click.style.return_value = level mock_textwrap.indent.return_value = f"{level} {message}" try: _utils.log(level, message, fatal=fatal) mock_click.echo.assert_called_once() mock_click.echo.assert_called_with(f"{level} {message}") except click.Abort: assert fatal is True
def main(label_id, assets_glob): # asset names in repository local_assets = [] for file_name in sorted(glob.glob(assets_glob, recursive=True)): asset = load_asset(file_name) # Can synchronize only assets of type job_template because we are # getting assets from tower by label. Label is not available on projects # or inventories. if asset['asset_type'] != 'job_template': continue local_assets.append(asset['name']) # asset names in tower tower_assets = [ item['name'] for item in tower_list('job_template', [('labels', label_id)]) ] common_assets = set(tower_assets).intersection(set(local_assets)) for asset in common_assets: log('INFO', f"'{asset}' located both in the repository and in the tower") # symmetric difference == disjunctive union == union without the intersection diff = set(tower_assets).symmetric_difference(set(local_assets)) error = False for asset in diff: if asset not in tower_assets: log('WARNING', f"'{asset}' not found in tower ... will be recreated") elif asset not in local_assets: error = True log('ERROR', (f"'{asset}' not found in repository ... will be reported " "(not allowed)")) if error: log('INFO', ( "Investigate if the asset should be deleted from tower, " "added to the repository, or it's label removed." )) log('ERROR', "Reported error(s) are not permitted!", fatal=True)
def load(files=None): validators = DefaultValidators.copy() # Load builtin validators. for name in dir(builtin): obj = getattr(builtin, name) if _is_validator(obj): validators[obj.tag] = obj # Load custom validators from external files. for f_path in files or []: log('INFO', f'Loading validators from user script {f_path!r}') with open(f_path, 'rb') as f: code = compile(f.read(), f_path, 'exec') globals_ = {} locals_ = {} exec(code, globals_, locals_) for obj in locals_.values(): if _is_validator(obj): validators[obj.tag] = obj return validators
def extract(src_archive, dest_dir): if os.path.isdir(dest_dir): log('WARNING', f"Rewriting directory: '{dest_dir}'") shutil.rmtree(dest_dir, ignore_errors=True) with zipfile.ZipFile(src_archive, 'r') as zf: zf.extractall(dest_dir) log('INFO', f"Directory path (artifacts - extracted): '{dest_dir}'") log('INFO', "Successfully extracted")
def main(assets_glob): diff = False for file_name in sorted(glob.glob(assets_glob, recursive=True)): asset = load_asset(file_name) try: jt_data = tower_receive(asset['asset_type'], asset['name'])[0] except TowerCLIError: log('INFO', (f"Asset '{asset['name']}' doesn't exist in Tower, no " "need to check for diffs")) continue # Need to parse extra vars to dict becuse in assets file it is YAML and # in reponse from tower it is JSON. asset['extra_vars'] = yaml.safe_load(asset['extra_vars']) jt_data['extra_vars'] = yaml.safe_load(jt_data['extra_vars']) log('INFO', f"Differentiating '{file_name}' and '{asset['name']}'") differences = list(dictdiffer.diff(jt_data, asset)) if differences != []: diff = True log('WARNING', (f" Mismatch, '{file_name}' is not the same as " f"the '{asset['name']}' in tower!")) log('INFO', " Difference:") for d in differences: log('INFO', " " + json.dumps(d, indent=2)) if diff: log('ERROR', "Difference(s) found!", fatal=True)
def main(destination, assets_glob): try: os.makedirs(destination, exist_ok=True) except OSError: log('ERROR', f"Directory path: {destination}") log('ERROR', "Failed to create directory!", fatal=True) for file_name in sorted(glob.glob(assets_glob, recursive=True)): asset = load_asset(file_name) try: log('INFO', f"Downloading '{asset['name']}' ...") asset_data = tower_receive(asset['asset_type'], asset['name'])[0] except TowerCLIError: log('INFO', (f"... asset '{asset['name']}' does not exist in Tower")) continue file_path = os.path.join( destination, asset_data['name'].replace('/', '-').replace(' ', '_') + '.yml') file_content = yaml.dump(asset_data, Dumper=Dumper, default_flow_style=False) try: log('INFO', f" File path: {file_path}") with open(file_path, 'w') as file: file.write("---\n") file.write(file_content) except EnvironmentError: log('ERROR', "Failed to write to the file!", fatal=True) log('INFO', "... downloaded")
def gitlab_download(url, token, project, artifacts_job, pipeline_id, file_path): gl = gitlab.Gitlab(url, token) gl.auth() project = gl.projects.get(project) pipeline = project.pipelines.get(pipeline_id) log('INFO', (f"Pipeline: {pipeline.id}\n" f" Status: {pipeline.attributes['status']}\n" f" Commit: {pipeline.attributes['sha']}\n" f" URL: {pipeline.attributes['web_url']}")) if pipeline.attributes['status'] != 'success': log('ERROR', "Pipeline's status is not 'success'!", fatal=True) job_backup = next(job for job in pipeline.jobs.list() if job.name == artifacts_job) log('INFO', (f" Job: {artifacts_job}\n" f" Status: {job_backup.attributes['status']}\n" f" URL: {job_backup.attributes['web_url']}")) if job_backup.attributes['status'] != 'success': log('ERROR', "Job's status is not 'success'!", fatal=True) for artifact in job_backup.artifacts: if artifact['filename'] == 'artifacts.zip': log('INFO', (f" Artifact: {artifact['filename']}\n" f" Format: {artifact['file_format']}\n" f" Size: {artifact['size']}")) break else: log('ERROR', "Invalid artifact!", fatal=True) try: if os.path.isfile(file_path): log('WARNING', f"Rewriting file: '{file_path}'") with open(file_path, "wb") as f: project.jobs.get(job_backup.id).artifacts(streamed=True, action=f.write) except EnvironmentError: log('ERROR', f"Failed to write to the file! Path: {file_path}", fatal=True) log('INFO', "File path (artifacts - archive): '{file_path}'") log('INFO', "Successfully downloaded")