def load_yaml_or_secret_yaml(path, force_secrets=False): try: path, env_lookup_key = path.split(":", 1) except ValueError: env_lookup_key = None if env_lookup_key or force_secrets: cmd = [ 'padre-decoder', '-f', path, ] if env_lookup_key: cmd.extend([ '-e', env_lookup_key, ]) res = pu.run(cmd, stdout=pu.PIPE) res.raise_for_status() # In python 3.x the json.loads requires a unicode type... res_stdout = res.stdout if isinstance(res_stdout, six.binary_type): res_stdout = res_stdout.decode("utf8") data = json.loads(res_stdout) else: with open(path, "rb") as fh: data = yaml.safe_load(fh.read()) return data
def setup_git(config, secrets): try: git_email = config.admin_email except AttributeError: git_email = None if git_email: git_user = git_email.split('@')[0] set_email_cmd = ('git', 'config', '--global', 'user.email', git_email) set_name_cmd = ('git', 'config', '--global', 'user.name', git_user) res = pu.run(set_email_cmd) res.raise_for_status() res = pu.run(set_name_cmd) res.raise_for_status() try: gerrit_user = config.launchpad.user except AttributeError: gerrit_user = None if gerrit_user: set_gerrit_cmd = ('git', 'config', '--global', 'gitreview.username', gerrit_user) res = pu.run(set_gerrit_cmd) res.raise_for_status()
def load_yaml_or_secret_yaml(path, force_secrets=False): try: path, env_lookup_key = path.split(":", 1) except ValueError: env_lookup_key = None if env_lookup_key or force_secrets: cmd = [ 'padre-decoder', '-f', path, ] if env_lookup_key: cmd.extend([ '-e', env_lookup_key, ]) res = pu.run(cmd, stdout=pu.PIPE) res.raise_for_status() data = json.loads(res.stdout) else: with open(path, "rb") as fh: data = yaml.safe_load(fh.read()) return data
def run(self, playbook, timeout=0.1, on_timeout_callback=None, **kwargs): cmd = self.form_command(playbook) kwargs.setdefault('close_fds', True) return pu.run(cmd, timeout=timeout, on_timeout_callback=on_timeout_callback, **kwargs)
def setup_ssh(config, secrets): def iter_lines_clean(blob): for line in blob.splitlines(): if not line or line.startswith("#"): continue else: yield line try: create_at = config.ssh.create_at except AttributeError: create_at = None if not create_at: return try: os.makedirs(create_at) except OSError as e: if e.errno == errno.EEXIST: if not os.path.isdir(create_at): raise else: raise try: ssh_conf = config.ssh.config except AttributeError: ssh_conf = None if ssh_conf: ssh_conf_path = os.path.join(create_at, "config") with open(ssh_conf_path, 'wb') as fh: fh.write(ssh_conf) os.chmod(ssh_conf_path, 0o600) try: ssh_priv = config.ssh.private_key except AttributeError: ssh_priv = None if ssh_priv: ssh_priv_path = os.path.join(create_at, "id_rsa") with open(ssh_priv_path, 'wb') as fh: fh.write(ssh_priv) os.chmod(ssh_priv_path, 0o600) try: ssh_pub = config.ssh.public_key except AttributeError: ssh_pub = None if ssh_pub: ssh_pub_path = os.path.join(create_at, "id_rsa.pub") with open(ssh_pub_path, 'wb') as fh: fh.write(ssh_pub) os.chmod(ssh_pub_path, 0o600) try: known_hosts = config.ssh.known_hosts except AttributeError: known_hosts = () known_hosts_lines = [] for host in known_hosts: scan_command = ['ssh-keyscan'] # Use urlparse and fake an https address using the given host. # This works well with both hostnames and IPs (v4/v6), and ALSO ports. parsed = urllib.parse("https://{}".format(host)) if parsed.port: scan_command.extend(['-p', parsed.port]) scan_command.append(parsed.hostname) r = pu.run(scan_command, stdout=pu.PIPE, stderr=pu.PIPE) r.raise_for_status() known_hosts_lines.append("# Keyscan for '%s'" % host) known_hosts_lines.extend(iter_lines_clean(r.stdout)) try: fetcher_func = config.plugins.env_fetcher_func except AttributeError: fetcher_func = None else: fetcher_func = utils.import_func(fetcher_func) render_bin = utils.find_executable("render") if render_bin and fetcher_func: for env_name, env_topo_fn in fetcher_func( env_dir=config.get("env_dir")): r = pu.run([render_bin, '-e', env_topo_fn, 'known_hosts'], stdout=pu.PIPE, stderr=pu.PIPE) r.raise_for_status() known_hosts_lines.append("# Environment '%s'" % env_name) known_hosts_lines.extend(iter_lines_clean(r.stdout)) if known_hosts_lines: known_hosts_path = os.path.join(create_at, "known_hosts") with open(known_hosts_path, 'wb') as fh: fh.write(("# WARNING: DO NOT EDIT THIS" " FILE (IT WAS AUTOGENERATED ON BOT BOOTSTRAP!!!)\n")) fh.write("\n".join(known_hosts_lines)) os.chmod(known_hosts_path, 0o644)
def _run(self, downstream_url, upstream_url, upstream_branch_refs, upstream_tag_refs, upstream_tags_as_branches_refs, patch_repo_url, patch_branch): tmp_upstream_branch_refs = [] for upstream_branch in upstream_branch_refs.split(","): upstream_branch = upstream_branch.strip() if upstream_branch: tmp_upstream_branch_refs.append(upstream_branch) upstream_branch_refs = tmp_upstream_branch_refs tmp_upstream_tags_refs = [] for upstream_tag in upstream_tag_refs.split(","): upstream_tag = upstream_tag.strip() if upstream_tag: tmp_upstream_tags_refs.append(upstream_tag) upstream_tag_refs = tmp_upstream_tags_refs tmp_upstream_tags_as_branches_refs = [] for upstream_tag_branch in upstream_tags_as_branches_refs.split(","): upstream_tag_branch = upstream_tag_branch.strip() if upstream_tag_branch: tmp_pieces = upstream_tag_branch.split(":", 2) tmp_tag = tmp_pieces[0] tmp_branch = tmp_pieces[1] tmp_upstream_tags_as_branches_refs.append( [tmp_tag, tmp_branch]) upstream_tags_as_branches_refs = tmp_upstream_tags_as_branches_refs project = upstream_url.split('/') project = project[-1] or project[-2] self.message.reply_text("Syncing repository for project `%s`..." % project, threaded=True, prefixed=False) # Make temp dir for run tmp_dir_prefix = "github_sync_{}".format(project) with utils.make_tmp_dir(dir=self.bot.config.working_dir, prefix=tmp_dir_prefix) as tmp_dir: # Clone the source repo try: source_repo = git.Repo.clone_from( upstream_url, os.path.join(tmp_dir, 'source')) self.message.reply_text( ":partyparrot: Successfully loaded repository `%s`." % project, threaded=True, prefixed=False) except Exception: self.message.reply_text( ":sadparrot: Failed to load repository `%s`." % project, threaded=True, prefixed=False) return # Now check patches, if we know what patch repo to use if patch_repo_url: self.message.reply_text( "Checking patch compatibility for `%s` branch `%s`." % (project, patch_branch), threaded=True, prefixed=False) # Clone the patch repo patch_repo = git.Repo.clone_from( patch_repo_url, os.path.join(tmp_dir, 'patches')) head_commit = patch_repo.head.commit.hexsha # Validate patches r = process_utils.run( [ 'update-patches', '--branch-override', patch_branch, '--patch-repo', patch_repo.working_dir ], cwd=os.path.join(tmp_dir, "source") # from sync() above ) try: r.raise_for_status() self.message.reply_text( ":gdhotdog: Patch compatibility check successful.", threaded=True, prefixed=False) except process_utils.ProcessExecutionError: self.message.reply_text( "Patch compatibility check failed. Please do a manual " "rebase!", threaded=True, prefixed=False) attachment = { 'text': (":warning:" " Patches are in merge conflict in the" " repository `%s`. Manual intervention" " is required!") % project, 'mrkdwn_in': ['text'], 'color': su.COLORS.purple, } self.message.reply_attachments( attachments=[attachment], log=LOG, as_user=True, text=' ', channel=self.config.admin_channel, unfurl_links=True) return # If we made an auto-commit, PR it if patch_repo.head.commit.hexsha == head_commit: self.message.reply_text("No patch updates detected.", threaded=True, prefixed=False) else: new_branch = '{project}_{short_hash}'.format( project=project, short_hash=patch_repo.head.commit.hexsha[:8]) new_refspec = 'HEAD:{branch}'.format(branch=new_branch) self.message.reply_text( "Pushing patch updates to branch `{branch}`.".format( branch=new_branch), threaded=True, prefixed=False) patch_repo.remote().push(refspec=new_refspec) patch_repo_name = patch_repo_url.split(":")[-1] patch_repo_name = patch_repo_name.split('.git')[0] gh_repo = self.bot.clients.github_client.get_repo( patch_repo_name) title, body = patch_repo.head.commit.message.split('\n', 1) self.message.reply_text("Creating pull request...", threaded=True, prefixed=False) pr = gh_repo.create_pull(title=title, body=body.strip(), base="master", head=new_branch) self.message.reply_text( ":gunter: Pull request created: {url}".format( url=pr.html_url), threaded=True, prefixed=False) # Finish syncing the repo by pushing the new state self.message.reply_text("Pushing upstream state downstream...", threaded=True, prefixed=False) source_repo.heads.master.checkout() source_repo.remote().fetch() retval = git_utils.sync_push( working_folder=tmp_dir, target=downstream_url, push_tags=upstream_tag_refs, push_branches=upstream_branch_refs, push_tags_to_branches=upstream_tags_as_branches_refs) if retval == 0: self.message.reply_text( ":partyparrot: Successfully pushed repository `%s`." % project, threaded=True, prefixed=False) else: self.message.reply_text( ":sadparrot: Failed to push repository `%s`." % project, threaded=True, prefixed=False) return self.message.reply_text(":beers: Done.", threaded=True, prefixed=False)
def test_run(self): r = pu.run(['bash', '-c', 'exit 0']) r.raise_for_status() self.assertEqual(r.exit_code, 0)
def test_run_bad(self): r = pu.run(["bash", "-c", 'exit 1'], stdout=pu.PIPE, stderr=pu.PIPE) self.assertRaises(pu.ProcessExecutionError, r.raise_for_status) self.assertEqual(r.exit_code, 1)
def test_run_capture(self): r = pu.run(['bash', '-c', 'echo "hi"'], stdout=pu.PIPE, stderr=pu.PIPE) r.raise_for_status() self.assertNotEqual("", r.stdout)