def create( self, pull_secret_file: str, agent_namespace: str, base_domain: str = consts.env_defaults.DEFAULT_BASE_DNS_DOMAIN, provider_image: str = "", hypershift_cpo_image: str = "", release_image: str = "", ssh_key: str = "", ): log.info(f"Creating HyperShift cluster {self.name}") cmd = ( f"./bin/hypershift create cluster agent --pull-secret {pull_secret_file} --name {self.name}" f" --agent-namespace {agent_namespace} --base-domain {base_domain}" ) if provider_image: log.info(f"Using provider image {provider_image}") cmd += f" --annotations hypershift.openshift.io/capi-provider-agent-image={provider_image}" if hypershift_cpo_image: log.info( f"Using hypershift control-plane-operator image {hypershift_cpo_image}" ) cmd += f" --control-plane-operator-image={hypershift_cpo_image}" if release_image: log.info(f"Using release image {release_image}") cmd += f" --release-image={release_image}" if ssh_key: cmd += f" --ssh-key {ssh_key}" log.info(f"Create command is: {cmd}") utils.run_command_with_output(cmd, cwd=HYPERSHIFT_DIR)
def installer_gather(ip, ssh_key, out_dir): stdout, stderr, _ret = utils.run_command( f"{INSTALLER_BINARY} gather bootstrap --log-level debug --bootstrap {ip} --master {ip} --key {ssh_key}" ) with open(INSTALLER_GATHER_DEBUG_STDOUT, "w") as f: f.write(stdout) with open(INSTALLER_GATHER_DEBUG_STDERR, "w") as f: f.write(stderr) matches = re.compile(r'.*logs captured here "(.*)".*').findall(stderr) if len(matches) == 0: logging.warning( f"It seems like installer-gather didn't generate any bundles, stderr: {stderr}" ) return bundle_file_path, *_ = matches logging.info(f"Found installer-gather bundle at path {bundle_file_path}") utils.run_command_with_output(f"tar -xzf {bundle_file_path} -C {out_dir}") os.remove(bundle_file_path) if os.path.exists(bundle_file_path) else None
def installer_generate(openshift_release_image): logging.info("Installer generate ignitions") bip_env = { "OPENSHIFT_INSTALL_RELEASE_IMAGE_OVERRIDE": openshift_release_image } utils.run_command_with_output( f"{INSTALLER_BINARY} create single-node-ignition-config --dir={IBIP_DIR}", env=bip_env)
def create(self, pull_secret_file: str, agent_namespace: str, provider_image: str = "", ssh_key: str = ""): log.info(f"Creating HyperShift cluster {self.name}") cmd = ( f"./bin/hypershift create cluster agent --pull-secret {pull_secret_file} --name {self.name}" f" --agent-namespace {agent_namespace}" ) if provider_image: log.info(f"Using provider image {provider_image}") cmd += f" --annotations hypershift.openshift.io/capi-provider-agent-image={provider_image}" if ssh_key: cmd += f" --ssh-key {ssh_key}" utils.run_command_with_output(cmd, cwd=HYPERSHIFT_DIR)
def dump(self, output_folder, kubeconfig_path=None): log.info(f"Dump HyperShift cluster {self.name} to {output_folder}") utils.run_command_with_output( f"KUBECONFIG={kubeconfig_path} {HYPERSHIFT_DIR}/bin/hypershift dump cluster --name {self.name} " f"--artifact-dir {output_folder}")
def delete(self): log.info(f"Deleting HyperShift cluster {self.name}") utils.run_command_with_output( f"./bin/hypershift destroy cluster agent --name {self.name}", cwd=HYPERSHIFT_DIR)
def create(self, pull_secret_file: str, ssh_key: str = ""): log.info(f"Creating HyperShift cluster {self.name}") cmd = f"./bin/hypershift create cluster agent --pull-secret {pull_secret_file} --name {self.name}" if ssh_key: cmd += f" --ssh-key {ssh_key}" utils.run_command_with_output(cmd, cwd=HYPERSHIFT_DIR)