def provision(self, num_master=-1, num_worker=-1): """ Create and apply terraform plan""" if num_master > -1 or num_worker > -1: print("Overriding number of nodes") if num_master > -1: self.conf.master.count = num_master print(" Masters:{} ".format(num_master)) if num_worker > -1: self.conf.worker.count = num_worker print(" Workers:{} ".format(num_worker)) print("Init terraform") self._check_tf_deployed() self.utils.setup_ssh() init_cmd = "terraform init" if self.conf.terraform.plugin_dir: print("Installing plugins from {}".format( self.conf.terraform.plugin_dir)) init_cmd = init_cmd + " -plugin-dir=" + self.conf.terraform.plugin_dir self._runshellcommandterraform(init_cmd) self._runshellcommandterraform("terraform version") self._generate_tfvars_file() plan_cmd = ("{env_setup};" " terraform plan " " -out {workspace}/tfout".format( env_setup=self._env_setup_cmd(), workspace=self.conf.workspace)) apply_cmd = ("{env_setup};" "terraform apply -auto-approve {workspace}/tfout".format( env_setup=self._env_setup_cmd(), workspace=self.conf.workspace)) # TODO: define the number of retries as a configuration parameter for retry in range(1, 5): print( Format.alert( "Run terraform plan - execution # {}".format(retry))) self._runshellcommandterraform(plan_cmd) print( Format.alert( "Run terraform apply - execution # {}".format(retry))) try: self._runshellcommandterraform(apply_cmd) break except Exception: print("Failed terraform apply n. %d" % retry) if retry == 4: print( Format.alert("Failed Openstack Terraform deployment")) raise finally: self._fetch_terraform_output()
def read_directory_entries(self, file): while True: node_type = Format.read_int(file) if node_type is None: raise StopIteration node_name = Format.read_string(file) node_digest = file.read(Digest.dataDigestSize()) node_level = IntegerEncodings.binary_read_int_varlen(file) node_stat = unserialize_stats(file) try: node_name_decoded = unicode(node_name, 'utf8') yield (node_type, node_name_decoded, node_stat, node_digest, node_level) except: logging.info("Encountered bad file name in " + self.path())
def __init__(self, conf): super().__init__(conf, 'vmware') if not os.path.isfile(conf.vmware.env_file): msg = ( f'Your VMware env file path "{conf.vmware.env_file}" does not exist.\n\t ' 'Check the VMware env file path in your configured yaml file.') raise ValueError(Format.alert(msg))
def __init__(self, conf): super().__init__(conf, 'openstack') if not os.path.isfile(conf.openstack.openrc): raise ValueError( Format.alert( f"Your openrc file path \"{conf.openstack.openrc}\" does not exist.\n\t " "Check your openrc file path in a configured yaml file"))
def __init__(self, conf): super().__init__(conf, 'openstack') if not os.path.isfile(conf.openstack.openrc): raise ValueError( Format.alert( f"Your openrc file path \"{conf.openstack.openrc}\" does not exist.\n\t " "Check your openrc file path in a configured yaml file")) self.platform_new_vars = {} if not self.conf.terraform.internal_net: self.conf.terraform.internal_net = self.conf.terraform.stack_name
def write(self, ctx): """ Write the info of the current dir to database """ packer = PackerStream.PackerOStream(self.backup, Container.CODE_DIR) # sorting is an optimization to make everybody access files in the same # order. # TODO: measure if this really makes things faster # (probably will with a btree db) for child in self.children: Format.write_int(packer, child.get_type()) Format.write_string(packer, child.get_name().encode('utf8')) packer.write(child.get_digest()) packer.write(IntegerEncodings.binary_encode_int_varlen(child.get_level())) stats_str = serialize_stats(child.get_stats()) packer.write(stats_str) self.digest = packer.get_digest() self.level = packer.get_level() return (packer.get_num_new_blocks(), packer.get_size_new_blocks())
def _runshellcommandterraform(self, cmd, env={}): """Running terraform command in {terraform.tfdir}/{platform}""" cwd = self.tfdir # Terraform needs PATH and SSH_AUTH_SOCK sock_fn = self.utils.ssh_sock_fn() env["SSH_AUTH_SOCK"] = sock_fn env["PATH"] = os.environ['PATH'] print(Format.alert("$ {} > {}".format(cwd, cmd))) subprocess.check_call(cmd, cwd=cwd, shell=True, env=env)
def cleanup(self): """ Clean up """ cleanup_failure = False try: self._cleanup_platform() except Exception as ex: cleanup_failure = True print(Format.alert("Received the following error {}".format(ex))) print("Attempting to finish cleanup") dirs = [os.path.join(self.conf.workspace, "tfout"), self.tfjson_path] for tmp_dir in dirs: try: self.utils.runshellcommand("rm -rf {}".format(tmp_dir)) except Exception as ex: cleanup_failure = True print("Received the following error {}".format(ex)) print("Attempting to finish cleanup") if cleanup_failure: raise Exception(Format.alert("Failure(s) during cleanup"))
def get_platform(conf, platform): if platform.lower() == "openstack": platform = Openstack(conf) elif platform.lower() == "vmware": platform = VMware(conf) elif platform.lower() == "bare-metal": # TODO platform = Bare_metal(conf, utils) print("Todo: bare-metal is not available") sys.exit(0) elif platform.lower() == "libvirt": # TODO platform = Livbirt(conf, utils) print("Todo: libvirt is not available") sys.exit(0) else: raise Exception( Format.alert( "Platform Error: {} is not applicable".format(platform))) return platform
def unserialize_stats(file): stats = {} for mode in STAT_PRESERVED_MODES: stats[mode] = Format.read_int(file) return stats
def serialize_stats(stats): file = StringIO.StringIO() for mode in STAT_PRESERVED_MODES: Format.write_int(file, stats[mode]) return file.getvalue()
def _verify_tf_dependency(self): if not os.path.exists(self.tfjson_path): raise Exception(Format.alert("tf file not found. Please run terraform and try again{}"))
def _check_tf_deployed(self): if os.path.exists(self.tfjson_path): raise Exception(Format.alert(f"tf file found. Please run cleanup and try again {self.tfjson_path}"))