def list(): CWD.check_project_dir() sparks = CWD.list() logger.log("Sparks added to Project = " + str(len(sparks))) for name, config in sparks.iteritems(): logger.log(name + ": " + config['description'])
def cli(): """Print version info, use -v for licenses""" logger.log("allspark Version " + util.allspark_version()) # Output tool versions util.shell_run("python --version") util.shell_run("terraform --version") util.shell_run("ansible --version") # Output dependency licenses logger.vlog("OSS Licenses:") logger.vjson(util.oss_licenses())
def generate(self, provider=None): if (provider is None): provider = self.get_provider() try: if os.path.exists(self.project_dir): util.write_template("ansible/sparks.tf.tpl", self.data, self.project_infra_dir + "/sparks.tf") util.write_template( "ansible/allsparks.yml.tpl", self.get_src_data(), self.project_software_dir + "/allsparks.yml") util.write_template("ansible/site.yml.tpl", self.data, self.project_software_dir + "/site.yml") else: util.makedir(self.project_dir + "/") self.data["provider"] = provider util.write_json(self.project_config, self.data) util.makedir(self.project_infra_dir + "/") util.makedir(self.project_software_dir + "/") util.makedir(self.project_ssh_dir + "/") util.shell_run( "if [ ! -f 'allspark.rsa' ]; then ssh-keygen -f 'allspark.rsa' -t rsa -N ''; fi", cwd=self.project_ssh_dir) logger.log("init infrastructure code") self.generate_infra() logger.log("init provisioning code") self.generate_software() logger.log("complete") except Exception, err: logger.error("Error creating project") traceback.print_exc()
def spark_remove(name): api.remove(name) logger.log("Removed " + name)
def spark_add(spark, name): provider = api.get_provider() api.is_valid(spark, provider) api.add(spark, name, provider) logger.log("Added " + spark)
def config_get(key): c = config.allsparkConfig() logger.log(c.get(key))
def update(self, batch, force, apply_infra, apply_software): self.check_project_dir() self.generate() result_code = 0 tf_init_force = " -upgrade" if force else "" tf_get_force = " -update" if force else "" an_force = " --force" if force else "" # Infrastructure if apply_infra and util.confirm(batch, 'Plan Infrastructure Changes [Y/N] :'): result_code = util.shell_run( "terraform init -input=false" + tf_init_force, cwd=self.project_infra_dir)["result_code"] if result_code == 0: result_code = util.shell_run( "terraform get" + tf_get_force, cwd=self.project_infra_dir)["result_code"] if result_code == 0: result_code = util.shell_run( "terraform plan", cwd=self.project_infra_dir)["result_code"] if result_code == 0 and util.confirm( batch, 'Apply Infrastructure Changes [Y/N] :'): logger.log("") logger.log("Build Infrastructure") logger.log("") result_code = util.shell_run( "terraform apply", cwd=self.project_infra_dir)[ "result_code"] # apply infra changes if result_code == 0: result_code = util.shell_run( "terraform output -json > " + self.tf_outputs, cwd=self.project_infra_dir)[ "result_code"] # get output variables self.generate_ssh_config() # Software if result_code == 0 and apply_software and util.confirm( batch, 'Apply Software Changes [Y/N] :'): role_path = self.project_software_dir + "/roles" # Hack to drop/recreate roles due to ansible-galaxy bug! if force: util.shell_run("rm -r " + role_path, cwd=self.project_software_dir) result_code = util.shell_run( "ansible-galaxy install " + an_force + " -r allsparks.yml -p " + role_path, cwd=self.project_software_dir)["result_code"] if result_code == 0: logger.log("") logger.log("Provision Software") logger.log("") result_code = util.shell_run( "ansible -i inventory.py -m ping ssh.*", cwd=self.project_software_dir)["result_code"] #util.shell_run("ansible -i inventory.py -m win_ping winrm.*", cwd=self.project_software_dir) if result_code == 0: result_code = util.shell_run( "ansible-playbook site.yml -i inventory.py", cwd=self.project_software_dir)["result_code"] return result_code
def available(provider): sparks = get_sparks(provider) logger.log("Sparks available for " + provider + " = " + str(len(sparks))) for spark in sparks: logger.log(spark['id'] + ": " + spark['description'])
def list_vms(): CWD.check_project_dir() for name, ip in CWD.vms().iteritems(): logger.log(name + ": " + ip)