def test_calling_context_hostname(self): # with no username specified with Sultan.load(hostname='google.com') as sultan: user = getpass.getuser() self.assertEqual(str(sultan.ls("-lah", "/home")), "ssh %[email protected] 'ls -lah /home;'" % user) # local user with Sultan.load(hostname='google.com', user=getpass.getuser()) as sultan: user = getpass.getuser() self.assertEqual(str(sultan.ls("-lah", "/home")), "ssh %[email protected] 'ls -lah /home;'" % user) # different user with Sultan.load(hostname='google.com', user="******") as sultan: user = "******" self.assertEqual(str(sultan.ls("-lah", "/home")), "ssh %[email protected] 'ls -lah /home;'" % user) # different user as sudo with Sultan.load(hostname='google.com', user="******", sudo=True) as sultan: user = "******" self.assertEqual(str(sultan.ls("-lah", "/home")), "ssh %[email protected] 'sudo su - obama -c \'ls -lah /home;\''" % user)
def test_calling_context_hostname(self): # with no username specified with Sultan.load(hostname='google.com') as sultan: user = getpass.getuser() self.assertEqual(str(sultan.ls("-lah", "/home")), "ssh %[email protected] 'ls -lah /home;'" % user) # local user with Sultan.load(hostname='google.com', user=getpass.getuser()) as sultan: user = getpass.getuser() self.assertEqual(str(sultan.ls("-lah", "/home")), "ssh %[email protected] 'ls -lah /home;'" % user) # different user with Sultan.load(hostname='google.com', user="******") as sultan: user = "******" self.assertEqual(str(sultan.ls("-lah", "/home")), "ssh %[email protected] 'ls -lah /home;'" % user) # different user as sudo with Sultan.load(hostname='google.com', user="******", sudo=True) as sultan: user = "******" self.assertEqual(str(sultan.ls("-lah", "/home")), "ssh %[email protected] 'sudo su - obama -c \'ls -lah /home;\''" % user) # different port and different user as sudo config = SSHConfig(port=2345) with Sultan.load(hostname='google.com', user='******', sudo=True, ssh_config=config) as sultan: user = '******' self.assertEqual(str(sultan.ls('-lah', '/home')), "ssh -p 2345 %[email protected] 'sudo su - obama -c \'ls -lah /home;\''" % user)
def app(self, name, delete=False): app_names = self.__get_app_names(name) app_configs = self.__get_app_config(name) api_path = app_configs["api"]["path"] if delete is True: with Sultan.load() as s: for k, v in app_names.iteritems(): self.__print_result(s.echo("y").pipe().cf("delete", v, "-r").run()) else: with Sultan.load() as s: apps_len = len(app_names) i = 1 s.cd("../../../").and_() for k, v in app_names.iteritems(): s.cd("{}".format(app_configs[k]["path"])).and_() path = "../../../{}".format(app_configs[k]["path"]) file = "../{}/scripts/env-deploy/{}{}.yml".format(api_path, TEMP_PATH, v) if os.path.exists("{}/{}".format(path, file)): npm_commands = app_configs[k]["npm"] for npm_command in npm_commands: s.npm(npm_command).and_() s.cf("zero-downtime-push", v, "-show-app-log", "-f {}".format(file)).and_() else: print "Cannot find '{}'. Execute python generate_manifest_files {} first".format(file, name) i += 1 if i <= apps_len: s.cd("..").and_() else: s.cd("..") self.__print_result(s.run())
def execute_db_task(self, name, db_name="digitalmarketplace", snapshot_file="snapshot.tar", delete=False): env_name = self.__get_env_name(name) db_task_name = "{}-db-task".format(env_name) if delete is True: print "deleting {}".format(db_task_name) with Sultan.load() as s: self.__print_result( s.echo("y").pipe().cf("delete", db_task_name).run()) return self.generate_manifest_file(name, env_name, "db-task") with Sultan.load(cwd="schema-sync") as s: self.__print_result( s.pg_dump("--no-owner", "--no-privileges", "--column-inserts", "--dbname={}".format(db_name), "-f snapshot.tar", "-F t").run()) self.__print_result( s.cf("push", db_task_name, "-f ../{}{}.yml".format(TEMP_PATH, db_task_name)).run()) with Sultan.load() as s: result = s.cf("app", db_task_name, "--guid").run() self.__print_result(result) db_task_id = result.stdout[0] db_task_env_file_name = "{}db-task-env.json".format(TEMP_PATH) result = s.cf("curl", '"/v2/apps/{}/env"'.format(db_task_id)).redirect( db_task_env_file_name, append=False, stdout=True, stderr=False).run() self.__print_result(result) with open(db_task_env_file_name) as data_file: db_task_env = json.load(data_file) postgres_uri = db_task_env["system_env_json"]["VCAP_SERVICES"][ "postgres"][0]["credentials"]["uri"] print postgres_uri result = s.cf( "run-and-wait", db_task_name, '"pgutils/pg_restore --no-owner --dbname={postgres_uri} {snapshot_file}"' .format(postgres_uri=postgres_uri, snapshot_file=snapshot_file)).run() self.__print_result(result) self.__print_result(s.cf("stop", db_task_name).run())
def test_calling_context(self): sultan = Sultan.load(cwd='/tmp', test_key='test_val') self.assertEqual(sultan.current_context, { 'cwd': '/tmp', 'test_key': 'test_val' }) with Sultan.load(cwd='/tmp') as sultan: self.assertEqual(sultan.current_context, {'cwd': '/tmp'})
def execute_db_task(self, name, db_name="digitalmarketplace", snapshot_file="snapshot.tar", delete=False): env_name = self.__get_env_name(name) db_task_name = "{}-db-task".format(env_name) if delete is True: print "deleting {}".format(db_task_name) with Sultan.load() as s: self.__print_result(s.echo("y").pipe().cf("delete", db_task_name).run()) return self.generate_manifest_file(name, env_name, "db-task") with Sultan.load(cwd="schema-sync") as s: self.__print_result( s.pg_dump("--no-owner", "--no-privileges", "--column-inserts", "--dbname={}".format(db_name), "-f snapshot.tar", "-F t").run()) self.__print_result( s.cf("push", db_task_name, "-f ../{}{}.yml".format(TEMP_PATH, db_task_name)).run()) with Sultan.load() as s: result = s.cf("app", db_task_name, "--guid").run() self.__print_result(result) db_task_id = result.stdout[0] db_task_env_file_name = "{}db-task-env.json".format(TEMP_PATH) result = s.cf("curl", '"/v2/apps/{}/env"'.format(db_task_id)).redirect( db_task_env_file_name, append=False, stdout=True, stderr=False).run() self.__print_result(result) with open(db_task_env_file_name) as data_file: db_task_env = json.load(data_file) postgres_uri = db_task_env["system_env_json"]["VCAP_SERVICES"]["postgres"][0]["credentials"]["uri"] print postgres_uri result = s.cf("run-and-wait", db_task_name, '"pgutils/pg_restore --no-owner --dbname={postgres_uri} {snapshot_file}"' .format(postgres_uri=postgres_uri, snapshot_file=snapshot_file)).run() self.__print_result(result) self.__print_result(s.cf("stop", db_task_name).run())
def setup_keys(self, github_key): with Sultan.load() as s: s.scp( '-o StrictHostKeyChecking=no -i {} {} ubuntu@{}:~/github_key '. format(self.ssh_key, github_key, self.server_ip)).run() _cmd1 = "mv('github_key ~/.ssh/id_rsa').and_().chmod('600 ~/.ssh/id_rsa')" result1 = run_ssh_cmd(self.server_ip, self.ssh_key, _cmd1) # Now remove any existing public keys as they will conflict with the private key result2 = run_ssh_cmd(self.server_ip, self.ssh_key, "rm('-f ~/.ssh/id_rsa.pub')") # Now add the github public key to avoid host key verification prompt result3 = run_ssh_cmd( self.server_ip, self.ssh_key, "ssh__keyscan('github.com >> ~/.ssh/known_hosts')") result = list() result.append(result1.stdout) result.append(result2.stdout) result.append(result3.stdout) return (result)
def setup_symlinks(): ''' Sets up the symlinks for the different files. ''' link_map = { 'bash/bashrc': '.bashrc', 'git/gitconfig': '.gitconfig', 'pypi/pypirc': '.pypirc', 'tmux/tmux.conf': '.tmux.conf', 'vim/vimrc': '.vimrc', 'zsh/zshrc': '.zshrc' } with Sultan.load() as s: for source_path, destination_path in link_map.items(): abs_source_path = os.path.join(PWD, 'data', source_path) abs_destination_path = os.path.join(HOME, destination_path) logger.info("Setting up '%s'" % (source_path)) logger.info(" - Source: %s" % abs_source_path) logger.info(" - Destination: %s" % abs_destination_path) if os.path.exists(abs_destination_path): logging.info(" - Skipping '%s' since it already exists." % abs_destination_path) else: response = s.ln('-s', abs_source_path, abs_destination_path).run() logging.debug('\n'.join(response.stdout))
def undeploy(deploy_name): """Deploy a branch in qa server""" deploy_name = get_deploy_name(deploy_name) current_deploys = qa_deployment_sever_get_list() if len(current_deploys) == 0: print("List of 0 deploys returned, please, run again the command.") exit(1) if deploy_name not in current_deploys: print("%s is not deployed" % deploy_name) exit(1) print("Undeploying %s..." % deploy_name) with Sultan.load(cwd="/home/ubuntu/server/exolever/", user=deploy_username, hostname=deploy_host) as s: result = (s.docker__compose( '--file', deploy_name + '/docker-compose.yml', 'down', '--volumes', '--remove-orphans').and_().rm('-rf', deploy_name).run()) print(result.stdout) print("Undeployed!")
def deploy(branch, target, url): """Deploy a branch in qa server""" if not check_status(branch): exit(1) current_deploys = qa_deployment_sever_get_list() if len(current_deploys) >= 5: print("Limit of 5 deploys reached.") exit(1) elif len(current_deploys) == 0: print("List of 0 deploys returned, please, run again the command.") exit(1) if not target: target = get_deploy_name(branch) if target in current_deploys: print("Branch %s is already deployed." % branch) exit(1) print("Deploying branch %s..." % branch) print("server/exolever/deploy.sh %s %s %s" % (branch, target, url)) with Sultan.load(cwd="/home/ubuntu/server/exolever/", user=deploy_username, hostname=deploy_host) as s: result = (s.bash('./deploy.sh', branch, target, url).run()) print(result.stdout)
def setup(self, branch, github_key): # Ensure that cloud init has finished check_cloud_init_finished(self.ip_address, self.ssh_key) # Transfer the private key to the server to enable # it to access github without being prompted for credentials self.setup_keys(github_key) with Sultan.load() as s: s.scp( '-o StrictHostKeyChecking=no -i {} {} ubuntu@{}:~/rethinkdb.conf' .format(self.ssh_key, 'setup/rethinkdb.conf', self.ip_address)).run() s.scp( '-o StrictHostKeyChecking=no -i {} {} ubuntu@{}:~/setup_rethinkdb.sh' .format(self.ssh_key, 'setup/setup_rethinkdb.sh', self.ip_address)).run() s.scp( '-o StrictHostKeyChecking=no -i {} {} ubuntu@{}:~/configure_rethinkdb.py' .format(self.ssh_key, 'setup/configure_rethinkdb.py', self.ip_address)).run() logger.info( 'Now checking out relevant excalibur repos for {} branch'.format( branch)) # Check out galahad repos required for rethinkdb self.checkout_repo('galahad-config') _cmd1 = "bash('./setup_rethinkdb.sh')" run_ssh_cmd(self.ip_address, self.ssh_key, _cmd1)
def setup(self, branch, aws_config, aws_keys, stack_suffix, key_name, config_tarfile): logger.info( 'Now checking out relevant galahad repos for {} branch'.format( branch)) time.sleep(10) # Check out galahad repos required for galahad self.copy_config(config_tarfile) self.checkout_repo('galahad', branch) # Sleep for 10 seconds to ensure that both repos are completely checked out time.sleep(10) # Setup the config and keys for AWS communication self.setup_aws_access(aws_config, aws_keys) # Setup the Default key to be able to login to the install nodes GALAHAD_KEY_DIR = '~/user-keys' _cmd = "mkdir('-p {}')".format(GALAHAD_KEY_DIR) run_ssh_cmd(self.server_ip, self.ssh_key, _cmd) with Sultan.load() as s: s.scp( '-o StrictHostKeyChecking=no -i {0} {0} ubuntu@{1}:{2}/{3}.pem' .format(self.ssh_key, self.server_ip, GALAHAD_KEY_DIR, key_name)).run() _cmd = "sudo('chmod 600 {0}/{1}.pem')".format(GALAHAD_KEY_DIR, key_name) run_ssh_cmd(self.server_ip, self.ssh_key, _cmd) # Deploy the Pre-requisites _cmd = "sudo('apt-get update')" run_ssh_cmd(self.server_ip, self.ssh_key, _cmd) _cmd = "sudo('apt-get install -y python-minimal python-pip python3-dev python3-pip')" run_ssh_cmd(self.server_ip, self.ssh_key, _cmd) _cmd = "sudo('pip3 install -r galahad/deploy/requirements.txt')" run_ssh_cmd(self.server_ip, self.ssh_key, _cmd) # Start the normal deployment process - Run the setup script _cmd = '''bash(('-c "cd galahad/deploy && python3 deploy_galahad.py' ' -i {0}/{1}.pem' ' --aws_config ~/.aws/config' ' --aws_keys ~/.aws/credentials' ' --key_name {1}' ' -b {2}' ' -s {3}' ' -n {4}' ' --deactivate_virtue_migration' ' --import_stack {5}' ' --setup"'))'''.format(GALAHAD_KEY_DIR, key_name, branch, stack_suffix, self.stack_name, self.import_stack_name) run_ssh_cmd(self.server_ip, self.ssh_key, _cmd)
def ups_service(self, name, delete=False): ups_service_names = self.__get_ups_service_names(name) with Sultan.load() as s: common_config_name = self.__get_common_config(name) if delete is True: self.__print_result( s.echo("y").pipe().cf("delete-service", common_config_name).run()) for k, v in ups_service_names.iteritems(): self.__print_result( s.echo("y").pipe().cf("delete-service", v).run()) else: self.__print_result( s.cf("create-user-provided-service", common_config_name, "-p {}{}.json".format(TEMP_PATH, common_config_name)).run()) for k, v in ups_service_names.iteritems(): file = "{}{}.json".format(TEMP_PATH, v) if os.path.exists(file): self.__print_result( s.cf("create-user-provided-service", v, "-p {}".format(file)).run()) else: print "Cannot find '{}'. Execute python generate_config_files {} first".format( file, name)
def get_git_files( repo_root: Path, commits: Optional[List[str]] = None, all_files: bool = False, ) -> List[str]: """Obtain changed files. :param repo_root: Repository top folder. :param commits: Commits range. :returns: List of staged files. """ command = [] if all_files: command.append("ls-files") print("Getting all the repository files.") else: command = ["diff", "--no-commit-id", "--name-only", "-r"] if commits: command.extend([*commits]) print(f"Checking file changes in the commit range: {commits}.") else: command.insert(3, "--cached") command.append("HEAD") print("Checking local changed files...") with Sultan.load(cwd=repo_root) as s: changed_files = (s.git(*command).run(halt_on_nonzero=False, quiet=True).stdout) return changed_files
def get_changed_files(repo_root, commits=None): """Obtains changed files. Args: repo_root (Path): Repository top folder. first_commit_hash: First commit hash. Defaults to None. second_commit_hash: Second commit hash. Defaults to None. Returns: List[str]: List of staged files. """ command = ["diff", "--no-commit-id", "--name-only", "-r"] if commits: command.extend([*commits]) else: command.insert(3, "--cached") command.append("HEAD") with Sultan.load(cwd=repo_root) as s: changed_files = ( s.git(*command).run(halt_on_nonzero=False, quiet=True).stdout ) return changed_files
def setup(self, branch): # Ensure that cloud init has finished check_cloud_init_finished(self.ip_address, self.ssh_key) with Sultan.load() as s: s.scp( '-o StrictHostKeyChecking=no -i {} {} ubuntu@{}:~/rethinkdb.conf' .format(self.ssh_key, 'setup/rethinkdb.conf', self.ip_address)).run() s.scp( '-o StrictHostKeyChecking=no -i {} {} ubuntu@{}:~/setup_rethinkdb.sh' .format(self.ssh_key, 'setup/setup_rethinkdb.sh', self.ip_address)).run() s.scp( '-o StrictHostKeyChecking=no -i {} {} ubuntu@{}:~/configure_rethinkdb.py' .format(self.ssh_key, 'setup/configure_rethinkdb.py', self.ip_address)).run() logger.info( 'Now checking out relevant excalibur repos for {} branch'.format( branch)) # Check out galahad repos required for rethinkdb self.copy_config('~/galahad-config') _cmd1 = "bash('./setup_rethinkdb.sh')" run_ssh_cmd(self.ip_address, self.ssh_key, _cmd1)
def run_backup(self): """Do the backup, for real or dry run.""" if self.dryrun is False: LOGGER.info("\nNot a dry run: Eligible files will be downloaded\n") else: LOGGER.info("\nDRY RUN: Nothing will be downloaded\n") for profile, config in self.profiles.items(): for paths in config["paths"]: # --no-progress : File transfer progress is not displayed. command = ( "s3 sync {remote} {local} --profile={profile} --no-progress" ).format(remote=paths["remote"], local=paths["local"], profile=profile) args = [] if "delete" in paths and paths["delete"] is True: # Delete local files that don't exist on S3: args.append("--delete") if "include-only" in paths: includes = [] if "today" in paths["include-only"]: fmt = paths["include-only"]["today"] dt = datetime.utcnow() includes.append("--include '{}'".format( dt.strftime(fmt))) if "yesterday" in paths["include-only"]: fmt = paths["include-only"]["yesterday"] dt = datetime.utcnow() - timedelta(1) includes.append("--include '{}'".format( dt.strftime(fmt))) if len(includes) > 0: args.append(" --exclude '*'") args.extend(includes) if self.dryrun is True: # The aws command has its own dryrun option, handily: args.append("--dryrun") command = "{} {}".format(command, " ".join(args)) with Sultan.load() as s: result = s.aws(command).run(streaming=True) # From https://sultan.readthedocs.io/en/latest/sultan-examples.html#example-13-streaming-results-from-a-command # noqa: E501 while True: complete = result.is_complete for line in result.stdout: LOGGER.info(line) for line in result.stderr: LOGGER.error(line) if complete: break time.sleep(1)
def do_GET(self): if self.path == "/": self.path = "/index.html" if self.path == "/metrics": # TODO - don't write to file # There's probably a better way to do this than writing to file, # then reading from that file, but Sultan outputting to STDOUT # currently outputs every character on a newline with Sultan.load() as s: s.sh("-c 'pwrstat -status'").redirect("pwrstats.txt", append=False, stdout=True, stderr=False).run() # stats.json defines what outputs we are looking for from pwrstat with open('stats.json') as f: stats = json.load(f) with open("pwrstats.txt", "r") as f: for line in f: for i in stats: if re.findall(r'' + i['search'] + '', line): if i['end']: end = int(i['end']) else: end = None i['stat'] = line[32:end].rstrip() with open("metrics", "w") as f: for i in stats: # Reassign string values to booleans if i is 'power_supply': if i['stat'] is 'power_supply': i['stat'] = '0' else: i['stat'] = '1' f.write(i['help'] + '\n') f.write(i['type'] + '\n') f.write(i['metric'] + ' ' + i['stat'] + '\n') try: #Check the file extension required and #set the right mime type mimetype = 'text/html' #Open the static file requested and send it f = open(curdir + sep + self.path) self.send_response(200) self.send_header('Content-type', mimetype) self.end_headers() self.wfile.write(f.read()) f.close() return except IOError: self.send_error(404, 'File Not Found: %s' % self.path)
def copy_config(self, config_path): run_ssh_cmd(self.ip_address, self.ssh_key, "rm('-rf {}')".format(config_path)) with Sultan.load() as s: s.scp( '-r -o StrictHostKeyChecking=no -i {} {} ubuntu@{}:{} '.format( self.ssh_key, config_path, self.ip_address, config_path)).run()
def postgres_service(self, name, delete=False): postgres_service_name = self.__get_postgres_service_name(name) with Sultan.load() as s: if delete is True: self.__print_result(s.echo("y").pipe().cf("delete-service", postgres_service_name).run()) else: self.__print_result(s.cf("create-service", "postgres", "shared", postgres_service_name).run()) self.__print_result(s.cf("update-service", postgres_service_name, "-c '{\"extensions\":[\"pg_trgm\"]}'").run())
def test_calling_context(self): sultan = Sultan.load(cwd='/tmp', test_key='test_val') self.assertEqual(sultan.current_context, {'cwd': '/tmp', 'env': {}, 'sudo': False, 'logging': True, 'test_key': 'test_val', 'user': getpass.getuser(), 'hostname': None}) # cwd with Sultan.load(cwd='/tmp') as sultan: self.assertEqual(sultan.current_context, { 'cwd': '/tmp', 'env': {}, 'sudo': False, 'logging': True, 'user': getpass.getuser(), 'hostname': None}) # sudo with Sultan.load(cwd='/tmp', sudo=True) as sultan: self.assertEqual(sultan.current_context, { 'cwd': '/tmp', 'env': {}, 'sudo': True, 'logging': True, 'user': '******', 'hostname': None}) with Sultan.load(cwd='/tmp', sudo=False, user="******") as sultan: self.assertEqual(sultan.current_context, { 'cwd': '/tmp', 'env': {}, 'sudo': False, 'logging': True, 'user': '******', 'hostname': None}) with Sultan.load(sudo=True) as sultan: self.assertEqual(sultan.current_context, {'cwd': None, 'env': { }, 'sudo': True, 'logging': True, 'user': '******', 'hostname': None}) # hostname with Sultan.load(hostname='localhost') as sultan: self.assertEqual(sultan.current_context, {'cwd': None, 'env': { }, 'sudo': False, 'logging': True, 'user': getpass.getuser(), 'hostname': 'localhost'}) # set environment with Sultan.load(env={'path': ''}) as sultan: self.assertEqual(sultan.current_context, {'cwd': None, 'env': { 'path': ''}, 'sudo': False, 'logging': True, 'user': getpass.getuser(), 'hostname': None})
def get_latest_commit_date(region): try: with Sultan.load(env={'PAGER': 'cat'}) as s: result = s.git( f'-C inf-covid19-similarity-data log -1 --format=%ct "by_key/{region}.csv"' ).run() return int(''.join(result.stdout).strip()) except: return 0
def run_and_return(command): if system_platform() == 'Windows': return run_and_return_winfix(command) else: with Sultan.load() as s: result = s.java('-jar ChunkyLauncher.jar -' + command).run() log(str(result.stdout)) temp = [x.replace('\t', '') for x in result.stderr] # remove \t return temp
def setup_aws_access(self, aws_config, aws_keys): run_ssh_cmd(self.ip_address, self.ssh_key, "mkdir('~/.aws')") with Sultan.load() as s: s.scp( '-o StrictHostKeyChecking=no -i {} {} ubuntu@{}:~/.aws/config ' .format(self.ssh_key, aws_config, self.ip_address)).run() s.scp( '-o StrictHostKeyChecking=no -i {} {} ubuntu@{}:~/.aws/credentials ' .format(self.ssh_key, aws_keys, self.ip_address)).run()
def update_data_repository(): try: with Sultan.load() as s: s.bash(path.join(getcwd(), 'commit-and-push.sh')).run() except: app.logger.error(f'[update_data_repository] failed.') trace_info = traceback.format_exc().splitlines() app.logger.error(f'[update_data_repository] ' + f'\n '.join(trace_info)) pass
def setup_jenv(): ''' Set up 'jenv' ''' if platform.system() == 'Windows': logger.info("Run this manually for installing jenv: set-executionpolicy remotesigned") logger.info("Run this manually for installing jenv: (new-object Net.WebClient).DownloadString(\"http://get.jenv.io/GetJenv.ps1\") | iex") else: with Sultan.load(cwd=HOME) as s: s.curl('-L', '-s', 'get.jenv.io').pipe().bash().run()
def ups_secret_service(self, secret_file, delete=False): with Sultan.load() as s: if delete is True: self.__print_result(s.echo("y").pipe().cf("delete-service", "ups-secret-service").run()) else: self.__print_result( s.cf("create-user-provided-service", "ups-secret-service", "-p {}".format(secret_file)) .run())
def copy_config(self, config_path): run_ssh_cmd(self.server_ip, self.ssh_key, "rm('-rf ~/galahad-config')") config_filename = config_path.split('/')[-1] with Sultan.load() as s: s.scp('-o StrictHostKeyChecking=no -i {} {} ubuntu@{}:{} '.format( self.ssh_key, config_path, self.server_ip, config_filename)).run() run_ssh_cmd(self.server_ip, self.ssh_key, "tar('-xf ~/{}')".format(config_filename))
def ups_secret_service(self, secret_file, delete=False): with Sultan.load() as s: if delete is True: self.__print_result( s.echo("y").pipe().cf("delete-service", "ups-secret-service").run()) else: self.__print_result( s.cf("create-user-provided-service", "ups-secret-service", "-p {}".format(secret_file)).run())
def get_latest_commit_date(repo, filename): try: with Sultan.load(env={'PAGER': 'cat'}) as s: result = s.git(f'-C {repo} log -1 --format=%ct "{filename}"').run() return int(''.join(result.stdout).strip()) except: app.logger.error(f'[get_latest_commit_date] failed.') trace_info = traceback.format_exc().splitlines() app.logger.error(f'[get_latest_commit_date] ' + f'\n '.join(trace_info)) return 0
def run(command, print_command=True): with Sultan.load() as s: s.commands = [command] out = s.run() stdout = '\n'.join(out.stdout) stderr = '\n'.join(out.stderr) stdout = "" if stdout == "" else "STDOUT:\n" + stdout stderr = "" if stderr == "" else "\nSTDERR:\n" + stderr ret = stdout + stderr return ret
def bootstrap_worker(_metadata): app.logger.info(f"[bootstrap_worker] Starting worker...") try: with Sultan.load() as s: s.git(f'-C {DATA} pull --depth 1 origin master').run() s.git(f'-C {SIMILARITY_DATA} pull --depth 1 origin master').run() regions_file = path.join(SIMILARITY_DATA, 'regions.csv') metadata_file = path.join('data', 'metadata.json') df = None metadata = _metadata.copy() if metadata_changed() or not path.isfile(regions_file): app.logger.info('[bootstrap_worker] Loading metadata...') with open(path.join(DATA, metadata_file)) as f: metadata = json.load(f) app.logger.info('[bootstrap_worker] Processing attributes...') df = process(metadata) app.logger.info('[bootstrap_worker] Clustering by attributes...') clusters = per_similarity(df) df['cluster'] = clusters.labels_ df = df.sort_values(by=['cluster', 'key']) app.logger.info('[bootstrap_worker] Saving regions.csv...') df.to_csv(regions_file, index=False) app.logger.info('[bootstrap_worker] Commit and push...') update_data_repository() else: app.logger.info('[bootstrap_worker] Loading attributes...') is_up_to_date = time.time() - get_latest_commit_date(SIMILARITY_DATA, 'regions.csv') < 60 * 60 * 24 if len(metadata) == 0: app.logger.info('[bootstrap_worker] Loading metadata...') with open(path.join(DATA, metadata_file)) as f: metadata = json.load(f) df = pd.read_csv(regions_file) if not is_up_to_date: app.logger.info('[bootstrap_worker] Updating attributes...') df = process_with_days(metadata, df) len_clusters = len(df['cluster'].unique()) app.logger.info(f'[bootstrap_worker] Loaded {len(df)} regions across {len_clusters} clusters.') return df, metadata except: app.logger.error(f'[bootstrap_worker] failed.') trace_info = traceback.format_exc().splitlines() app.logger.error( f'[bootstrap_worker] ' + f'\n '.join(trace_info))
def test_src(self): handle, filepath = tempfile.mkstemp() try: with Sultan.load(src=filepath) as s: self.assertEqual(str(s.yum('install', 'apache')), 'source %s && yum install apache;' % filepath) finally: if os.path.exists(filepath): os.unlink(filepath)
def qa_deployment_sever_get_list(): with Sultan.load(cwd="/home/ubuntu/server/exolever/", user=deploy_username, hostname=deploy_host) as s: result = (s.ls('-d', '*/').run()) # Remove / char result = [w.replace("/", "") for w in result.stdout] return result
def test_calling_context_sudo(self): # no sudo with Sultan.load(sudo=False) as sultan: self.assertEqual(str(sultan.ls('-lah', '/root')), 'ls -lah /root;') # sudo as another user with Sultan.load(sudo=True, user='******') as sultan: self.assertEqual(str(sultan.ls("/home/hodor")), "sudo su - hodor -c 'ls /home/hodor;'") # sudo as root with Sultan.load(sudo=True) as sultan: self.assertEqual(str(sultan.ls('-lah', '/root')), "sudo su - root -c 'ls -lah /root;'") # sudo as another user with cwd set with Sultan.load(sudo=True, user='******', cwd='/home/hodor') as sultan: self.assertEqual(str(sultan.ls('-lah', '.')), "sudo su - hodor -c 'cd /home/hodor && ls -lah .;'")
def setup_valor_router(self): # SCP over the setup file to the instance with Sultan.load() as s: s.scp( '-o StrictHostKeyChecking=no -i {} ../valor/{} ubuntu@{}:~/.'. format(self.ssh_key, 'setup_valor_router.sh', VALOR_ROUTER_HOSTNAME)).run() # Execute the setup file on the instance _cmd = "bash('./setup_valor_router.sh')" run_ssh_cmd(VALOR_ROUTER_HOSTNAME, self.ssh_key, _cmd)
def setup_rvm(): ''' Sets up 'rvm'. ''' logger.info("Setting up RVM") with Sultan.load(cwd=HOME) as s: RVM_PATH = os.path.join(HOME, '.rvm') if not os.path.exists(RVM_PATH): s.curl('-sSL', 'https://get.rvm.io').pipe().bash('-s', 'stable', '--ruby').run() else: logger.info("RVM already exists.")
def setup_vim(): ''' Sets up vim. ''' logger.info("Setting up Vundle") with Sultan.load(cwd=HOME) as s: PATH_TO_VUNDLE_BUNDLE = os.path.join(HOME, '.vim', 'bundle', 'Vundle.vim') if not os.path.exists(PATH_TO_VUNDLE_BUNDLE): s.mkdir('-p', os.path.dirname(PATH_TO_VUNDLE_BUNDLE)).run() s.git('clone', 'https://github.com/VundleVim/Vundle.vim.git', PATH_TO_VUNDLE_BUNDLE).run() logger.warning("Please remember to run Vim, and run ':PluginInstall'") else: logger.info("Vundle is already setup.")
def ups_service(self, name, delete=False): ups_service_names = self.__get_ups_service_names(name) with Sultan.load() as s: common_config_name = self.__get_common_config(name) if delete is True: self.__print_result(s.echo("y").pipe().cf("delete-service", common_config_name).run()) for k, v in ups_service_names.iteritems(): self.__print_result(s.echo("y").pipe().cf("delete-service", v).run()) else: self.__print_result( s.cf("create-user-provided-service", common_config_name, "-p {}{}.json".format(TEMP_PATH, common_config_name)) .run()) for k, v in ups_service_names.iteritems(): file = "{}{}.json".format(TEMP_PATH, v) if os.path.exists(file): self.__print_result( s.cf("create-user-provided-service", v, "-p {}".format(file)).run()) else: print "Cannot find '{}'. Execute python generate_config_files {} first".format(file, name)
def start(self, name): with Sultan.load() as s: app_names = self.__get_app_names(name) for k, v in app_names.iteritems(): self.__print_result(s.cf("start", v).run())