def app(self, name, delete=False): app_names = self.__get_app_names(name) app_configs = self.__get_app_config(name) api_path = app_configs["api"]["path"] if delete is True: with Sultan.load() as s: for k, v in app_names.iteritems(): self.__print_result(s.echo("y").pipe().cf("delete", v, "-r").run()) else: with Sultan.load() as s: apps_len = len(app_names) i = 1 s.cd("../../../").and_() for k, v in app_names.iteritems(): s.cd("{}".format(app_configs[k]["path"])).and_() path = "../../../{}".format(app_configs[k]["path"]) file = "../{}/scripts/env-deploy/{}{}.yml".format(api_path, TEMP_PATH, v) if os.path.exists("{}/{}".format(path, file)): npm_commands = app_configs[k]["npm"] for npm_command in npm_commands: s.npm(npm_command).and_() s.cf("zero-downtime-push", v, "-show-app-log", "-f {}".format(file)).and_() else: print "Cannot find '{}'. Execute python generate_manifest_files {} first".format(file, name) i += 1 if i <= apps_len: s.cd("..").and_() else: s.cd("..") self.__print_result(s.run())
def test_run_basic(self, m_subprocess): m_subprocess.Popen = mock.Mock() m_subprocess.Popen().communicate.return_value = ("sample_response", "") m_subprocess.Popen().returncode = 0 sultan = Sultan() response = sultan.ls("-lah /tmp").run() self.assertTrue(m_subprocess.Popen().communicate.called) self.assertEqual(response.stdout, ["sample_response"])
def cmd_magick_convert_array(): try: s = Sultan() s.convert( "le.jpg -define h:format=rgba -depth 8 -size 100x100 rgba100x100.h" ).run() except Exception as e: print(e)
def test_run_custom_script(self): try: s = Sultan() response = s.myscript(where=self.dir_path).run() self.assertEqual(len(response.stdout), 0) response = s.cat(self.output_filepath).run() self.assertEqual(response.stdout, ['Donec sapien turpis, mattis vel urna sed, iaculis aliquam purus.']) finally: shutil.rmtree(self.dir_path)
def execute_db_task(self, name, db_name="digitalmarketplace", snapshot_file="snapshot.tar", delete=False): env_name = self.__get_env_name(name) db_task_name = "{}-db-task".format(env_name) if delete is True: print "deleting {}".format(db_task_name) with Sultan.load() as s: self.__print_result( s.echo("y").pipe().cf("delete", db_task_name).run()) return self.generate_manifest_file(name, env_name, "db-task") with Sultan.load(cwd="schema-sync") as s: self.__print_result( s.pg_dump("--no-owner", "--no-privileges", "--column-inserts", "--dbname={}".format(db_name), "-f snapshot.tar", "-F t").run()) self.__print_result( s.cf("push", db_task_name, "-f ../{}{}.yml".format(TEMP_PATH, db_task_name)).run()) with Sultan.load() as s: result = s.cf("app", db_task_name, "--guid").run() self.__print_result(result) db_task_id = result.stdout[0] db_task_env_file_name = "{}db-task-env.json".format(TEMP_PATH) result = s.cf("curl", '"/v2/apps/{}/env"'.format(db_task_id)).redirect( db_task_env_file_name, append=False, stdout=True, stderr=False).run() self.__print_result(result) with open(db_task_env_file_name) as data_file: db_task_env = json.load(data_file) postgres_uri = db_task_env["system_env_json"]["VCAP_SERVICES"][ "postgres"][0]["credentials"]["uri"] print postgres_uri result = s.cf( "run-and-wait", db_task_name, '"pgutils/pg_restore --no-owner --dbname={postgres_uri} {snapshot_file}"' .format(postgres_uri=postgres_uri, snapshot_file=snapshot_file)).run() self.__print_result(result) self.__print_result(s.cf("stop", db_task_name).run())
def test_calling_context(self): sultan = Sultan.load(cwd='/tmp', test_key='test_val') self.assertEqual(sultan.current_context, { 'cwd': '/tmp', 'test_key': 'test_val' }) with Sultan.load(cwd='/tmp') as sultan: self.assertEqual(sultan.current_context, {'cwd': '/tmp'})
def take_picture_intent(self, message): #It takes a moment for the command to be processed so probably best to prompt them! self.speak_dialog("ready") wait_while_speaking() #Play the shutter sound play_wav(self.shutter_sound) #take the photo sultan = Sultan() sultan.fswebcam("-r 640x480 --no-banner ~/webcam/image.jpg").run() #Comment on having taken the photo: self.speak_dialog("picture")
def cmd_magick_convert_image(aname, astring): try: if (astring == "jpeg" or astring == "jpg" or astring == "bmp" or astring == "png"): s = Sultan() s.convert(aname + "." + astring + " " + aname + "." + astring).run() print("Converted image:" + aname + "." + astring) else: print("Unsupported Filename Extension") except Exception as e: print(e)
def execute_db_task(self, name, db_name="digitalmarketplace", snapshot_file="snapshot.tar", delete=False): env_name = self.__get_env_name(name) db_task_name = "{}-db-task".format(env_name) if delete is True: print "deleting {}".format(db_task_name) with Sultan.load() as s: self.__print_result(s.echo("y").pipe().cf("delete", db_task_name).run()) return self.generate_manifest_file(name, env_name, "db-task") with Sultan.load(cwd="schema-sync") as s: self.__print_result( s.pg_dump("--no-owner", "--no-privileges", "--column-inserts", "--dbname={}".format(db_name), "-f snapshot.tar", "-F t").run()) self.__print_result( s.cf("push", db_task_name, "-f ../{}{}.yml".format(TEMP_PATH, db_task_name)).run()) with Sultan.load() as s: result = s.cf("app", db_task_name, "--guid").run() self.__print_result(result) db_task_id = result.stdout[0] db_task_env_file_name = "{}db-task-env.json".format(TEMP_PATH) result = s.cf("curl", '"/v2/apps/{}/env"'.format(db_task_id)).redirect( db_task_env_file_name, append=False, stdout=True, stderr=False).run() self.__print_result(result) with open(db_task_env_file_name) as data_file: db_task_env = json.load(data_file) postgres_uri = db_task_env["system_env_json"]["VCAP_SERVICES"]["postgres"][0]["credentials"]["uri"] print postgres_uri result = s.cf("run-and-wait", db_task_name, '"pgutils/pg_restore --no-owner --dbname={postgres_uri} {snapshot_file}"' .format(postgres_uri=postgres_uri, snapshot_file=snapshot_file)).run() self.__print_result(result) self.__print_result(s.cf("stop", db_task_name).run())
def test_run_advanced(self): sultan = Sultan() try: sultan.mkdir("-p /tmp/mytestdir")\ .mkdir("-p /tmp/mytestdir/foobar")\ .touch("/tmp/mytestdir/a")\ .touch("/tmp/mytestdir/b")\ .run() response = sultan.ls("-1 /tmp/mytestdir/").run() self.assertEqual(response, ['a', 'b', 'foobar']) finally: if os.path.exists('/tmp/mytestdir'): shutil.rmtree('/tmp/mytestdir')
def test_command_generation(self): sultan = Sultan() self.assertEqual(str(sultan.yum("install", "gcc")), "yum install gcc;") sultan = Sultan() self.assertEqual(str(sultan.yum("install", "-y", "gcc")), "yum install -y gcc;") sultan = Sultan() self.assertEqual(str(sultan.yum("install -y gcc")), "yum install -y gcc;")
def setup(self, branch, aws_config, aws_keys, stack_suffix, key_name, config_tarfile): logger.info( 'Now checking out relevant galahad repos for {} branch'.format( branch)) time.sleep(10) # Check out galahad repos required for galahad self.copy_config(config_tarfile) self.checkout_repo('galahad', branch) # Sleep for 10 seconds to ensure that both repos are completely checked out time.sleep(10) # Setup the config and keys for AWS communication self.setup_aws_access(aws_config, aws_keys) # Setup the Default key to be able to login to the install nodes GALAHAD_KEY_DIR = '~/user-keys' _cmd = "mkdir('-p {}')".format(GALAHAD_KEY_DIR) run_ssh_cmd(self.server_ip, self.ssh_key, _cmd) with Sultan.load() as s: s.scp( '-o StrictHostKeyChecking=no -i {0} {0} ubuntu@{1}:{2}/{3}.pem' .format(self.ssh_key, self.server_ip, GALAHAD_KEY_DIR, key_name)).run() _cmd = "sudo('chmod 600 {0}/{1}.pem')".format(GALAHAD_KEY_DIR, key_name) run_ssh_cmd(self.server_ip, self.ssh_key, _cmd) # Deploy the Pre-requisites _cmd = "sudo('apt-get update')" run_ssh_cmd(self.server_ip, self.ssh_key, _cmd) _cmd = "sudo('apt-get install -y python-minimal python-pip python3-dev python3-pip')" run_ssh_cmd(self.server_ip, self.ssh_key, _cmd) _cmd = "sudo('pip3 install -r galahad/deploy/requirements.txt')" run_ssh_cmd(self.server_ip, self.ssh_key, _cmd) # Start the normal deployment process - Run the setup script _cmd = '''bash(('-c "cd galahad/deploy && python3 deploy_galahad.py' ' -i {0}/{1}.pem' ' --aws_config ~/.aws/config' ' --aws_keys ~/.aws/credentials' ' --key_name {1}' ' -b {2}' ' -s {3}' ' -n {4}' ' --deactivate_virtue_migration' ' --import_stack {5}' ' --setup"'))'''.format(GALAHAD_KEY_DIR, key_name, branch, stack_suffix, self.stack_name, self.import_stack_name) run_ssh_cmd(self.server_ip, self.ssh_key, _cmd)
def setup_keys(self, github_key): with Sultan.load() as s: s.scp( '-o StrictHostKeyChecking=no -i {} {} ubuntu@{}:~/github_key '. format(self.ssh_key, github_key, self.server_ip)).run() _cmd1 = "mv('github_key ~/.ssh/id_rsa').and_().chmod('600 ~/.ssh/id_rsa')" result1 = run_ssh_cmd(self.server_ip, self.ssh_key, _cmd1) # Now remove any existing public keys as they will conflict with the private key result2 = run_ssh_cmd(self.server_ip, self.ssh_key, "rm('-f ~/.ssh/id_rsa.pub')") # Now add the github public key to avoid host key verification prompt result3 = run_ssh_cmd( self.server_ip, self.ssh_key, "ssh__keyscan('github.com >> ~/.ssh/known_hosts')") result = list() result.append(result1.stdout) result.append(result2.stdout) result.append(result3.stdout) return (result)
def setup_symlinks(): ''' Sets up the symlinks for the different files. ''' link_map = { 'bash/bashrc': '.bashrc', 'git/gitconfig': '.gitconfig', 'pypi/pypirc': '.pypirc', 'tmux/tmux.conf': '.tmux.conf', 'vim/vimrc': '.vimrc', 'zsh/zshrc': '.zshrc' } with Sultan.load() as s: for source_path, destination_path in link_map.items(): abs_source_path = os.path.join(PWD, 'data', source_path) abs_destination_path = os.path.join(HOME, destination_path) logger.info("Setting up '%s'" % (source_path)) logger.info(" - Source: %s" % abs_source_path) logger.info(" - Destination: %s" % abs_destination_path) if os.path.exists(abs_destination_path): logging.info(" - Skipping '%s' since it already exists." % abs_destination_path) else: response = s.ln('-s', abs_source_path, abs_destination_path).run() logging.debug('\n'.join(response.stdout))
def midi_hookup(): """ midi channels: | vmpkIN | vmpkOUT --------|----------|------------ sBoxIN | no | no --------|----------|------------- sBoxOUT | yes | no """ sBox_outport = mido.open_output(10) vpmk_inport = mido.open_input(10) runner = Sultan() runner.vpmk("-im 10").run() # launch vmpk on port 10
def get_git_files( repo_root: Path, commits: Optional[List[str]] = None, all_files: bool = False, ) -> List[str]: """Obtain changed files. :param repo_root: Repository top folder. :param commits: Commits range. :returns: List of staged files. """ command = [] if all_files: command.append("ls-files") print("Getting all the repository files.") else: command = ["diff", "--no-commit-id", "--name-only", "-r"] if commits: command.extend([*commits]) print(f"Checking file changes in the commit range: {commits}.") else: command.insert(3, "--cached") command.append("HEAD") print("Checking local changed files...") with Sultan.load(cwd=repo_root) as s: changed_files = (s.git(*command).run(halt_on_nonzero=False, quiet=True).stdout) return changed_files
def ups_service(self, name, delete=False): ups_service_names = self.__get_ups_service_names(name) with Sultan.load() as s: common_config_name = self.__get_common_config(name) if delete is True: self.__print_result( s.echo("y").pipe().cf("delete-service", common_config_name).run()) for k, v in ups_service_names.iteritems(): self.__print_result( s.echo("y").pipe().cf("delete-service", v).run()) else: self.__print_result( s.cf("create-user-provided-service", common_config_name, "-p {}{}.json".format(TEMP_PATH, common_config_name)).run()) for k, v in ups_service_names.iteritems(): file = "{}{}.json".format(TEMP_PATH, v) if os.path.exists(file): self.__print_result( s.cf("create-user-provided-service", v, "-p {}".format(file)).run()) else: print "Cannot find '{}'. Execute python generate_config_files {} first".format( file, name)
def get_changed_files(repo_root, commits=None): """Obtains changed files. Args: repo_root (Path): Repository top folder. first_commit_hash: First commit hash. Defaults to None. second_commit_hash: Second commit hash. Defaults to None. Returns: List[str]: List of staged files. """ command = ["diff", "--no-commit-id", "--name-only", "-r"] if commits: command.extend([*commits]) else: command.insert(3, "--cached") command.append("HEAD") with Sultan.load(cwd=repo_root) as s: changed_files = ( s.git(*command).run(halt_on_nonzero=False, quiet=True).stdout ) return changed_files
def undeploy(deploy_name): """Deploy a branch in qa server""" deploy_name = get_deploy_name(deploy_name) current_deploys = qa_deployment_sever_get_list() if len(current_deploys) == 0: print("List of 0 deploys returned, please, run again the command.") exit(1) if deploy_name not in current_deploys: print("%s is not deployed" % deploy_name) exit(1) print("Undeploying %s..." % deploy_name) with Sultan.load(cwd="/home/ubuntu/server/exolever/", user=deploy_username, hostname=deploy_host) as s: result = (s.docker__compose( '--file', deploy_name + '/docker-compose.yml', 'down', '--volumes', '--remove-orphans').and_().rm('-rf', deploy_name).run()) print(result.stdout) print("Undeployed!")
def setup(self, branch, github_key): # Ensure that cloud init has finished check_cloud_init_finished(self.ip_address, self.ssh_key) # Transfer the private key to the server to enable # it to access github without being prompted for credentials self.setup_keys(github_key) with Sultan.load() as s: s.scp( '-o StrictHostKeyChecking=no -i {} {} ubuntu@{}:~/rethinkdb.conf' .format(self.ssh_key, 'setup/rethinkdb.conf', self.ip_address)).run() s.scp( '-o StrictHostKeyChecking=no -i {} {} ubuntu@{}:~/setup_rethinkdb.sh' .format(self.ssh_key, 'setup/setup_rethinkdb.sh', self.ip_address)).run() s.scp( '-o StrictHostKeyChecking=no -i {} {} ubuntu@{}:~/configure_rethinkdb.py' .format(self.ssh_key, 'setup/configure_rethinkdb.py', self.ip_address)).run() logger.info( 'Now checking out relevant excalibur repos for {} branch'.format( branch)) # Check out galahad repos required for rethinkdb self.checkout_repo('galahad-config') _cmd1 = "bash('./setup_rethinkdb.sh')" run_ssh_cmd(self.ip_address, self.ssh_key, _cmd1)
def deploy(branch, target, url): """Deploy a branch in qa server""" if not check_status(branch): exit(1) current_deploys = qa_deployment_sever_get_list() if len(current_deploys) >= 5: print("Limit of 5 deploys reached.") exit(1) elif len(current_deploys) == 0: print("List of 0 deploys returned, please, run again the command.") exit(1) if not target: target = get_deploy_name(branch) if target in current_deploys: print("Branch %s is already deployed." % branch) exit(1) print("Deploying branch %s..." % branch) print("server/exolever/deploy.sh %s %s %s" % (branch, target, url)) with Sultan.load(cwd="/home/ubuntu/server/exolever/", user=deploy_username, hostname=deploy_host) as s: result = (s.bash('./deploy.sh', branch, target, url).run()) print(result.stdout)
def setup(self, branch): # Ensure that cloud init has finished check_cloud_init_finished(self.ip_address, self.ssh_key) with Sultan.load() as s: s.scp( '-o StrictHostKeyChecking=no -i {} {} ubuntu@{}:~/rethinkdb.conf' .format(self.ssh_key, 'setup/rethinkdb.conf', self.ip_address)).run() s.scp( '-o StrictHostKeyChecking=no -i {} {} ubuntu@{}:~/setup_rethinkdb.sh' .format(self.ssh_key, 'setup/setup_rethinkdb.sh', self.ip_address)).run() s.scp( '-o StrictHostKeyChecking=no -i {} {} ubuntu@{}:~/configure_rethinkdb.py' .format(self.ssh_key, 'setup/configure_rethinkdb.py', self.ip_address)).run() logger.info( 'Now checking out relevant excalibur repos for {} branch'.format( branch)) # Check out galahad repos required for rethinkdb self.copy_config('~/galahad-config') _cmd1 = "bash('./setup_rethinkdb.sh')" run_ssh_cmd(self.ip_address, self.ssh_key, _cmd1)
def do_GET(self): if self.path == "/": self.path = "/index.html" if self.path == "/metrics": # TODO - don't write to file # There's probably a better way to do this than writing to file, # then reading from that file, but Sultan outputting to STDOUT # currently outputs every character on a newline with Sultan.load() as s: s.sh("-c 'pwrstat -status'").redirect("pwrstats.txt", append=False, stdout=True, stderr=False).run() # stats.json defines what outputs we are looking for from pwrstat with open('stats.json') as f: stats = json.load(f) with open("pwrstats.txt", "r") as f: for line in f: for i in stats: if re.findall(r'' + i['search'] + '', line): if i['end']: end = int(i['end']) else: end = None i['stat'] = line[32:end].rstrip() with open("metrics", "w") as f: for i in stats: # Reassign string values to booleans if i is 'power_supply': if i['stat'] is 'power_supply': i['stat'] = '0' else: i['stat'] = '1' f.write(i['help'] + '\n') f.write(i['type'] + '\n') f.write(i['metric'] + ' ' + i['stat'] + '\n') try: #Check the file extension required and #set the right mime type mimetype = 'text/html' #Open the static file requested and send it f = open(curdir + sep + self.path) self.send_response(200) self.send_header('Content-type', mimetype) self.end_headers() self.wfile.write(f.read()) f.close() return except IOError: self.send_error(404, 'File Not Found: %s' % self.path)
def run_backup(self): """Do the backup, for real or dry run.""" if self.dryrun is False: LOGGER.info("\nNot a dry run: Eligible files will be downloaded\n") else: LOGGER.info("\nDRY RUN: Nothing will be downloaded\n") for profile, config in self.profiles.items(): for paths in config["paths"]: # --no-progress : File transfer progress is not displayed. command = ( "s3 sync {remote} {local} --profile={profile} --no-progress" ).format(remote=paths["remote"], local=paths["local"], profile=profile) args = [] if "delete" in paths and paths["delete"] is True: # Delete local files that don't exist on S3: args.append("--delete") if "include-only" in paths: includes = [] if "today" in paths["include-only"]: fmt = paths["include-only"]["today"] dt = datetime.utcnow() includes.append("--include '{}'".format( dt.strftime(fmt))) if "yesterday" in paths["include-only"]: fmt = paths["include-only"]["yesterday"] dt = datetime.utcnow() - timedelta(1) includes.append("--include '{}'".format( dt.strftime(fmt))) if len(includes) > 0: args.append(" --exclude '*'") args.extend(includes) if self.dryrun is True: # The aws command has its own dryrun option, handily: args.append("--dryrun") command = "{} {}".format(command, " ".join(args)) with Sultan.load() as s: result = s.aws(command).run(streaming=True) # From https://sultan.readthedocs.io/en/latest/sultan-examples.html#example-13-streaming-results-from-a-command # noqa: E501 while True: complete = result.is_complete for line in result.stdout: LOGGER.info(line) for line in result.stderr: LOGGER.error(line) if complete: break time.sleep(1)
def postgres_service(self, name, delete=False): postgres_service_name = self.__get_postgres_service_name(name) with Sultan.load() as s: if delete is True: self.__print_result(s.echo("y").pipe().cf("delete-service", postgres_service_name).run()) else: self.__print_result(s.cf("create-service", "postgres", "shared", postgres_service_name).run()) self.__print_result(s.cf("update-service", postgres_service_name, "-c '{\"extensions\":[\"pg_trgm\"]}'").run())
def __init__(self, hostname="localhost", configuration=None, configuration_path=DEFAULT_CONFIG_PATH): super(Router, self).__init__(hostname) self.configuration = configuration self.configuration_path = configuration_path self.sultan = Sultan()
def copy_config(self, config_path): run_ssh_cmd(self.ip_address, self.ssh_key, "rm('-rf {}')".format(config_path)) with Sultan.load() as s: s.scp( '-r -o StrictHostKeyChecking=no -i {} {} ubuntu@{}:{} '.format( self.ssh_key, config_path, self.ip_address, config_path)).run()
def test_calling_context(self): sultan = Sultan.load(cwd='/tmp', test_key='test_val') self.assertEqual(sultan.current_context, {'cwd': '/tmp', 'env': {}, 'sudo': False, 'logging': True, 'test_key': 'test_val', 'user': getpass.getuser(), 'hostname': None}) # cwd with Sultan.load(cwd='/tmp') as sultan: self.assertEqual(sultan.current_context, { 'cwd': '/tmp', 'env': {}, 'sudo': False, 'logging': True, 'user': getpass.getuser(), 'hostname': None}) # sudo with Sultan.load(cwd='/tmp', sudo=True) as sultan: self.assertEqual(sultan.current_context, { 'cwd': '/tmp', 'env': {}, 'sudo': True, 'logging': True, 'user': '******', 'hostname': None}) with Sultan.load(cwd='/tmp', sudo=False, user="******") as sultan: self.assertEqual(sultan.current_context, { 'cwd': '/tmp', 'env': {}, 'sudo': False, 'logging': True, 'user': '******', 'hostname': None}) with Sultan.load(sudo=True) as sultan: self.assertEqual(sultan.current_context, {'cwd': None, 'env': { }, 'sudo': True, 'logging': True, 'user': '******', 'hostname': None}) # hostname with Sultan.load(hostname='localhost') as sultan: self.assertEqual(sultan.current_context, {'cwd': None, 'env': { }, 'sudo': False, 'logging': True, 'user': getpass.getuser(), 'hostname': 'localhost'}) # set environment with Sultan.load(env={'path': ''}) as sultan: self.assertEqual(sultan.current_context, {'cwd': None, 'env': { 'path': ''}, 'sudo': False, 'logging': True, 'user': getpass.getuser(), 'hostname': None})
def run_and_return(command): if system_platform() == 'Windows': return run_and_return_winfix(command) else: with Sultan.load() as s: result = s.java('-jar ChunkyLauncher.jar -' + command).run() log(str(result.stdout)) temp = [x.replace('\t', '') for x in result.stderr] # remove \t return temp
def update_data_repository(): try: with Sultan.load() as s: s.bash(path.join(getcwd(), 'commit-and-push.sh')).run() except: app.logger.error(f'[update_data_repository] failed.') trace_info = traceback.format_exc().splitlines() app.logger.error(f'[update_data_repository] ' + f'\n '.join(trace_info)) pass
def get_latest_commit_date(region): try: with Sultan.load(env={'PAGER': 'cat'}) as s: result = s.git( f'-C inf-covid19-similarity-data log -1 --format=%ct "by_key/{region}.csv"' ).run() return int(''.join(result.stdout).strip()) except: return 0
def setup_aws_access(self, aws_config, aws_keys): run_ssh_cmd(self.ip_address, self.ssh_key, "mkdir('~/.aws')") with Sultan.load() as s: s.scp( '-o StrictHostKeyChecking=no -i {} {} ubuntu@{}:~/.aws/config ' .format(self.ssh_key, aws_config, self.ip_address)).run() s.scp( '-o StrictHostKeyChecking=no -i {} {} ubuntu@{}:~/.aws/credentials ' .format(self.ssh_key, aws_keys, self.ip_address)).run()
def run(command, print_command=True): with Sultan.load() as s: s.commands = [command] out = s.run() stdout = '\n'.join(out.stdout) stderr = '\n'.join(out.stderr) stdout = "" if stdout == "" else "STDOUT:\n" + stdout stderr = "" if stderr == "" else "\nSTDERR:\n" + stderr ret = stdout + stderr return ret
def setup_jenv(): ''' Set up 'jenv' ''' if platform.system() == 'Windows': logger.info("Run this manually for installing jenv: set-executionpolicy remotesigned") logger.info("Run this manually for installing jenv: (new-object Net.WebClient).DownloadString(\"http://get.jenv.io/GetJenv.ps1\") | iex") else: with Sultan.load(cwd=HOME) as s: s.curl('-L', '-s', 'get.jenv.io').pipe().bash().run()
def play_video_audio_intent(self, message): #the full utterance can be accessed like this: message.data.get('utterance') #Remember that search_query.rx file from earlier? If we want the contents of the query we supplied we can get it using message.data.get("search_query") #self.speak_dialog(message.data.get("search_query")) ytURL = self.getResults(message.data.get("search_query")) #It takes a moment for the command to be processed so probably best to prompt them! self.speak_dialog("downloading") #Get Sultan running that command sultan = Sultan() #first we remove any existing output file: rm("/tmp/output.wav") #ytURL = "https://www.youtube.com/watch?v=IPXIgEAGe4U" #double underscores are needed for the syntax here - they're an equivalent of a hyphen sultan.youtube__dl("-x --audio-format wav -o '/tmp/output.%(ext)s' " + ytURL).run() #Then we use the audio service to play our file: self.audio_service.play('file:///tmp/output.wav')
def ups_secret_service(self, secret_file, delete=False): with Sultan.load() as s: if delete is True: self.__print_result(s.echo("y").pipe().cf("delete-service", "ups-secret-service").run()) else: self.__print_result( s.cf("create-user-provided-service", "ups-secret-service", "-p {}".format(secret_file)) .run())
def setup_rvm(): ''' Sets up 'rvm'. ''' logger.info("Setting up RVM") with Sultan.load(cwd=HOME) as s: RVM_PATH = os.path.join(HOME, '.rvm') if not os.path.exists(RVM_PATH): s.curl('-sSL', 'https://get.rvm.io').pipe().bash('-s', 'stable', '--ruby').run() else: logger.info("RVM already exists.")
def setup_vim(): ''' Sets up vim. ''' logger.info("Setting up Vundle") with Sultan.load(cwd=HOME) as s: PATH_TO_VUNDLE_BUNDLE = os.path.join(HOME, '.vim', 'bundle', 'Vundle.vim') if not os.path.exists(PATH_TO_VUNDLE_BUNDLE): s.mkdir('-p', os.path.dirname(PATH_TO_VUNDLE_BUNDLE)).run() s.git('clone', 'https://github.com/VundleVim/Vundle.vim.git', PATH_TO_VUNDLE_BUNDLE).run() logger.warning("Please remember to run Vim, and run ':PluginInstall'") else: logger.info("Vundle is already setup.")
def ups_service(self, name, delete=False): ups_service_names = self.__get_ups_service_names(name) with Sultan.load() as s: common_config_name = self.__get_common_config(name) if delete is True: self.__print_result(s.echo("y").pipe().cf("delete-service", common_config_name).run()) for k, v in ups_service_names.iteritems(): self.__print_result(s.echo("y").pipe().cf("delete-service", v).run()) else: self.__print_result( s.cf("create-user-provided-service", common_config_name, "-p {}{}.json".format(TEMP_PATH, common_config_name)) .run()) for k, v in ups_service_names.iteritems(): file = "{}{}.json".format(TEMP_PATH, v) if os.path.exists(file): self.__print_result( s.cf("create-user-provided-service", v, "-p {}".format(file)).run()) else: print "Cannot find '{}'. Execute python generate_config_files {} first".format(file, name)
def start(self, name): with Sultan.load() as s: app_names = self.__get_app_names(name) for k, v in app_names.iteritems(): self.__print_result(s.cf("start", v).run())