def setup(config_file, deployment_id, ssh_key_file): mesos_marathon_setup_cmd = "python mesos_marathon_setup.py --config_file " + config_file + \ " --deployment_id " + deployment_id + " --ssh_key_file " + ssh_key_file shell_call(mesos_marathon_setup_cmd) hydra_setup_cmd = "python hydra_setup_script.py --deployment_id " + deployment_id shell_call(hydra_setup_cmd)
def create(self): if self.image == "ubuntu-12-04" or self.image == "ubuntu-14-04": cmd = "gcloud compute disks create " + self.name + " --image " + self.image + " --type " + self.type + \ " --size=" + self.size + " -q" elif self.image is None: cmd = "gcloud compute disks create " + self.name + " --type " + self.type + " --size=" + self.size + " -q" else: cmd = "gcloud compute disks create " + self.name + " --source-snapshot " + self.image + \ " --type " + self.type + " --size=" + self.size + " -q" print("disk_cmd=%s" % cmd) shell_call(cmd)
def create(self): if self.image == "ubuntu-12-04" or self.image == "ubuntu-14-04": cmd = "gcloud compute disks create " + self.name + " --image " + self.image + " --type " + self.type + \ " --size=" + self.size + " -q" elif self.image is None: cmd = "gcloud compute disks create " + self.name + " --type " + self.type + " --size=" + self.size + " -q" else: cmd = "gcloud compute disks create " + self.name + " --source-snapshot " + self.image + \ " --type " + self.type + " --size=" + self.size + " -q" print ("disk_cmd=%s" % cmd) shell_call(cmd)
def add(lang): langs = getlangs(lang) puts(u"Adding %s" % ', '.join(langs)) for loc in langs: with indent(2): puts(u"Generating PO for %s" % loc) shell_call(u"django-admin.py makemessages -l %(lang)s " u"-e py,html,email,txt" % {'lang': loc}) for app in I18N_APPS: with indent(4): puts(u"Generating PO for app %s" % app) with chdir(os.path.join(REPO_ROOT, app)): shell_call(u"django-admin.py makemessages " u"-d djangojs -l %(lang)s" % {'lang': loc}) puts(colored.green("sucesssfuly generated %s" % loc))
def add(lang): langs = getlangs(lang) puts("Adding %s" % ', '.join(langs)) for loc in langs: with indent(2): puts("Generating PO for %s" % loc) shell_call("django-admin.py makemessages -l %(lang)s " "-e py,html,email,txt" % {'lang': loc}) for app in I18N_APPS: with indent(4): puts("Generating PO for app %s" % app) with chdir(os.path.join(REPO_ROOT, app)): shell_call("django-admin.py makemessages " "-d djangojs -l %(lang)s" % {'lang': loc}) puts(colored.green("sucesssfuly generated %s" % loc))
def compile_mo(lang=None): langs = getlangs(lang) puts(u"Compiling %s" % ', '.join(langs)) for loc in langs: with indent(2): puts(u"Compiling %s" % loc) shell_call(u"django-admin.py compilemessages -l %(lang)s " % {'lang': loc}) for app in I18N_APPS: with indent(4): puts(u"Compiling app %s" % app) with chdir(os.path.join(REPO_ROOT, app)): shell_call(u"django-admin.py compilemessages -l %(lang)s" % {'lang': loc}) puts(colored.green("sucesssfuly compiled %s" % loc))
def compile_mo(lang=None): langs = getlangs(lang) puts("Compiling %s" % ', '.join(langs)) for loc in langs: with indent(2): puts("Compiling %s" % loc) shell_call("django-admin.py compilemessages -l %(lang)s " % {'lang': loc}) for app in I18N_APPS: with indent(4): puts("Compiling app %s" % app) with chdir(os.path.join(REPO_ROOT, app)): shell_call("django-admin.py compilemessages -l %(lang)s" % {'lang': loc}) puts(colored.green("sucesssfuly compiled %s" % loc))
def create(self, common_section): pathname = "/tmp/gce_key.txt" tfile = open(pathname, 'w') with open(common_section.sshkey) as f: lines = f.readlines() tfile.writelines(self.user_name + ":" + lines[0]) tfile.close() cmd = "gcloud compute instances create " + self.name + " --machine-type " + self.machine_type + \ " --network " + common_section.network + \ " --maintenance-policy MIGRATE --scopes https://www.googleapis.com/auth/cloud-platform " \ "--disk name=" + self.disk_list[0].name + ",mode=rw,boot=yes,auto-delete=yes --disk name=" + \ self.disk_list[1].name + \ ",mode=rw,boot=no,auto-delete=yes --no-address --tags no-ip --metadata-from-file sshKeys=" + pathname print("create_instance_cmd = %s" % cmd) shell_call(cmd) self.ip = self.get_ip() return self.ip
def join(self): """Uses pandaseq 2.7 to join the foward and reverse reads together. See https://github.com/neufeld/pandaseq""" # Special case for new primers that don't join # rev_primer_name = self.info['primers']['reverse']['name'] not_joining_primers = ("1132R", "1000R") if rev_primer_name in not_joining_primers: print "No overlap special case" self.trim_and_concat.run() return # Special case for primers that highly overlap # high_overlap_primers = ("806R",) if rev_primer_name in high_overlap_primers: print "High overlap special case, using mothur" result = sh.mothur("#make.contigs(ffastq=%s, rfastq=%s);" % (self.uncomrpessed_pair.fwd, self.uncomrpessed_pair.rev)) if "ERROR" in result.stdout: raise Exception("Mothur didn't run correctly") # Move things # #shutil.move(self.tax.centers.prefix_path + '.align', self.mothur_aligned) #shutil.move(self.tax.centers.prefix_path + '.align.report', self.p.mothur_report) return # Default case # command = 'pandaseq27 -T 1 -f %s -r %s -u %s -F 1> %s 2> %s' command = command % (self.fwd, self.rev, self.unassembled.path, self.assembled.path, self.assembled.p.out) shell_call(command) # Because it exits with status 1 https://github.com/neufeld/pandaseq/issues/40
def run_experiment(): shell_call(oracle_cmd) shell_call(sleep_cmd) shell_call(exp_cmd) output = os.popen(tail_cmd).read() sp = output.strip().split('\n') if 'CALL:' in sp[0] and 'ORACLE:' in sp[1]: t1 = int(sp[0].strip().split(':')[1]) t2 = int(sp[1].strip().split(':')[1]) return t2 - t1 else: return -1
from framework.utils.data.text_indexer import TextIndexer from word_classifier.data import ClassifierData from framework.evaluator import Evaluator from shell_command import shell_call import framework.utils.common as utils from time import time import numpy as np import os import sys params = utils.load_param_file(sys.argv[1]) params['num_classes'] = len(params['keywords']) + 1 release_cmd = 'python3 ../tools/release_model.py %s' % sys.argv[1] shell_call(release_cmd) vocab_file = os.path.join(utils.get_dict_value(params, 'output_location'), 'vocab.pkl') release_dir = os.path.join(utils.get_dict_value(params, 'output_location'), params['model_name']) graphdef_file = os.path.join(release_dir, params['model_name'] + '.graphdef') ckpt = os.path.join(utils.get_dict_value(params, 'output_location'), utils.get_dict_value(params, 'model_name') + '.ckpt') e = Evaluator.load_graphdef(graphdef_file) e.dump_variable_sizes() i = TextIndexer.from_file(vocab_file) test_data = ClassifierData.get_data_from_dirs( ['/mnt/work/training_data/statmt.tokenized/valid'], params=params) #test_data = ClassifierData.get_data(params=params) model_results = []
def callcmd(cmd): print("executing: %s" % cmd) shell_call(cmd)
else: keywords_cmd = '' params_cmd = 'python3 %s/params2json.py %s' % (script_path, paramsfile) params = utils.load_param_file(paramsfile) model_name = params['model_name'] release_dir_name = model_name release_cmds = [] release_cmds.append('mkdir %s' % os.path.join(model_dirname, release_dir_name)) release_files = [ model_dirname + '/*.graphdef', model_dirname + '/*.json', model_dirname + '/release.timestamp.txt', ] for src_file in release_files: release_cmds.append( 'cp -rvf %s %s' % (src_file, os.path.join(model_dirname, release_dir_name))) copy2repo = 'python3 %s/copy2repo.py --paramsfile %s --release_dir_name %s' % ( script_path, os.path.join(model_dirname, 'params.py'), release_dir_name) cmds = [freeze_cmd, params_cmd] + release_cmds #cmds = [freeze_cmd, vocab_cmd, params_cmd] + release_cmds + [keywords_cmd, copy2repo] for c in cmds: if len(c) > 0: print("EXECUTING: %s" % c) shell_call(c)
help='SSH public key absolute path. It would be used to get passwordless login to cloud ' 'instances. Default is ~/.ssh/id_rsa.pub') # parser.add_argument('--cont', '-t', action='store_true', # help='If your script fails because of any reason in middle of somethhing, use this flag. ' # 'This flag will resume the script from failed step. ') parser.add_argument('--clean', '-c', action='store_true', help='cleanup instances') args = parser.parse_args() ssh_key_file = args.ssh_key_file config_file = args.config_file deployment_id = args.deployment_id local_work_dir = os.environ['HOME'] config = ConfigParser.ConfigParser() config.read(config_file) sections = config.sections() if args.clean: # TODO: Needs to be updated. This should be a function and should clean the instances according to supplied tag. print("==> Removing deployment nodes") f = open(local_work_dir + '/.' + deployment_id + '_mesos_all_ips', 'r') for ip in f: ip = ip.rstrip() # setup_helpers.delete_instance(config, ip) shell_call("rm " + local_work_dir + "/." + deployment_id + "_mesos_all_ips") shell_call("rm " + local_work_dir + "/." + deployment_id + "_mesos_masters_ips") shell_call("rm " + local_work_dir + "/." + deployment_id + "_mesos_slaves_ips") else: setup(config_file, deployment_id, ssh_key_file)
def join(self): """Uses pandaseq 2.8""" self.assembled.remove() command = 'pandaseq28 -T 1 -f %s -r %s -u %s -F 1> %s 2> %s' command = command % (self.fwd_path, self.rev_path, self.unassembled.path, self.assembled.path, self.assembled.p.out) shell_call(command) # Because it exits with status 1 https://github.com/neufeld/pandaseq/issues/40
#! /usr/bin/python #import module from shell_command import shell_call shell_call("ls *.py") #Below link is contains all the shell command which you may want to execute through this program #https://ss64.com/bash/