def save_command(rules_dir, chains, ipv6): """ Saves the current iptables rules for the desired chains Arguments --------- rules_dir str chains [str] List of chains to preserve ipv6 bool """ derive_tables, filter_tables = table_handler() temp_abs_path = "{}/temp.v{}".format(rules_dir, version_prefix(ipv6)) rules_abs_path = "{}/rules.v{}".format(rules_dir, version_prefix(ipv6)) popen("ip{}tables-save > {}".format("6" * ipv6, temp_abs_path), shell=True, universal_newlines=True).communicate() with open(temp_abs_path, "r") as temp: lines = temp.readlines() tables = filter_tables(derive_tables(lines), chains) rules_lines = reduce(lambda x, y: x + y, tables) with open(rules_abs_path, 'w') as rules: rules.writelines(rules_lines) if os.path.exists(rules_abs_path): os.remove(temp_abs_path)
def getTrackedItems(): # The tracked items in the working tree according to the # currently set fit attributes fitSetRgx = re.compile('(.*): fit: set') p = popen('git ls-files -o'.split(), stdout=PIPE) p = popen('git check-attr --stdin fit'.split(), stdin=p.stdout, stdout=PIPE) return {m.group(1) for m in [fitSetRgx.match(l) for l in p.stdout] if m}
def test_returns_usage_information(self): """Test usage""" output = popen(['lmdo', '-h'], stdout=PIPE).communicate()[0] self.assertTrue('Usage:' in output) output = popen(['lmdo', '--help'], stdout=PIPE).communicate()[0] self.assertTrue('Usage:' in output)
def _get(items, store, pp, successes, failures): if not exists(tempDir): mkdir(tempDir) for filePath,objHash,size in items: pp.newItem(filePath, size) # Copy download to temp file first, and then to actual object location # This is to prevent interrupted downloads from causing bad objects to be placed # in the objects cache (tempHandle, tempTransferFile) = mkstemp(dir=tempDir) osclose(tempHandle) key = store.check('%s/%s'%(objHash[:2], objHash[2:])) try: transferred = store.get(key, tempTransferFile, size) except: transferred = False if key and transferred: pp.updateProgress(size, size) popen(['mv', tempTransferFile, filePath]).wait() successes.append((filePath, objHash, size)) else: pp.updateProgress(size, size, custom_item_string='ERROR') failures.append(filePath) cache.insert({h:(s,f) for f,h,s in successes}, inLru=True, progressMsg='Caching newly gotten items')
def run(self, host, port, debug=True, services=True): """Run development server and celery queue.""" def signal_handler(sig, frame): click.secho('Stopping server and worker...', fg='green') server.terminate() if services: worker.terminate() click.secho("Server and worker stopped...", fg="green") signal.signal(signal.SIGINT, signal_handler) if services: ServicesCommands(self.cli_config).ensure_containers_running() click.secho("Starting celery worker...", fg="green") worker = popen([ 'pipenv', 'run', 'celery', '--app', 'invenio_app.celery', 'worker' ]) click.secho("Starting up local (development) server...", fg='green') run_env = environ.copy() run_env['FLASK_ENV'] = 'development' if debug else 'production' run_env['INVENIO_SITE_HOSTNAME'] = f"{host}:{port}" server = popen([ 'pipenv', 'run', 'invenio', 'run', '--cert', 'docker/nginx/test.crt', '--key', 'docker/nginx/test.key', '--host', host, '--port', port ], env=run_env) click.secho('Instance running!\nVisit https://{}:{}'.format( host, port), fg='green') server.wait()
def test_component_creation(self): popen(['k2a', 'react_component', '--name=testComp'], stdout=PIPE).communicate() if not os.path.isdir("./TestComp"): self.assertTrue(True, msg='Failed to create component !!') popen(['rm', '-rf', 'testComp'], stdout=PIPE).communicate()
def save(fitTrackedData, paths=None, pathArgs=None, forceWrite=False, quiet=False): added,removed,stubs = saveItems(fitTrackedData, paths=paths, pathArgs=pathArgs, quiet=quiet) if stubs: print '\nerror: The following items are empty, zero-byte files and cannot be added to fit:\n' for i in sorted(stubs): print ' ',i print return False if len(added) + len(removed) > 0 or forceWrite: print 'Working-tree changes saved.' writeFitFile(fitTrackedData) fitFileStatus = getFitFileStatus() if len(fitFileStatus) == 0 or fitFileStatus[1] == ' ': return True oldStagedFitFileHash = None newStagedFitFileHash = None if fitFileStatus[0] == 'A': oldStagedFitFileHash = getStagedFitFileHash() popen('git add -f'.split()+[fitFile]).wait() newStagedFitFileHash = getStagedFitFileHash() print 'Staged .fit file.' if oldStagedFitFileHash != newStagedFitFileHash: _saveToCache(added, fitTrackedData, newStagedFitFileHash) return True
def test_returns_usage_information(self): # take output from 'neo -h'. Then take the first word output = popen(['neo', '-h'], stdout=PIPE).communicate()[0] assert 'Usage:' in str(output) output = popen(['neo', '--help'], stdout=PIPE).communicate()[0] assert 'Usage:' in str(output)
def test_multiple_timestep_two_var_repeat_one(self): #TODO some odd gymnastics to handle test files and temp file writing. There's probably a better way fgold = open(self.savewd+"\\tests\\commands\\data\\goldfiles\\two_timestep_two_var.csv") fdebug= open(self.savewd+"\\tests\\debug-dump.txt","w") fwritten=None savewd = os.getcwd() shutil.copyfile(self.savewd+'\\tests\\commands\\data\\good-two-timestep.txt',self.test_dir+"\\good-two-timestep.txt") os.chdir(self.test_dir) try: popen(['tidysol', 'tidy', self.test_dir+"\\good-two-timestep.txt",'--cols=spf2.sr,spf2.cellRe,spf2.sr'], stdout=PIPE).communicate()[0].decode("utf-8") fwritten = open(self.test_dir+"\\good-two-timestep.csv") finally: goldtext=fgold.read() writtentext=fwritten.read() fgold.close() fwritten.close() os.chdir(savewd) fdebug.write(writtentext) fdebug.close() self.maxDiff=None self.assertMultiLineEqual(goldtext,writtentext) os.chdir(savewd) if fwritten is not None: fwritten.close() #what about handling a large file?
def test_returns_usage_information(self): output = popen(['idseq_pipeline', '-h'], stdout=PIPE).communicate()[0] self.assertTrue('Usage:' in output) output = popen(['idseq_pipeline', '--help'], stdout=PIPE).communicate()[0] self.assertTrue('Usage:' in output)
def test_returns_usage_information(self): output = popen(['jagger', '-h'], stdout=PIPE).communicate()[0].decode() self.assertTrue('Usage:' in output) output = popen(['jagger', '--help'], stdout=PIPE).communicate()[0].decode() self.assertTrue('Usage:' in output)
def test_returns_usage_information(self): output = popen(['substra', '-h'], stdout=PIPE).communicate()[0] print(output.decode('utf-8')) self.assertTrue('Usage:' in output.decode('utf-8')) output = popen(['substra', '--help'], stdout=PIPE).communicate()[0] self.assertTrue('Usage:' in output.decode('utf-8'))
def test_single_timestep_all_default(self): #TODO some odd gymnastics to handle test files and temp file writing. There's probably a better way #one option is to test the output string in ComsolExportFile to_csv() then either trust tidy to make the correct call to_csv and write file #or do a little mocking #TODO should probably factor out all the common file handling in the next few tests fgold = open(self.savewd+"\\tests\\commands\\data\\goldfiles\\single_timestep_all_default.csv") savewd = os.getcwd() shutil.copyfile(self.savewd+'\\tests\\commands\\data\\good-single-timestep.txt',self.test_dir+"\\good-single-timestep.txt") fdebug= open(self.savewd+"\\tests\\debug-dump.txt","w") os.chdir(self.test_dir) fwritten=None try: popen(['tidysol', 'tidy', self.test_dir+"\\good-single-timestep.txt"], stdout=PIPE).communicate()[0].decode("utf-8") fwritten = open(self.test_dir+"\\good-single-timestep.csv") finally: goldtext=fgold.read() writtentext=fwritten.read() fgold.close() fwritten.close() os.chdir(savewd) fdebug.write(writtentext) fdebug.close() self.maxDiff=None self.assertMultiLineEqual(goldtext,writtentext) os.chdir(savewd) if fwritten is not None: fwritten.close()
def test_returns_usage_information(self): output = popen(['python', 'putio-cli', '-h'], stdout=PIPE).communicate()[0] self.assertTrue('Usage:' in output) output = popen(['python', 'putio-cli', '--help'], stdout=PIPE).communicate()[0] self.assertTrue('Usage:' in output)
def test_returns_usage_information(self): output = popen(['anubis', '-h'], stdout=PIPE).communicate()[0] output = output.decode("utf-8") self.assertTrue('Usage:' in output) output = popen(['anubis', '--help'], stdout=PIPE).communicate()[0] output = output.decode("utf-8") self.assertTrue('Usage:' in output)
def edit(self): checkhash = (lambda x: xhash(open(x, 'rb').read()).hexdigest()) MD5 = checkhash(self.markfile) command = (self.editor, self.markfile) try: popen(command).wait()#FIXME 使用Gvim的话,会不等待,往下执行。 except Exception: system(' '.join(command)) return MD5 == checkhash(self.markfile)
def createLink(images, orig, dest, trainIndx, tagImage, label): destPath = dest + "train/"+ label +"/" flag = 0 #print( "[INFO] dest: ", destPath, ", stock: ", tagImage, ", label", label) for img in images: if flag > trainIdx: destPath = dest + "validation/"+label+"/" im = tagImage + '_' + img +".png" popen(['ln', '-s', orig + label +"/"+ im, im], cwd=os.path.abspath(destPath)) flag += 1
def stop(proc_list): ''' Stop the ChucK process ''' for p in proc_list: popen("kill " + p, popen.stdout, popen.stderr) if proc.stderr: raise Exception(proc.stderr) sys.exit()
def run_cmd_locally(self, cmd): try: subprocess.popen(cmd.split(), shell=True, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE) except subprocess.CalledProcessError as e: print("Error running command: " + cmd + ", output: " + self.binary2str(e.output) + "\n")
def test_returns_iotdevice_create_file_does_not_exist(self): name='device1' gateway='gateway1' operation='create' configfile='./tests/conf/iotorch_test.toml' text= "IoT Device " + name + " created" output = popen(['iotorch', 'iotdevice', operation, '--name='+name, '--gateway='+gateway, '--configfile='+configfile], stdout=PIPE).communicate()[0] self.assertTrue(text.encode('utf-8') in output) operation='get' output = popen(['iotorch', 'iotdevice', operation, '--name='+name, '--configfile='+configfile], stdout=PIPE).communicate()[0] self.assertTrue(gateway.encode('utf-8') in output)
def test_returns_iotserver_delete(self): name='server1' operation='delete' text= "IoT Server " + name + " deleted" configfile='./tests/conf/iotorch.toml' output = popen(['iotorch', 'iotserver', operation, '--name='+name, '--configfile='+configfile], stdout=PIPE).communicate()[0] self.assertTrue(text.encode('utf-8') in output) operation='get' text='Nothing to get' output = popen(['iotorch', 'iotserver', operation, '--name='+name, '--configfile='+configfile], stdout=PIPE).communicate()[0] self.assertTrue(text.encode('utf-8') in output)
def generator(start_userid, num): limit = sys.argv[1] or 10 user_ids = [] table = connection.table('user_info') for key, data in table.scan(row_start=start_userid, limit=num): user_ids.append(key) for user_id in user_ids: subprocess.popen('event_gen_worker.py',user_id) return start_userid
def test_returns_usage_information(self): output = popen(['ezored', '-h'], stdout=PIPE).communicate()[0] output = str(output) print(output) self.assertTrue('Usage:' in output) output = popen(['ezored', '--help'], stdout=PIPE).communicate()[0] output = str(output) print(output) self.assertTrue('Usage:' in output)
def test_returns_iotserver_create(self): name='server1' cluster='test1' iotslice='test1' operation='create' configfile='./tests/conf/iotorch.toml' text= "IoT Server " + name + " created" output = popen(['iotorch', 'iotserver', operation, '--name='+name, '--cluster='+cluster,'--slice='+iotslice,'--configfile='+configfile], stdout=PIPE).communicate()[0] self.assertTrue(text.encode('utf-8') in output) operation='get' output = popen(['iotorch', 'iotserver', operation, '--name='+name, '--configfile='+configfile], stdout=PIPE).communicate()[0] self.assertTrue(cluster.encode('utf-8') in output)
def generator(start_userid, num): limit = sys.argv[1] or 10 user_ids = [] table = connection.table('user_info') for key, data in table.scan(row_start=start_userid, limit=num): user_ids.append(key) for user_id in user_ids: subprocess.popen('event_gen_worker.py', user_id) return start_userid
def test_returns_iotserver_set(self): name='test1' username='******' password='******' operation='set' text= "IoT Server " + name + " set" configfile='./tests/conf/iotorch.toml' output = popen(['iotorch', 'iotserver', operation, '--name='+name, '--user='******'--password='******'--configfile='+configfile], stdout=PIPE).communicate()[0] self.assertTrue(text.encode('utf-8') in output) operation='get' output = popen(['iotorch', 'iotserver', operation, '--name='+name, '--configfile='+configfile], stdout=PIPE).communicate()[0] self.assertTrue(username.encode('utf-8') in output)
def test_returns_usage_information(self): output = popen(["pyginny", "-h"], stdout=PIPE).communicate()[0] output = str(output) print(output) self.assertTrue("Usage" in output) output = popen(["pyginny", "--help"], stdout=PIPE).communicate()[0] output = str(output) print(output) self.assertTrue("Usage:" in output)
def test_returns_iotgateway_create_slice_does_not_exist(self): name='gateway3' cluster='test1' iotslice='ghost' operation='create' configfile='./tests/conf/iotorch.toml' text= 'Slice does not exist' output = popen(['iotorch', 'iotserver', operation, '--name='+name, '--cluster='+cluster,'--slice='+iotslice,'--configfile='+configfile], stdout=PIPE).communicate()[0] self.assertTrue(text.encode('utf-8') in output) text= 'Nothing to get' operation='get' output = popen(['iotorch', 'iotserver', operation, '--name='+name, '--configfile='+configfile], stdout=PIPE).communicate()[0] self.assertTrue(text.encode('utf-8') in output)
def ndnsec_createIdentity(identityName): if not ndnsec_checkIdentity(identityName): logger.info('creating self-signed identity %s' % identityName) ndnsecInstallCert = popen([ndnsecCmd, 'cert-install', '-'], stdin=PIPE, stdout=PIPE) ndnsecInstallCertStdIn = ndnsecInstallCert.stdin ndnsecKeyGen = popen([ndnsecCmd, 'key-gen', '-i', identityName], stdout=ndnsecInstallCertStdIn) output = ndnsecInstallCert.communicate()[0] if ndnsecInstallCert.returncode == 0: return ndnsec_checkIdentity(identityName)
def _run(command, commun=True): 'Execute a command line.' from subprocess import PIPE, Popen as popen output = '' try: if commun: p = popen(command, stdout=PIPE) output = p.communicate()[0] else: p = popen(command) except WindowsError, e: _log.error('Windows Error: %s', str(e)) raise
def test_init_and_clean(self, d): os.chdir(d.path) output = popen(['ezored', 'init'], stdout=PIPE).communicate()[0] output = str(output) print(output) output = popen(['ezored', 'clean'], stdout=PIPE).communicate()[0] output = str(output) print(output) required = 'Finished' self.assertTrue(required in output)
def runBloatwareCleanup(removeTaskScheduler=False): dataFile = "{}\\selectedOptions.json".format(os.path.dirname(__file__)) data = dataUtils.readJson(dataFile) for taskName in data: if taskData["command"]: print("running: ", data["command"]) subprocess.popen(data["command"]) else: eval("{}()".format(data["pythonFunc"])) if "Create task to repeat these settings" in data.keys(): data.pop("Create task to repeat these settings", None) dataUtils.writeJson(data, dataFile)
def with_no_session(self): output = popen(['pwdc', 'delete'], stdout=PIPE).communicate()[0] self.assertTrue('No PWD Session, check --session_file parameter' in output) output = popen(['pwdc', 'env'], stdout=PIPE).communicate()[0] self.assertTrue('No PWD Session, check --session_file parameter' in output) output = popen(['pwdc', 'info'], stdout=PIPE).communicate()[0] self.assertTrue('No PWD Session, check --session_file parameter' in output)
def test_target_github_copy_file(self, d): os.chdir(d.path) project_file_data = """ config: name: EzoRed targets: - name: github-test repository: name: ezored/target-github-test type: github version: b:master dependencies: - repository: name: ezored/dependency-github-test type: github version: b:master """ d.write(Constants.PROJECT_FILE, project_file_data.encode('utf-8')) output = popen(['ezored', 'dependency', 'update', '-d'], stdout=PIPE).communicate()[0] output = str(output) print(output) output = popen(['ezored', 'target', 'build', 'github-test', '-d'], stdout=PIPE).communicate()[0] output = str(output) print(output) required = 'Build finished for target: github-test' self.assertTrue(required in output) self.assertTrue( os.path.exists( os.path.join('vendor', 'target-github-test-master', 'file-to-parse.txt'))) self.assertTrue( os.path.exists( os.path.join('vendor', 'target-github-test-master', 'ezored-target.yml'))) self.assertTrue( os.path.exists( os.path.join('vendor', 'target-github-test-master', 'build.py'))) self.assertTrue( os.path.exists( os.path.join('vendor', 'target-github-test-master', 'source', 'test-copy.py')))
def autoRUN(): #função para iniciar junto com S.O try: os.system("copy" + filename + " " + tempdir) except: print 'Erro Na Copia' #coloquei isso para fica mais didatico pass try: FNULLL = open(os.devnull, 'w') subprocess.popen( "REG AFD HKEY_LOCAL_MACHINE\\Software\\Microsoft\\Windows\\CurrentVersion\\Run\\" "/v NomeDoRegistro /d " + tempdir + "\\" + nome_arquivo, stdout=FNULL, Stderr=FNULL) except exception as e: print "erro de registro", e
def restore_command(rules_dir, ipv6): """ Restores the current iptables rules for the desired chains Arguments --------- rules_dir str ipv6 bool """ rules_abs_path = "{}/rules.v{}".format(rules_dir, version_prefix(ipv6)) if os.path.exists(rules_abs_path): popen("ip{}tables-restore -n < {}".format("6" * ipv6, rules_abs_path), shell=True, universal_newlines=True).communicate()
def write_eeprom_content(mfserial, orig_line, rvbd_sec, replace=False): # Write the binary file with RVBD serial back into EEPROM global TMP_OFILE output_str = '' count = 0 replace_str = "%s%s" % (rvbd_sec, mfserial) output_str += replace_str count += len(replace_str) # Final padding for n in range(len(orig_line) - count): output_str += chr(255) f = open(TMP_OFILE, 'wb') f.write(output_str) f.close() i = 0 err = False while i < 5: # Time to write it back to the EEPROM image dummy, output = subprocess.popen(EEPROM_WRITE) output = output.read().strip() if None == search(compile("Size to Write[\s]+: [\d]+ bytes$"), output): print "Could not write the EEPROM data, retrying" i += 1 sleep (5) err = True else: if None != search(compile("FRU Read failed"), output): #retry as likely ipmitool timed out print "ipmitool timeout encountered, retrying" i += 1 sleep (5) err = True else: err = False break if err: print "Cannot write back to the EEPROM, exiting" sys.exit(1) # clear out the files command = '/bin/rm -f %s' % TMP_OFILE dummy, output = subprocess.popen(command)
def bctest(testdir, testobj, exeext): execprog = testobj['exec'] + exeext execargs = testobj['args'] execrun = [execprog] + execargs stdincfg = none inputdata = none if "input" in testobj: filename = testdir + "/" + testobj['input'] inputdata = open(filename).read() stdincfg = subprocess.pipe outputfn = none outputdata = none if "output_cmp" in testobj: outputfn = testobj['output_cmp'] outputdata = open(testdir + "/" + outputfn).read() proc = subprocess.popen(execrun, stdin=stdincfg, stdout=subprocess.pipe, stderr=subprocess.pipe,universal_newlines=true) try: outs = proc.communicate(input=inputdata) except oserror: print("oserror, failed to execute " + execprog) sys.exit(1) if outputdata and (outs[0] != outputdata): print("output data mismatch for " + outputfn) sys.exit(1) wantrc = 0 if "return_code" in testobj: wantrc = testobj['return_code'] if proc.returncode != wantrc: print("return code mismatch for " + outputfn) sys.exit(1)
def run_devserver(): """just for dev""" from subprocess import Popen as popen filename = sys.argv[1] if not filename: print 'use command like: python soxo.py ./wsgi.py' exit(0) begin_time = time.time() dirname = os.path.dirname(filename) dirname = './' if not dirname else dirname def is_file_modify(dirname): for fl in os.walk(dirname): for f in [f for f in fl[-1] if os.path.splitext(f)[1] == '.py']: if '_html' not in f and os.stat(fl[0]+'/'+f).st_mtime > begin_time: return True #watcher while True: p = popen(['python', filename]) try: while True:#True: #if any file change, reload if is_file_modify(dirname): p.terminate() begin_time = time.time() print ('some file change, server reloading...') break time.sleep(0.01) except KeyboardInterrupt: p.terminate() print ('\nterminate %s' % str(p)) exit(0)
def set_eeprom_state(state="RO"): # Set the EEPROM to RW and RO as desired command = EEPROM_STATE if "RO" == state: command += "0x37" else: command += "0xFB" i = 0 err = False while i < 5: dummy, output = subprocess.popen(command) output = output.read().strip() if "" != output: print "Can't set EEPROM to %s, retrying" % state i += 1 sleep (5) err = True else: err = False break if err: print "Could not change EEPROM state, exiting" sys.exit(1)
def printstation(file, verbose): """ printstation() - file: path to file being written by pianobar - verbose: setting to print headers this method attempts to parse out the most recently-selected station. this content is buried deep within the file; this method runs "grep" on the command-line, extracting all strings with "station" in them. the last instance of this string is printed to stdout. """ p = subprocess.popen(["grep", "station", file], stdout=subprocess.pipe, stderr=subprocess.pipe) result = p.communicate()[0] output = result.split("\r")[-1] if "station" in output: if verbose: output = output.replace("|>", "") output = re.sub("\(.*\)", "", output) output = output.strip() output = output.rstrip() print(output) else: output = output.split("\"")[1] print(output) else: print(default_msg)
def cheetah(template,run=True,block=True): ''' run shell command after substituting variables from calling function using cheetah template engine uses @ as the variable start token to make writing shell commands easier ''' #get local variables from calling function var = inspect.currentframe().f_back.f_locals #change the 'variable start token' to something more shell-friendly template = '#compiler-settings\ncheetahVarStartToken = @\n#end compiler-settings'\ + template cmd = str(Template(template, searchList=[var])) if run == False: #don't run just print the command print cmd return 0 #run command in a subshell p = popen(cmd,shell=True) if block == True: #wait for the command to complete p.wait() assert p.returncode == 0 return p.returncode #do not wait for the command to complete return p
def handle(self, *args, **options): loops = 1 if len(args) >= 1: loops = int(args[0]) self.stdout.write("Analysing FHBRS-Application ...\r\n") results = [] for loop in xrange(loops): for wget_url in wget_urls: self.stdout.write("\tFetching '%s' ...\r\n" % (wget_url)) wget_cmd = [wget_bin.strip()] + wget_args + [wget_url] wget_pipe = popen(wget_cmd, stdout=PIPE) wget_out = wget_pipe.stdout.read() json_match = json_re.search(wget_out) json_text = json_match.group(1).strip() result = json.loads(json_text) #self.stdout.write("\tAnalyse-Result:\r\n") #for key, value in result.iteritems(): # self.stdout.write("\t\t%s: %s\r\n" % (str(key), str(value))) results.append(result) self.stdout.write("\r\n") render_time_key = 'request-to-response-time' render_times = [result[render_time_key] for result in results] render_time_avg = sum(render_times) / len(render_times) self.stdout.write("Analyse-Render-Time-Avg: %f\r\n" % (render_time_avg)) sql_query_count_key = 'sql-query-count' sql_query_counts = [result[sql_query_count_key] for result in results] sql_query_count_avg = float(sum(sql_query_counts)) / len(sql_query_counts) self.stdout.write("Analyse-SQL-Query-Count-Avg: %f\r\n" % (sql_query_count_avg)) return
def foo(command): # Subprocess wrapper command = shlex.split(command) try: x = subprocess.popen(command) except Exception, e: print "[!] Error while executing system command: %s" % e x = 1
def getDataStore(progressCallback): moduleName = popen('git config fit.datastore.moduleName'.split(), stdout=PIPE).communicate()[0].strip() modulePath = popen('git config fit.datastore.modulePath'.split(), stdout=PIPE).communicate()[0].strip() if not moduleName: raise Exception('error: No external data store is configured. Check the fit.datastore keys in git config.') if modulePath: import sys sys.path.append(modulePath) try: from importlib import import_module return import_module(moduleName).Store(progressCallback) except Exception as e: print 'error: Could not load the data store configured in fit.datastore.' raise
def _getWorkingTreeStateForLastHead(fitData, fitManifestChanges): if not fitManifestChanges: return fitData if '.fit' in fitManifestChanges: fitManifestChanges.remove('.fit') if fitManifestChanges: popen('git checkout HEAD@{1}'.split() + list(fitManifestChanges), stdout=open(devnull, 'wb'), stderr=open(devnull, 'wb')).wait() try: saveItems(fitData, quiet=True) except: raise finally: if fitManifestChanges: popen('git checkout HEAD'.split() + list(fitManifestChanges), stdout=open(devnull, 'wb'), stderr=open(devnull, 'wb')).wait() return fitData
def execute_command(self, command, match, prefix, target): from os.path import realpath, dirname, join from subprocess import Popen as popen, PIPE from time import time #TODO: allow only commands below ./commands/ exe = join(dirname(realpath(dirname(__file__))), command['argv'][0]) myargv = [exe] + command['argv'][1:] try: if match and match.groupdict().get('args', None): myargv += [match.groupdict()['args']] except: log.info("cannot parse args!") cwd = self.getconf('workdir') if not os.access(cwd,os.W_OK): log.error("Workdir '%s' is not Writable! Falling back to root dir"%cwd) cwd = "/" env = {} env.update(os.environ) # first merge os.environ env.update(command.get('env', {})) # then env of cfg env['_prefix'] = prefix env['_from'] = prefix.split('!', 1)[0] log.debug('self:' +self.nickname) # when receiving /query, answer to the user, not to self if self.nickname in target: target.remove(self.nickname) target.append(env['_from']) log.debug('target:' +str(target)) start = time() try: log.debug("Running : %s"%str(myargv)) log.debug("Environ : %s"%(str(env))) p = popen(myargv, bufsize=1, stdout=PIPE, stderr=PIPE, env=env, cwd=cwd) except Exception as error: self.ME(target, 'brain damaged') log.error('OSError@%s: %s' % (myargv, error)) return pid = p.pid for line in iter(p.stdout.readline, ''.encode()): try: self.PRIVMSG(target, translate_colors(line.decode())) except Exception as error: log.error('no send: %s' % error) log.debug('%s stdout: %s' % (pid, line)) p.wait() elapsed = time() - start code = p.returncode log.info('command: %s -> %s in %d seconds' % (myargv, code, elapsed)) [log.debug('%s stderr: %s' % (pid, x)) for x in p.stderr.readlines()] if code != 0: self.ME(target, 'mimimi')
def getStagedOffenders(): fitConflict = [] binaryFiles = [] staged = [] p = popen('git diff --name-only --diff-filter=A --cached'.split(), stdout=PIPE) p = popen('git check-attr --stdin fit'.split(), stdin=p.stdout, stdout=PIPE) for l in p.stdout: filepath = l[:l.find(':')] if l.endswith(' set\n'): fitConflict.append(filepath) elif l.endswith(' unspecified\n'): staged.append(filepath) if len(staged) > 0: binaryFiles = filterBinaryFiles(staged) return set(fitConflict), set(binaryFiles)
def make_bsubs(script_name,input_dicts, mem_req = None): scr_path = os.path.join(config.root, 'scripts') bsub_path = os.path.join(scr_path, script_name + '.bsub') fopen = open(bsub_path, 'w') for i in range(len(input_dicts)): d = input_dicts[i] inp_file = os.path.join(scr_path, 'scr_inputs/'+ script_name+ str(d)) inp_handle = open(inp_file) out_file = os.path.join(scr_path, 'scr_output/'+ script_name+ str(d)) pickle.dump(d,inp_handle) l = 'bsub -q compbio-week {3} -i {0} -o {1} {2}'.format(\ inp_file, out_file, os.path.join(scr_path, script_name),\ (lambda x: x == None and ' ' or ' -R {0} '.format(x))(mem_req)) subprocess.popen(l, shell = True) print l
def execute_command(self, command, match, prefix, target): from os.path import realpath, dirname, join from subprocess import Popen as popen, PIPE from time import time # TODO: allow only commands below ./commands/ exe = join(dirname(realpath(dirname(__file__))), command["argv"][0]) myargv = [exe] + command["argv"][1:] try: if match and match.groupdict().get("args", None): myargv += shlex.split(match.groupdict()["args"]) except: log.info("cannot parse args!") cwd = getconf("workdir") if not os.access(cwd, os.W_OK): log.error("Workdir '%s' is not Writable! Falling back to root dir" % cwd) cwd = "/" env = command.get("env", {}) env["_prefix"] = prefix env["_from"] = prefix.split("!", 1)[0] env.update(os.environ) log.debug("self:" + self.nickname) # when receiving /query, answer to the user, not to self if self.nickname in target: target.remove(self.nickname) target.append(env["_from"]) log.debug("target:" + str(target)) start = time() try: print(myargv) p = popen(myargv, bufsize=1, stdout=PIPE, stderr=PIPE, env=env, cwd=cwd) except Exception as error: self.ME(target, "brain damaged") log.error("OSError@%s: %s" % (myargv, error)) return pid = p.pid for line in iter(p.stdout.readline, "".encode()): try: self.PRIVMSG(target, translate_colors(line.decode())) except Exception as error: log.error("no send: %s" % error) log.debug("%s stdout: %s" % (pid, line)) p.wait() elapsed = time() - start code = p.returncode log.info("command: %s -> %s in %d seconds" % (myargv, code, elapsed)) [log.debug("%s stderr: %s" % (pid, x)) for x in p.stderr.readlines()] if code != 0: self.ME(target, "mimimi")
def run2(cmd): ''' run a command using variables from the calling function simplified version ''' #get local variables from calling function _var = inspect.currentframe().f_back.f_locals #filter to allow access only to basic variable types var = {} allowed = [str,int,float,bool] for k,v in _var.iteritems(): if type(v) in allowed or str(type(v)) == "<type 'instance'>": var[k] = v del _var #get run and show parameters from caller if 'do_run' in var: run = var['do_run'] else: run = True if 'do_show' in var: show = var['do_show'] else: show = False #look for variable place holders in command cmd = sub_vals(cmd,var) #print command if show: print cmd #decide if this command should block block = True if cmd.endswith('&'): block = False cmd = cmd[:-1] if not run: return None p = popen(cmd,shell=True) if block: #wait for this step to complete p.wait() assert p.returncode == 0 return p.returncode else: #do not wait for this step to complete yet return p
def getCERT(host,port='443'): cmd = 'openssl s_client -connect {}:{} '.format(host,port) siteSSLCert = popen(cmd.split(),stdout=pipe,stdin=pipe,stderr=pipe).communicate('DONE')[0] certSTART = re.compile(r'''-+BEGIN CERTIFICATE-+''') certEND = re.compile(r'''-+END CERTIFICATE-+''') count = 0 cert = '' for line in siteSSLCert.split('\n'): if certSTART.search(line):count = 1 if count>0: cert+=line+'\n' if certEND.search(line):count = 0 return cert
def play_sound( filename ): #p = popen(['./pifm', 'data/waipopenghuwan.wav', '103.3', '44100']) p = popen(['./pifm', 'data/01.wav', '103.3', '44100']) ret = None count = 7 while count and ret is None: time.sleep(3) ret = p.poll() print ret count -= 1 if count == 0: os.kill(p.pid, 15) p.wait() return
def start_link(proc_list, proc_start): ''' Function to start the process ''' #keep the restart whilst under restart number #get the PID of the process if proc_start < restart: proc = popen(command, popen.stdout, popen.stderr) proc_list.append(proc.pid) if proc.stderr: proc_start += 1 start(proc_start)
def find_device(): p = subprocess.popen("adb shell getevent -l > " + file_name, shell=true) device_logs = open(file_name, "r").readlines() os.kill(p.pid, signal.sigint) touch_device_line = "" for idx, log in reversed(list(enumerate(device_logs))): if "touchscreen" in log: touch_device_line = device_logs[idx-1] break touch_device_line = touch_device_line.replace(" ", "").replace("\t", "") device_num = touch_device_line[len(touch_device_line)-3:] return device_num
def my_check_output(*popenargs, **kwargs): """ if we had python 2.7, we should simply use subprocess.check_output. this is a stop-gap solution for python 2.6 """ if 'stdout' in kwargs: raise valueerror('stdout argument not allowed, it will be overridden.') process = subprocess.popen(stderr=subprocess.pipe, stdout=subprocess.pipe, *popenargs, **kwargs) output, unused_err = process.communicate() retcode = process.poll() if retcode: cmd = kwargs.get("args") if cmd is none: cmd = popenargs[0] raise exception("exit code is not 0. it is %d. command: %s" % (retcode, cmd)) return output
def process_v2(subjects, SORTING_ORDER=None, CLASHES=0, NUM_RESULTS=0): def make_subject_file(): data = map(get_classes, subjects) fd, fname = mkstemp(suffix='.crcl') def pr(*args): os.write(fd, ' '.join(map(unicode, args)) + '\n') pr(len(subjects), 13) for di in xrange(len(data)): pr('%s' % subjects[di]) d = data[di] classes = [] for k,v in d.iteritems(): nm = k.split(' ', 1)[1] for t in v: classes.append((nm, t)) pr(' %d' % len(classes)) for c in classes: pr(' %s' % c[0]) pr(' %d' % len(c[1])) for t in c[1]: dow = DAYS[t[0]] pr(' %s %d-%d' % (dow, t[1], t[2])) os.close(fd) return fname fname = make_subject_file() args = ['circles-generator', fname, SORTING_ORDER, str(NUM_RESULTS), str(CLASHES)] stream = popen(args, stdout=PIPE) num_tables = int(stream.stdout.readline()) data = stream.stdout.readline() os.unlink(fname) return num_tables, json.loads(data)
def postCommit(): fitFileHash = popen('git ls-tree HEAD .fit'.split(), stdout=PIPE).communicate()[0].strip() if not fitFileHash: return fitFileHash = fitFileHash.split()[2] savesFile = joinpath(savesDir, fitFileHash) committed = [] if exists(savesFile): committed = cache.commit({h for f,(h,s) in readFitFile(savesFile).iteritems()}) move(savesFile, getCommitFile()) if checkForChanges(readFitFile()): print 'git-fit: This commit did not include some fit changes that currently exist in' print ' the working tree. If you did in fact want to include those changes in the' print ' commit, you can run "git-fit save", followed by git-commit --amend.' if len(committed) > 0: print 'git-fit: This commit included new objects that have been placed in your local' print ' cache. If you plan to git-fit push this commit, you must first copy these' print ' objects to the datastore configured for this repository bt running git-fit put.'