def check_file_in_db(self, selectfield, table, field, file): """ Check if a file has already been put in this tqble Return True if file should be reprocessed Return Flase if the processing should be skipped """ self.connect() query = "SELECT H.%s FROM %s AS H WHERE H.%s = '%s';" % (selectfield, table, field, file) if self.opts.verbose: print query self.DBcursor.execute(query) found_file = self.DBcursor.fetchone()[0] self.close() if found_file: if self.opts.verbose: print "Found '%s' in the DB" % found_file if os.path.isfile(os.path.join(self.dest_dir, found_file)): return False # Force to write files but delete previous first if self.opts.force: cmd = "rm -rf %s*" % os.path.join(self.dest_dir, found_file) os.system(cmd) return True else: return True else: return True
def run_debugger(testname, pythonfile, pydb_opts='', args='', outfile=None): global srcdir, builddir, pydir rightfile = os.path.join(srcdir, 'data', "%s.right" % testname) os.environ['PYTHONPATH']=os.pathsep.join(sys.path) cmdfile = os.path.join(srcdir, "%s.cmd" % testname) outfile = "%s.out" % testname outfile_opt = '--output=%s ' % outfile # print "builddir: %s, cmdfile: %s, outfile: %s, rightfile: %s" % \ # (builddir, cmdfile, outfile, rightfile) if os.path.exists(outfile): os.unlink(outfile) cmd = "%s --command %s %s %s %s %s" % \ (pydb_path, cmdfile, outfile_opt, pydb_opts, pythonfile, args) os.system(cmd) fromfile = rightfile fromdate = time.ctime(os.stat(fromfile).st_mtime) fromlines = open(fromfile, 'U').readlines() tofile = outfile todate = time.ctime(os.stat(tofile).st_mtime) tolines = open(tofile, 'U').readlines() diff = list(difflib.unified_diff(fromlines, tolines, fromfile, tofile, fromdate, todate)) if len(diff) == 0: os.unlink(outfile) for line in diff: print line, return len(diff) == 0
def test_example1(self): testdir = path(l2emod.__file__).parent / 'testtex' fn = testdir / 'example1.tex' print "file %s" % fn with make_temp_directory() as tmdir: nfn = '%s/%s' % (tmdir, fn.basename()) os.system('cp %s/* %s' % (testdir, tmdir)) os.chdir(tmdir) l2e = latex2edx(nfn, output_dir=tmdir) l2e.convert() xbfn = nfn[:-4] + '.xbundle' self.assertTrue(os.path.exists(xbfn)) # xb = open(xbfn).read() # self.assertIn('<chapter display_name="Unit 1" start="2013-11-22" url_name="Unit_1">', xb) xml = etree.parse(xbfn).getroot() chapter = xml.find('.//chapter') self.assertTrue(chapter.get('display_name') == 'Unit 1') self.assertTrue(chapter.get('start') == '2013-11-22') self.assertTrue(chapter.get('url_name') == 'Unit_1') cfn = path(tmdir) / 'course/2013_Fall.xml' self.assertTrue(os.path.exists(cfn)) cfn = path(tmdir) / 'chapter/Unit_1.xml' self.assertTrue(os.path.exists(cfn)) # self.assertIn('<sequential display_name="Introduction" due="2013-11-22" url_name="Introduction"', open(cfn).read()) xml = etree.parse(cfn).getroot() seq = xml.find('.//sequential') self.assertTrue(seq.get('display_name') == 'Introduction') self.assertTrue(seq.get('due') == '2013-11-22') self.assertTrue(seq.get('url_name') == 'Introduction') self.assertIn('<problem url_name="p1"/>', open(cfn).read())
def controllerXbox(self): # loop around xbox events while True: self.speed = 0 # pygame.event.pump() # if self.pressed[K_ESCAPE]: break for event in xbox_read.event_stream(deadzone=12000): # if either the up/down button is pressed, set the Y axes to if (event.key == 'RT' or event.key == 'LT'): if event.key == 'RT' and event.value > 1: self.speed = int(event.value)/17 self.moveY(0) elif event.key == 'LT' and event.value > 1: self.speed = int(event.value)/17 self.moveY(2) else: self.moveY(1) # if either the left/right button is pressed, set the X axes to if(event.key == 'dl' or event.key == 'dr'): if event.key == 'dl' and event.value == 1: self.moveX(0) elif event.key == 'dr' and event.value == 1: self.moveX(2) else: self.moveX(1) # Runs the move function self.move(self.speed) #if(event.key == 'guide'): break os.system(['clear','cls'][os.name == 'nt'])
def display(files, card_random, stack_random, wildcard, reverse): stacks = sfile.read_stack_files(files) stacks.sort() if card_random: card_random = 'random' if stack_random: stack_random = 'randomstack' if wildcard: wildcard = 'wildcard' if reverse: reverse = 'reverse' session = ContentObject(stacks, card_random, stack_random, wildcard, reverse) print """ Type 'Q' to stop anytime, RETURN to continue studying. """ print "Your arguments:" print "\n", session.mode prompt(session) while True: os.system('cls' if os.name == 'nt' else 'clear') print session.fetch() prompt(session) print session.fetch() prompt(session)
def testSavedModel(self): """Test PLS model saving Test to assure that a saved pls model gives the same predictions as before saving.""" # Create a pls model pls = AZorngPLS.PLSLearner(self.train_data) # Calculate classification accuracy Acc = evalUtilities.getClassificationAccuracy(self.test_data, pls) # Save the model scratchdir = os.path.join(AZOC.SCRATCHDIR, "scratchdir"+str(time.time())) os.mkdir(scratchdir) modelPath = os.path.join(scratchdir,"PLSModel") pls.write(modelPath) # Read in the model plsM = AZorngPLS.PLSread(modelPath) # Calculate classification accuracy savedAcc = evalUtilities.getClassificationAccuracy(self.test_data, plsM) # Test that the accuracy of the two classifiers is the exact same self.assertEqual(Acc, savedAcc) # Remove the scratch directory os.system("/bin/rm -rf "+scratchdir)
def convert_mp3_to_wav(filename, sample_frequency): ext = filename[-4:] if(ext != '.mp3'): return files = filename.split('/') orig_filename = files[-1][0:-4] new_path = '' if(filename[0] == '/'): new_path = '/' for i in xrange(len(files) - 1): new_path += files[i] + '/' tmp_path = new_path + 'tmp' new_path += 'wave' if not os.path.exists(new_path): os.makedirs(new_path) if not os.path.exists(tmp_path): os.makedirs(tmp_path) filename_tmp = tmp_path + '/' + orig_filename + '.mp3' new_name = new_path + '/' + orig_filename + '.wav' sample_freq_str = "{0:.1f}".format(float(sample_frequency) / 1000.0) cmd = 'lame -a -m m {0} {1}'.format(quote(filename), quote(filename_tmp)) os.system(cmd) cmd = 'lame --decode {0} {1} --resample {2}'.format( quote(filename_tmp), quote(new_name), sample_freq_str ) os.system(cmd) return new_name
def save(x, filename, bzip2=False, gzip=False): """ save(x, filename): Saves x to a file. Pretty much the only constraint on x is that it have no circular references (it must be Python pickle-able). This uses the pickle module, so data you save is *guaranteed* to be readable by future versions of Python. INPUT: x -- almost arbitrary object filename -- a string OUTPUT: Creates a file named filename, from which the object x can be reconstructed. """ o=open(filename,"w") # Note: don't use protocol 2 here (use 1), since loading doesn't work # on my extension types. cPickle.dump(x,o,1) o.close() if bzip2: os.system("bzip2 -f %s"%filename) if gzip: os.system("gzip -f %s"%filename)
def aria_down(url, path=None, curt_max=1, split=5, proxy='http://127.0.0.1:8087', useragent=None): dl_list = 'dl_list.txt' if os.path.exists(dl_list) == True: os.remove(dl_list) #将下载文件列表写入txt文件 try: with open(dl_list, 'w') as f: if isinstance(url, list): for i in url: f.write(i + '\n') else: f.write(url + '\n') except IOError as err: print('File error:' + str(err)) #生成下载命令 command = 'aria2c -j' + str(curt_max) + ' -s' + str(split) if path != None: command += ' --dir="' + path + '"' if proxy != None: command += ' --http-proxy="' + proxy + '"' if useragent != None: command += ' --user-agent="' + useragent + '"' command += ' -i "' + dl_list + '"' print(command) #调用aria2c进行下载 os.system(command)
def git_commit(): ''' git commit command ''' print("git commit"); t = time.localtime() os.system("git commit -m '%s'" % time.strftime("%Y年%m月%d日%H时%M分%S秒"))
def fRegistroDiario(queDB): bSalir = False while not bSalir: os.system('clear') print 'NOOXS - Mantenimiento de BD nooxsense' print '*** Tabla Registro Diario ***' print print print 'SOLO PARA PINES DIGITALES Y EN MODO OUTPUT' print print '*******************************************' print '| | | | minutos en |' print '| Fecha |Disp |PIN | 1 | 0 |' print '*******************************************' sSQL='SELECT fecha, cod_dispositivo, PIN_num, min1,min0 FROM registrodiario ;' cursor.execute(sSQL) aFilas=cursor.fetchall() x = 0 dFechaAnterior = aFilas[0][0] for aRegistro in aFilas: if dFechaAnterior != aRegistro[0]: dFechaAnterior = aRegistro[0] print print print '|{0:10} | {1:5d} | {2:4} | {3:5d}| {4:5d} |'.format(str(aRegistro[0]), aRegistro[1], aRegistro[2],aRegistro[3],aRegistro[4]) print '–––––––––––––––––––––––––––––––––––––––––––' x = x +1 if x == 100: x= 0 y=raw_input('pulse una tecla para continuar...') print print iOp = raw_input('Pulsa cualquier tecla para volver ') bSalir= True
def clean_ftp_root(logger): clean_cmd = '%s -s -y -q -size=%s %s' % (os.path.join(cwd, 'agestore.exe'), ftp_root_bytes, ftp_root) logger.info('Start clean up ftp_root folder with cmd %s' % clean_cmd) try: os.system(clean_cmd) except: logger.error('Clean ftp root failed, error info %s' % clean_cmd)
def RunRevision(context, revision, zip_file, profile, num_runs, command, args): """Given a zipped revision, unzip it and run the test.""" print 'Trying revision %s...' % str(revision) # Create a temp directory and unzip the revision into it. cwd = os.getcwd() tempdir = tempfile.mkdtemp(prefix='bisect_tmp') UnzipFilenameToDir(zip_file, tempdir) # Hack: Chrome OS archives are missing icudtl.dat; try to copy it from # the local directory. if context.platform == 'chromeos': icudtl_path = 'third_party/icu/source/data/in/icudtl.dat' if not os.access(icudtl_path, os.F_OK): print 'Couldn\'t find: ' + icudtl_path sys.exit() os.system('cp %s %s/chrome-linux/' % (icudtl_path, tempdir)) os.chdir(tempdir) # Run the build as many times as specified. testargs = ['--user-data-dir=%s' % profile] + args # The sandbox must be run as root on Official Chrome, so bypass it. if ((context.is_official or context.flash_path) and context.platform.startswith('linux')): testargs.append('--no-sandbox') if context.flash_path: testargs.append('--ppapi-flash-path=%s' % context.flash_path) # We have to pass a large enough Flash version, which currently needs not # be correct. Instead of requiring the user of the script to figure out and # pass the correct version we just spoof it. testargs.append('--ppapi-flash-version=99.9.999.999') runcommand = [] for token in shlex.split(command): if token == '%a': runcommand.extend(testargs) else: runcommand.append( token.replace('%p', os.path.abspath(context.GetLaunchPath(revision))). replace('%s', ' '.join(testargs))) results = [] for _ in range(num_runs): subproc = subprocess.Popen(runcommand, bufsize=-1, stdout=subprocess.PIPE, stderr=subprocess.PIPE) (stdout, stderr) = subproc.communicate() results.append((subproc.returncode, stdout, stderr)) os.chdir(cwd) try: shutil.rmtree(tempdir, True) except Exception: pass for (returncode, stdout, stderr) in results: if returncode: return (returncode, stdout, stderr) return results[0]
def speak(voicedata, num): print(str((newsnum+1) - num)) tts = gTTS(text=voicedata, lang='hi') tts.save("E:\SandBox\Python\eradioJockey/temp/"+str(num+1)+".mp3") shutil.copy('E:\SandBox\Python\eradioJockey\\assets\\alert.wav', 'temp') newname = "ren E:\SandBox\Python\eradioJockey\\temp\\alert.wav"+' '+str(num)+'a'+'.wav' os.system(newname)
def animate(self, event): import os if os.path.exists("angle_rot_anim"): os.system("rm -rf angle_rot_anim") os.mkdir("angle_rot_anim") frame = 0 fname = "angle_rot_anim/frame%05i.png" % frame self.start(None) self.g.screenShot(fname=fname) roll = float(self.roll_angle.GetValue()) roll = -1*roll*pi/180 for r in linspace(0, roll, 50): self.show_transform([r]) time.sleep(.1) frame += 1 fname = "angle_rot_anim/frame%05i.png" % frame self.g.screenShot(fname=fname) for r in linspace(0, self.pitch_angle, 20): self.show_transform([roll, r]) time.sleep(.1) frame += 1 fname = "angle_rot_anim/frame%05i.png" % frame self.g.screenShot(fname=fname) for r in linspace(0, self.yaw_angle, 50): self.show_transform([roll, self.pitch_angle, r]) time.sleep(.1) frame += 1 fname = "angle_rot_anim/frame%05i.png" % frame self.g.screenShot(fname=fname)
def test_init_manifest_packageid(self): comm.setUp() os.chdir(comm.XwalkPath) comm.clear("org.xwalk.test") os.mkdir("org.xwalk.test") cmd = ( comm.HOST_PREFIX + comm.PackTools + "crosswalk-app manifest " + comm.XwalkPath + "org.xwalk.test --platforms=android --package-id=org.xwalk.test" ) os.system(cmd) with open(comm.ConstPath + "/../tools/org.xwalk.test/manifest.json") as json_file: data = json.load(json_file) updatecmd = ( comm.HOST_PREFIX + comm.PackTools + "crosswalk-app manifest " + comm.XwalkPath + "org.xwalk.test --platforms=android --package-id=org.test.foo" ) os.system(updatecmd) with open(comm.ConstPath + "/../tools/org.xwalk.test/manifest.json") as json_file_update: updatedata = json.load(json_file_update) comm.clear("org.xwalk.test") self.assertEquals(data["xwalk_package_id"].strip(os.linesep), "org.xwalk.test") self.assertEquals(updatedata["xwalk_package_id"].strip(os.linesep), "org.test.foo")
def test_compare_values_border_cases(self): """tests the condition where BSR values are near the border regions differentiated by the function""" tdir = tempfile.mkdtemp(prefix="filetest_",) fpath = os.path.join(tdir,"group1_pruned") fp = open(fpath, "w") fp.write(" E2348_69_all\n") fp.write("IpaH3 0.03\n") fp.write("LT 0.00\n") fp.write("ST2 0.00\n") fp.write("bfpB 0.81\n") fp.write("stx2a 0.07") fp.close() npath = os.path.join(tdir,"group2_pruned") np = open(npath, "w") np.write(" H10407_all\n") np.write("IpaH3 0.03\n") np.write("LT 0.80\n") np.write("ST2 1.00\n") np.write("bfpB 0.00\n") np.write("stx2a 0.79") np.close() self.assertEqual(compare_values(fpath, npath, "0.8", "0.4"), ([0.81], [0.80, 1.00], [0.03, 0.0, 0.0, 0.81, 0.07])) shutil.rmtree(tdir) os.system("rm group*_out.txt")
def share(filename): # TODO: Move this connection handling into a function in Kano Utils import subprocess if not is_internet(): subprocess.call(['sudo', 'kano-settings', '4']) if not is_internet(): return 'You have no internet' success, _ = login_using_token() if not success: os.system('kano-login 3') success, _ = login_using_token() if not success: return 'Cannot login' data = json.loads(request.data) filename, filepath = _save(data) success, msg = upload_share(filepath, filename, APP_NAME) if not success: return msg increment_app_state_variable_with_dialog(APP_NAME, 'shared', 1) return ''
def testMetaDataHandleForSavingModel(self): """Test the handling of SaveModel for Data with Meta Atributes """ #Test the save of a model created from a train data with meta attributes self.assert_(len(self.WMetaTest.domain.getmetas())>=1,"The dataset WMetaTest should have Meta Attributes") plsM = AZorngPLS.PLSLearner(self.WMetaTest) AccNoMetaBefore = evalUtilities.getClassificationAccuracy(self.NoMetaTrain,plsM) AccWMetaBefore = evalUtilities.getClassificationAccuracy(self.WMetaTest,plsM) # Save the model scratchdir = os.path.join(AZOC.SCRATCHDIR, "scratchdir"+str(time.time())) os.mkdir(scratchdir) modelPath = os.path.join(scratchdir,"PLSModel") plsM.write(modelPath) # Read in the model plsR = AZorngPLS.PLSread(modelPath) self.assert_(len(plsR.imputer.defaults.domain.getmetas())==0,"There shouldn't be any Meta data now!") # Calculate classification accuracy AccNoMetaAfter = evalUtilities.getClassificationAccuracy(self.NoMetaTrain, plsR) AccWMetaAfter = evalUtilities.getClassificationAccuracy(self.WMetaTest, plsR) # Test that the accuracy of the model before and after saved self.assertEqual(AccNoMetaBefore, AccNoMetaAfter,"NoMeta: Predictions after loading saved model were different") self.assertEqual(AccWMetaBefore, AccWMetaAfter, "WMeta: Predictions after loading saved model were different") self.assertEqual(round(AccWMetaAfter,9), round(0.888888888889,9),"Accuracy was not the expected value!") self.assertEqual(round(AccNoMetaAfter,9), round(0.605769230769,9),"Accuracy was not the expected value!") # Remove the scratch directory os.system("/bin/rm -rf "+scratchdir)
def main(wrld): i="" global world, hit, starting world = open("maps/world" + str(wrld)).read().split("\n") os.system("clear") print "" if(starting): render(world, user.X, user.Y) print ' ' * ((len(world[1])/2)-4) + "WELCOME TO" print ' ' * ((len(world[1])/2)-1) + "BARF" i = " " sys.stdout.write("What is your name? > ") user.name = raw_input() starting = False while not i in ["quit", "q", "exit"]: hit = False sys.stdout.write(user.name + "> ") i = raw_input() os.system("clear") if(i in [ "go north", "north", "w" ]): goUp() if(i in [ "go south", "south", "s" ]): goDown() if(i in [ "go west", "west", "a" ]): goLeft() if(i in [ "go east", "east", "d" ]): goRight() if user.health> 0: if not hit: print "" render(world, user.X, user.Y) print "you have " + str(user.health) + " health, and " + str(user.strength) + " strength." else: print "You have died\n\n" raw_input() i = "q"
def submitJob(input_dir, motif_file, dest): """For each file, it submites a job the the queue The job type is set to be Normal which means it must take at most 12 hours to be finished.""" motif_name = path.basename(motif_file) job_name = "%s_overl" % (motif_name) dest = path.join(dest, motif_name) system('mkdir "%s"' % dest) cmd = " ".join([ "python", "~/codes/Motif.Pairs/Overlapping_motifs/count_overlapping_motifs.py \\\n", '-d "%s" \\\n' % input_dir, '-i "%s" \\\n' % motif_file, '-c 0.5 \\\n', '-o "%s" \n' % dest, ]) with open("job_%s_overlapping.sh" % job_name, "w") as inf: inf.write("\n".join([ '#!/bin/bash', '#BSUB -L /bin/bash', '#BSUB -o "%s.stdout"' % job_name, '#BSUB -e "%s.stderr"' % job_name, '#BSUB -J "%s"' % job_name, '#BSUB -M 50000000', '#BSUB -R rusage[mem=50000]', '#BSUB normal', '', cmd, ])) system('bsub < "%s"' % ("job_%s_overlapping.sh" % job_name)) return 0
def bbnet(): "Create network and run Buffer Bloat experiment" print "starting mininet ...." # Seconds to run iperf; keep this very high seconds = 3600 start = time() # Reset to known state topo = StarTopo(n=args.n, bw_host=args.bw_host, delay='%sms' % args.delay, bw_net=args.bw_net, maxq=args.maxq, diff=args.diff) net = Mininet(topo=topo, host=CPULimitedHost, link=TCLink, autoPinCpus=True, controller=OVSController) net.start() dumpNodeConnections(net.hosts) net.pingAll() print args.diff if args.diff: print "Differentiate Traffic Between iperf and wget" os.system("bash tc_cmd_diff.sh") else: print "exec tc_cmd.sh" os.system("bash tc_cmd.sh %s" % args.maxq) sleep(2) ping_latency(net) print "Initially, the delay between two hosts is around %dms" % (int(args.delay)*2) h2 = net.getNodeByName('h2') h1 = net.getNodeByName('h1') h1.cmd('cd ./http/; nohup python2.7 ./webserver.py &') h1.cmd('cd ../') h2.cmd('iperf -s -w 16m -p 5001 -i 1 > iperf-recv.txt &') CLI( net ) h1.cmd("sudo pkill -9 -f webserver.py") h2.cmd("rm -f index.html*") Popen("killall -9 cat", shell=True).wait()
def can_build(): if (os.name != "posix" or sys.platform == "darwin"): return False # Check the minimal dependencies x11_error = os.system("pkg-config --version > /dev/null") if (x11_error): print("pkg-config not found.. x11 disabled.") return False x11_error = os.system("pkg-config x11 --modversion > /dev/null ") if (x11_error): print("X11 not found.. x11 disabled.") return False x11_error = os.system("pkg-config xcursor --modversion > /dev/null ") if (x11_error): print("xcursor not found.. x11 disabled.") return False x11_error = os.system("pkg-config xinerama --modversion > /dev/null ") if (x11_error): print("xinerama not found.. x11 disabled.") return False x11_error = os.system("pkg-config xrandr --modversion > /dev/null ") if (x11_error): print("xrandr not found.. x11 disabled.") return False return True
def crearPefil(self): if sys.platform == 'linux2': os.system("cp -r /usr/share/pyventa/perfil/* "+libutil.home()) else: os.system("xcopy \usr\share\pyventa\perfil \"%s\" /i /a /e /k"%self.home) msgBox=QtGui.QMessageBox(QtGui.QMessageBox.Information,"Reinicio programado","<h2>La operacion ha tenido exito</h2><br><p>Ahora se recopilaran los datos necesarios para la base de datos, despues de eso el programa se cerrara para establecer las configuraciones.</p>.",QtGui.QMessageBox.Close,self) msgBox.exec_()
def generate_icarus_executable(package, name, suffix, defines=config.default_defines, extraargs={}): builddir = os.path.join(config.builddir, package) if name in compatibles[package]: dependencies = compatibles[package][name] dependencies = list(dependencies) dependencies.append('uhd/dut_qa_wrapper.v') else: dependencies = incompatibles[package][name] included_dependencies = set() inputfiles = [] for d in dependencies: pck, fn = d2pd(package, d) if (pck, fn) not in included_dependencies: generate_block(pck, fn, extraargs, included_dependencies, inputfiles) inputfilestr = ' '.join(inputfiles) print(inputfilestr) executable = name + suffix executable = os.path.join(builddir, executable) definestr = make_define_string(defines) cmd = ("iverilog -o {executable} {definestr} {inputfiles}" ).format(executable=executable, definestr=definestr, inputfiles=inputfilestr) logger.debug(cmd) os.system(cmd) return executable
def main(): os.chdir(os.path.dirname(os.path.realpath(__file__))) os.system("pip install certifi") print "Copying certifi's cacert.pem" import certifi shutil.copy2(certifi.where(), 'agkyra/resources/cacert.pem')
def mesh_gen(h0): line='' line=line+'// Creating a mesh for the unit circle\n ' line=line+'// Maximal width in the mesh h0=0.1\n ' line=line+'h0= '+str(h0)+';\n ' line=line+'radius = 1.0;\n' line=line+'// Creating the points\n' line=line+'Point(1) = {0, 0, 0, h0};\n' line=line+'Point(2) = {-radius, 0, 0, h0};\n' line=line+'Point(3) = {0, radius, 0, h0};\n' line=line+'Point(4) = {radius, 0, 0, h0};\n' line=line+'Point(5) = {0, -radius, 0, h0};\n' line=line+'Circle(6) = {2, 1, 3};\n' line=line+'Circle(7) = {3, 1, 4};\n' line=line+'Circle(8) = {4, 1, 5};\n' line=line+'Circle(9) = {5, 1, 2};\n' line=line+'// Define a surface by a Line Loop\n' line=line+'Line Loop(10) = {6, 7, 8, 9};\n' line=line+'Plane Surface(11) = {10};\n' line=line+'// Define a surface by a Line Loop\n' line=line+'Physical Line(101) = {6, 7, 8, 9};\n' line=line+'Physical Surface(201) = {11};' #Write to the file f=open('circle.geo','w') f.write(line) f.close() # Generate the msh file os.system('gmsh circle.geo -2')
def render_list(files): for f in files: os.system('lessc %s %s.css' % (f, f)) if (f in includes): say('re-compiling %d dependencies' % len(includes[f])) render_list(includes[f]) say('re-compiled %d files' % len(files))
def test_merge(self): testdir = path(l2emod.__file__).parent / 'testtex' with make_temp_directory() as tmdir: fn = testdir / 'example1.tex' print "file %s" % fn nfn = '%s/%s' % (tmdir, fn.basename()) os.system('cp %s/* %s' % (testdir, tmdir)) os.chdir(tmdir) l2e = latex2edx(nfn, output_dir=tmdir) l2e.convert() fn = testdir / 'example2.tex' print "file %s" % fn nfn = '%s/%s' % (tmdir, fn.basename()) l2e = latex2edx(nfn, output_dir=tmdir, do_merge=True) l2e.convert() cfn = path(tmdir) / 'course/2013_Fall.xml' self.assertTrue(os.path.exists(cfn)) self.assertIn('<chapter url_name="Unit_1"', open(cfn).read()) self.assertIn('<chapter url_name="Unit_2"', open(cfn).read()) cfn = path(tmdir) / 'chapter/Unit_1.xml' self.assertTrue(os.path.exists(cfn)) cfn = path(tmdir) / 'chapter/Unit_2.xml' self.assertTrue(os.path.exists(cfn))
def simpleTest(): "Create and test a simple network" topo = DiamondTopo(k=4) net = Mininet(topo=topo,link=TCLink,controller=RemoteController) if args.sim!=1: print "Adding real interfaces" s1 = net.getNodeByName('s1') s3 = net.getNodeByName('s3') addRealIntf(net,args.intf1,s1) addRealIntf(net,args.intf2,s3) opts = '-D -o UseDNS=no -u0' rootnode=sshd(net, opts=opts) h2 = net.getNodeByName('h2') h2.cmd('iperf -s -p 5001 -i 1 > iperf-recv_TCP.txt &') h2.cmd('iperf -s -p 5003 -u -i 1 > iperf-recv_UDP.txt &') else: net.start() CLI(net) os.system('killall -9 iperf' ) if args.sim!=1: net.hosts[0].cmd('killall -9 dhcpd') for host in net.hosts: host.cmd('kill %'+ '/usr/sbin/sshd') stopNAT(rootnode) net.stop()