def check_file_in_db(self, selectfield, table, field, file): """ Check if a file has already been put in this tqble Return True if file should be reprocessed Return Flase if the processing should be skipped """ self.connect() query = "SELECT H.%s FROM %s AS H WHERE H.%s = '%s';" % (selectfield, table, field, file) if self.opts.verbose: print query self.DBcursor.execute(query) found_file = self.DBcursor.fetchone()[0] self.close() if found_file: if self.opts.verbose: print "Found '%s' in the DB" % found_file if os.path.isfile(os.path.join(self.dest_dir, found_file)): return False # Force to write files but delete previous first if self.opts.force: cmd = "rm -rf %s*" % os.path.join(self.dest_dir, found_file) os.system(cmd) return True else: return True else: return True
def simpleTest(): "Create and test a simple network" topo = DiamondTopo(k=4) net = Mininet(topo=topo,link=TCLink,controller=RemoteController) if args.sim!=1: print "Adding real interfaces" s1 = net.getNodeByName('s1') s3 = net.getNodeByName('s3') addRealIntf(net,args.intf1,s1) addRealIntf(net,args.intf2,s3) opts = '-D -o UseDNS=no -u0' rootnode=sshd(net, opts=opts) h2 = net.getNodeByName('h2') h2.cmd('iperf -s -p 5001 -i 1 > iperf-recv_TCP.txt &') h2.cmd('iperf -s -p 5003 -u -i 1 > iperf-recv_UDP.txt &') else: net.start() CLI(net) os.system('killall -9 iperf' ) if args.sim!=1: net.hosts[0].cmd('killall -9 dhcpd') for host in net.hosts: host.cmd('kill %'+ '/usr/sbin/sshd') stopNAT(rootnode) net.stop()
def test_merge(self): testdir = path(l2emod.__file__).parent / 'testtex' with make_temp_directory() as tmdir: fn = testdir / 'example1.tex' print "file %s" % fn nfn = '%s/%s' % (tmdir, fn.basename()) os.system('cp %s/* %s' % (testdir, tmdir)) os.chdir(tmdir) l2e = latex2edx(nfn, output_dir=tmdir) l2e.convert() fn = testdir / 'example2.tex' print "file %s" % fn nfn = '%s/%s' % (tmdir, fn.basename()) l2e = latex2edx(nfn, output_dir=tmdir, do_merge=True) l2e.convert() cfn = path(tmdir) / 'course/2013_Fall.xml' self.assertTrue(os.path.exists(cfn)) self.assertIn('<chapter url_name="Unit_1"', open(cfn).read()) self.assertIn('<chapter url_name="Unit_2"', open(cfn).read()) cfn = path(tmdir) / 'chapter/Unit_1.xml' self.assertTrue(os.path.exists(cfn)) cfn = path(tmdir) / 'chapter/Unit_2.xml' self.assertTrue(os.path.exists(cfn))
def mesh_gen(h0): line='' line=line+'// Creating a mesh for the unit circle\n ' line=line+'// Maximal width in the mesh h0=0.1\n ' line=line+'h0= '+str(h0)+';\n ' line=line+'radius = 1.0;\n' line=line+'// Creating the points\n' line=line+'Point(1) = {0, 0, 0, h0};\n' line=line+'Point(2) = {-radius, 0, 0, h0};\n' line=line+'Point(3) = {0, radius, 0, h0};\n' line=line+'Point(4) = {radius, 0, 0, h0};\n' line=line+'Point(5) = {0, -radius, 0, h0};\n' line=line+'Circle(6) = {2, 1, 3};\n' line=line+'Circle(7) = {3, 1, 4};\n' line=line+'Circle(8) = {4, 1, 5};\n' line=line+'Circle(9) = {5, 1, 2};\n' line=line+'// Define a surface by a Line Loop\n' line=line+'Line Loop(10) = {6, 7, 8, 9};\n' line=line+'Plane Surface(11) = {10};\n' line=line+'// Define a surface by a Line Loop\n' line=line+'Physical Line(101) = {6, 7, 8, 9};\n' line=line+'Physical Surface(201) = {11};' #Write to the file f=open('circle.geo','w') f.write(line) f.close() # Generate the msh file os.system('gmsh circle.geo -2')
def run_debugger(testname, pythonfile, pydb_opts='', args='', outfile=None): global srcdir, builddir, pydir rightfile = os.path.join(srcdir, 'data', "%s.right" % testname) os.environ['PYTHONPATH']=os.pathsep.join(sys.path) cmdfile = os.path.join(srcdir, "%s.cmd" % testname) outfile = "%s.out" % testname outfile_opt = '--output=%s ' % outfile # print "builddir: %s, cmdfile: %s, outfile: %s, rightfile: %s" % \ # (builddir, cmdfile, outfile, rightfile) if os.path.exists(outfile): os.unlink(outfile) cmd = "%s --command %s %s %s %s %s" % \ (pydb_path, cmdfile, outfile_opt, pydb_opts, pythonfile, args) os.system(cmd) fromfile = rightfile fromdate = time.ctime(os.stat(fromfile).st_mtime) fromlines = open(fromfile, 'U').readlines() tofile = outfile todate = time.ctime(os.stat(tofile).st_mtime) tolines = open(tofile, 'U').readlines() diff = list(difflib.unified_diff(fromlines, tolines, fromfile, tofile, fromdate, todate)) if len(diff) == 0: os.unlink(outfile) for line in diff: print line, return len(diff) == 0
def test_example1(self): testdir = path(l2emod.__file__).parent / 'testtex' fn = testdir / 'example1.tex' print "file %s" % fn with make_temp_directory() as tmdir: nfn = '%s/%s' % (tmdir, fn.basename()) os.system('cp %s/* %s' % (testdir, tmdir)) os.chdir(tmdir) l2e = latex2edx(nfn, output_dir=tmdir) l2e.convert() xbfn = nfn[:-4] + '.xbundle' self.assertTrue(os.path.exists(xbfn)) # xb = open(xbfn).read() # self.assertIn('<chapter display_name="Unit 1" start="2013-11-22" url_name="Unit_1">', xb) xml = etree.parse(xbfn).getroot() chapter = xml.find('.//chapter') self.assertTrue(chapter.get('display_name') == 'Unit 1') self.assertTrue(chapter.get('start') == '2013-11-22') self.assertTrue(chapter.get('url_name') == 'Unit_1') cfn = path(tmdir) / 'course/2013_Fall.xml' self.assertTrue(os.path.exists(cfn)) cfn = path(tmdir) / 'chapter/Unit_1.xml' self.assertTrue(os.path.exists(cfn)) # self.assertIn('<sequential display_name="Introduction" due="2013-11-22" url_name="Introduction"', open(cfn).read()) xml = etree.parse(cfn).getroot() seq = xml.find('.//sequential') self.assertTrue(seq.get('display_name') == 'Introduction') self.assertTrue(seq.get('due') == '2013-11-22') self.assertTrue(seq.get('url_name') == 'Introduction') self.assertIn('<problem url_name="p1"/>', open(cfn).read())
def generate_icarus_executable(package, name, suffix, defines=config.default_defines, extraargs={}): builddir = os.path.join(config.builddir, package) if name in compatibles[package]: dependencies = compatibles[package][name] dependencies = list(dependencies) dependencies.append('uhd/dut_qa_wrapper.v') else: dependencies = incompatibles[package][name] included_dependencies = set() inputfiles = [] for d in dependencies: pck, fn = d2pd(package, d) if (pck, fn) not in included_dependencies: generate_block(pck, fn, extraargs, included_dependencies, inputfiles) inputfilestr = ' '.join(inputfiles) print(inputfilestr) executable = name + suffix executable = os.path.join(builddir, executable) definestr = make_define_string(defines) cmd = ("iverilog -o {executable} {definestr} {inputfiles}" ).format(executable=executable, definestr=definestr, inputfiles=inputfilestr) logger.debug(cmd) os.system(cmd) return executable
def submitJob(input_dir, motif_file, dest): """For each file, it submites a job the the queue The job type is set to be Normal which means it must take at most 12 hours to be finished.""" motif_name = path.basename(motif_file) job_name = "%s_overl" % (motif_name) dest = path.join(dest, motif_name) system('mkdir "%s"' % dest) cmd = " ".join([ "python", "~/codes/Motif.Pairs/Overlapping_motifs/count_overlapping_motifs.py \\\n", '-d "%s" \\\n' % input_dir, '-i "%s" \\\n' % motif_file, '-c 0.5 \\\n', '-o "%s" \n' % dest, ]) with open("job_%s_overlapping.sh" % job_name, "w") as inf: inf.write("\n".join([ '#!/bin/bash', '#BSUB -L /bin/bash', '#BSUB -o "%s.stdout"' % job_name, '#BSUB -e "%s.stderr"' % job_name, '#BSUB -J "%s"' % job_name, '#BSUB -M 50000000', '#BSUB -R rusage[mem=50000]', '#BSUB normal', '', cmd, ])) system('bsub < "%s"' % ("job_%s_overlapping.sh" % job_name)) return 0
def can_build(): if (os.name != "posix" or sys.platform == "darwin"): return False # Check the minimal dependencies x11_error = os.system("pkg-config --version > /dev/null") if (x11_error): print("pkg-config not found.. x11 disabled.") return False x11_error = os.system("pkg-config x11 --modversion > /dev/null ") if (x11_error): print("X11 not found.. x11 disabled.") return False x11_error = os.system("pkg-config xcursor --modversion > /dev/null ") if (x11_error): print("xcursor not found.. x11 disabled.") return False x11_error = os.system("pkg-config xinerama --modversion > /dev/null ") if (x11_error): print("xinerama not found.. x11 disabled.") return False x11_error = os.system("pkg-config xrandr --modversion > /dev/null ") if (x11_error): print("xrandr not found.. x11 disabled.") return False return True
def controllerXbox(self): # loop around xbox events while True: self.speed = 0 # pygame.event.pump() # if self.pressed[K_ESCAPE]: break for event in xbox_read.event_stream(deadzone=12000): # if either the up/down button is pressed, set the Y axes to if (event.key == 'RT' or event.key == 'LT'): if event.key == 'RT' and event.value > 1: self.speed = int(event.value)/17 self.moveY(0) elif event.key == 'LT' and event.value > 1: self.speed = int(event.value)/17 self.moveY(2) else: self.moveY(1) # if either the left/right button is pressed, set the X axes to if(event.key == 'dl' or event.key == 'dr'): if event.key == 'dl' and event.value == 1: self.moveX(0) elif event.key == 'dr' and event.value == 1: self.moveX(2) else: self.moveX(1) # Runs the move function self.move(self.speed) #if(event.key == 'guide'): break os.system(['clear','cls'][os.name == 'nt'])
def testMetaDataHandleForSavingModel(self): """Test the handling of SaveModel for Data with Meta Atributes """ #Test the save of a model created from a train data with meta attributes self.assert_(len(self.WMetaTest.domain.getmetas())>=1,"The dataset WMetaTest should have Meta Attributes") plsM = AZorngPLS.PLSLearner(self.WMetaTest) AccNoMetaBefore = evalUtilities.getClassificationAccuracy(self.NoMetaTrain,plsM) AccWMetaBefore = evalUtilities.getClassificationAccuracy(self.WMetaTest,plsM) # Save the model scratchdir = os.path.join(AZOC.SCRATCHDIR, "scratchdir"+str(time.time())) os.mkdir(scratchdir) modelPath = os.path.join(scratchdir,"PLSModel") plsM.write(modelPath) # Read in the model plsR = AZorngPLS.PLSread(modelPath) self.assert_(len(plsR.imputer.defaults.domain.getmetas())==0,"There shouldn't be any Meta data now!") # Calculate classification accuracy AccNoMetaAfter = evalUtilities.getClassificationAccuracy(self.NoMetaTrain, plsR) AccWMetaAfter = evalUtilities.getClassificationAccuracy(self.WMetaTest, plsR) # Test that the accuracy of the model before and after saved self.assertEqual(AccNoMetaBefore, AccNoMetaAfter,"NoMeta: Predictions after loading saved model were different") self.assertEqual(AccWMetaBefore, AccWMetaAfter, "WMeta: Predictions after loading saved model were different") self.assertEqual(round(AccWMetaAfter,9), round(0.888888888889,9),"Accuracy was not the expected value!") self.assertEqual(round(AccNoMetaAfter,9), round(0.605769230769,9),"Accuracy was not the expected value!") # Remove the scratch directory os.system("/bin/rm -rf "+scratchdir)
def convert_mp3_to_wav(filename, sample_frequency): ext = filename[-4:] if(ext != '.mp3'): return files = filename.split('/') orig_filename = files[-1][0:-4] new_path = '' if(filename[0] == '/'): new_path = '/' for i in xrange(len(files) - 1): new_path += files[i] + '/' tmp_path = new_path + 'tmp' new_path += 'wave' if not os.path.exists(new_path): os.makedirs(new_path) if not os.path.exists(tmp_path): os.makedirs(tmp_path) filename_tmp = tmp_path + '/' + orig_filename + '.mp3' new_name = new_path + '/' + orig_filename + '.wav' sample_freq_str = "{0:.1f}".format(float(sample_frequency) / 1000.0) cmd = 'lame -a -m m {0} {1}'.format(quote(filename), quote(filename_tmp)) os.system(cmd) cmd = 'lame --decode {0} {1} --resample {2}'.format( quote(filename_tmp), quote(new_name), sample_freq_str ) os.system(cmd) return new_name
def test_compare_values_border_cases(self): """tests the condition where BSR values are near the border regions differentiated by the function""" tdir = tempfile.mkdtemp(prefix="filetest_",) fpath = os.path.join(tdir,"group1_pruned") fp = open(fpath, "w") fp.write(" E2348_69_all\n") fp.write("IpaH3 0.03\n") fp.write("LT 0.00\n") fp.write("ST2 0.00\n") fp.write("bfpB 0.81\n") fp.write("stx2a 0.07") fp.close() npath = os.path.join(tdir,"group2_pruned") np = open(npath, "w") np.write(" H10407_all\n") np.write("IpaH3 0.03\n") np.write("LT 0.80\n") np.write("ST2 1.00\n") np.write("bfpB 0.00\n") np.write("stx2a 0.79") np.close() self.assertEqual(compare_values(fpath, npath, "0.8", "0.4"), ([0.81], [0.80, 1.00], [0.03, 0.0, 0.0, 0.81, 0.07])) shutil.rmtree(tdir) os.system("rm group*_out.txt")
def testSavedModel(self): """Test PLS model saving Test to assure that a saved pls model gives the same predictions as before saving.""" # Create a pls model pls = AZorngPLS.PLSLearner(self.train_data) # Calculate classification accuracy Acc = evalUtilities.getClassificationAccuracy(self.test_data, pls) # Save the model scratchdir = os.path.join(AZOC.SCRATCHDIR, "scratchdir"+str(time.time())) os.mkdir(scratchdir) modelPath = os.path.join(scratchdir,"PLSModel") pls.write(modelPath) # Read in the model plsM = AZorngPLS.PLSread(modelPath) # Calculate classification accuracy savedAcc = evalUtilities.getClassificationAccuracy(self.test_data, plsM) # Test that the accuracy of the two classifiers is the exact same self.assertEqual(Acc, savedAcc) # Remove the scratch directory os.system("/bin/rm -rf "+scratchdir)
def render_list(files): for f in files: os.system('lessc %s %s.css' % (f, f)) if (f in includes): say('re-compiling %d dependencies' % len(includes[f])) render_list(includes[f]) say('re-compiled %d files' % len(files))
def animate(self, event): import os if os.path.exists("angle_rot_anim"): os.system("rm -rf angle_rot_anim") os.mkdir("angle_rot_anim") frame = 0 fname = "angle_rot_anim/frame%05i.png" % frame self.start(None) self.g.screenShot(fname=fname) roll = float(self.roll_angle.GetValue()) roll = -1*roll*pi/180 for r in linspace(0, roll, 50): self.show_transform([r]) time.sleep(.1) frame += 1 fname = "angle_rot_anim/frame%05i.png" % frame self.g.screenShot(fname=fname) for r in linspace(0, self.pitch_angle, 20): self.show_transform([roll, r]) time.sleep(.1) frame += 1 fname = "angle_rot_anim/frame%05i.png" % frame self.g.screenShot(fname=fname) for r in linspace(0, self.yaw_angle, 50): self.show_transform([roll, self.pitch_angle, r]) time.sleep(.1) frame += 1 fname = "angle_rot_anim/frame%05i.png" % frame self.g.screenShot(fname=fname)
def crearPefil(self): if sys.platform == 'linux2': os.system("cp -r /usr/share/pyventa/perfil/* "+libutil.home()) else: os.system("xcopy \usr\share\pyventa\perfil \"%s\" /i /a /e /k"%self.home) msgBox=QtGui.QMessageBox(QtGui.QMessageBox.Information,"Reinicio programado","<h2>La operacion ha tenido exito</h2><br><p>Ahora se recopilaran los datos necesarios para la base de datos, despues de eso el programa se cerrara para establecer las configuraciones.</p>.",QtGui.QMessageBox.Close,self) msgBox.exec_()
def main(): os.chdir(os.path.dirname(os.path.realpath(__file__))) os.system("pip install certifi") print "Copying certifi's cacert.pem" import certifi shutil.copy2(certifi.where(), 'agkyra/resources/cacert.pem')
def main(wrld): i="" global world, hit, starting world = open("maps/world" + str(wrld)).read().split("\n") os.system("clear") print "" if(starting): render(world, user.X, user.Y) print ' ' * ((len(world[1])/2)-4) + "WELCOME TO" print ' ' * ((len(world[1])/2)-1) + "BARF" i = " " sys.stdout.write("What is your name? > ") user.name = raw_input() starting = False while not i in ["quit", "q", "exit"]: hit = False sys.stdout.write(user.name + "> ") i = raw_input() os.system("clear") if(i in [ "go north", "north", "w" ]): goUp() if(i in [ "go south", "south", "s" ]): goDown() if(i in [ "go west", "west", "a" ]): goLeft() if(i in [ "go east", "east", "d" ]): goRight() if user.health> 0: if not hit: print "" render(world, user.X, user.Y) print "you have " + str(user.health) + " health, and " + str(user.strength) + " strength." else: print "You have died\n\n" raw_input() i = "q"
def bbnet(): "Create network and run Buffer Bloat experiment" print "starting mininet ...." # Seconds to run iperf; keep this very high seconds = 3600 start = time() # Reset to known state topo = StarTopo(n=args.n, bw_host=args.bw_host, delay='%sms' % args.delay, bw_net=args.bw_net, maxq=args.maxq, diff=args.diff) net = Mininet(topo=topo, host=CPULimitedHost, link=TCLink, autoPinCpus=True, controller=OVSController) net.start() dumpNodeConnections(net.hosts) net.pingAll() print args.diff if args.diff: print "Differentiate Traffic Between iperf and wget" os.system("bash tc_cmd_diff.sh") else: print "exec tc_cmd.sh" os.system("bash tc_cmd.sh %s" % args.maxq) sleep(2) ping_latency(net) print "Initially, the delay between two hosts is around %dms" % (int(args.delay)*2) h2 = net.getNodeByName('h2') h1 = net.getNodeByName('h1') h1.cmd('cd ./http/; nohup python2.7 ./webserver.py &') h1.cmd('cd ../') h2.cmd('iperf -s -w 16m -p 5001 -i 1 > iperf-recv.txt &') CLI( net ) h1.cmd("sudo pkill -9 -f webserver.py") h2.cmd("rm -f index.html*") Popen("killall -9 cat", shell=True).wait()
def test_init_manifest_packageid(self): comm.setUp() os.chdir(comm.XwalkPath) comm.clear("org.xwalk.test") os.mkdir("org.xwalk.test") cmd = ( comm.HOST_PREFIX + comm.PackTools + "crosswalk-app manifest " + comm.XwalkPath + "org.xwalk.test --platforms=android --package-id=org.xwalk.test" ) os.system(cmd) with open(comm.ConstPath + "/../tools/org.xwalk.test/manifest.json") as json_file: data = json.load(json_file) updatecmd = ( comm.HOST_PREFIX + comm.PackTools + "crosswalk-app manifest " + comm.XwalkPath + "org.xwalk.test --platforms=android --package-id=org.test.foo" ) os.system(updatecmd) with open(comm.ConstPath + "/../tools/org.xwalk.test/manifest.json") as json_file_update: updatedata = json.load(json_file_update) comm.clear("org.xwalk.test") self.assertEquals(data["xwalk_package_id"].strip(os.linesep), "org.xwalk.test") self.assertEquals(updatedata["xwalk_package_id"].strip(os.linesep), "org.test.foo")
def share(filename): # TODO: Move this connection handling into a function in Kano Utils import subprocess if not is_internet(): subprocess.call(['sudo', 'kano-settings', '4']) if not is_internet(): return 'You have no internet' success, _ = login_using_token() if not success: os.system('kano-login 3') success, _ = login_using_token() if not success: return 'Cannot login' data = json.loads(request.data) filename, filepath = _save(data) success, msg = upload_share(filepath, filename, APP_NAME) if not success: return msg increment_app_state_variable_with_dialog(APP_NAME, 'shared', 1) return ''
def save(x, filename, bzip2=False, gzip=False): """ save(x, filename): Saves x to a file. Pretty much the only constraint on x is that it have no circular references (it must be Python pickle-able). This uses the pickle module, so data you save is *guaranteed* to be readable by future versions of Python. INPUT: x -- almost arbitrary object filename -- a string OUTPUT: Creates a file named filename, from which the object x can be reconstructed. """ o=open(filename,"w") # Note: don't use protocol 2 here (use 1), since loading doesn't work # on my extension types. cPickle.dump(x,o,1) o.close() if bzip2: os.system("bzip2 -f %s"%filename) if gzip: os.system("gzip -f %s"%filename)
def fRegistroDiario(queDB): bSalir = False while not bSalir: os.system('clear') print 'NOOXS - Mantenimiento de BD nooxsense' print '*** Tabla Registro Diario ***' print print print 'SOLO PARA PINES DIGITALES Y EN MODO OUTPUT' print print '*******************************************' print '| | | | minutos en |' print '| Fecha |Disp |PIN | 1 | 0 |' print '*******************************************' sSQL='SELECT fecha, cod_dispositivo, PIN_num, min1,min0 FROM registrodiario ;' cursor.execute(sSQL) aFilas=cursor.fetchall() x = 0 dFechaAnterior = aFilas[0][0] for aRegistro in aFilas: if dFechaAnterior != aRegistro[0]: dFechaAnterior = aRegistro[0] print print print '|{0:10} | {1:5d} | {2:4} | {3:5d}| {4:5d} |'.format(str(aRegistro[0]), aRegistro[1], aRegistro[2],aRegistro[3],aRegistro[4]) print '–––––––––––––––––––––––––––––––––––––––––––' x = x +1 if x == 100: x= 0 y=raw_input('pulse una tecla para continuar...') print print iOp = raw_input('Pulsa cualquier tecla para volver ') bSalir= True
def display(files, card_random, stack_random, wildcard, reverse): stacks = sfile.read_stack_files(files) stacks.sort() if card_random: card_random = 'random' if stack_random: stack_random = 'randomstack' if wildcard: wildcard = 'wildcard' if reverse: reverse = 'reverse' session = ContentObject(stacks, card_random, stack_random, wildcard, reverse) print """ Type 'Q' to stop anytime, RETURN to continue studying. """ print "Your arguments:" print "\n", session.mode prompt(session) while True: os.system('cls' if os.name == 'nt' else 'clear') print session.fetch() prompt(session) print session.fetch() prompt(session)
def clean_ftp_root(logger): clean_cmd = '%s -s -y -q -size=%s %s' % (os.path.join(cwd, 'agestore.exe'), ftp_root_bytes, ftp_root) logger.info('Start clean up ftp_root folder with cmd %s' % clean_cmd) try: os.system(clean_cmd) except: logger.error('Clean ftp root failed, error info %s' % clean_cmd)
def git_commit(): ''' git commit command ''' print("git commit"); t = time.localtime() os.system("git commit -m '%s'" % time.strftime("%Y年%m月%d日%H时%M分%S秒"))
def speak(voicedata, num): print(str((newsnum+1) - num)) tts = gTTS(text=voicedata, lang='hi') tts.save("E:\SandBox\Python\eradioJockey/temp/"+str(num+1)+".mp3") shutil.copy('E:\SandBox\Python\eradioJockey\\assets\\alert.wav', 'temp') newname = "ren E:\SandBox\Python\eradioJockey\\temp\\alert.wav"+' '+str(num)+'a'+'.wav' os.system(newname)
def RunRevision(context, revision, zip_file, profile, num_runs, command, args): """Given a zipped revision, unzip it and run the test.""" print 'Trying revision %s...' % str(revision) # Create a temp directory and unzip the revision into it. cwd = os.getcwd() tempdir = tempfile.mkdtemp(prefix='bisect_tmp') UnzipFilenameToDir(zip_file, tempdir) # Hack: Chrome OS archives are missing icudtl.dat; try to copy it from # the local directory. if context.platform == 'chromeos': icudtl_path = 'third_party/icu/source/data/in/icudtl.dat' if not os.access(icudtl_path, os.F_OK): print 'Couldn\'t find: ' + icudtl_path sys.exit() os.system('cp %s %s/chrome-linux/' % (icudtl_path, tempdir)) os.chdir(tempdir) # Run the build as many times as specified. testargs = ['--user-data-dir=%s' % profile] + args # The sandbox must be run as root on Official Chrome, so bypass it. if ((context.is_official or context.flash_path) and context.platform.startswith('linux')): testargs.append('--no-sandbox') if context.flash_path: testargs.append('--ppapi-flash-path=%s' % context.flash_path) # We have to pass a large enough Flash version, which currently needs not # be correct. Instead of requiring the user of the script to figure out and # pass the correct version we just spoof it. testargs.append('--ppapi-flash-version=99.9.999.999') runcommand = [] for token in shlex.split(command): if token == '%a': runcommand.extend(testargs) else: runcommand.append( token.replace('%p', os.path.abspath(context.GetLaunchPath(revision))). replace('%s', ' '.join(testargs))) results = [] for _ in range(num_runs): subproc = subprocess.Popen(runcommand, bufsize=-1, stdout=subprocess.PIPE, stderr=subprocess.PIPE) (stdout, stderr) = subproc.communicate() results.append((subproc.returncode, stdout, stderr)) os.chdir(cwd) try: shutil.rmtree(tempdir, True) except Exception: pass for (returncode, stdout, stderr) in results: if returncode: return (returncode, stdout, stderr) return results[0]
def aria_down(url, path=None, curt_max=1, split=5, proxy='http://127.0.0.1:8087', useragent=None): dl_list = 'dl_list.txt' if os.path.exists(dl_list) == True: os.remove(dl_list) #将下载文件列表写入txt文件 try: with open(dl_list, 'w') as f: if isinstance(url, list): for i in url: f.write(i + '\n') else: f.write(url + '\n') except IOError as err: print('File error:' + str(err)) #生成下载命令 command = 'aria2c -j' + str(curt_max) + ' -s' + str(split) if path != None: command += ' --dir="' + path + '"' if proxy != None: command += ' --http-proxy="' + proxy + '"' if useragent != None: command += ' --user-agent="' + useragent + '"' command += ' -i "' + dl_list + '"' print(command) #调用aria2c进行下载 os.system(command)
def createPartition(self): cmd = 'printf "8,\n;0,0\n;0,0\n;0,0\ny\n" | sfdisk -f -uS ' + self.disk_path res = os.system(cmd) return res >> 8
import os import sys os.system('python processing.py') os.system('python runTools.py CRF') os.system('python runTools.py MEMM') os.system('python runTools.py MaxEnt') os.system('python aspectExtraction.py HMM') os.system('python aspectExtraction.py baseline') os.system('python aspectExtraction.py CRF crf++_pred.txt') os.system('python aspectExtraction.py MEMM memm_pred.txt') os.system('python aspectExtraction.py MaxEnt maxent_pred.txt') os.system('python sentimentPredict.py SVM') os.system('python sentimentPredict.py logreg') os.system('python sentimentPredict.py nn') os.system('python sentimentPredict.py adaboost')
def imap(host_ip,desc): import sys import os import subprocess from ANSE import exit_msg desc = desc host_ip = host_ip os.system('clear') print("""\033[37m ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++\033[0m\033[94m +Choose your NSE script for IMAP servers: [1] imap-brute [2] imap-capabilities [3] imap-ntlm-info [0] back\033[0m\033[37m ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++\033[0m""") option=input("Enter your NSE script no:") os.system('clear') if option == "1": print("""\033[37m ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++\033[0m\033[94m File imap-brute Script types: portrule Categories: brute, intrusive Download: http://nmap.org/svn/scripts/imap-brute.nse User Summary Performs brute force password auditing against IMAP servers using either LOGIN, PLAIN, CRAM-MD5, DIGEST-MD5 or NTLM authentication. Script Arguments imap-brute.auth authentication mechanism to use LOGIN, PLAIN, CRAM-MD5, DIGEST-MD5 or NTLM passdb, unpwdb.passlimit, unpwdb.timelimit, unpwdb.userlimit, userdb See the documentation for the unpwdb library. creds.[service], creds.global See the documentation for the creds library. brute.credfile, brute.delay, brute.emptypass, brute.firstonly, brute.guesses, brute.mode, brute.passonly, brute.retries, brute.threads, brute.unique, brute.useraspass See the documentation for the brute library. smbdomain, smbhash, smbnoguest, smbpassword, smbtype, smbusername See the documentation for the smbauth library. Example Usage nmap -p 143,993 --script imap-brute <host> Default Option Used in script: nmap -p 143,993 --script [script name] [arg] [host_ip] -oN [file_name]\033[0m\033[37m ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++\033[0m""") port_select=input("Set Default option-port-143,993[Y/N]:") if port_select == "Y" or port_select == "y": default_port="143,993" arg=input("Enter argument if you need or press just enter:") file_name=input("Enter your file name to save:") output="-oN"+' '+"output/"+host_ip+"-"+file_name+".txt" subprocess.call('nmap --script imap-brute -p '+' '+default_port+' '+arg+' '+host_ip+' '+output,shell=True) imap(host_ip,desc) elif port_select == "N" or port_select == "n": custom_port=input("Enter your Custom port:") arg=input("Enter argument if you need or press just enter:") file_name=input("Enter your file name to save:") output="-oN"+' '+"output/"+host_ip+"-"+file_name+".txt" subprocess.call('nmap --script imap-brute -p '+' '+custom_port+' '+arg+' '+host_ip+' '+output,shell=True) imap(host_ip,desc) else: os.system('clear') print(desc) sys.exit(exit_msg) elif option == "2": print("""\033[37m ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++\033[0m\033[94m File imap-capabilities Script types: portrule Categories: default, safe Download: http://nmap.org/svn/scripts/imap-capabilities.nse User Summary Retrieves IMAP email server capabilities. IMAP4rev1 capabilities are defined in RFC 3501. The CAPABILITY command allows a client to ask a server what commands it supports and possibly any site-specific policy. Script Arguments smbdomain, smbhash, smbnoguest, smbpassword, smbtype, smbusername See the documentation for the smbauth library. Example Usage nmap -sV -sC <target> Default Option Used in script: nmap -sV --script [script name] [arg] [host_ip] -oN [file_name]\033[0m\033[37m ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++\033[0m""") port_select=input("Set Default option-no-port[Y/N]:") if port_select == "Y" or port_select == "y": arg=input("Enter argument if you need or press just enter:") file_name=input("Enter your file name to save:") output="-oN"+' '+"output/"+host_ip+"-"+file_name+".txt" subprocess.call('nmap -sV --script imap-capabilities'+' '+arg+' '+host_ip+' '+output,shell=True) imap(host_ip,desc) elif port_select == "N" or port_select == "n": custom_port=input("Enter your Custom port:") arg=input("Enter argument if you need or press just enter:") file_name=input("Enter your file name to save:") output="-oN"+' '+"output/"+host_ip+"-"+file_name+".txt" subprocess.call('nmap -sV --script imap-capabilities -p '+' '+custom_port+' '+arg+' '+host_ip+' '+output,shell=True) imap(host_ip,desc) else: os.system('clear') print(desc) sys.exit(exit_msg) elif option == "3": print("""\033[37m ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++\033[0m\033[94m File imap-ntlm-info Script types: portrule Categories: default, discovery, safe Download: http://nmap.org/svn/scripts/imap-ntlm-info.nse User Summary This script enumerates information from remote IMAP services with NTLM authentication enabled. Sending an IMAP NTLM authentication request with null credentials will cause the remote service to respond with a NTLMSSP message disclosing information to include NetBIOS, DNS, and OS build version. Script Arguments mssql.domain, mssql.instance-all, mssql.instance-name, mssql.instance-port, mssql.password, mssql.protocol, mssql.scanned-ports-only, mssql.timeout, mssql.username See the documentation for the mssql library. smtp.domain See the documentation for the smtp library. randomseed, smbbasic, smbport, smbsign See the documentation for the smb library. smbdomain, smbhash, smbnoguest, smbpassword, smbtype, smbusername See the documentation for the smbauth library. Example Usage nmap -p 143,993 --script imap-ntlm-info <target>ost> Default Option Used in script: nmap -p 143,993 --script [script name] [arg] [host_ip] -oN [file_name]\033[0m\033[37m ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++\033[0m""") port_select=input("Set Default option-port-143,993[Y/N]:") if port_select == "Y" or port_select == "y": default_port="143,993" arg=input("Enter argument if you need or press just enter:") file_name=input("Enter your file name to save:") output="-oN"+' '+"output/"+host_ip+"-"+file_name+".txt" subprocess.call('nmap --script imap-ntlm-info -p '+' '+default_port+' '+arg+' '+host_ip+' '+output,shell=True) imap(host_ip,desc) elif port_select == "N" or port_select == "n": custom_port=input("Enter your Custom port:") arg=input("Enter argument if you need or press just enter:") file_name=input("Enter your file name to save:") output="-oN"+' '+"output/"+host_ip+"-"+file_name+".txt" subprocess.call('nmap --script imap-ntlm-info -p '+' '+custom_port+' '+arg+' '+host_ip+' '+output,shell=True) imap(host_ip,desc) else: os.system('clear') print(desc) sys.exit(exit_msg) elif option == "0": from ANSE import service_scan service_scan(host_ip, desc) else: os.system('clear') print(desc) sys.exit(exit_msg)
r.symbols.append(pointStyle) s.rules.append(r) map.append_style("mapStyle", s) # Adding point layer layerPoint = mapnik.Layer("pointLayer") layerPoint.datasource = mapnik.Shapefile(file=os.path.join("data", "map.shp")) layerPoint.styles.append("mapStyle") # Adding polygon layerPoly = mapnik.Layer("polyLayer") layerPoly.datasource = mapnik.Shapefile( file=os.path.join("data", "ne_110m_land.shp")) layerPoly.styles.append("mapStyle") # Add layers to map map.layers.append(layerPoly) map.layers.append(layerPoint) # Set boundaries for the Netherlands boundsLL = (5, 51, 7, 54.5) #(minx, miny, maxx,maxy) map.zoom_to_box(mapnik.Box2d(*boundsLL)) # zoom to bbox mapnik.render_to_file(map, os.path.join("figs", "map3.png"), "png") print "All done - check content" # Deleting the created shapefiles (map.shp etc.) os.system('./clean.sh')
# statuscode, statusmessage, header = webservice.getreply() #print "Response: ", statuscode, statusmessage #print "headers: ", header # # res = webservice.getfile().read() res = webservice.getresponse().read() # print res #exit(1) # Parse the response into the DOM certconfig = parseString(res) fileName = certconfig.getElementsByTagName('Filename')[0] htmlFile.write(' <td>' + getText(fileName.childNodes) + '</td>') htmlFile.write(' </tr>') # print HTML tail htmlFile.write('</table>') htmlFile.write('</body></html') #Close the HTML File htmlFile.close() # Launch the file in a browser print "Report generation complete, launching browser to view report file " + options.outputFile os.system("start " + options.outputFile)
def main(): global DEBUG try: depsonly = bool(sys.argv[2] in ['true', 1]) except IndexError: depsonly = False if os.getenv('ROOMSERVICE_DEBUG'): DEBUG = True product = sys.argv[1] device = product[product.find("_") + 1:] or product if depsonly: repo_path = get_from_manifest(device) if repo_path: fetch_dependencies(repo_path) else: print("Trying dependencies-only mode on a" "non-existing device tree?") sys.exit() print("Device {0} not found. Attempting to retrieve device repository from " "{1} Github (http://github.com/{1}).".format(device, org_display)) githubreq = urllib.request.Request( "https://api.github.com/search/repositories?" "q={0}+user:{1}+in:name+fork:true".format(device, org_display)) add_auth(githubreq) repositories = [] try: result = json.loads(urllib.request.urlopen(githubreq).read().decode()) except urllib.error.URLError: print("Failed to search GitHub") sys.exit() except ValueError: print("Failed to parse return data from GitHub") sys.exit() for res in result.get('items', []): repositories.append(res) for repository in repositories: repo_name = repository['name'] if not (repo_name.startswith("device_") and repo_name.endswith("_" + device)): continue print("Found repository: %s" % repository['name']) fallback_branch = detect_revissquid(repository) manufacturer = repo_name[7:-(len(device)+1)] repo_path = "device/%s/%s" % (manufacturer, device) adding = [{'repository': repo_name, 'target_path': repo_path}] add_to_manifest(adding, fallback_branch) print("Syncing repository to retrieve project.") os.system('repo sync --force-sync --no-tags --current-branch --no-clone-bundle %s' % repo_path) print("Repository synced!") fetch_dependencies(repo_path, fallback_branch) print("Done") sys.exit() print("Repository for %s not found in the %s Github repository list." % (device, org_display)) print("If this is in error, you may need to manually add it to your " "%s" % custom_local_manifest)
import os import sys import webbrowser url='http://localhost:6006/' webbrowser.open(url, new=0, autoraise=True) os.system('tensorboard --logdir=./')
args = parser.parse_args() assert os.path.isdir(str(args.kenlm)), "kenlm directory not found - '{d}'".format( d=args.kenlm ) lm_dir = os.path.join(args.dst, "lm") os.makedirs(lm_dir, exist_ok=True) # download LM sys.stdout.write("\nDownloading Librispeech LM - {lm}...\n\n".format(lm=lm)) sys.stdout.flush() arpa_file = os.path.join(lm_dir, lm + ".arpa") os.system( "wget -O - http://www.openslr.org/resources/11/{lm}.arpa.gz | " "gunzip -c > {o}".format(lm=lm, o=arpa_file) ) # temporary arpa file in lowercase sys.stdout.write("\nSaving ARPA LM file in binary format ...\n\n") sys.stdout.flush() os.system( "cat {arpa} | tr '[:upper:]' '[:lower:]' > {arpa}.tmp".format(arpa=arpa_file) ) binary = os.path.join(args.kenlm, "build", "bin", "build_binary") os.system("{bin} {i}.tmp {o}".format(bin=binary, i=arpa_file, o=arpa_file + ".bin")) os.remove("{arpa}.tmp".format(arpa=arpa_file)) # write words to lexicon.txt file dict_file = os.path.join(lm_dir, "lexicon.txt") sys.stdout.write("\nWriting Lexicon file - {d}...\n\n".format(d=dict_file))
# ================================= SELF-FILLED SECTION ===================================== # The name of the running example file. (without training "_torch.py" ) example_name = "bert" # The training arguments # 1. Usually we set the epochs:2, batch_size:2, max_train_steps_per_epoch:10 # 2. The expression for the following setup is "--epochs 2 --batch_size 8 --max_train_steps_per_epoch 10" # 3. The syntax of this expression is different from run_notebook.py train_info = "--epochs 2 --batch_size 4 --max_train_steps_per_epoch 10 --max_eval_steps_per_epoch 10" # Do you want to run "fastestimator test"? (bool) need_test = False # ============================================================================================== stderr_file = os.path.abspath( os.path.join(__file__, "..", "run_torch_stderr.txt")) if os.path.exists(stderr_file): os.remove(stderr_file) source_dir = get_apphub_source_dir_path(__file__) py_file = os.path.join(source_dir, example_name + "_torch.py") result = os.system("fastestimator train {} {} 2>> {}".format( py_file, train_info, stderr_file)) if need_test: result += os.system("fastestimator test {} {} 2>> {}".format( py_file, train_info, stderr_file)) if result: raise ValueError("{} fail".format(py_file))
def raise_alarm(): import os duration = 1 # second freq = 440 # Hz os.system('play --no-show-progress --null --channels 1 synth %s sine %f' % (duration, freq))
def cls(): return os.system('cls') cls()
import pymysql as MySQLdb import os from time import sleep db = MySQLdb.connect("ip", "user", "password", "db_name") cursor = db.cursor() query = "SELECT * FROM yenivideo where durum= 'Veri çekildi' " cursor.execute(query) sayac = 0 for j in cursor.fetchall(): os.system("python3 /home/yonetici/verianaliz/saatlik_analiz.py") sleep(30)
def main(): # the flippening! os.system('LD_LIBRARY_PATH="" content insert --uri content://settings/system --bind name:s:user_rotation --bind value:i:1') if os.getenv("NOLOG") is not None: del managed_processes['loggerd'] del managed_processes['tombstoned'] if os.getenv("NOUPLOAD") is not None: del managed_processes['uploader'] if os.getenv("NOVISION") is not None: del managed_processes['visiond'] if os.getenv("LEAN") is not None: del managed_processes['uploader'] del managed_processes['loggerd'] del managed_processes['logmessaged'] del managed_processes['logcatd'] del managed_processes['tombstoned'] del managed_processes['proclogd'] if os.getenv("NOCONTROL") is not None: del managed_processes['controlsd'] del managed_processes['radard'] # support additional internal only extensions try: import selfdrive.manager_extensions selfdrive.manager_extensions.register(register_managed_process) except ImportError: pass params = Params() params.manager_start() # set unset params if params.get("IsMetric") is None: params.put("IsMetric", "0") if params.get("RecordFront") is None: params.put("RecordFront", "0") if params.get("IsFcwEnabled") is None: params.put("IsFcwEnabled", "1") if params.get("HasAcceptedTerms") is None: params.put("HasAcceptedTerms", "0") if params.get("IsUploadVideoOverCellularEnabled") is None: params.put("IsUploadVideoOverCellularEnabled", "1") if params.get("IsDriverMonitoringEnabled") is None: params.put("IsDriverMonitoringEnabled", "1") if params.get("IsGeofenceEnabled") is None: params.put("IsGeofenceEnabled", "-1") if params.get("SpeedLimitOffset") is None: params.put("SpeedLimitOffset", "0") if params.get("LongitudinalControl") is None: params.put("LongitudinalControl", "0") if params.get("LimitSetSpeed") is None: params.put("LimitSetSpeed", "0") # is this chffrplus? if os.getenv("PASSIVE") is not None: params.put("Passive", str(int(os.getenv("PASSIVE")))) if params.get("Passive") is None: raise Exception("Passive must be set to continue") # put something on screen while we set things up if os.getenv("PREPAREONLY") is not None: spinner_proc = None else: spinner_text = "chffrplus" if params.get("Passive")=="1" else "openpilot" spinner_proc = subprocess.Popen(["./spinner", "loading %s"%spinner_text], cwd=os.path.join(BASEDIR, "selfdrive", "ui", "spinner"), close_fds=True) try: manager_update() manager_init() manager_prepare() finally: if spinner_proc: spinner_proc.terminate() if os.getenv("PREPAREONLY") is not None: return # SystemExit on sigterm signal.signal(signal.SIGTERM, lambda signum, frame: sys.exit(1)) try: manager_thread() except Exception: traceback.print_exc() crash.capture_exception() finally: cleanup_all_processes(None, None) if params.get("DoUninstall") == "1": uninstall()
import os for i in range(0, 1): dataset_list = [ "large_gowallah", "large_graph_youtube", "large_twt_snap" ] ##,"large_graph_youtube"]#""large_gowallah"]#,""]#:##],"large_graph_youtube"]#["large_graph_youtube"]#,"large_graph_youtube"] model_path = open('bestRlModel.txt', 'r').read() for dataset in dataset_list: # out_rl_time_bestModel_performance=open('GCOMB_RESULTS/{}'.format(dataset) + '.txt', 'w') # for line in read_rl_time_bestModel[-2:-1]: #print(time, model_path) for sampling_neighborhood in [ 0.75 ]: ##, 0.9,0.1, 0.3, 0.5,0.7]:#0.1, 0.3, 0.5, 0.7, for budget in [ 15, 25, 50, 100, 150, 200 ]: # [100]:#[ 15, 25, 50,100,150,200]:#150,200]:#15,25,50,100, graph_path = "./GraphSAGE-master/real_data/{}/test/_bp".format( dataset) command = "python get_output.py " + graph_path + " " + model_path.replace( "\n", "") + " " + str(budget) + " " + str( sampling_neighborhood) + " None" print(command) # command="python get_output.py " + graph_path os.system(command) # # command = "python get_output.py ./GraphSAGE-master/real_data/large_graph_youtube/test/_bp"
def manager_update(): if os.path.exists(os.path.join(BASEDIR, "vpn")): cloudlog.info("installing vpn") os.system(os.path.join(BASEDIR, "vpn", "install.sh")) update_apks()
def uninstall(): cloudlog.warning("uninstalling") with open('/cache/recovery/command', 'w') as f: f.write('--wipe_data\n') # IPowerManager.reboot(confirm=false, reason="recovery", wait=true) os.system("service call power 16 i32 0 s16 recovery i32 1")
for section in range(nSection): begin = section*maxFiles end = min(begin + maxFiles, nFiles) FileNames = files[begin:end] FileNamesStr = " ".join(str(i) for i in FileNames) print "@@ Writing run script..." jds = "%ssubmit.jds" %Dirname fout = open(jds, "w") print>>fout, "# Job description file for condor job" print>>fout, """executable = /cms/scratch/jdj0715/nanoAOD/src/nano/analysis/test/topmass/batch_Top/ttbar.sh universe = vanilla log = condor.log getenv = True should_transfer_files = YES when_to_transfer_output = ON_EXIT output = %s/job_%d.log error = %s/job_%d.err transfer_input_files = NanoAOD queue""" % (Dirname, count, Dirname, count) fout.close() count += 1 #jobName = analysis+'_'+datasetName subBatch = "condor_submit -batch-name %s -append 'arguments=%s %s' %s" %(datasetName ,datasetName,FileNamesStr, jds) #print createbatch print subBatch os.system(subBatch)
os._exit(os.wait()[1]) if __name__ == "__main__": neos_update_required = os.path.isfile("/init.qcom.rc") \ and (not os.path.isfile("/VERSION") or int(open("/VERSION").read()) < 8) if neos_update_required: # update continue.sh before updating NEOS if os.path.isfile(os.path.join(BASEDIR, "scripts", "continue.sh")): from shutil import copyfile copyfile(os.path.join(BASEDIR, "scripts", "continue.sh"), "/data/data/com.termux/files/continue.sh") # run the updater print("Starting NEOS updater") subprocess.check_call(["git", "clean", "-xdf"], cwd=BASEDIR) os.system(os.path.join(BASEDIR, "installer", "updater", "updater")) raise Exception("NEOS outdated") elif os.path.isdir("/data/neoupdate"): from shutil import rmtree rmtree("/data/neoupdate") unblock_stdout() import glob import shutil import hashlib import importlib import subprocess import traceback from multiprocessing import Process
import sys import os if sys.version_info >= (3, 3): requirements = "py3k-requirements.txt" elif (2, 6) <= sys.version_info < (3, 0): requirements = "requirements.txt" else: raise AssertionError("only support 2.6, 2.7, 3.3") is_dev = sys.argv[1] == "dev" if len(sys.argv) > 1 else False if __name__ == "__main__": if is_dev: requirements = "dev-%s" % requirements os.system("pip install -r %s --use-mirrors" % requirements)
from selenium import webdriver #from selenium.webdriver.common.keys import Keys from selenium.webdriver.chrome.options import Options import os import pandas as pd #hide browser op = Options() op.add_argument('--headless') browser = webdriver.Chrome(options=op) #url = 'http://140.138.155.188:888/login.aspx' testUrl - 1 while True: os.system('cls') print("Welcome to TRA Inquiry System\n") choice = eval(input("1.查詢\n2.退出\n?")) if choice == 2: break url = 'https://tip.railway.gov.tw/tra-tip-web/tip/tip001/tip112/gobytime' #testUrl - 2 browser.get(url) #date date = input("\n出發日期(xxxx/xx/xx): ") browser.find_element_by_id('rideDate').send_keys(date) #startStation arrival = input("\n出發站: ") browser.find_element_by_id('startStation').send_keys(arrival) #endStation
return img def make_datum(img, label): return caffe.proto.caffe_pb2.Datum( channels=3, width=IMAGE_WIDTH, height=IMAGE_HEIGHT, label=label, data=np.rollaxis(img, axis=2, start=0).tostring()) # If already exsit previous lmdb folders, remove them os.system('rm -rf ' + train_lmdb) os.system('rm -rf ' + validation_lmdb) ''' # for python 3.5+ # there is a quicker way by using glob # you need to add 'import glob' to do this train_data = [img for img in glob.glob("root_dir/train/*/*.jpg")] validation_data = [img for img in glob.glob("root_dir/validation/*/*.jpg")] ''' # univarsal way # a list to store all images' path i = 0 train_data = [] for root, dirnames, filenames in os.walk(JPG_train_path): i = i + 1
def clear_screen(): if os.name == "nt": os.system('cls') else: os.system('clear')
exit(f"{W}[{B}*{W}] {R}" + js['message'] + W) else: exit(js) #Spammer().Payu('083113226393','haha') def update(): r = requests.get('http://auxcrewtbdrpg.com/update.txt') if '1.3' in str(r.text): return r.text.replace('\\033', '\033').replace('\\n', '\n') else: return '' while True: os.system('clear') try: print(f''' █████████ █▄█████▄█ █▼▼▼▼▼ _-_-__--_-__-_-_--__- █ _-_-_-_-_-_-_-_-_-_- █▲▲▲▲▲ _-_-__-_-_-_-_-_-_-_- █████████ __██____██___ ╔════════════════╕ ║ WELCOME │ ╠════════════════╡ ║ SPAM SMS │
def log_debug(msg,filename=str(FILEPATH+'debug.log')): #print(msg) cmd = 'echo "{}" >> {}'.format(msg, filename) os.system(cmd)
#!/usr/bin/python import commands import os import webbrowser os.system("sudo cat /etc/ansible/awsapache | cut -s -d' ' -f1 > /etc/ansible/apacheipread.txt") lines = [line.rstrip('\n') for line in open('/etc/ansible/apacheipread.txt')] for f in lines: url = 'https://'+f+'/adhoc.html' b = webbrowser.get('firefox') b.open_new_tab(url)
def get_count(flag): cmd = "echo " + "\"" + flag + "\"" + " | ../../../pin -t obj-ia32/inscount0.so -o inscount.out -- ~/baleful_unpack" os.system(cmd) with open("inscount.out") as f: count = int(f.read().split(" ")[1]) return count
def compile(self, file_name): print("The_Coolest v1.0") print("Copyright (c) 2019: Lauren, Marcel, Arlenis") file = None try: file = open(file_name) except: print("(0, 0) - CompilerError: No existe el fichero {}".format( file_name)) exit(0) data = file.readlines() data = "".join(data) file.close() file_name = file_name[:-3] + '.mips' compiler = Compiler() compiler.tokenize(data) if not len(compiler.errors): ast = compiler.parse(data) if not len(compiler.errors): context_ = context.Context() pvisitor = PrintVisitor() pvisitor_cil = PrintVisitorCIL() # print("----------------AST----------------") # print(pvisitor.visit(ast, -1)) res = compiler._checksemantic(ast, context_) # print("----------------Check Semantic----------------") # print(pvisitor.visit(ast, -1)) if not len(compiler.errors) and not (res == "ERROR"): # print("----------------Ordenado----------------") # print(pvisitor.visit(ast, -1)) cil_types = compiler._ciltypes(ast, context_.hierarchy) # print("----------------CIL Types----------------") # print(pvisitor_cil.visit(cil_types, -1)) # print("----------------CIL----------------") cil_ast = compiler._cooltocil(ast, cil_types) # print(pvisitor_cil.visit(cil_ast, -1)) myfile = open(file_name, 'w') mips = VisitorMIPS() res = mips.visit(cil_ast) myfile.write(res) myfile.close() else: # print("------------Errors---------------") # print("Errores sem'anticos: ") # myfile.write("Errores sem'anticos: ") for i, error in enumerate(compiler.errors): # error = "Error {}: {} ".format(i+1, error) print(error) # myfile.write(error) exit(0) else: # print("------------Errors---------------") # myfile.write("Errores sint'acticos: ") # print("Errores sint'acticos: ") for i, error in enumerate(compiler.errors): # error = "Error {}: {} ".format(i+1, error) print(error) # myfile.write(error) exit(0) else: # print("------------Errors---------------") # myfile.write("Errores sint'acticos: ") # print("Errores sint'acticos: ") for i, error in enumerate(compiler.errors): # error = "Error {}: {} ".format(i+1, error) print(error) # myfile.write(error) exit(0) os.system('spim -f ' + file_name)
def main(window): global logo_img,beer_menu_rows_fit_error,food_menu_rows_fit_error,merch_menu_rows_fit_error,BEERS_LINE_SPACE,HEAPS_LINE_SPACE,MERCH_LINE_SPACE,logo_x,logo_end_x,menu_state,ls,rs,ts,bs,tl,tr,bl,br,MENU_CHANGE_PERIOD #, menu_state_timestamp curses.start_color() curses.init_pair(GREEN, curses.COLOR_GREEN, curses.COLOR_BLACK) curses.curs_set(0) menu_opts = menu_dict() # prompt_len = len(prompt_str) toggle_cursor = False toggle_char = '_' scroll_cnt = 0 scroll_speed = 7 #5 logo_img = get_art(logo_font, logo_text) logo_len = longest_str(logo_img) if CENTER_LOGO: logo_x = (max_dimensions(window)[1] - logo_len) // 2 else: logo_x = max_dimensions(window)[1] - logo_len - 3 #4) #10) logo_end_x = logo_x + logo_len #longest_str(logo_img) ls=curses.ACS_PLUS #LTEE rs=curses.ACS_PLUS #RTEE ts=curses.ACS_HLINE #curses.ACS_PLMINUS bs=curses.ACS_HLINE #'=' tl=curses.ACS_ULCORNER tr=curses.ACS_URCORNER bl=curses.ACS_SSBB br=curses.ACS_LRCORNER if DEBUG_BEER: if os.path.exists(beer_debug_file): os.system(f'rm {beer_debug_file}') beer_dict_out = json.dumps(menu_opts[0], indent=4) if any(menu_opts[1].values()): beer_dict_out += '\n' + json.dumps(menu_opts[1], indent=4) os.system(f'echo {beer_dict_out} > {beer_debug_file}') log_debug(('_'*24)+'\n\n', beer_debug_file) if DEBUG_HEAPS: if os.path.exists(food_debug_file): os.system(f'rm {food_debug_file}') heap_dict_out = json.dumps(menu_opts[2], indent=4) os.system(f'echo {heap_dict_out} > {food_debug_file}') log_debug(('_'*24)+'\n\n', food_debug_file) if DEBUG_MERCH: if os.path.exists(merch_debug_file): os.system(f'rm {merch_debug_file}') merch_dict_out = json.dumps(menu_opts[-1], indent=4) os.system(f'echo {merch_dict_out} > {merch_debug_file}') log_debug(('_'*24)+'\n\n', merch_debug_file) EXCLUSIVE_DEBUG = DEBUG_BEER ^ DEBUG_HEAPS ^ DEBUG_MERCH menu_state_timestamp = time.time() while True: scroll_cnt %= 1000000 window.erase() # window.addstr(2,2,str(scroll_cnt),curses.color_pair(WHITE)) # window.addstr(2,2,str(max_dimensions(window)[0]),curses.color_pair(WHITE)) # window.addstr(3,2,str(max_dimensions(window)[1]),curses.color_pair(WHITE)) if scroll_cnt % scroll_speed != 0: draw_logo(window, logo_img, attrs=[curses.A_BOLD]) #, curses.A_UNDERLINE]) #, curses.A_REVERSE]) #, curses.A_BLINK]) if menu_state == MERCH: MENU_CHANGE_PERIOD = 10 elif menu_state == HEAPS: MENU_CHANGE_PERIOD = 15 else: MENU_CHANGE_PERIOD = 30 if time.time() - menu_state_timestamp >= MENU_CHANGE_PERIOD: # if time.time() - menu_state_timestamp >= 60: # global logo_state # logo_state = LOGO_WRAP_LEFT menu_state_timestamp = time.time() menu_state = (menu_state + 1) % len(menu_state_list) # state = menu_state_list[menu_state] if EXCLUSIVE_DEBUG: if DEBUG_BEER: if menu_state != BEERS1 or menu_state != BEERS2: menu_state = BEERS1 elif DEBUG_HEAPS: menu_state = HEAPS else: menu_state = MERCH menu = menu_opts[menu_state] if not any(menu.values()): # or menu_state == MERCH: menu_state_timestamp = time.time() menu_state = (menu_state + 1) % len(menu_state_list) menu = menu_opts[menu_state] draw_menu(window, menu) if beer_menu_rows_fit_error: # TODO: also add in a check for num items in list (if over 6 then either decrease line space or add to BEERS2) BEERS_LINE_SPACE -= 1 beer_menu_rows_fit_error = False if food_menu_rows_fit_error: HEAPS_LINE_SPACE -= 1 food_menu_rows_fit_error = False if merch_menu_rows_fit_error: MERCH_LINE_SPACE -= 1 merch_menu_rows_fit_error = False if LOGO_SCROLL: if scroll_cnt % scroll_speed == 0: scroll_logo(window, logo_img) prompt_dir = 'merch' if menu_state == MERCH else 'food' if menu_state == HEAPS else 'beer' window.addstr(0,1,prompt_str.format(prompt_dir)) if scroll_cnt % 2 == 0: toggle_char = '_' if toggle_cursor else ' ' toggle_cursor = not toggle_cursor window.addch(0,1+len(prompt_str.format(prompt_dir)),toggle_char, curses.color_pair(WHITE)) window.refresh() time.sleep(LOOP_SLEEP) scroll_cnt += 1
import sys import os import time # Check argument if len(sys.argv) == 1: print('Please input a source path') exit(1) # Set a source and a desination source = str(sys.argv[1]) target_dir = os.getcwd() + '/backup' target = target_dir + os.sep + time.strftime('%Y%m%d_%H%M%S') + '.zip' # Create a folder if the destination is not exist if not os.path.exists(target_dir): os.mkdir(target_dir) # Make a zip command zip_command = 'zip -r {0} {1}'.format(target, source) # Run the backup print('Zip command is : ') print(zip_command) print('Running :') if os.system(zip_command) == 0: print('Successful backup to', target) else: print('Backup Failed')
import numpy as np import pandas as pd import os from glob import glob from scipy.stats import kurtosis from multiprocessing import cpu_count, Pool from tsfresh.feature_extraction import extract_features import sys argvs = sys.argv import utils PREF = 'f007' os.system(f'rm ../data/t*_{PREF}*') os.system(f'rm ../feature/t*_{PREF}*') def quantile(n): def quantile_(x): return np.percentile(x, n) quantile_.__name__ = 'q%s' % n return quantile_ def kurt(x): return kurtosis(x)