def _apply_process(run, site, fileno, input_prefix, processed_output_path): """Multiprocessing-friendly call of process (all ADs within a hall together).""" logging.debug('[process] Running on Run %d, file %d', run, fileno) muons_location = os.path.join(input_prefix, f'muons_{run}_{fileno:>04}.root') num_events = -1 debug = False ads = common.dets_for(site, run) for ad in ads: logging.debug( '[process] Running on Run %d, file %d, EH%d-AD%d', run, fileno, site, ad ) events_location = os.path.join( input_prefix, f'events_ad{ad}_{run}_{fileno:>04}.root' ) output_location = os.path.join( processed_output_path, f'EH{site}', f'processed_ad{ad}', f'out_ad{ad}_{run}_{fileno:>04}.root', ) if os.path.isfile(output_location): logging.debug('[process] Found existing file. Skipping. %s', output_location) else: process.main( num_events, events_location, muons_location, output_location, (run, fileno), ad, debug, ) logging.debug('[process] Finished Run %d, file %d', run, fileno) return
def main(): #stick cmd args here for models if len(sys.argv) > 1 and sys.argv[1] == 'nn': model = NeuralNet() else: print "ya didn't enter an nn param ya jabronie, what kinda shit do you think this is?????" sys.exit() if not os.path.isfile('data/clean_features.csv'): print "Processing data... " process.main() print "Loading processed data..." train_x = pd.read_csv('data/clean_features.csv', nrows=max_rows) train_y = pd.read_csv('data/clean_labels.csv', nrows=max_rows) test_x = pd.read_csv('data/clean_test.csv', nrows=max_rows) act_test = pd.read_csv('data/act_test.csv', nrows=max_rows) test_ids = act_test['activity_id'] print "Creating folds and cleaning data..." folds = LabelKFold(train_x['people_id'], n_folds=num_folds) train_x = train_x.drop('people_id', axis=1) test_x = test_x.drop('people_id', axis=1) kfold_train(model, train_x, train_y, folds) create_submission(model, test_x, test_ids)
def main(): # Disable touch repeat touch.enable_repeat(False) while True: # Run one process loop process.main() # Sleep to avoid 100% CPU usage time.sleep(5)
def main(): """Ingest, process, sync""" if not os.path.exists("data"): os.makedirs("data") parser = argparse.ArgumentParser(description='Ingest virus sequences') parser.add_argument('--headless', action='store_true', help='Run firefox in headless state (requires xvfb and x11vnc)', required=False) parser.add_argument('--clock', action='store_true', help='Run ntpdate to fix date', required=False) args = vars(parser.parse_args()) headless = args['headless'] clock = args['clock'] ingest.main([headless]) process.main() sync.main([clock])
def submit(): if request.method == 'POST': seqId = request.form.get('seqid') seq = request.files.get('seq') # 用户输入序列号 if seqId: process.write_gbk(seqId) strand, seq, position = process.main(seqId) # img_filename = os.path.join(app.config['UPLOAD_FOLDER'], 'visualize.png') return render_template('result.html', strand=strand, seq=seq, position=position) # 用户输入文件 elif seq: filename = secure_filename(seq.filename) print(filename) seq.save(os.path.join(UPLOAD_PATH, filename)) print('文件上传成功') if filename.lower().endswith( '.fasta') or filename.lower().endswith('.fa'): print("fasta文件可视化") if filename.lower().endswith('.gbk') or filename.lower().endswith( '.genbank'): print('gbk文件可视化') visualize3(filename) return render_template('result1.html') # 待修改 return render_template('submit.html')
def main(): # create logger to save FORMAT = '%(asctime)-15s %(message)s' logging.basicConfig(format=FORMAT, level=logging.DEBUG) ## parameters #API to call url="https://data.nse.sg" nse_directory = os.path.dirname(os.path.realpath(__file__)) # device_file="%s/exp1_devices_notripswithdata_Sept29(2)_short.csv" % nse_directory device_file="%s/nse_v4_deployment1.csv" % nse_directory current_date="2015-10-02" # geojson ouput file geo_json_file="%s/result.geojson" % nse_directory # load list of device IDs from file logging.info("Load device IDs") try: with open(device_file, 'r') as csvfile: devices = [ int(line.strip()) for line in csvfile if line.strip() ] except IOError as e: logging.error("Failed to load device IDs: %s" % e.strerror) sys.exit(10) #return results from process results, geojson = process.main(url, device_file, current_date=current_date, testing=True) with open(geo_json_file, 'w') as fout: fout.write('{ "type": "FeatureCollection",\n "features": [\n' + \ ', '.join(geojson) + \ '\n ]\n }')
def test_message(message): logger.info('testing') # with open('logging.txt', 'w+') as f: # f.write(str(message['data'])) pred = process.main(message['data']) emit('my response', {'data': pred}, broadcast=True)
def main(): today = datetime.datetime.now().strftime('%Y-%m-%d') log("***********************" + str(today) + "******************************") log("writemc:" + str(today) + " start at " + str(time.asctime())) download.main() log("donwnload:" + str(today) + " end at " + str(time.asctime())) #print " end at "+str(time.asctime()) process.main() log("process:" + str(today) + " end at " + str(time.asctime())) print "process:" + str(today) + " end at " + str(time.asctime()) processcl.main() log("processcl:" + str(today) + " end at " + str(time.asctime())) print "processcl:" + str(today) + " end at " + str(time.asctime()) processdh.main() log("processdh:" + str(today) + " end at " + str(time.asctime())) print "processdh:" + str(today) + " end at " + str(time.asctime()) importmemdh.main() log("importmemdh:" + str(today) + " end at " + str(time.asctime())) print "importmemdh:" + str(today) + " end at " + str(time.asctime()) processrep.main() log("processrep:" + str(today) + " end at " + str(time.asctime())) print "processrep:" + str(today) + " end at " + str(time.asctime()) shangxiaxian_jisuan.main() log("shangxiaxian_jisuan" + str(today) + " end at " + str(time.asctime())) print "shangxiaxian_jisuan" + str(today) + " end at " + str(time.asctime()) #importmem.main("select braid from branch where braid = '02058'") #log("impormem:"+str(today)+" end at "+str(time.asctime())) log("writemc:" + str(today) + " end at " + str(time.asctime()))
def main(): today = datetime.datetime.now().strftime("%Y-%m-%d") log("***********************" + str(today) + "******************************") log("writemc:" + str(today) + " start at " + str(time.asctime())) download.main() log("donwnload:" + str(today) + " end at " + str(time.asctime())) # print " end at "+str(time.asctime()) process.main() log("process:" + str(today) + " end at " + str(time.asctime())) print "process:" + str(today) + " end at " + str(time.asctime()) processcl.main() log("processcl:" + str(today) + " end at " + str(time.asctime())) print "processcl:" + str(today) + " end at " + str(time.asctime()) processdh.main() log("processdh:" + str(today) + " end at " + str(time.asctime())) print "processdh:" + str(today) + " end at " + str(time.asctime()) importmemdh.main() log("importmemdh:" + str(today) + " end at " + str(time.asctime())) print "importmemdh:" + str(today) + " end at " + str(time.asctime()) processrep.main() log("processrep:" + str(today) + " end at " + str(time.asctime())) print "processrep:" + str(today) + " end at " + str(time.asctime()) shangxiaxian_jisuan.main() log("shangxiaxian_jisuan" + str(today) + " end at " + str(time.asctime())) print "shangxiaxian_jisuan" + str(today) + " end at " + str(time.asctime()) # importmem.main("select braid from branch where braid = '02058'") # log("impormem:"+str(today)+" end at "+str(time.asctime())) log("writemc:" + str(today) + " end at " + str(time.asctime()))
def get(self, page=None): if page == "data": try: data_file = process.main() except Exception as e: return self.error(log.exc(e)) return self.file(data_file) if page == "list": try: keys = s3.list_contents() keys.sort() most_recent = keys[-1] except Exception as e: return self.error(log.exc(e)) return self.text(most_recent) elif len(page): return self.not_found() log.info("Home") return self.render("home.html", {})
def setup(): download.main() process.main()
import pandas as pd import json import process import numpy as np from PIL import Image import matplotlib.pyplot as plt import matplotlib.image as mpimg with open('logging.txt') as f: store = f.read() a = eval(store) width = len(a[0]) height = len(a) new_image = Image.new('RGB', (width, height)) data = new_image.load() for y in range(height): for x in range(width): data[(x, y)] = (tuple(a[y][x])) new_image.save('foo.png', 'png') img = mpimg.imread('foo.png') imgplot = plt.imshow(img) plt.show() res = process.main(a)
Incident_RecordFileName = fileDir + 'Incident_update_history.txt' with open(Incident_RecordFileName, 'a+') as f: f.write('>>>' + '\n') f.write('Program started at ' + str(starttime_int) + ' @UTC time:' + starttime_str + ' @local time:' + starttime_str_local + '\n') shutil.copy(MajorEvent_RecordFileName, Dropbox_dir) shutil.copy(Roadwork_RecordFileName, Dropbox_dir) shutil.copy(Incident_RecordFileName, Dropbox_dir) print "[INFO] History record prepared" while True: print print '>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>' total_count += 1 fileList = check() print print "[INFO] Finished updating, This is the ", total_count, "th update" process.main(True, fileList) print '>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>' print for i in xrange(900, 0, -1): time.sleep(1) sys.stdout.write('\rCountdown for next update: %04s' % str(i)) sys.stdout.flush()
workers.put((pid, wp)) while not workers.empty(): pid, wp = workers.get() scraper.save_prices(wp) try: os.waitpid(pid, 0) except OSError: pass sys.stdout.flush() sys.stderr.flush() return True if __name__ == '__main__': # Auto-change directory name = sys.argv[0] dir_path = os.path.dirname(name) if not dir_path == '': os.chdir(dir_path) if main() == True: # Do data processing import process process.main() sys.exit(0)
def process(self, w, data=None): print "process" nameFile = self.nameFileText.get_text() nameDataMapFile = self.dataFileText.get_text() resultFile = process.main(nameFile, nameDataMapFile) self.outFileText.set_text(resultFile)
''' Eecutes process.py, chars.py, undet.py in order. ''' import process import chars import undet process.main() chars.main() undet.main()
def main(): logging.basicConfig(level=logging.INFO) logging.info('Pulling data from Spotify') refresh() process.main()
def test_list_processer(self, mock_args): with patch.object(WaypointListProcessor, 'get_trips', return_value=[]) as mock_method: main() self.assertEqual(mock_method.called, True)
def test_stream_processer(self, mock_args): with patch.object(WaypointStreamProcessor, 'process_waypoint', return_value=[]) as mock_method: main() self.assertEqual(mock_method.called, True)
def test_process_with_invalid_source(self, mock_args): with self.assertRaises(SystemExit) as sys_ex: main() self.assertEqual(sys_ex.exception.code, 0)
def get(self): process.main()
def testAOP(self): sys.argv.append("aop.data") process.main()