def main(): init.init() cd = code_pic.main() # cd={'5812':'woca'} with open(os.getcwd() + '/settings/codesave.json', 'w') as f: json.dump(cd, f) # cd=json.load(f) with open(os.getcwd() + '/settings/zjie.json', 'r', encoding='utf-8') as f2: # print(type(f2)) content = f2.read() if content.startswith(u'\ufeff'): content = content.encode('utf8')[3:].decode('utf8') py = json.loads(content) # py=eval(content) for i in cd: shutil.copyfile('mb.docx', os.getcwd() + "\\bg\\" + str(i) + '.docx') for i in cd: doc = Document(os.getcwd() + "\\bg\\" + str(i) + '.docx') table = doc.tables[0] table.cell(1, 4).text = cd.get(i) # table.cell(1,6).text=py[str(random.randint(1,3))] table.cell(1, 6).text = random.choice(list(py.values())) run = table.cell(1, 0).paragraphs[0].add_run() run.add_picture(os.getcwd() + "\\pic\\" + str(i) + '.png', width=Inches(4.0), height=Inches(2.5)) doc.paragraphs[10].add_run("%s" % i).font.size = Pt(20) doc.paragraphs[11].add_run("%s" % i).font.size = Pt(20) doc.save(os.getcwd() + "\\bg\\" + str(i) + '.docx')
def parse_arg(arg): if arg == "init": init() if arg == "hash-object": hash_object() if arg == "cat-file": cat_file()
def create_share_folder(share_name, comment, options=None, share_dir=None): if oscar.get_share(share_name): return (False, "SHAREALREADYEXISTS") if share_dir: base_dir = os.path.dirname(share_dir) else: base_dir = _share_folder_base share_dir = os.path.join( base_dir, share_name.encode("utf-8") if isinstance(share_name, unicode) else share_name) if os.path.exists(share_dir): return (False, "DIRALREADYEXISTS") if not os.path.isdir(base_dir) and not os.access(base_dir, os.W_OK): return (False, "NOACCESS") os.mkdir(share_dir) init.init(share_dir) if options: config.put_all(share_dir, options) rst = oscar.register_share( oscar.Share(share_name, share_dir, comment=comment, guest_ok=True, writable=True)) return (rst, None)
def main(): args = sys.argv[1:] if len(args) < 1: logging.warning('Usage: python <filename> <function> <parameters..>') elif args[0] == 'init': init() elif args[0] == 'add': data_to_copy_path = args[1] add(data_to_copy_path) elif args[0] == 'commit': message = args[1] commit(message) elif args[0] == 'status': status() elif args[0] == 'checkout': commit_id = args[1] checkout(commit_id) elif args[0] == 'graph': graph() elif args[0] == 'branch': branch_name = args[1] branch(branch_name) elif args[0] == 'merge': branch_name = args[1] merge()
def get_public_key(args): """Get public key""" keys_location_rel_path = settings.cfg.get("keys", "keys_location_rel_path") keys_location_abs_path = expanduser(keys_location_rel_path) public_key_file_name = settings.cfg.get("keys", "public_key_file_name") public_key_abs_path = join(keys_location_abs_path, public_key_file_name) if exists(public_key_abs_path) and isfile(public_key_abs_path): with open(public_key_abs_path, 'r') as f: public_key_data = f.read() public_key = rsa.PublicKey.load_pkcs1(public_key_data) return public_key, public_key_abs_path else: print(settings.messages["_ask_init_fbi_utility"]) try: answer = raw_input() except NameError: answer = input() answer_lower = answer.strip().lower() if ((answer_lower == 'y') or (answer_lower == 'yes') or (answer_lower == 'yep')): # Init the fbi utility init(args) with open(public_key_abs_path, 'r') as f: public_key_data = f.read() public_key = rsa.PublicKey.load_pkcs1(public_key_data) return public_key, public_key_abs_path else: print(settings.messages["_error_EncodeFbiUtilityNotInited"]) exit(1)
def main (argv): logger = initLogger(); if (len(argv) == 0): print "Missing argument. Options: init, store, list, test, get, restore"; elif (argv[0] == "init"): init.init(archiveDir); elif (argv[0] == "store"): if (len(argv) < 2): print "Usage: mybackup store <directory>"; else: store.store(archiveDir, argv[1], logger); elif (argv[0] == "list"): if (len(argv) < 2): listBackups.list(archiveDir) else: listBackups.list(archiveDir, argv[1]) elif (argv[0] == "get"): if (len(argv) < 2): print "Usage: mybackup get <pattern>"; else: restore.getFile(archiveDir, argv[1]); elif (argv[0] == "restore"): if (len(argv) < 2): restore.restoreAll(archiveDir) else: restore.restoreAll(archiveDir, argv[1]) elif (argv[0] == "test"): test.test(archiveDir, logger) else: print "Unknown option: "+argv[0];
def main_junit(): from init import init init() import xmlrunner directory = os.path.dirname(__file__) all_tests = unittest.TestLoader().discover(directory) xmlrunner.XMLTestRunner(output='test-reports').run(all_tests)
def main() -> None: if len(sys.argv) > 1: if sys.argv[1] == 'init': init() else: commands()
def __init__(self): super().__init__() init.init(self) self.identificate.execute(self) signal.signal(signal.SIGINT, self.sigIntHandler) self.show()
def main(): print("开始游戏。\n-----------------------") init.init() while True: choice = int(input("你想探索还是回到工作室?(1, 2)")) if choice == 1: explore.explore() elif choice == 2: workroom.work()
def __init__(self): print("Army Builder Version 1.0") print("Please enter the faction of the army you are creating:") faction = input(">> ") init.init(faction) self.army = ArmyList(faction) # create empty army list self.add_detachment() print(self.army) return
def Setup(self): @self._window.event def on_draw(): glClearColor(1, 1, 1, 1) glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT) glDisable(GL_DEPTH_TEST) glDisable(GL_TEXTURE_2D) glDisable(GL_LIGHTING) glViewport(0, 0, self._window.width, self._window.height) glMatrixMode(GL_PROJECTION) glLoadIdentity() glOrtho(-1e-2, 1.0 + 1e-2, -1e-2, 1.0 + 1e-2, -2, 2) glMatrixMode(GL_MODELVIEW) glLoadIdentity() glEnableClientState(GL_VERTEX_ARRAY) self._lines.Draw() @self._window.event def on_mouse_drag(x, y, dx, dy, buttons, modifiers): w, h = self._window.get_size() x = x / float(w) y = y / float(h) mx, my = self._mx, self._my if x >= 1.0: return if x <= 0.0: return if y >= 1.0: return if y <= 0.0: return self._mx = x self._my = y #self._step = buttons == 1 @self._window.event def on_mouse_release(x, y, button, modifiers): pass #self._step = False # Initialise sim data init.init() #sources.MakeSupernova(1e51,2e33) self.windlum = 3e34 # 2e38 self.windml = 1e19 # 2e22 #sources.MakeWind(self.windlum,self.windml) self.Sphotons = 1e48 #sources.MakeRadiation(self.Sphotons) # Set up rendering s = 0.02 glEnable(GL_BLEND) glPointSize(2) # Set up pyglet to run pyglet.clock.set_fps_limit(60) pyglet.clock.schedule_interval(self.Step, 1.0 / 60.0) pyglet.app.run()
def main(): init.init() server = ServerListener(TransactionDispatcher) try: server.start(NET_LOCAL['HOST'], NET_LOCAL['PORT'], NET_LOCAL['QUEUE']) except KeyboardInterrupt: server.stop() exit()
def startGame(): #게임시작함수 init(con) print() print("******************************************") print("****커넥트 포 게임에 오신걸 환영합니다****") print("******************************************") print("******************************************") print("*****made by 김민섭 임재민 이정섭******") print("******************************************") print() game_input = '' #게임인풋 초기화 order = 0 #순서 정하기위함 board = [[0] * 7 for i in range(6)] #게임판 status = [] #상태 while game_input != '1' and game_input != '2': game_input = input("선공은 1, 후공은 2를 선택하세요 : ") if game_input == '1': print() print("* 1 2 3 4 5 6 7 *") print("+ - - - - - - - + ") print('F ○○○○○○○ #') print('E ○○○○○○○ #') print('D ○○○○○○○ #') print('C ○○○○○○○ #') print('B ○○○○○○○ #') print('A ○○○○○○○ #') print("+ - - - - - - - + ") print() print("플레이어 먼저 시작합니다") order = 1 #플레이어 먼저 순서 break elif game_input == '2': print("인공지능이 먼저 시작합니다") order = -1 #AI 먼저 순서 break else: print("잘못 입력하셨습니다. 다시 선택하세요.") while True: board, order, prev_coord, status = GameStatus(board, order, status) game_over, winner = gameOver(board, prev_coord, order) if game_over: print("******************************************") print("************* 게임 오버 *************") if winner == 1: print("************* 승리 *************") elif winner == -1: print("************* 패배 *************") else: print("************* 무승부 *************") print("******************************************") print() break
def main(): cmds = sys.argv if len(cmds) > 1: ui = cmds[1].lower() if ui == 'init': init() if backup.readyToBackup(): if ui == 'restore' and len(backup.getIndex()) > 0: if len(cmds) > 2: restore(os.path.join(backup.restDir, cmds[2])) else: print("Restoring to default location %s " % backup.restDir) restore(backup.restDir) print("All files restored.") elif ui == 'get': try: get(cmds[2]) except IndexError: print("Please give a pattern eg. 'file' ") elif ui == 'test': fileEntries = validateArchiveIndex() invalidFiles = validateFiles() #print(fileEntries) #print("Correct Entries: %s " % fileEntries['Correct Entries']) #print("Incorrect Entries: %s " % archiveList['Incorrect Entries']) if fileEntries['Erroneous Paths'] != []: print("Erroneous Paths: %d" % len(fileEntries['Erroneous Paths'])) print('\n'.join(' {}: {}'.format(*x) \ for x in enumerate(fileEntries['Erroneous Paths'],1))) elif ui == 'list': if len(cmds) > 2: print('\n'.join('{}: {}'.format(*x) \ for x in enumerate(listFiles(cmds[2]),1))) else: listFiles() print('\n'.join('{}: {}'.format(*x) \ for x in enumerate(listFiles(),1))) elif ui == 'store' and len(cmds) > 2: if os.path.isdir(cmds[2]): store(cmds[2]) else: print("invalid directory") else: print("not ready... run 'cat' init") else: print("run 'init' to initialise the program")
def experiment(datasets, log=False, n_jobs=1, nosave=False, error_type=None, arg_seeds=None): """Run expriments on all datasets for all splits""" # set logger for experiments if log: logging.captureWarnings(False) logging.basicConfig(filename='logging_{}.log'.format( datetime.datetime.now()), level=logging.DEBUG) # set seeds for experiments np.random.seed(config.root_seed) split_seeds = np.random.randint(10000, size=config.n_resplit) experiment_seed = np.random.randint(10000) # run experiments for dataset in datasets: if log: logging.debug("{}: Experiment on {}".format( datetime.datetime.now(), dataset['data_dir'])) for i, seed in enumerate(split_seeds): if arg_seeds is not None: if i not in arg_seeds: continue if utils.check_completed(dataset, seed, experiment_seed): print( "Ignore {}-th experiment on {} that has been completed before." .format(i, dataset['data_dir'])) continue tic = time.time() init(dataset, seed=seed, max_size=config.max_size) clean(dataset, error_type) one_split_experiment(dataset, n_retrain=config.n_retrain, n_jobs=n_jobs, nosave=nosave, seed=experiment_seed, error_type=error_type) toc = time.time() t = (toc - tic) / 60 remaining = t * (len(split_seeds) - i - 1) if log: logging.debug( "{}: {}-th experiment takes {} min. Estimated remaining time: {} min" .format(datetime.datetime.now(), i, t, remaining))
def test_Option_class(): """ Checks list facilities in the Options class and that the object can be inialised successfully """ init.init("Necron") item_list = [ init.WargearItem("Tesla carbine"), init.WargearItem("Gauss cannon"), init.WargearItem("Gauss blaster") ] option = option_parser.Option(item_list) assert item_list[1] == option[1] for i, j in enumerate(option): assert j == item_list[i]
def set(path,key,value,id=None): data = [] try: conn = lwdb.init() curr = conn.cursor() f = None if id is None: curr.execute('SELECT rowid, path FROM file WHERE path = ?', (path,)) file_data = curr.fetchone() f = File(file_data[0], file_data[1]) else: #object is incomplete! f = File(id,path) try: curr.execute('INSERT INTO file_metadata (file_id,key,value) VALUES (?,?,?)',(f.id,key,value)) except sqlite3.IntegrityError: curr.execute('UPDATE file_metadata SET value = ? WHERE file_id = ? AND key = ?',(value,f.id,key)) f.get_all_props(curr) conn.commit() conn.close() return True except sqlite3.Error as e: print('file::set()',path,key,value,e) return False
def project_loading(): try: models, data, index_to_id, id_to_index = init(clear=True) return Response(status=204, mimetype='application/json') except Exception as e: return Response(str(e), status=500, mimetype='application/json')
def run(): (csv_file, limit) = init.init() json_p = multiprocessing.Process(target=generate_json_and_write_file, args=(csv_file, limit)) json_p.start() http_p = multiprocessing.Process(target=server.run) http_p.start()
def list(filter=None,id=None,path=None): conn = lwdb.init() curr = conn.cursor() try: if filter is None and id is None and path is None: curr.execute('SELECT rowid, path, mode FROM project_file') elif filter is not None and id is None and path is None: curr.execute('SELECT rowid, path, mode FROM project_file WHERE path LIKE ?', (filter,)) elif path is not None and id is None: curr.execute('SELECT rowid, path, mode FROM project_file WHERE path = ?', (path,)) elif id is not None: curr.execute('SELECT rowid, path, mode FROM project_file WHERE rowid = ?', (id,)) file_data = curr.fetchall() if len(file_data) is 0: raise lwfexcept.ProjectFileNotFoundError(None) data = [] for d in file_data: f = ProjectFile(d[0], d[1], d[2]) data.append(f) conn.close() return data except sqlite3.Error as e: conn.close() raise e
def main(): """Main function""" pygame.init() screen = pygame.display.set_mode((600, 400)) clock = pygame.time.Clock() animation, event_queue, console = init.init() event_queue.subscribe("SET_FPS", set_fps) terminated = False while not terminated: for event in pygame.event.get(): if event.type == pygame.QUIT: terminated = True elif event.type == pygame.KEYUP and event.key == pygame.K_RETURN: console.toggle_activate() if not console.active: console.execute() elif event.type == pygame.TEXTINPUT and console.active: console.add_text(event.text) elif (event.type == pygame.KEYUP and event.key == pygame.K_BACKSPACE and console.active): console.remove_last_char() elif event.type == pygame.KEYUP and event.key == pygame.K_ESCAPE: console.errored = False elif event.type == pygame.KEYUP and event.key == pygame.K_F1: animation.active = True animation.counter = 0 elif event.type == pygame.KEYUP and event.key == pygame.K_F2: animation.active = False event_queue.update() render(screen, entities=[animation, console]) clock.tick(FPS) pygame.display.quit()
def main(args): args.config = None finished = False result_folder = '' try: # init log logger.info('Initialisation starts') logger.info('apk:\t\t\t%s', args.apk_file) logger.info('appium port:\t\t%s', args.appium_port) logger.info('appium back port:\t%s', args.appium_back_port) logger.info('proxy port:\t\t%s', args.proxy_port) logger.info('system port:\t\t%s', args.appium_system_port) finished, result_folder = init(args) if finished: logger.info('Main program starts') logger.info('apk:\t\t\t%s', args.apk_file) logger.info('appium port:\t\t%s', args.appium_port) logger.info('appium back port:\t%s', args.appium_back_port) logger.info('proxy port:\t\t%s', args.proxy_port) logger.info('system port:\t\t%s', args.appium_system_port) finished = tester(result_folder, args) if args.idp_name == 'fb': time.sleep(30 * 60) else: print "Initialisation fail" except Exception: logger.exception("exception: ") return finished, result_folder
def SvcDoRun(self): self.logger.info("Service start ") init.init() self.netkeeper = Netkeeper() self.mainwindow = MainWindow() thread1 = threading.Thread(target=self.netkeeper.autoDail, args=('17702737125@hkd', '09152287', self.mainwindow.autoupdate)) thread1.setDaemon(True) thread1.start() thread2 = threading.Thread(target=self.mainwindow.mainloop()) thread2.setDaemon(True) thread2.start() self.tray = TrayIcon(self.netkeeper) while init.settings.RUNNING: time.sleep(1)
def list(file=None,complete=None,limit=None,curr=None): data = [] #try: mycurr = False if curr is None: conn = lwdb.init() curr = conn.cursor() mycurr = True if file is not None: if limit is not None: curr.execute('SELECT rowid,json FROM queue WHERE file_id = ? LIMIT ?', (file,limit,)) else: curr.execute('SELECT rowid,json FROM queue WHERE file_id = ?', (file,)) elif complete is not None: if limit is not None: curr.execute('SELECT rowid,json FROM queue WHERE complete = ? LIMIT ?', (complete,limit,)) else: curr.execute('SELECT rowid,json FROM queue WHERE complete = ?', (complete,)) else: if limit is not None: curr.execute('SELECT rowid,json FROM queue LIMIT ?',(limit,)) else: curr.execute('SELECT rowid,json FROM queue') config_data = curr.fetchall() for d in config_data: data.append((d[0],json.loads(d[1]))) if mycurr: conn.close() #except sqlite3.Error as e: # print('event::list()',e) return data
async def post(request): try: unique_task_id = uuid.uuid1() #Pull init data from file initilization_data = init() logging.warn("{0} - received a request".format(unique_task_id)) #Receive data from Crawling crawling_data = await request.json() #Run our functions output_json = Text_Transformation_controller(crawling_data,initilization_data['max_n_gram_size']) #Send metadata to Link Analysis r = requests.post(initilization_data['Link_Analysis_address'],json.dumps(output_json['metadata'])) #Send data to Indexing r = requests.post(initilization_data['Indexing_address'],json.dumps(output_json)) #Tell Crawling that this was a success response_obj = {"status": 200} logging.warn("{0} - Successfully finished processing request.".format(unique_task_id)) return web.Response(text=json.dumps(response_obj), status=200) except Exception as e: #Tell Crawling that this was a failure logging.error("{0} - An Exception occurred.\nREQUEST:\n{1}".format(unique_task_id,json.dumps(request, indent=4)), exc_info=True) response_obj = {"status": 500, "message": "Incorrect JSON Format: "} logging.warn("{0} - Finished processing request in error.".format(unique_task_id)) return web.Response(text=json.dumps(response_obj), status=500)
def main(): parser = argparse.ArgumentParser(description='Band Protocol Layer') parser.add_argument('instruction', help='Instruction sets') parser.add_argument('--debug', dest='debug', action='store_true') parser.add_argument('--key', help='Key to find value') args = parser.parse_args() if args.instruction == 'node': node(args.debug) elif args.instruction == 'init': init() elif args.instruction == 'clear': clear() elif args.instruction == 'info': info(args.key) else: raise Exception("This instruction is not supported")
def run(): # Make sure necessary directories exist if not os.path.exists('tracks'): os.makedirs('tracks') if not os.path.exists('figures'): os.makedirs('figures') loc = 'http://barataria.tamu.edu:8080/thredds/dodsC/NcML/txla_nesting6.nc' grid = tracpy.inout.readgrid(loc) overallstartdate = datetime(2007, 5, 1, 12, 1) overallstopdate = datetime(2007, 6, 1, 12, 1) # overallstartdate = datetime(2008, 8, 1, 12, 1) # overallstopdate = datetime(2008, 9, 1, 12, 1) date = overallstartdate # Start from the beginning and add days on for loop # keep running until we hit the next month while date < overallstopdate: name = date.isoformat()[0:13] # If the particle trajectories have not been run, run them if not os.path.exists('tracks/' + name + '.nc') and \ not os.path.exists('tracks/' + name + 'gc.nc'): # Read in simulation initialization nstep, N, ndays, ff, tseas, ah, av, lon0, lat0, z0, zpar, do3d, doturb, \ grid, dostream, T0, U, V = init.init(date, loc, grid=grid) # pdb.set_trace() # Run tracpy # Save directly to grid coordinates lonp, latp, zp, t, grid, T0, U, V \ = tracpy.run.run(loc, nstep, ndays, ff, date, tseas, ah, av, \ lon0, lat0, z0, zpar, do3d, doturb, name, N=N, \ grid=grid, dostream=dostream, T0=T0, U=U, V=V, savell=False) # # If basic figures don't exist, make them # if not os.path.exists('figures/' + name + '*.png'): # # Read in and plot tracks # d = netCDF.Dataset('tracks/' + name + '.nc') # lonp = d.variables['lonp'][:] # latp = d.variables['latp'][:] # # tracpy.plotting.tracks(lonp, latp, name, grid=grid) # # tracpy.plotting.hist(lonp, latp, name, grid=grid, which='hexbin') # d.close() # # # Do transport plot # tracpy.plotting.transport(name='all_f/N=5_dx=8/25days', fmod=date.isoformat()[0:13], # extraname=date.isoformat()[0:13], # Title='Transport on Shelf, for a week from ' + date.isoformat()[0:13], dmax=1.0) # Increment by 24 hours for next loop, to move through more quickly # nh = nh + 24 date = date + timedelta(hours=4)
def boot(args=[]): # Define boot function if "--init" in args: # If runlevel was declared with --init argument, level = args[args.index("--init") + 1] # set it to given value, else: # else set it to 2 level = 2 exit( init.init(level) ) # Execute init with given runlevel, after that exit with returned systemcode
def main(unused_argv): options, vocab, multisense_vocab, tf_config = init.init() model = polylm.PolyLM(vocab, options, multisense_vocab=multisense_vocab, training=False) with tf.Session(config=tf_config) as sess: model.attempt_restore(sess, options.model_dir, True) util.wic(model, vocab, sess, options)
def generate_operators(): repo_names = getRepoNames() new_operators = [] output = open('mutation_operators.txt','w') init.init() for repo in repo_names: print("collecting operators form repo : " + repo + " .....") diff_file = open('../resources/diffs_modified/'+repo+'_diff_modified','r') minus = None plus = None for line in diff_file: if minus is None: minus = line.strip() elif plus is None: plus = line.strip() if plus is not None: #parse both minus and plus if not (("'''" in minus) or ("'''" in plus) or ('"""' in minus) or ('"""' in plus)): token_minus = parse(minus[1:]) token_plus = parse(plus[1:]) new_operator = compareTok(token_minus, token_plus) if (new_operator is not None): if (new_operator in new_operators): init.duplicate += 1 else: output.write(str(new_operator)+ "\n") new_operators.append(new_operator) minus = None plus = None print("size : " + str(len(new_operators)) ); print("collecting done") #return new_operators print("less and equal Than 2 : " + str(init.lessThan2)) print("more and equal than 10 tokens : " + str(init.moreThan10)) print("same afterToken, beforeToken : " + str(init.same)) print("more and equal than 4 identifiers : " + str(init.moreThan4Iden)) print("3 identifers in a row : " + str(init.inArow)) print("unmatched bracket : " + str(init.unmatchedBracket)) print("duplicate : " + str(init.duplicate)) output.close()
def main(): parameters_in_train , cost , inference , feeding = init.init() with open('model/params_pass_0.tar', 'r') as f: parameters = paddle.parameters.Parameters.from_tar(f) infer_dict = copy.copy(feeding) del infer_dict['isclick'] itera = wd.train()() for feature in itera: prediction = paddle.infer(inference, parameters=parameters, input=[feature], feeding=infer_dict) print 'predict = '+ str((prediction[0][0]+1)/2) + ' isclick = '+ str(feature[2])
def main(): global tutorialShown init.init() print("good morning") if not tutorialShown: draw.tutorial() tutorialShown = True draw.menu() while 1: game.processUserInput() draw.moveNanny() if game.isGameWon(): draw.win() break if game.isGameover(): draw.gameover() break draw.render() main()
def main(): prompt = int( input( '输入模式:\n 1. 将此文件夹新建成git仓库并上传\n 2. 更新git仓库内容\n 3. 生成自己git仓库notebook链接\n 4. 生成他人git仓库notebook链接\n: ' )) if prompt == 1: init() print('初始化仓库并上传文件完成') elif prompt == 2: upload() print('上传文件已完成') elif prompt == 3: notebook = search_notebook() #print(notebook) # 如果文件夹中只有一个notebook,自动生成该notebook链接 if len(notebook) == 1: link = generate_link(notebook[0]) print('notebook链接为:') print(link) elif len(notebook) > 1: print('检测到多个notebook,请选择一个') for index in range(len(notebook)): option = index + 1 print(str(option) + '. ' + str(notebook[index])) user_choice = int(input('请输入序号: ')) link = generate_link1(notebook[user_choice - 1]) print('notebook链接为:') print(link) # 如果pyperclip出错可以直接注释下两行 print('已将链接复制到剪切板') copy(link) elif len(notebook) == 0: print('未能检测到notebook文件,关闭程序。') elif prompt == 4: git_repo_link = input('请输入该git仓库网址:') notebook_path = input('请输入notebook在该仓库的相对地址:') link = generate_link2(git_repo_link, notebook_path) print(link) # 如果pyperclip出错可以直接注释下两行 print('已将链接复制到剪切板') copy(link) else: print('无法识别指令,关闭程序。')
def __init__(self, tmplate_column_names, file_path): self.file_path = file_path tmp = self.file_path.split('/') self.path = "./" + tmp[-1] self.dir = "/".join(tmp[0:len(tmp) - 1]) os.chdir(self.dir) self.yaml = YamlReader(self.path).fechFile() self.init_data = init.init(self.yaml, self.fetch_init_data()) self.swaggerPaths = YamlProcessor(self.yaml).YamlProcessorMain() self.pars = SwaggerSpecAnalys( self.swaggerPaths).SwaggerSpecAnalysMain()
def main(): try: init.init() except Exception as e: init.graceful_exit() import traceback print(traceback.print_exception(*init.sys.exc_info())) print("Fatal error!!") v = input(prompt) if v == '1': main() elif v == '3': init.sys.exit(0) else: import install install.main(install.args) main() except Exception: # Extremely fatal error print("ERROR")
def play(filename): state = init.init() console.authenticate(state) my_playlist = localplaylist.LocalPlaylist() my_playlist.load(filename) state['player_path'] = playlist.player_path while True: for i in xrange(my_playlist.count()): song = my_playlist.get(i) console.play_track(state, song)
def delete(id): try: conn = lwdb.init() curr = conn.cursor() curr.execute('DELETE FROM queue WHERE rowid IN (?)',[id]) conn.commit() conn.close() return True except sqlite3.Error as e: print('queue::delete',e) return False
def animate(self): dxfunction, dx2function, dxic = init.init(self.fun) self.enableZoom() #Function to call phase plane plot function IC0=array([float(self.X0.get()), float(self.Y0.get())]) IC1=array([float(self.X1.get()), float(self.Y1.get())]) dim = [float(self.Xmin.get()),float(self.Ymin.get()),float(self.Xmax.get()),float(self.Ymax.get())] N_trajectories = int(self.numPlots.get()) def lorentz_deriv((x, y, z), t0, sigma=10., beta=8./3, rho=28.0):
def __init__(self, source_name="camera", calibrate=True, create_gui=True): # TODO: way too much responsibility... # set and start the GUI, if it exists if create_gui: app = QtGui.QApplication(sys.argv) self.gui = init.init(self) sys.exit(app.exec_()) # load source, pause, record, and play the video self.is_recording = False self.recorded = [] # TODO: set defaults with dc .py self.displays = [] # update self.camera_gain = ( 0.0 ) # TODO: not right yet!!! #Should they be used to control the camera directly? firewirevideo.c's feature register self.camera_bias = 0.0 self.camera_gamma = 1.0 self.camera_framerate = FirewireVideo.DC1394_FRAMERATE_15 self.video_mode = FirewireVideo.DC1394_VIDEO_MODE_640x480_RGB8 self.parse_args() self.open_source(source_name) # display pipeline # source -> corrector -> scale_offset (with autocontrast on/off?) # calibrate camera ??? (TODO: load calibration data if available?) self.calibrator = Calibrator(input=self.source, calibrate_now=calibrate) # keep the last ProcessObject in the pipeline self.source.updatePlayMode() self.calibrator.getOutput().update() self.scale_offset = Scale_Offset(input=self.calibrator.getOutput(), auto_contrast=False) self.output = self.scale_offset # TODO: manage displays with GUI, only show the final result at end if calibrate: self.add_image_display(input=self.calibrator.dark_current_image, title="dc") self.add_image_display(input=self.calibrator.flat_field_image * 255, title="ff") self.add_display(input=self.source, title="source") self.add_display(input=self.calibrator.getOutput(), title="corrected") self.add_display(input=self.output, title="microscope") self.main_loop()
def share_create(share_name): # TODO: disable some characters to use http://internet.designcross.jp/2010/02/blog-post.html if share_name.startswith(".") or share_name.startswith("_") or share_name == "static": return flask.jsonify({"success":False, "info":"INVALIDNAME"}) params = flask.request.json if samba.share_exists(share_name): return flask.jsonify({"success":False, "info":"SHAREALREADYEXISTS"}) share_folder_base = web.app.config["SHARE_FOLDER_BASE"] share_dir = os.path.join(share_folder_base, share_name.encode("utf-8") if isinstance(share_name,unicode) else share_name) if os.path.exists(share_dir): return flask.jsonify({"success":False, "info":"DIRALREADYEXISTS"}) if not os.path.isdir(share_folder_base) and not os.access(share_folder_base, os.W_OK): return flask.jsonify({"success":False, "info":"NOACCESS"}) os.mkdir(share_dir) init.init(share_dir) rst = samba.register_share(share_name, share_dir, comment=params.get(u"comment"), guest_ok=params.get(u"guest_ok"), writable=True, force_user=getpass.getuser(),veto_files = ".oscar",valid_users=params.get("valid_users"),valid_groups=params.get("valid_groups")) if rst: options = params[u"options"] if u"options" in params else {} config.put_all(share_dir, options) return flask.jsonify({"success":rst, "info":None})
def list(): data = [] try: conn = lwdb.init() curr = conn.cursor() curr.execute('SELECT * FROM config') config_data = curr.fetchall() for d in config_data: data.append((d[0], d[1])) conn.close() except sqlite3.Error as e: print('config::list()',e) return data
def main(self): self.running = False self.state = init.init() self.app = QtWidgets.QApplication(sys.argv) DBusQtMainLoop(set_as_default=True) self.session_bus = QtDBus.QDBusConnection.sessionBus() self.session_bus.registerService(DBUS_SERVICE_NAME) qt_translator = QtCore.QTranslator() qt_translator.load("xmradio_%s" % QtCore.QLocale().name(), "lang") self.app.installTranslator(qt_translator) self.authenticate()
def set_mode(id,mode=None,curr=None): try: mycurr = False if curr is None: conn = lwdb.init() curr = conn.cursor() mycurr = True curr.execute('UPDATE project_file SET mode = ? WHERE rowid = ?',(mode,id)) if mycurr: conn.commit() conn.close() return True except sqlite3.Error as e: print('project_file::set_mode',e) return False
def set(key=None,value=None): try: conn = lwdb.init() curr = conn.cursor() try: curr.execute('INSERT INTO config VALUES (?,?)',(key,value)) except sqlite3.IntegrityError: curr.execute('UPDATE config SET value = ? WHERE key = ?',(value,key)) conn.commit() conn.close() return True except sqlite3.Error as e: print('config::set',e) return False
def add(file, event): try: conn = lwdb.init() curr = conn.cursor() curr.execute('INSERT INTO queue(file_id,json) VALUES (?,?)',(file,json.dumps(event),)) conn.commit() curr.execute('SELECT last_insert_rowid()') id = curr.fetchone() conn.close() return id[0] except sqlite3.Error as e: print('queue::add',e) return None
def complete(id,curr=None): print "comp {}".format(id) try: mycurr = False if curr is None: conn = lwdb.init() curr = conn.cursor() mycurr = True curr.execute('UPDATE queue SET complete = 1 WHERE rowid IN (?)',[id]) if mycurr: conn.commit() conn.close() return True except sqlite3.Error as e: print('queue::complete',e) return False
def get(key): ret = (None,None) try: conn = lwdb.init() curr = conn.cursor() curr.execute('SELECT * FROM config WHERE key IN (?)', [(key)]) data = curr.fetchone() if data is not None: ret = data else: raise lwfexcept.ConfigValueNotFoundError(key) conn.close() except sqlite3.Error as e: print('config::get()',e) return ret
def add_file(projectid,fileid,fileid_mode): u.eprint("PF {} add file {} {}".format(projectid,fileid_mode,fileid)) conn = lwdb.init() curr = conn.cursor() try: curr.execute('INSERT INTO project_file_ref (project_file_id,file_id,mode) VALUES (?,?,?)', (projectid,fileid,fileid_mode)) conn.commit() conn.close() return True except sqlite3.IntegrityError as e: conn.close() raise lwfexcept.ProjectFileModeAlreadyTakenError() except sqlite3.Error as e: conn.close() raise e
def get_files(pf): data = [] conn = lwdb.init() curr = conn.cursor() try: curr.execute('SELECT file_id FROM project_file_ref WHERE project_file_id = ?', (pf.id,)) file_data = curr.fetchall() for d in file_data: f = dbfile.get(id=d[0]) data.append(f) conn.close() return data except sqlite3.Error as e: conn.close() raise e
def add(path): conn = lwdb.init() curr = conn.cursor() try: curr.execute('INSERT INTO file (path) VALUES (?)', (path,)) conn.commit() curr.execute('SELECT last_insert_rowid()') id = curr.fetchone() f = File(id[0],path) conn.close() return f except sqlite3.IntegrityError as e: conn.close() raise lwfexcept.FileAlreadyImportedError(path) except sqlite3.Error as e: conn.close() raise e
def pop(): conn = lwdb.init() conn.isolation_level = None curr = conn.cursor() i = None try: curr.execute("BEGIN") l = list(curr=curr,complete=0,limit=1) i = l[0] complete(i[0], curr=curr) curr.execute("COMMIT") except sqlite3.Error: curr.execute("ROLLBACK") conn.close() return i
def find(fileid): data = [] conn = lwdb.init() curr = conn.cursor() try: curr.execute('SELECT project_file_id FROM project_file_ref WHERE file_id = ?', (fileid,)) file_data = curr.fetchall() for d in file_data: f = get(id=d[0]) data.append(f) conn.close() if len(data) is 0: raise lwfexcept.ProjectFileNotFoundError(fileid) return data except sqlite3.Error as e: conn.close() raise e
def __init__(self): self.gameOver = False self.screen = init.init(self.screenSize) #pygame.mixer.init(frequency=44100, size=16, channels=2, buffer=2000) pygame.mixer.init() pygame.font.init() #Init sounds self.laserShotSound = pygame.mixer.Sound(os.path.join('..\sounds\lasershot.wav')) self.asteroidExplosion = pygame.mixer.Sound(os.path.join('..\sounds\snare003.wav')) self.playerDies = pygame.mixer.Sound(os.path.join('..\sounds\Noise001.wav')) #Init fonts self.fontScoreHeight = 25 self.fontScoreColor = (0, 210, 30) self.fontScorePos = (25, 450) self.scoreFont = pygame.font.Font(os.path.join('..\gfx\chintzy.ttf'), self.fontScoreHeight) #Projectiles self.projectilesList = [] #Create main player self.player = Player(self, self.shipImage, (32,32), (300, 420), self.ammoType, self.screenSize) self.asteroidController = AsteroidController(self, (32, 32), self.asteroidImage) self.backgroundScroller = BackgroundScroller(self.screenSize) #Load animations self.animationController = AnimationController() self.animationController.addAnimation(self.yoshiImage, 7, (64, 64)) #Score system self.score = 0 #Initial score self.scoreAsteroidHit = 80 self.startGameLoop()
def tResponse(functionName, IC = None, dim = None, numXo = None, ODESolver = 'Default', simSpecs = None): dxfunction, dx2function, dxic = init.init(functionName) if (simSpecs is not None): tInit = simSpecs[0] tFinal = simSpecs[1] numT = simSpecs[2] else: tInit = 0.0 tFinal = 15 numT = 1000 #similiar to step response X0 = IC t = linspace(tInit, tFinal, numT) # time X, infodict = integrateFucn(dxfunction, X0, t, ODESolver) # infodict['message'] # >>> 'Integration successful.' y1, y2 = X.T #Time response of the system f1 = p.figure(figsize=(5,4), dpi=100) p.plot(t, y1, 'r-', label='y1(t)') p.plot(t, y2 , 'b-', label='y2(t)') p.grid() p.legend(loc='best') p.xlabel('time') p.ylabel('outputs') p.title('Evolution of y1 and y2') #p.show() return f1
##%run main.py ### reload run data/clusters import init init.init( 'output/Hpy.pkl' ) from globals import * import funcs import floc if False: scores_all = floc.get_scores_all(clusters, iter, all_genes, ratios, string_net) scores_all2 = floc.get_scores_best(scores_all) #%timeit -n1 ord = floc.rnd_bubblesort( scores_all2['combined'].values.copy() ) ##, scores_all2.shape[0]*2 ) #%timeit -n1 ord = floc.rnd_bubblesort2( scores_all2['combined'].values.copy() ) ##, scores_all2.shape[0]*2 ) #%timeit -n1 ord = floc.rnd_bubblesort3( scores_all2['combined'].values.copy() ) ##, scores_all2.shape[0]*2 ) ### testing the parallelized biclusterings from globals import * from params import * import Bicluster tmp = Bicluster.re_meme_all_clusters_par(clusters) ### testing weaved residue funcs import weaved b = clusters[0] print funcs.matrix_residue( ratios.ix[b.rows, b.cols] ) rats = ratios.ix[b.rows, b.cols].values print weaved.fast_resid(rats)
# coding=utf-8 import init import threading from reddit import loginreddit from thread.findposts import findComments if __name__ == "__main__": print('[main] logging into reddit') loginreddit.loginReddit() print('[main] initialize dictionaries and converters (champion, item, spell)') init.init() r = loginreddit.r print('[main] starting findcomments thread') t = threading.Thread(target=findComments, args = ()) t.start()
# du/dt = a*u - b*u*v # dv/dt = -c*v + d*b*u*v # We will use X=[u, v] to describe the states from numpy import * import pylab as p import init as init #List of the function names availables FuntionNameList = ["Hyperbolic", "Simple", "Magnetic", "Duffing", "Vanderpol", "Violin"] #Select the function that you want to work with functionName = "Vanderpol" dxfunction, dx2function, dxic = init.init(functionName) print 'Init values (%.3f, %.3f) (%.3f, %.3f)' % (dxic[0][0], dxic[0][1], dxic[1][0], dxic[1][1]) # load initials conditions to test X0 = array([1, 1]) # Before using SciPy to integrate this system, we will have a closer look on # position equilibrium. Equilibrium occurs when the x_dot is equal to 0. #Test initial condition (should be true) def testIC( fun, *args ): result = all(fun(args[0]) == zeros(2) ) and all(fun(args[1]) == zeros(2) ) if not result: print 'Error, equilibrium point should be equal to zero! Check initial conditions!' testIC(dxfunction,dxic[0],dxic[1])
import jsonpickle as j import init as i import argparse import ast parser = argparse.ArgumentParser() parser.add_argument('names') parser.add_argument('products') parser.add_argument('matrix') args = parser.parse_args() names = ast.literal_eval(args.names) products = ast.literal_eval(args.products) matrix = ast.literal_eval(args.matrix) results = i.init(names, products, matrix) if not isinstance(results[2], list): results[2] = results[2].tolist() for m in range(6,8): for l in range(0, len(results[m])): if not isinstance(results[m][l][0], list): results[m][l][0] = results[m][l][0].tolist() if not isinstance(results[m][l][1], list): results[m][l][1] = results[m][l][1].tolist() else: for n in range(0,len(results[m][l][1])): if not isinstance(results[m][l][1][n], list): results[m][l][1][n] = results[m][l][1][n].tolist() for k in range(0, len(results[m][l][3])): results[m][l][3][k] = str(results[m][l][3][k]) if not isinstance(results[2], float): results[m][l][4] = float(results[m][l][4])