def main(): args=sys.argv[1:] if len(args)==2: pagename=args.pop(0) workdir=os.sep.join([os.getcwd(),pagename]) else: if os.path.exists("pagedata.txt"): workdir="." else: pagename="www.example.com" workdir=os.sep.join([os.getcwd(),pagename]) try: mode=args.pop() except: mode="design" if not mode in ["design","export","help"]: print("invalid command:",mode) return print("Working directory:",workdir) if not os.path.exists(workdir): print("Populating directory with skeleton files") shutil.copytree(os.sep.join([mydir,"skeleton"]),workdir) shutil.copy(os.sep.join([mydir,"template.txt"]),os.sep.join([workdir,"pagedata.txt"])) else: for x in ["static","templates"]: shutil.rmtree(os.sep.join([workdir,x])) shutil.copytree(os.sep.join([mydir,"skeleton",x]),os.sep.join([workdir,x])) shutil.copy(os.sep.join([mydir,"skeleton","designs","default.html"]),os.sep.join([workdir,"designs","default.html"])) os.chdir(workdir) if mode=="design": admin.srv() elif mode=="help": print(usage) else:#export export.export()
def main(system_env, system, setup, ep, outputfn, network_name, batch_size): if system == 'cartpole_obs': from networks.mpnet_cartpole_obs import MPNet elif system == 'acrobot_obs': from networks.mpnet_acrobot_obs import MPNet elif system == 'quadrotor_obs': from networks.mpnet_quadrotor_obs import MPNet else: print("Unrecognized model name") raise mpnet = MPNet(ae_input_size=32, ae_output_size=output_size[system], in_channels=in_channel[system], state_size=state_size[system]).cuda() mpnet.load_state_dict( torch.load('output/{}/{}/{}/ep{}.pth'.format(system, setup, network_name, ep))) mpnet.train() # mpnet.eval() Path("exported/output/{}".format(system)).mkdir(exist_ok=True) export(mpnet, setup=setup, system_env=system_env, system=system, exported_path="exported/output/{}/{}".format(system, outputfn), batch_size=batch_size)
def main(system_env, system, setup, ep, outputfn): # mpnet = MPNet( # ae_input_size=32, # ae_output_size=1024, # in_channels=1, # state_size=state_size[system]).cuda() mpnet = KMPNet(total_input_size=8, AE_input_size=32, mlp_input_size=136, output_size=4, CAE=Encoder, MLP=MLP) mpnet.cuda() # mpnet.load_state_dict(torch.load('/media/arclabdl1/HD1/YLmiao/results/KMPnet_res/cartpole_obs_lr0.001000_SGD_step_100/kmpnet_epoch_9950_direction_0_step_100.pkl'.format(system, setup, ep))) # load_func(mpnet, '/media/arclabdl1/HD1/YLmiao/results/KMPnet_res/cartpole_obs_lr0.001000_SGD_step_100/kmpnet_epoch_9950_direction_0_step_100.pkl') # load_func(mpnet, '/media/arclabdl1/HD1/YLmiao/results/KMPnet_res/cartpole_obs_3_lr0.001000_Adagrad_step_200/kmpnet_epoch_2600_direction_0_step_200.pkl') load_func( mpnet, '/media/arclabdl1/HD1/YLmiao/results/KMPnet_res/cartpole_obs_3_lr0.001000_Adagrad_loss_l1_smooth_step_200/kmpnet_epoch_9950_direction_0_step_200.pkl' ) mpnet.train() # mpnet.eval() export(mpnet, setup=setup, system_env=system_env, system=system, exported_path="exported/output/{}/{}".format(system, outputfn))
def main(system_env, system, setup, ep, from_exported, network_type, outputfn): if from_exported: from exported.export_mpnet_external_small_model import KMPNet, load_func, Encoder, MLP mpnet = KMPNet(total_input_size=8, AE_input_size=32, mlp_input_size=40, output_size=4, CAE=Encoder, MLP=MLP, loss_f=None).cuda() load_func(mpnet, '/media/arclabdl1/HD1/YLmiao/results/KMPnet_res/cartpole_obs_4_lr0.010000_Adagrad_step_200/kmpnet_epoch_3150_direction_0_step_200.pkl') costnet = CostNet(ae_input_size=32, ae_output_size=32, in_channels=1, state_size=state_size[system], encoder=mpnet.encoder.cuda()).cuda() else: if system == 'quadrotor_obs': from networks.mpnet_quadrotor_obs import MPNet elif system == 'cartpole_obs': from networks.mpnet_cartpole_obs import MPNet elif system == 'acrobot_obs': from networks.mpnet_acrobot_obs import MPNet elif system == 'car_obs': from networks.mpnet_car_obs import MPNet mpnet = MPNet( ae_input_size=32, ae_output_size=output_size[system], in_channels=in_channel[system], state_size=state_size[system]).cuda() costnet = CostNet(ae_input_size=32, ae_output_size=output_size[system], in_channels=in_channel[system], state_size=state_size[system], encoder=mpnet.encoder).cuda() costnet.load_state_dict(torch.load('output/{}/{}/{}/ep{}.pth'.format(system, setup, network_type, ep))) costnet.eval() # costnet.mlp.eval() # costnet.mlp.dropout = False Path("exported/output/{}".format(system)).mkdir(exist_ok=True) export(costnet, setup=setup, system_env=system_env, system=system, exported_path="exported/output/{}/{}".format(system, outputfn))
def orbibuild_project(prjpath, app_args, buildid, recipes_use=None): if not os.path.isdir(prjpath): raise ValueError("Incorrect path '" + prjpath + "'") prjpath = os.path.abspath(prjpath) + "/" builddir = prjpath + "/build" if not os.path.exists(builddir): os.makedirs(builddir) if __get_build_id(builddir) == buildid: return __set_build_id(builddir, buildid) recipes = None try: recipes = Recipes(prjpath, recipes_use) except ValueError as e: print(e.args[0]) build_break(prjpath) if len(recipes.list): export(prjpath, recipes.list[0]) for recipe in recipes.list: __info_recipe(recipe) deps_travel(prjpath, __copy_bins) __compile(prjpath, app_args, recipe, buildid) __linking(prjpath, recipe) __copy_files(prjpath, recipe)
def export_to_excel(settings): filename_raw = settings['FEED_URI'] filename = filename_raw[0:filename_raw.index('.')] excel_filename = f'{filename}.xlsx' export(filename_raw, excel_filename) os.remove(filename_raw) return excel_filename
def main(): """ The main function of the script This function run as a starting point of the program """ # Handle args first import export export.export("./submissions", "./", "rubric-01.md")
def main(system_env, system, setup, ep): mpnet = MPNet( ae_input_size=32, ae_output_size=1024, in_channels=1, state_size=state_size[system]).cuda() costnet = CostNet(ae_input_size=32, ae_output_size=1024, in_channels=1, state_size=4, encoder=mpnet.encoder).cuda() costnet.load_state_dict(torch.load('output/{}/{}/cost_so_far/ep{}.pth'.format(system, setup, ep))) costnet.eval() export(costnet, setup=setup, system_env=system_env, system=system, exported_path="exported/output/{}/cost_so_far_10k.pt".format(system))
def main(): manifest_path = DEF_MANIFEST input_path = DEF_INPUT_PATH output_path = DEF_OUTPUT_PATH output_formats = DEF_OUTPUT_FORMATS ttx_output = DEF_TTX_OUTPUT dev_ttx_output = DEF_DEV_TTX delim_codepoint = DEF_DELIM_CODEPOINT no_lig = DEF_NO_LIG no_vs16 = DEF_NO_VS16 nfcc = DEF_NFCC try: opts, _ = getopt.getopt( sys.argv[1:], 'hm:i:o:F:d:', ['help', 'ttx', 'dev-ttx', 'no-lig', 'no-vs16', 'nfcc']) for opt, arg in opts: if opt in ['-h', '--help']: print(HELP) sys.exit() elif opt == '-m': manifest_path = arg elif opt == '-i': input_path = arg elif opt == '-o': output_path = arg elif opt == '-F': output_formats = arg.split(',') elif opt == '-d': delim_codepoint = arg elif opt == '--ttx': ttx_output = True elif opt == '--dev-ttx': dev_ttx_output = True elif opt == '--no-lig': no_lig = True elif opt == '--no-vs16': no_vs16 = True elif opt == '--nfcc': nfcc = True except Exception: print(HELP) sys.exit(2) try: export(manifest_path, input_path, output_path, output_formats, delim_codepoint, ttx_output, dev_ttx_output, no_lig, no_vs16, nfcc) except Exception as e: log.out(f'!!! {e}', 31) raise e ######################## TEMP sys.exit(1) log.out('All done', 35)
def main(): x = numpy.zeros((1, 3, 32, 32), dtype=numpy.float32) # disable rename_tensors version export(Model(use_bn=True), x, filename='output/A.onnx') export(Model(use_bn=False), x, filename='output/B.onnx') # disable check model in onnx_chainer.export checker.check_model = lambda x: None onnx_chainer.export(Model(use_bn=True), x, filename='output/C.onnx') onnx_chainer.export(Model(use_bn=False), x, filename='output/D.onnx')
def main(): parser = build_parser() options = parser.parse_args() ps_spec = options.ps_hosts.split(",") worker_spec = options.worker_hosts.split(",") # Get the number of workers. num_workers = len(worker_spec) cluster = tf.train.ClusterSpec({"ps": ps_spec, "worker": worker_spec}) if options.job_name == "ps": print("Start parameter server %d" % (options.task_index)) server = tf.train.Server(cluster, job_name=options.job_name, task_index=options.task_index) server.join() return check_opts(options) style_target = get_img(options.style) content_targets = _get_files(options.train_path) random.shuffle(content_targets) kwargs = { "epochs": options.epochs, "print_iterations": options.checkpoint_iterations, "batch_size": options.batch_size, "save_path": options.checkpoint_dir, "learning_rate": options.learning_rate, "test_image": options.test } args = [ cluster, options.task_index, options.num_gpus, options.limit_train, content_targets, style_target, options.content_weight, options.style_weight, options.tv_weight, options.vgg_path ] preds, losses, i, epoch = optimize(*args, **kwargs) style_loss, content_loss, tv_loss, loss = losses print('Epoch %d, Iteration: %d, Loss: %s' % (epoch, i, loss)) to_print = (style_loss, content_loss, tv_loss) print('style: %s, content:%s, tv: %s' % to_print) if options.task_index == 0: export.export(options.checkpoint_dir, (1, 512, 512, 3)) ckpt_dir = options.checkpoint_dir cmd_text = 'python evaluate.py --checkpoint %s ...' % ckpt_dir print("Training complete. For evaluation:\n `%s`" % cmd_text)
def exportCsv(self): """ Export the current model as a CSV file. """ # Check a database is opened. if model.the_engine is None: return name = self.current_url.split('/')[-1].split('.')[0] # First ask the user for the file to save to. fname = str(QtGui.QFileDialog.getSaveFileName(self, 'export as CSV', '%s.csv' % name, '*.csv')) if fname == '': return # Then export the database export(fname, model.the_engine)
def _run(config: Configuration) -> str: file_path = config.get_api_file_output_path() while 1 == 1: project_data = get_project_data(config) if len(project_data) == 0: # This occurs when there are no more results "above" the last id. break export(file_path, project_data) config.last_id = project_data[-1]["id"] return file_path
def exportmain(): export.export(values.pitch) b = tkinter.Text(root, height=1, font="font, 20", width=34) b.insert(tkinter.INSERT, "Your file was succesfully exported") b.config(state=tkinter.DISABLED, bg="#f0f0f0", bd=0) b.pack() b = tkinter.Text(root, height=1, font="font, 15", width=63) b.insert( tkinter.INSERT, "It should be located under in the exports folder of the program") b.config(state=tkinter.DISABLED, bg="#f0f0f0", bd=0) b.pack() b = tkinter.Text(root, height=1, font="font, 15", width=41) b.insert(tkinter.INSERT, "This is usually C:/Program Files(x86)/StS") b.config(state=tkinter.DISABLED, bg="#f0f0f0", bd=0) b.pack()
def rnn(x_data, y_data): x_train = np.array(x_data[:int(len(x_data) * TRAINING_PERCENTAGE)]) y_train = np.array(y_data[:int(len(y_data) * TRAINING_PERCENTAGE)]) x_test = np.array(x_data[int(len(x_data) * TRAINING_PERCENTAGE):]) y_test = np.array(y_data[int(len(y_data) * TRAINING_PERCENTAGE):]) maxFeatures = 15500 model = Sequential() model.add(LSTM(200)) model.add(Dense(1, activation=LeakyReLU(alpha=0.3))) model.compile(loss='mean_squared_error', optimizer='adam', metrics=[crps]) history = model.fit(x_train, y_train, batch_size=BATCH_SIZE, epochs=EPOCHS, validation_data=(x_test, y_test)) # graph(history, to_file='images/rnn.png') scores = model.evaluate(x_test, y_test) print(f'CRPS RNN: {scores[1]}') df = export(x_train, x_test, model) print(df)
def main(clas): opts = [opt for opt in clas if opt.startswith("-")] args = [arg for arg in clas if not arg.startswith("-")] if "-h" in opts: print(help) elif "-c" in opts: import recommendedPercents as rp try: rp.budgetPercents(float(args[0])) except IndexError: errMessage() elif "-p" in opts: import actualPercents as ap try: ap.budgetPercents(month2file(args[0])) except IndexError: errMessage() elif "-i" in opts: import interestCalc as ic try: ic.calc(args[0], int(args[1]), int(args[2]), float(args[3])) except IndexError: errMessage() elif "-a" in opts: import addEntry as ae try: ae.add(month2file(args[0]), args[1], args[2], args[3], float(args[4])) except IndexError: errMessage() elif "-e" in opts: import export as e try: e.export(month2file(args[0])) except IndexError: errMessage() elif "-v" in opts: import vis as v try: v.stack(args[0]) except IndexError: errMessage() else: print(help)
def default(self,*a,**kw): print("params:",a,kw) if a[0] in ["static","files","preview"]: if a[0]=="files": fname="files/"+"/".join([x.split("?")[0] for x in a[1:]]) elif a[0]=="static": fname="static/"+"/".join(a[1:]) else: fname="index.html" export.export() mtype=mimetypes.guess_type(fname)[0] if mtype is None and a[-1].split(".")[-1]=="less":mtype="text/css" print(fname,mtype) cherrypy.response.headers['Content-Type']= mtype return open(fname,"rb") elif a[0]=="api": return self.api(a[1:],kw)
def exportFunc(self): self.input2 = self.textbox2.text() if self.output2.blockCount() == 1: return if self.input2 == "": self.errorPop("You must enter a file name!",1) return ex = export.export() ex.runExport(self.theOutput, self.input, self.input2)
def ffnn(x_data, y_data): x_data = flatten(x_data) numnodes = len(x_data[0]) x_train = np.array(x_data[: int(len(x_data) * TRAINING_PERCENTAGE)]) y_train = np.array(y_data[: int(len(y_data) * TRAINING_PERCENTAGE)]) x_test = np.array(x_data[int(len(x_data) * TRAINING_PERCENTAGE):]) y_test = np.array(y_data[int(len(y_data) * TRAINING_PERCENTAGE):]) model = Sequential([ Dense(numnodes, input_dim=len(x_train[0])), LeakyReLU(alpha=0.3), BatchNormalization(), Dropout(0.3), Dense(numnodes*2), LeakyReLU(alpha=0.3), BatchNormalization(), Dropout(0.3), Dense(numnodes*2), LeakyReLU(alpha=0.3), BatchNormalization(), Dropout(0.3), Dense(1), LeakyReLU(alpha=0.3) ]) model.compile( loss='mean_squared_error', optimizer='adam', metrics=[crps] ) model.summary() history = model.fit( x_train, y_train, batch_size=BATCH_SIZE, epochs=EPOCHS, validation_data=(x_test, y_test), verbose=2 ) # graph(history, to_file='images/ffnn.png') #Evaluating the model scores = model.evaluate(x_test, y_test, verbose=2) print(f'CRPS FFNN: {scores[1]}') df = export(x_train, x_test, model) print(df)
def cnn(x_data, y_data): x_train = np.array(x_data[:int(len(x_data) * TRAINING_PERCENTAGE)]) x_train = x_train.reshape(len(x_train), 22, 38, 1) y_train = np.array(y_data[:int(len(y_data) * TRAINING_PERCENTAGE)]) x_test = np.array(x_data[int(len(x_data) * TRAINING_PERCENTAGE):]) x_test = x_test.reshape(len(x_test), 22, 38, 1) y_test = np.array(y_data[int(len(y_data) * TRAINING_PERCENTAGE):]) weight_decay = 1e-4 model = Sequential([ Conv2D(32, (3, 3), activation=LeakyReLU(alpha=0.25), input_shape=(len(x_data[0]), len(x_data[0][0]), 1), padding='same', kernel_regularizer=regularizers.l2(weight_decay)), BatchNormalization(), Conv2D(32, (3, 3), activation=LeakyReLU(alpha=0.25), kernel_regularizer=regularizers.l2(weight_decay)), Dropout(0.3), MaxPooling2D((2, 2)), Conv2D(64, (3, 3), activation=LeakyReLU(alpha=0.25), padding='same', kernel_regularizer=regularizers.l2(weight_decay)), BatchNormalization(), Conv2D(64, (3, 3), activation=LeakyReLU(alpha=0.25), kernel_regularizer=regularizers.l2(weight_decay)), Dropout(0.3), MaxPooling2D((2, 2)), Flatten(), Dense(64, activation=LeakyReLU(alpha=0.25)), Dense(1, activation=LeakyReLU(alpha=0.25)), ]) model.compile(optimizer='adam', loss='mean_squared_error', metrics=[crps]) history = model.fit(x_train, y_train, epochs=EPOCHS, validation_data=(x_test, y_test)) # graph(history, to_file='images/cnn.png') #Evaluating the model scores = model.evaluate(x_test, y_test, verbose=2) print(f'CRPS CNN: {scores[1]}') df = export(x_train, x_test, model) print(df)
def download(self, file_bool): global out_db temp_name, mimetype = export.export(out_db, file_bool) path = os.path.join(current_app.root_path, temp_name) def generate(): with open(path) as f: yield from f os.remove(path) r = current_app.response_class(generate(), mimetype=mimetype) r.headers.set('Content-Disposition', 'attachment', filename=temp_name) return r
def main(system_env, system, setup, ep, outputfn): if system == 'cartpole_obs': from networks.mpnet_cartpole_obs_branch import MPNet elif system == 'acrobot_obs': from networks.mpnet_acrobot_obs import MPNet else: print("Unrecognized model name") raise mpnet = MPNet(ae_input_size=32, ae_output_size=32, in_channels=1, state_size=state_size[system]).cuda() mpnet.load_state_dict( torch.load('output/{}/{}/mpnet_branch/ep{}.pth'.format( system, setup, ep))) mpnet.train() # mpnet.eval() export(mpnet, setup=setup, system_env=system_env, system=system, exported_path="exported/output/{}/{}".format(system, outputfn))
def main(system_env, system, setup, ep, from_exported): if from_exported: from exported.export_mpnet_external_small_model import KMPNet, load_func, Encoder, MLP mpnet = KMPNet(total_input_size=8, AE_input_size=32, mlp_input_size=40, output_size=4, CAE=Encoder, MLP=MLP, loss_f=None).cuda() load_func( mpnet, '/media/arclabdl1/HD1/YLmiao/results/KMPnet_res/cartpole_obs_4_lr0.010000_Adagrad_step_200/kmpnet_epoch_3150_direction_0_step_200.pkl' ) costnet = CostNet(ae_input_size=32, ae_output_size=32, in_channels=1, state_size=4, encoder=mpnet.encoder.cuda()).cuda() else: mpnet = MPNet(ae_input_size=32, ae_output_size=1024, in_channels=1, state_size=state_size[system]).cuda() costnet = CostNet(ae_input_size=32, ae_output_size=1024, in_channels=1, state_size=4, encoder=mpnet.encoder).cuda() costnet.load_state_dict( torch.load('output/{}/{}/cost_transit/ep{}.pth'.format( system, setup, ep))) costnet.eval() export(costnet, setup=setup, system_env=system_env, system=system, exported_path="exported/output/{}/cost_10k.pt".format(system))
def delete(): deleting = True while deleting: query = input('Search for NAME or PID: ').lower() if query == 'name' or query == 'pid': uinput = input('Type the {}: '.format(query)).lower() conn = sqlite3.connect('person.db') c = conn.cursor() c.execute('SELECT * FROM person WHERE {} = "{}"'.format( query, uinput)) p = c.fetchone() conn.close() if p: person = Person.Person(p[0], p[1], p[2], p[3], p[4]) print('Please confirm if you want to delete this entry:') print('\n') print(person) print('\n') prompt = input('Confirm? y/n: ').lower() if prompt == 'y': export.export('d', 'deleted-data.csv', person) conn = sqlite3.connect('person.db') c = conn.cursor() c.execute('DELETE FROM person WHERE {} = "{}"'.format( query, uinput)) conn.commit() conn.close() print('Entry deleted with success.') deleting = False else: print('Entry not found in database.') prompt = input('Do you want to try again? y/n: ') if prompt != 'y': deleting = False
def download_admin_report(): if not flask_login.current_user.is_admin: return login_manager.unauthorized() db = get_db() events = list(model.get_events(db)) attendees = list(model.get_attendees(db)) sort_attendees_by_name(attendees) users = model.get_attendee_count_by_registrators(db) sort_users_by_name(users) fname = export.export(attendees, events, users) send_file(fname, as_attachment=True) return send_file(fname, as_attachment=True)
def save(self): f=open(pagedata,"w") pp2=pprint.PrettyPrinter(stream=f) pp2.pprint(self) f.flush() export.export()
def main(): print('Welcome to PersonDB!\n') log = True logged = False while log: prompt = input( 'Type LOGIN to log in, CREATE to create a new user or EXIT to quit the program: ' ).lower() if prompt == 'login': try: logged = login.login() if logged: log = False except Exception as e: error_report(e) elif prompt == 'create': try: login.create() except Exception as e: error_report(e) elif prompt == 'exit': log = False else: print('Wrong input.') if logged: run = True while run: print('Type CREATE to create a new entry\n' + 'Type READ to search the database\n' + 'Type UPDATE to update a entry\n' + 'Type DELETE to remove a entry\n' + 'Type EXPORT to export the db to a csv file\n' + 'Type EXIT to exit') command = input("Command: ").lower() print('\n') if command == 'create': try: CRUD.create() print('\n') except Exception as e: error_report(e) elif command == 'read': try: CRUD.read() print('\n') except Exception as e: error_report(e) elif command == 'update': try: CRUD.update() print('\n') except Exception as e: error_report(e) elif command == 'delete': try: CRUD.delete() print('\n') except Exception as e: error_report(e) elif command == 'export': try: export.export('a', 'exported-data.csv', '') print('\n') except Exception as e: error_report(e) elif command == 'exit': run = False else: print('Wrong input.') prompt = input('Do you want to try again? y/n: ') if prompt == 'n': creating = False
def mousePressed(event, data): # use event.x and event.y #splash screen if data.screenMode == 1: if event.x > 250 and event.x < 450 and event.y > 530 and event.y < 560: data.screenMode = 2 data.myname = data.typingName data.name = data.name #chating mode elif data.screenMode == 2: #select line for i in range(len(data.text) - 2, -1, -1): currText = data.text[i] y0 = currText.y0 y1 = currText.y1 if y0 < 0: break if data.mouseX >= 200 and data.mouseX <= 700 and data.mouseY <= y1 and data.mouseY >= y0: if currText.selected == True: currText.selected = False else: currText.selected = True if data.mouseX > 260 and data.mouseX < 700 and data.mouseY < 700 and data.mouseY > 600: data.mousejustchanged = True elif data.mousePosition == "filemode0": filename, filepath = data.fileList[0] dir_path = os.path.dirname(os.path.realpath(__file__)) dir_path = dir_path + "/" + filename updownload.download(filename, dir_path) elif data.mousePosition == "filemode1": filename, filepath = data.fileList[1] dir_path = os.path.dirname(os.path.realpath(__file__)) dir_path = dir_path + "/" + filename updownload.download(filename, dir_path) elif data.mousePosition == "filemode2": filename, filepath = data.fileList[2] dir_path = os.path.dirname(os.path.realpath(__file__)) dir_path = dir_path + "/" + filename updownload.download(filename, dir_path) elif data.mousePosition == "textmode0": print("trash") data.currentMode = 0 for i in range(len(data.text) - 2, -1, -1): currText = data.text[i] if currText.selected == True: currText.mode = data.currentMode elif data.mousePosition == "textmode1": print("report") data.currentMode = 1 for i in range(len(data.text) - 2, -1, -1): currText = data.text[i] if currText.selected == True: currText.mode = data.currentMode elif data.mousePosition == "textmode2": print("note") data.currentMode = 2 for i in range(len(data.text) - 2, -1, -1): currText = data.text[i] if currText.selected == True: currText.mode = data.currentMode elif data.mousePosition == "exportNotes": print("export") newTxtString = export.export(data.text) export.exportToTxt(newTxtString) elif data.mousePosition == "uploadFile": print("click uploadFile") filename = filedialog.askopenfilename( initialdir="/", title="Select file", filetypes=(("jpeg files", "*.jpg"), ("all files", "*.*"))) print(filename) name = filename.split("/")[-1] print(name) if len(filename) > 0 and len(name) > 0: data.fileList.append((name, filename)) updownload.upload(name, filename) if data.mousePosition == "mode": print("mode")
from datetime import datetime from scraper import scrape from export import export from emails import send_email today = datetime.today().strftime('%Y-%m-%d') # url = "https://www.car.gr/classifieds/cars/?offer_type=sale&rg=2&significant_damage=t&st=private" url = "https://www.car.gr/classifieds/cars/?make=18" # for testing purposes scrape(url) export('cars', today) send_email(today, '*****@*****.**')
def Restrict(f_name,gNum,gene,lag,blstInfo, beg,end,cutSites,org,no_selection,debug): geneStr = str(gene.seq).upper() ## geneComp = str(gene.seq.reverse_complement().upper()) geneComp = str(gene.seq.complement().upper()) if lag == 'Ambiguous': # Create one file for coding oligos oligos = dna_parsing.main(geneStr,beg,end,cutSites,lag, org,no_selection,debug) if oligos and set(oligos) != set([False]): export.export(oligos,blstInfo,lag,gene,gNum,f_name,org) else: export.export(oligos,blstInfo,lag,gene,gNum,f_name,org,sp=True) # And one file for the template oligos ##oligos = dna_parsing.main(geneComp,beg,end,cutSites,lag, oligos = dna_parsing.main(geneStr,beg,end,cutSites,lag, org,no_selection,debug,b=True) if oligos and set(oligos) != set([False]): export.export(oligos,blstInfo,lag,gene,gNum,f_name,org,b=True) else: export.export(oligos,blstInfo,lag,gene,gNum,f_name,org,b=True, sp=True) else: # Choose the sequence to use based on the lagging strand ## if lag == 'Coding': ## sequence = geneStr ## else: ## sequence = geneComp sequence = geneStr oligos = dna_parsing.main(sequence,beg,end,cutSites,lag, org,no_selection,debug) if oligos and set(oligos) != set([False]): export.export(oligos,blstInfo,lag,gene,gNum,f_name,org) else: export.export(oligos,blstInfo,lag,gene,gNum,f_name,org,sp=True)
ops, wraps = ['conv','gemm','pool'], [sc.templates.Conv, sc.templates.GEMM, sc.templates.Pool] ops = [wrap for operation, wrap in zip(ops, wraps) if getattr(args, operation)] # Done return (args.database, args.device, ops, args.nsamples) def cuda_environment(device): platforms = sc.driver.platforms() devices = [d for platform in platforms for d in platform.devices] device = devices[device] context = sc.driver.Context(device) stream = sc.driver.Stream(context) return device, context, stream if __name__ == "__main__": # Get arguments database, device, operations, nsamples = parse_arguments() # Initialize CUDA environment init_cuda = lambda: cuda_environment(device) # Run the auto-tuning for OpType in operations: print("----------------") print('Now tuning {}:'.format(OpType.id)) print("----------------") X, Y = dataset.benchmarks(OpType, nsamples, init_cuda) model = regression.train(OpType, X, Y) kernels = regression.prune(OpType, model, init_cuda) export(database, kernels, model, OpType.id, init_cuda)
print(dataset_t) the_create_model = functools.partial(create_model, half_float=args.half, padding=(args.image_size <= 0), tile_size=args.tile_size, tta=args.tta, tta_threshold=args.tta_threshold) tester = Tester(create_model=the_create_model, device=args.device, jobs=args.jobs, disable_tqdm=False) predictions = tester.test(args.model, dataset_t, args.output) if args.export: from export import export export(predictions, args.export) if args.eval: if args.data_root: from eval import evaluate iou = evaluate(predictions, dataset) print( json.dumps(iou, sort_keys=True, indent=4, separators=(',', ': '), ensure_ascii=False)) else: warnings.warn("ignore eval arg")
import sys sys.path.insert(0, './lib/') import export file_name = u'PipeLid.FCStd' obj_label = u'Body' outfile_name = u'./stl/PipeLid.stl' imgdir = u'./img/' x = 2000 y = 1500 export.open(file_name) export.export(obj_label, outfile_name) export.image(obj_label, x, y, imgdir) export.close()
# For the testing import sys import os from db_comms import connect, disconnect, exec_read, exec_write from export import export import ConfigParser connect() exec_write("insert into items values ('PC001', 'Descr001','Make1','Model1','Serial1','Date1','Store1')") exec_write("insert into items values ('PC002', 'Descr002','Make2','Model2','Serial2','Date2','Store2')") exec_write("insert into items values ('PC001', 'Descr003','Make3','Model3','Serial3','Date3','Store3')") results = exec_read("select * from items") disconnect() print results for line in results: print line[0], line[1] ''' print "export test..." export(results) '''
# Manually add elmyra's directory to sys.path because # the params.py script runs from blender context current_dir = path.dirname(path.realpath(__file__)) sys.path.append(current_dir) # Own module imports import export import render import version DEFAULT_RENDER_TIME = 60 def options_from_args(args): parser = ArgumentParser(prog="Elmyra Render Params") parser.add_argument("--id", required=True) parser.add_argument("--device", default="GPU") custom_args = args[sys.argv.index('--') + 1:] return parser.parse_args(custom_args) options = options_from_args(sys.argv) version.open_latest(options.id) render.render(DEFAULT_RENDER_TIME, options.device) export.export(options)
from argparse import ArgumentParser from os import path # Manually add elmyra's directory to sys.path because # this script runs from blender context sys.path.append(path.dirname(path.realpath(__file__))) import common import export import render import version def parse_custom_args(): parser = ArgumentParser(prog="Elmyra Render Params") parser.add_argument("--id", required=True) parser.add_argument("--device", default="CPU") parser.add_argument("--target_time", default=60) custom_args = sys.argv[sys.argv.index('--') + 1:] return parser.parse_args(custom_args) args = parse_custom_args() common.ensure_addons() version.open_latest(args.id) render.render(args.target_time, args.device) export.export()
title = ' '.join(item.h2.text.split()) pubinfo = ' '.join(item.find(class_ = 'pub').text.split()) try: pl = ' '.join(item.find(class_ = 'pl').text.split()) except AttributeError: pl = ' '.join(item.find(class_ = 'sub-count').text.split()) try: rate = float(item.find(class_ = 'rating_nums').text) except AttributeError: rate = 0 dic = {'title': title, 'pub': pubinfo, 'read': pl, 'rate': rate} rankList.append(dic) if __name__ == '__main__': url = "https://book.douban.com/subject_search?search_text=" key_word = raw_input('key word:') tag = quote(key_word) url = url + tag rankList = [] index = 0 while index < 3: wurl = url + '&start=' + str(index * 15) try: getContent(wurl, rankList) except Timeout: continue except HTTPError: break index += 1 export(rankList, key_word)
def main(): input_path = DEF_INPUT manifest_path = DEF_MANIFEST output_path = DEF_OUTPUT output_naming = DEF_OUTPUT_NAMING output_formats = DEF_OUTPUT_FORMATS renderer = DEF_RENDERER license = DEF_LICENSE params_path = None emoji_filter = [] emoji_filter_text = "" # for error messaging only json_out = None json_web_out = None src_size = None num_threads = DEF_NUM_THREADS force_desc = False max_batch = DEF_MAX_BATCH verbose = False try: opts, _ = getopt.getopt(sys.argv[1:], 'hm:i:o:f:F:ce:j:J:q:t:r:b:p:l:', ['help', 'force-desc', 'verbose']) for opt, arg in opts: if opt in ['-h', '--help']: print(HELP) sys.exit() # basics elif opt == '-m': manifest_path = arg elif opt == '-i': input_path = arg elif opt == '-o': output_path = arg # images elif opt == '-F': output_formats = arg.split(',') elif opt == '-f': output_naming = arg elif opt == '-r': renderer = arg elif opt == 'l': license = False elif opt == '-p': params_path = arg elif opt == '-t': num_threads = int(arg) if num_threads <= 0: raise ValueError # JSON elif opt == '-j': json_out = arg elif opt == '-J': json_web_out = arg # other emoji stuff elif opt == '-e': k, v = arg.split('=') v = v.split(',') emoji_filter.append((k, v)) emoji_filter_text = arg elif opt == '-q': t1, t2 = arg.split('x') src_size = int(t1), int(t2) elif opt == '-b': max_batch = int(arg) if max_batch <= 0: raise ValueError elif opt == '--force-desc': force_desc = True # terminal stuff elif opt == '-c': log.use_color = False elif opt == '--verbose': verbose = True except Exception as e: log.out(f'x∆∆x {e}\n', 31) sys.exit(2) # try to get all of the basic stuff and do the main execution # ----------------------------------------------------------- try: log.out(f'o∆∆o', 32) #hello # validate basic input that can't be checked while in progress if renderer not in RENDERERS: raise Exception( f"{renderer} is not a renderer you can use in orxporter.") # create a Manifest # ie. parse the manifest file and get the information we need from it log.out(f'Loading manifest file...', 36) m = orx.manifest.Manifest(os.path.dirname(manifest_path), os.path.basename(manifest_path)) log.out(f'- {len(m.emoji)} emoji defined.', 32) # filter emoji (if any filter is present) filtered_emoji = [e for e in m.emoji if emoji.match(e, emoji_filter)] if emoji_filter: if filtered_emoji: # if more than 0 log.out( f'- {len(filtered_emoji)} / {len(m.emoji)} emoji match the filter you gave.', 34) else: raise ValueError( f"Your filter ('{emoji_filter_text}') returned no results." ) # ensure that descriptions are present if --force-desc flag is there if force_desc: nondesc = [ e.get('code', str(e)) for e in filtered_emoji if 'desc' not in e ] if nondesc: raise ValueError('You have emoji without a description: ' + ', '.join(nondesc)) # JSON out or image out if json_out: jsonutils.write_emoji(filtered_emoji, json_out) elif json_web_out: jsonutils.write_web(filtered_emoji, json_web_out) else: if params_path: log.out(f'Loading image export parameters...', 36) p = orx.params.Parameters(os.path.dirname(params_path), os.path.basename(params_path)) else: # convert the non-parameter flags into an orx expression to be turned into a parameters object. log.out(f'Compiling image export parameters...', 36) license_text = "" if license == True: license_text = "yes" else: license_text = "no" makeshift_params = f"dest structure = {output_naming} format = {' '.join(output_formats)} license = {license_text}" p = orx.params.Parameters(string=makeshift_params) log.out(f'- {len(p.dests)} destination(s) defined.', 32) export.export(m, filtered_emoji, input_path, output_formats, os.path.join(output_path, output_naming), src_size, num_threads, renderer, max_batch, verbose) except (KeyboardInterrupt, SystemExit) as e: log.out(f'>∆∆< Cancelled!\n{e}', 93) sys.exit(1) # Where all the exceptions eventually go~ except Exception as e: log.out(f'x∆∆x {e}\n', 31) raise e ######################## TEMP, for developer stuff sys.exit(1) # yay! finished! log.out('All done! ^∆∆^\n', 32) # goodbye
def reconstruct_Sco_GEM(model_fn, save_fn = None, write_requirements = True): Sco_GEM = cobra.io.read_sbml_model(model_fn) Sco_GEM.name = "Sco-GEM" Sco_GEM.id = "Sco-GEM" Sco_GEM.solver = SOLVER if save_fn is None: save_fn = model_fn # Part 1: Fix known issues in models ## 1a) Issues in iKS1317 fix_iKS1317_issues.fix(Sco_GEM) ## 1b) Issues in Sco4 v4.00 sco4_model = cobra.io.read_sbml_model(SCO4_PATH) sco4_model.solver = SOLVER fix_sco4_issues.fix(sco4_model) ## 1c) Add missing / changed gene annotations in iMK1208 identifed in Sco4 / and by Snorre 21.09.2018 add_missing_gene_annotations_sco4.add_gene_annotations(Sco_GEM) # Part 2: Add reactions from Sco4 Sco_GEM = add_reactions_from_sco4.add_reactions(sco4_model, Sco_GEM, SCO4_REACTION_MAPPING_FN, SCO4_METABOLITE_MAPPING_FN) ## 2b) Rename metabolites added from Sco4 to BIGGish Ids annotate_new_rxns_and_mets_from_sco4.add_rxn_annotations(Sco_GEM, SCO4_REACTION_ANNOTATION_FN, False) annotate_new_rxns_and_mets_from_sco4.add_met_annotations(Sco_GEM, SCO4_METABOLITE_ANNOTATION_FN, False) # Part 3: Add and modify reactions according to iAA1259 iAA1259_model = cobra.io.read_sbml_model(iAA1259_PATH) iAA1259_model.solver = SOLVER add_and_modify_reactions_according_to_iAA1259.fix_iAA1259(iAA1259_model) Sco_GEM = add_and_modify_reactions_according_to_iAA1259.add_reactions(iAA1259_model, Sco_GEM, iAA1259_NEW_REACTIONS_FN) Sco_GEM = add_and_modify_reactions_according_to_iAA1259.modify_reactions(Sco_GEM) # Change biomass Sco_GEM = add_and_modify_reactions_according_to_iAA1259.change_biomass(iAA1259_model, Sco_GEM) # Part 4 fix_issue12_reversibility.fix(Sco_GEM) fix_issue33_annotation_bugs.fix(Sco_GEM) redox_pseudometabolite.run(Sco_GEM) fix_SBO_terms.add_SBO(Sco_GEM) fix_issue33_annotation_bugs.fix_metanetx_metabolite_annotations(Sco_GEM, MET_TO_METANETX_FN) fix_biomass.fix_biomass(Sco_GEM, NEW_BIOMASS_DATA_FN) fix_issue33_annotation_bugs.apply_new_chebi_annotations(Sco_GEM, MET_TO_CHEBI_FN) fix_issue33_annotation_bugs.fix_c_c_in_metabolite_ids(Sco_GEM) fix_issue33_annotation_bugs.fix_metanetx_reaction_annotations(Sco_GEM, RXN_TO_METANETX_FN) # Part 5 reversibility.change_bounds_according_to_eQuilibrator(Sco_GEM, EQUILIBRATOR_FN_1, EQUILIBRATOR_FN_2) reversibility.change_lower_bound_on_CPKS_reactions(Sco_GEM) reversibility.change_bounds_on_ATP_driven_reactions(Sco_GEM, ATP_DRIVEN_REACTIONS_REVERSIBILITY_FN) # Additional annotations feat_annotations.add_doi_annotations(Sco_GEM, DOI_ANNOTATIONS_FN) feat_annotations.add_gene_annotations(Sco_GEM, GENE_ANNOTATIONS_FN) feat_subsystem_annotation.update_subsystem_annotations(Sco_GEM, SUBSYSTEM_ANNOTATION_FN) # Issue 82 Delete reactions without gene associations issue_82_delete_reactions.delete_reactions(Sco_GEM) # Issue 85 cpk exchange reaction add_and_modify_reactions_according_to_iAA1259.add_exchange_reaction_for_ycpk(Sco_GEM) #Part 6 - Add transport reactions fix_transporters.fix_transporters(Sco_GEM,MODIFIED_TRANSPORT_REACTIONS_FN, NEW_TRANSPORT_REACTIONS_FN, NEW_TRANSPORT_REACTIONS_TO_NEW_METABOLITES_FN, NEW_METABOLITES_TO_NEW_TRANSPORT_REACTIONS) # Save model export.export(Sco_GEM, formats = ["xml", "yml"], write_requirements = write_requirements, objective = "BIOMASS_SCO_tRNA")
def writetxt(fname): from export import export export(fname=fname)