def train(cfg): print(f"Trainig {cfg.run}") run_dir = join(config.get('RESULTS_DIR'), cfg.exp, cfg.run) cfg.save(run_dir) ModelClass = models.get_model_class(cfg.model) model = ModelClass(cfg) datasets_dir = config.get('DATASETS_DIR') trn_ctx, tst_ctx = build_subsets_contexts(datasets_dir, run_dir, cfg) trn_zip = utils.build_tzip(cfg.train_strategy) tst_zip = utils.build_tzip('longest') loss_fn, opt = build_loss_opt(cfg) weights_dir = join(run_dir, 'weights') for epoch in trange(cfg.train_epochs, desc=' epochs', ncols=TQDM_NCOLS): train_epoch(epoch, trn_zip, model, loss_fn, opt, cfg.opt_alphas, trn_ctx) if cfg.eval_tst_freq and ((epoch + 1) % cfg.eval_tst_freq == 0): test_epoch(epoch, tst_zip, model, trn_ctx, tst_ctx) if cfg.save_freq and ((epoch + 1) % cfg.save_freq == 0): model.save_weights(join(weights_dir, f'{epoch:03d}.ckpt'))
def train(cfg): print(f"Trainig {cfg.run}") run_dir = join(config.get('RESULTS_DIR'), cfg.exp, cfg.run) cfg.save(run_dir) extractor = build_extractor() ModelClass = models.get_model_class(cfg.model) model = ModelClass(cfg) datasets_dir = config.get('DATASETS_DIR') trn_ctx, tst_ctx = build_subsets_contexts(datasets_dir, run_dir, cfg) trn_zip = utils.build_tzip(cfg.train_strategy) tst_zip = utils.build_tzip('longest') loss_fn, opt = build_loss_opt(cfg) weights_dir = join(run_dir, 'weights') metrics = defaultdict(list) for epoch in trange(cfg.train_epochs, desc=' epochs', ncols=TQDM_NCOLS): train_epoch(epoch, trn_zip, extractor, model, loss_fn, opt, cfg.opt_alphas, trn_ctx) if cfg.eval_tst_freq and ((epoch + 1) % cfg.eval_tst_freq == 0): test_epoch(epoch, tst_zip, extractor, model, trn_ctx, tst_ctx, metrics) if cfg.save_freq and ((epoch + 1) % cfg.save_freq == 0): model.save_weights(join(weights_dir, f'{epoch:03d}.ckpt')) df = pd.DataFrame.from_records(metrics, index='epoch') df.to_csv(join(run_dir, 'metrics.csv'))
def __init__(self): self.url_prefix = config.get('service', 'data-parser') self.dp_mongo_urls = config.get('dp_mongo', 'dp_mongo_urls') self.dp_mongo_ports = config.get('dp_mongo', 'dp_mongo_ports') self.dp_file_db_name = config.get('dp_mongo', 'dp_mongo_ex_file_db_name') self.dp_data_db_name = config.get('dp_mongo', 'dp_mongo_ex_data_db_name')
def __init__(self): self.script_ttl = config.getint("script.ttl", 60) # minutes self.scripts_folder = config.get("script.folder", "scripts") self.master_host = config.get("script.master_host") self.api_user = config.get("script.api_user") self.api_password = config.get("script.api_password") self.file_db = ScriptFileDB()
def run(func, args): config.init('../xmpp.conf') client_jid = config.get('users', 'client_jid') client_password = config.get('users', 'client_password') server_jid = config.get('users', 'server_jid') session = Remote.new_session(client_jid, client_password) endpoint = session.new_proxy(server_jid + '/rpc', TestRunner) job = Future() heartbeat = Heartbeat(job, endpoint.ping, session) heartbeat.start() getattr(endpoint.async(job), func)(*args)
def __init__(self): self.workers_count = config.getint("main.workers_count", 1) self.amqp_address = config.get("main.amqp_server") self.amqp_user = config.get("main.amqp_user", "guest") self.amqp_pass = config.get("main.amqp_pass", "guest") self.input_queue = config.get("main.input_queue", "spikeTasks") self.output_queue = config.get("main.output_queue") self.running = True self.threads = [] self.worker = SpikeWorker(ExecutorFactory())
def test_config(self): import common.config config = common.config.Config('test') self.assertIsInstance(config, common.config.Config) self.assertEqual(config.data['test2'], "Hi world") self.assertEqual(config.get('test2'), "Hi world") self.assertEqual(config.get('doesnotexist'), None) self.assertEqual(config.get('doesnotexist2', False), False)
def test(ds, subset='train', split=1, batch_size=1, num_batches=1, min_seq=16, max_seq=16, verbose=True, print_dropped=True): """Simple test function.""" datasets_dir = config.get('DATASETS_DIR') dl = build_dataloader(datasets_dir, ds, split=split, subset=subset, batch_size=batch_size, min_seq=min_seq, max_seq=max_seq, verbose=verbose, print_dropped=print_dropped) print('Traversing') for x, y in dl.take(num_batches): print(f'x.shape {x.shape}') print(f'x[0,0,:10]') print(x[0, 0, :10]) print(f'y.shape {y.shape}') print(f'y[0]') print(y[0])
def get_transport_name_from_jid(jid, use_config_setting=True): """ Returns 'aim', 'gg', 'irc' etc If JID is not from transport returns None. """ #FIXME: jid can be None! one TB I saw had this problem: # in the code block # it is a groupchat presence in handle_event_notify # jid was None. Yann why? if not jid or (use_config_setting and not config.get('use_transports_iconsets')): return host = get_server_from_jid(jid) if host in transport_type: return transport_type[host] # host is now f.e. icq.foo.org or just icq (sometimes on hacky transports) host_splitted = host.split('.') if len(host_splitted) != 0: # now we support both 'icq.' and 'icq' but not icqsucks.org host = host_splitted[0] if host in ('aim', 'irc', 'icq', 'msn', 'sms', 'tlen', 'weather', 'yahoo', 'mrim', 'facebook'): return host elif host == 'gg': return 'gadu-gadu' elif host == 'jit': return 'icq' elif host == 'facebook': return 'facebook' else: return None
def importdata(): try: player_id = session.get("admno") if player_id is None: flash("Please Login as admin.") return redirect(url_for('home')) #if db.authorize(player_id, utility.Authorization.TEACHER): if not db.isTeacher(player_id): flash("Please Login as teacher/admin.") return redirect(url_for('home')) if request.method == 'GET': dbrows = db.getAssessmentToRemove(player_id) return render_template("admin.html", dbrows=dbrows) elif request.method == 'POST': # check if the post request has the file part if 'file' not in request.files: flash('No file part') return redirect(request.url) file = request.files['file'] # if user does not select file, browser also # submit an empty part without filename if file.filename == '': flash('No CSV file selected') return redirect(request.url) if file and utility.allowed_file(file.filename): folder_path = config.get("DB_IMPORT", "UPLOAD_FOLDER") secfilename = secure_filename(file.filename) fnsplit = secfilename.split(".") filename = fnsplit[0] + datetime.datetime.now().strftime( "_%Y%m%d_%I-%M-%S_%p") + "." + fnsplit[1] print(os.path.join(folder_path, filename)) file.save(os.path.join(folder_path, filename)) table_name = request.form.get('tablename') isHeader = request.form.get("chkHeader") != None isReplace = request.form.get("chkReplace") != None isCSVImportOK = db.import_csv_data( folder_path + "/" + filename, table_name, isHeader) if isCSVImportOK: flash("Data imported successfully") else: flash( "Unable to import data. Please contact your administrator." ) return redirect(request.url) else: flash("Please select a CSV file") return redirect(request.url) except Exception as ex: print("Unable to process request..", ex) return
def get_transport_name_from_jid(jid, use_config_setting = True): """ Returns 'aim', 'gg', 'irc' etc If JID is not from transport returns None. """ #FIXME: jid can be None! one TB I saw had this problem: # in the code block # it is a groupchat presence in handle_event_notify # jid was None. Yann why? if not jid or (use_config_setting and not config.get('use_transports_iconsets')): return host = get_server_from_jid(jid) if host in transport_type: return transport_type[host] # host is now f.e. icq.foo.org or just icq (sometimes on hacky transports) host_splitted = host.split('.') if len(host_splitted) != 0: # now we support both 'icq.' and 'icq' but not icqsucks.org host = host_splitted[0] if host in ('aim', 'irc', 'icq', 'msn', 'sms', 'tlen', 'weather', 'yahoo', 'mrim', 'facebook'): return host elif host == 'gg': return 'gadu-gadu' elif host == 'jit': return 'icq' elif host == 'facebook': return 'facebook' else: return None
def checkapp_activity(self): """ 检查APP是否停留在同一页面,每检查一次 :return: """ log.info("Check if the APP stays on the same page.") activity_key = self.device_key + ':activity' activityname = self.device_key + ":activityname" activity_name = self.rd.get(activityname) log.info("dumpsys window w | %s \/ | %s name=" % (self.adb.find_util, self.adb.find_util)) app_activity = self.adb.getCurrentActivity() if activity_name != app_activity: log.info( "Check that the APP does not stay on the same page, and reset the record times." ) activity_name = app_activity self.rd.set(key=activity_key, value=0) self.rd.set(key=activityname, value=app_activity) if activity_name == app_activity: num = int(self.rd.get(activity_key)) + 1 self.rd.set(activity_key, num) log.info("Check that the APP stays on the same page: %s times." % num) log.info("Check that the APP stays on the same page: %s" % app_activity) if int(self.rd.get(activity_key)) == config.get('ACTIVITY_NUM'): log.info( "Record 5 times when the APP stays on the same page, and close the APP." ) log.info("am force-stop %s" % self.pname) self.adb.quitApp(packageName=self.pname) self.rd.set(activity_key, 0)
def train(self, w1, w2, w3, m): ''' 传入三个wav文件,生成snowboy的.pmdl模型 ''' def get_wave(fname): with open(fname, 'rb') as infile: return base64.b64encode(infile.read()).decode('utf-8') url = 'https://snowboy.kitt.ai/api/v1/train/' data = { "name": "wukong-robot", "language": "zh", "token": config.get('snowboy_token', ''), "voice_samples": [{ "wave": get_wave(w1) }, { "wave": get_wave(w2) }, { "wave": get_wave(w3) }] } response = requests.post(url, json=data) print(response.ok) if response.ok: with open(m, "wb") as outfile: outfile.write(response.content) return 'Snowboy模型已保存至{}'.format(m) else: return "Snowboy模型生成失败,原因:{}".format(response.text)
def save(self): serialized = pickle.dumps(dict(self)) self.redis.set(self.key, serialized) if not self['is_ban']: # if the record is not ban, remove session record after X minuets self.redis.expire(self.key, config.get('session_length', 1 * 60 * 60)) pass
def saveCSVFile(item_name, file_path): try: bucket_name = app_config.get("IBM_CLOUD", "BUCKET_NAME") print("Starting file transfer for {0} to bucket: {1}\n".format( item_name, bucket_name)) # set 5 MB chunks part_size = 1024 * 1024 * 5 # set threadhold to 15 MB file_threshold = 1024 * 1024 * 15 # set the transfer threshold and chunk size transfer_config = TrsConfig(multipart_threshold=file_threshold, multipart_chunksize=part_size) cos_res = ibm_boto3.resource( "s3", ibm_api_key_id=app_config.get("IBM_CLOUD", "COS_API_KEY"), ibm_service_instance_id=app_config.get("IBM_CLOUD", "COS_IAM_SERVICEID_CRN"), ibm_auth_endpoint=app_config.get("IBM_CLOUD", "IBM_AUTH_ENDPOINT"), config=Config(signature_version="oauth"), endpoint_url=app_config.get("IBM_CLOUD", "ENDPOINT_URL")) # the upload_fileobj method will automatically execute a multi-part upload # in 5 MB chunks for all files over 15 MB file_path_name = file_path + "/" + item_name fnsplit = item_name.split(".") bucket_file_name = fnsplit[0] + str( datetime.datetime.now()) + "." + fnsplit[1] with open(file_path_name, "rb") as file_handle: cos_res.Object(bucket_name, bucket_file_name).upload_fileobj( Fileobj=file_handle, Config=transfer_config) os.remove(file_path_name) print("Transfer for {0} Complete!\n".format(item_name)) return "File Saved." except ClientError as be: print("CLIENT ERROR: {0}\n".format(be)) return "Unable to Save File.." except Exception as e: print("Unable to complete multi-part upload: {0}".format(e)) return "Unable to upload File.."
def train_with_log(cfg): mlflow.set_tracking_uri(join(config.get('RESULTS_DIR'), 'mlruns')) mlflow.set_experiment(cfg.exp_name) with mlflow.start_run(run_name=cfg.run): params = {k: v for k, v in cfg.__dict__.items() if k[0] != '_'} for k, v in params.items(): mlflow.log_param(k, v) train(cfg)
def add_rhandler(): handler = RotatingFileHandler( filename=cfg.get('db', 'path') + '/ddns.log', maxBytes=1 * 1024 * 1024 * 1024, backupCount=3) handler.setLevel(__level__) handler.setFormatter(__formatter__) return handler
def get_ws_url(args): if args["adb_connect_url"]: connected_devices_file_path = "{0}/{1}".format( config.get("main", "devices_file_dir"), config.get("main", "devices_file_name")) args["serial"] = _get_device_serial(args["adb_connect_url"], connected_devices_file_path) if args["serial"]: device_props = api.get_device(args["serial"]) props_json = device_props.json() args["ws"] = props_json.get("device").get("display").get("url") log.debug( "Got websocket url {0} by device serial {1} from stf API".format( args["ws"], args["serial"])) address = args["ws"].split("ws://")[-1] return address
def eval_run(run_dir, batch_size=128, epoch=None, verbose=True, tqdm_leave=True): cfg = load_config(run_dir) datasets_dir = config.get('DATASETS_DIR') trn_dl = build_dataloader(datasets_dir, cfg.ds, cfg.split, 'train', batch_size, shuffle=False) tst_dl = build_dataloader(datasets_dir, cfg.ds, cfg.split, 'test', batch_size, shuffle=False) if epoch is not None: print(f'Evaluating {cfg.run} at epoch {epoch}') model = load_model(run_dir, cfg, epoch) trn_loss, trn_acc = eval_subset(model, trn_dl) tst_loss, tst_acc = eval_subset(model, tst_dl) print( f'{cfg.run}' f' loss=({trn_loss:.2f},{tst_loss:.2f})' f' acc=({trn_acc:.2f},{tst_acc:.2f})' ) return trn_dir = join(run_dir, 'etrn') tst_dir = join(run_dir, 'etst') if isdir(trn_dir): shutil.rmtree(trn_dir) if isdir(tst_dir): shutil.rmtree(tst_dir) trn_writer = tf.summary.create_file_writer(trn_dir) tst_writer = tf.summary.create_file_writer(tst_dir) if verbose: print(f'Evaluating {cfg.run}') best_acc, best_epoch = 0, 0 for epoch in trange(cfg.epochs, leave=tqdm_leave): model = load_model(run_dir, cfg, epoch) trn_loss, trn_acc = eval_subset(model, trn_dl) tst_loss, tst_acc = eval_subset(model, tst_dl) with trn_writer.as_default(): tf.summary.scalar(f'loss/{cfg.ds}', trn_loss, epoch) tf.summary.scalar(f'acc/{cfg.ds}', trn_acc, epoch) with tst_writer.as_default(): tf.summary.scalar(f'loss/{cfg.ds}', tst_loss, epoch) tf.summary.scalar(f'acc/{cfg.ds}', tst_acc, epoch) if tst_acc > best_acc: best_acc, best_epoch = tst_acc, epoch firsts = ['run', 'ds', 'split'] columns = [k for k in sorted(cfg.keys()) if k not in firsts] columns = firsts + ['acc', 'epoch'] + columns data = dict(cfg) data['acc'] = best_acc data['epoch'] = best_epoch df = pd.DataFrame(data, columns=columns, index=[0]) df.to_csv(f'{run_dir}/results.csv') if verbose: print(df.head())
def init_config(self): self.url_prefix = [ val.strip() for val in config.get(self.CONF_SECTION, self.KEY_NAME).split(',') ] cs = config.get_section(self.CONF_SECTION) if "timeout" in cs: self.timeout = float(cs["timeout"])
def checkapp_music(self): """ 检查APP后台是否有音乐类APP启动,如果有就进行关闭 :return: """ log.info("Check to see if a music APP has started.") musicapp_list = self.adb.getRunBackgroundProcess() for musicapp in musicapp_list: if musicapp in config.get('BLACKLIST_PACKAGE'): log.info("Close the application:%s" % musicapp) self.adb.quitApp(musicapp)
def load_from_file(): """ 从csv文件中获取结构关系 :return: """ file_path = config.get(constant.CSV_FILE_FULLPATH) diagnose_logger.info('file_path=' + file_path) with open(file_path, 'rb') as f: reader = CsvReader(f) return reader.get_node()
def speech(_str): result = aipSpeech.synthesis(_str, 'zh', 1, { 'vol': 5, 'per': config.get('/baidu_yuyin/per', 0), }) # 识别正确返回语音二进制 错误则返回dict 参照下面错误码 if not isinstance(result, dict): fileName = 'temp.mp3' with open(fileName, 'wb') as f: f.write(result) os.system('mpg123 ' + fileName)
def sendEmail(email_to, email_data): # creates SMTP session s = smtplib.SMTP(config.get("EMAIL_SETTINGS", "SERVER"), config.get("EMAIL_SETTINGS", "PORT")) # start TLS for security s.starttls() # Authentication senderemail = config.get("EMAIL_SETTINGS", "SENDER_EMAIL_ID") senderpwd = config.get("EMAIL_SETTINGS", "SENDER_EMAIL_PWD") s.login(senderemail, senderpwd) # sending the mail s.sendmail(config.get("EMAIL_SETTINGS", "SENDER_EMAIL_ID"), email_to, email_data) # terminating the session s.quit() return True
def run(source_id=None): '''Run a gather of stats. Can specify a source id to just gather for that source. ''' csv_url = config.get('db', 'projects') fp = urllib.urlretrieve(csv_url, 'data/projects.csv')[0] csv_url = config.get('db', 'sources') fp = urllib.urlretrieve(csv_url, 'data/sources.csv')[0] fo = open(fp) for dict_ in csv.DictReader(fo): source = Source(dict_) if source_id and not(source_id == source.id): continue try: log.info('Processing: %s' % source.id) assert source.type in TYPES, 'No handler for this source of type: %s' % source.type func = TYPES[source.type] func(database, source) except Exception, e: log.error(e)
def sendOTP(mobilenumber, otpMessage): isSent = False try: # print("INSIDE TRY") url = config.getSMSURL() # print("STRING URL: ",str(url)) usr = config.get("BULKSMS_URL", "user") # print("USER AND URL: ","BULKSMS_URL", usr) pwd = config.get("BULKSMS_URL", "password") sdr = config.get("BULKSMS_URL", "sender") typ = config.get("BULKSMS_URL", "type") # print ("ALL VARIABLES", url, usr, pwd, sdr, typ) myobj = { 'user': str(usr), 'password': str(pwd), 'sender': str(sdr), 'mobile': str(mobilenumber), 'type': str(typ), 'message': str(otpMessage) } conlength = getDictionaryLength(myobj) resp = requests.post(url, data=myobj, headers={ 'Content-Type': 'application/x-www-form-urlencoded', 'Content-Length': str(conlength) }) # print(resp.text) isSent = True except Exception as ex: print("sendOTP:", ex) return isSent
def train(cfg): model_dir = join(config.get('RESULTS_DIR'), cfg.exp_name, cfg.run) print(f"Trainig {cfg.run}") cfg.save_params(model_dir) datasets_dir = config.get('DATASETS_DIR') trn_dl = build_dataloader(datasets_dir, cfg.ds, cfg.split, 'train', cfg.tbatch_size) etrn_dl = build_dataloader(datasets_dir, cfg.ds, cfg.split, 'train', cfg.ebatch_size) etst_dl = build_dataloader(datasets_dir, cfg.ds, cfg.split, 'test', cfg.ebatch_size) num_classes = 51 if cfg.ds == 'hmdb51' else 101 ModelClass = models.get_model_class(cfg.model) model = ModelClass(cfg, num_classes) loss_fn = tf.keras.losses.SparseCategoricalCrossentropy() optimizer = tf.keras.optimizers.SGD(learning_rate=cfg.lr) trn_loss_epoch = tf.keras.metrics.SparseCategoricalCrossentropy() trn_acc_epoch = tf.keras.metrics.SparseCategoricalAccuracy() tst_loss_epoch = tf.keras.metrics.SparseCategoricalCrossentropy() tst_acc_epoch = tf.keras.metrics.SparseCategoricalAccuracy() trn_writer = tf.summary.create_file_writer(join(model_dir, 'trn')) tst_writer = tf.summary.create_file_writer(join(model_dir, 'tst')) trn_eval_step = (etrn_dl, trn_loss_epoch, trn_acc_epoch) tst_eval_step = (etst_dl, tst_loss_epoch, tst_acc_epoch) trn_eval_epoch = (trn_loss_epoch, trn_acc_epoch, trn_writer) tst_eval_epoch = (tst_loss_epoch, tst_acc_epoch, tst_writer) weights_dir = join(model_dir, 'weights') for epoch in trange(cfg.epochs): for x, y_true in trn_dl: train_step(x, y_true, model, loss_fn, optimizer) eval_step(model, trn_eval_step, tst_eval_step) eval_epoch(epoch, cfg.ds, trn_eval_epoch, tst_eval_epoch) model.save_weights(join(weights_dir, f'{epoch:03d}.ckpt'))
def top_to_home(): logger.debug('start:top_to_home') # クラウドタイムカードのトップ画面を開く。 driver.get('https://cloud-timecard.appspot.com/clw/') # タイトルが表示されていることを確認する。 assert 'ログインページ' in driver.title # 入力して送信する。 input_id = driver.find_element_by_id('UserCorporationId') input_id.send_keys(config.get('user_info', 'CorporationId')) input_name = driver.find_element_by_id('UserUsername') input_name.send_keys(config.get('user_info', 'Username')) input_pw = driver.find_element_by_id('UserPassword') input_pw.send_keys(config.get('user_info', 'Password')) driver.find_element_by_class_name('btn-primary').click() # 送信->home画面へ logger.debug('end:top_to_home')
def __init__(self): import common.config as config self.config = config.get() from common import Contact, Address, Hash self.dir = self.config['general']['config_dir'] self.bitsize = self.config['kademlia']['keysize'] print("Using dir: %s" % self.dir) from crypto.rsa import KeyPair # TODO: Re-enable cert loading # Get the cert and the hash for it # keyblob = open(self.config['kademlia']['keyfile'], 'r').read() # self.keypair = KeyPair(private=keyblob) # TODO: Load pub / priv, base hash off of this. hash_ = Hash(os.urandom(self.bitsize // 8)) # TODO: load this from config self.dh_group = self.config['kademlia']['group'] # Set up the networking core if self.config['net']['randomize_port']: port = self.new_port() else: port = self.config['net']['port'] import net.ipfinder from net import Stack from kademlia import Kademlia addr = Address(net.ipfinder.check_in(), port) self.contact = Contact(addr, hash_) self.net = Stack(port, self.dh_group) self.kademlia = Kademlia(self.net, self.contact, self.dir, self.config['kademlia']['bucket_size'], self.bitsize, self.config['kademlia']['paralellism']) try: from ui.server import UIServer self.uiserver = UIServer(self.config['net']['ui_port'], self.config['net']['max_ui_conns']) # Bind all the properties # This should probably be moved to a model somewhere props = {'contact_table': 'net._contacts'} for name, var_path in props.items(): self.uiserver.add_property(name, var_path, self) except socket.error: print("UI server port taken. Starting without UI")
def test_multiple_files(self): """ Test how several config files are merged in one """ res = config.read_config("test/resources/*.json") def expect(field, condition): assert_that(config.get(field), condition) expect("folder.tmp", equal_to("/tmp2")) expect("folder.subdirs.dir2", equal_to("<_<")) expect("folder.subdirs.dir1", equal_to("/o_o")) expect("new_one", equal_to("three")) assert_that(config.get("not_existing", "default"), equal_to("default"))
def test(ds, split=1, subset='train', transform=False, num_frames=16, sampling='fixed', cache=False, batch_size=1, shuffle=False, num_workers=0, batches=1, epochs=1, verbose=True, print_dropped=True, plot=False): """Simple test function.""" datasets_dir = config.get('DATASETS_DIR') dl = build_dataloader(datasets_dir, ds, split=split, subset=subset, transform=transform, num_frames=num_frames, sampling=sampling, cache=cache, batch_size=batch_size, shuffle=shuffle, num_workers=num_workers, verbose=verbose, print_dropped=print_dropped) print(f'Number of batches {len(dl)}') for epoch in range(epochs): print(f'Epoch {epoch}') for x, y in islice(dl, batches): print(f" x {x.dtype} {x.shape}\n" f" x.flatten()[:5] {x.flatten()[:5]}\n" f" y {y.dtype} {y.shape}\n" f" y[0] {y[0]}") if plot: for xi, yi in zip(x, y): plot_clip(xi, yi)
def savesnap(): if request.method == "POST": studentid = session.get("admno") classid = "class" + request.form.get("classid") assessment = request.form.get("assessment") assessment = assessment.replace(" ", "_") imgdata = request.form['snapimgdata'] imgdata = imgdata[imgdata.find(',')+1:] file_imgdata = io.BytesIO(base64.b64decode(imgdata)) student_image_file = "s_" + studentid + assessment + "_" + datetime.datetime.now().strftime("_%Y%m%d_%I-%M-%S_%p") + ".jpg" file = FileStorage(file_imgdata, filename=student_image_file) filename = secure_filename(file.filename) save_folder = config.get("STUDENT", "UPLOAD_FOLDER") + "/" + classid file.save(os.path.join(save_folder, filename)) #return jsonify(studentid, request.form['snapimgdata']) return jsonify(success=True)
def test(ds, subset='train', split=1, batch_size=1, batches=1, epochs=1, num_frames=16, shuffle=False, sampling='fixed', cache=False, verbose=True, print_dropped=True): """Simple test function.""" datasets_dir = config.get('DATASETS_DIR') dl = build_dataloader(datasets_dir, ds, split=split, subset=subset, batch_size=batch_size, num_frames=num_frames, shuffle=shuffle, sampling=sampling, cache=cache, verbose=verbose, print_dropped=print_dropped) print('Traversing') print(f'Number of batches {len(dl)}') for epoch in range(epochs): print(f'Epoch {epoch}') for batch, (x, y) in enumerate(islice(dl, batches)): print(f'batch {batch}') print(f'x.shape {x.shape}') print(f'x[0,0,:10]') print(x[0, 0, :10]) print(f'y.shape {y.shape}') print(f'y[0]') print(y[0])
def __init__(self): self.db_file = config.get("script.db_file", "scriptsdb.sqlite3") self.connection = sqlite3.connect(self.db_file) self.initialize()
def __init__(self): super(monitorDevice, self).__init__() self.device_dict = {} self.devicelist = [] self.phone = config.get('MOBLIE')
import sys import os import logging import locale import gi import uuid from common import config import nbxmpp from common import defs from common import ged as ged_module interface = None # The actual interface (the gtk one for the moment) thread_interface = None # Interface to run a thread and then a callback config = config.Config() version = config.get('version') connections = {} # 'account name': 'account (connection.Connection) instance' ipython_window = None ged = ged_module.GlobalEventsDispatcher() # Global Events Dispatcher nec = None # Network Events Controller plugin_manager = None # Plugins Manager log = logging.getLogger('gajim') logger = None from common import configpaths gajimpaths = configpaths.gajimpaths VCARD_PATH = gajimpaths['VCARD']
__init(None, rnode, node_stack, reader, last_index) def get_col_num(self): return self.col_num def get_line_num(self): return self.line_num def get_node(self): return self._node if __name__ == '__main__': from common import config from config import constant file_path = config.get(constant.CSV_FILE_FULLPATH) reader = CsvReader(file(file_path, 'rb')) node = reader.get_node() print reader def print_node(node, c): if node is None: return # print c print c + node.text if not node.cnodes: return for cnode in node.cnodes: print_node(cnode, c + '--')
def loadCfg(): authfilename = config.get('Conf', 'authenticationfile') with open(authfilename) as f: return json.load(f)
def saveCfg(userdb): authfilename = config.get('Conf', 'authenticationfile') with open(authfilename, 'w') as f: json.dump(userdb, f, indent=4)
def __init__(self): self.script_file = None self.stats_connected = False self.host = config.get("stats.host", "localhost") self.port = config.get("stats.port", 8125)
sys.path.append(os.path.abspath(os.path.join(path, '..'))) from common import config if __name__ == "__main__": parser = argparse.ArgumentParser("Add tasks to queue manually") parser.add_argument("-amqp", "--amqp-server", dest="amqp_server", help="AMQP server address to publish to") parser.add_argument("-s", "--scenario", dest="scenario", help="Scenario to run") parser.add_argument("-c", "--count", dest="count", help="Task count", default=1) parser.add_argument("-amqp-user", "--amqp-user", dest="amqp_user", help="AMQP server user", default="guest") parser.add_argument("-amqp-pass", "--amqp-pass", dest="amqp_pass", help="AMQP server password", default="guest") parser.add_argument("-d", "--drain", dest="drain", action="store_true", help="Drain all previous messages") config.read_config() opts = parser.parse_args() amqp_server = opts.amqp_server if opts.amqp_server else config.get("main.amqp_server") amqp_user = config.get("main.amqp_user", opts.amqp_user) amqp_pass = config.get("main.amqp_pass", opts.amqp_pass) queue = config.get("main.input_queue", "spikeTasks") if not amqp_server: print "FATAL: amqp server is not defined" exit(1) connection = pika.BlockingConnection(pika.ConnectionParameters(host=str(amqp_server), credentials=pika_credentials.PlainCredentials(amqp_user, amqp_pass))) channel = connection.channel() count = 0 if opts.drain: print "Draining all messages from queue" message = True
#!/usr/bin/python # -*- coding: utf-8 -*- import ConfigParser import sys from common import get_input, config, smug_auth, album_select s = smug_auth() to_delete = album_select(s, config.get('smugmug', 'username')) print("Delete albums:") for a in to_delete: print("\t%s" % a['Title']) print("Is this OK? Type 'delete' to continue") val = get_input() if val != 'delete': print("Aborting!") sys.exit(0) for a in to_delete: sys.stdout.write("Deleting %s..." % (a['Title'])) resp = s.albums_delete(AlbumID=a['id']) if resp['stat'] == 'ok': print("done") else: print("error") print("All done.")
def expect(field, condition): assert_that(config.get(field), condition)
# coding:utf-8 __author__ = 'xinchun.li' from sqlalchemy import create_engine, MetaData from sqlalchemy.orm import scoped_session, sessionmaker from common import config from common import logger from config import constant diagnose_logger = logger.get_logger(logger.DIAGNOSE) db_location = config.get(constant.SQLITE3_DB_LOCATION) diagnose_logger.info('db_location=' + db_location) engine = create_engine(db_location, convert_unicode=True) metadata = MetaData() db_session = scoped_session(sessionmaker(autocommit=False, autoflush=True, bind=engine)) def init_db(): metadata.create_all(bind=engine) def shutdown_session(): db_session.remove()