def _load_authinfo_list(self): authinfo_data = load(self.FILE_PATH, None) self.authinfo_list = {} if authinfo_data is not None: for k, v in authinfo_data.items(): userid, password = v self.authinfo_list[k] = (self.enc.decrypt(userid), self.enc.decrypt(password))
def main(_): start_time = time.time() FLAGS = flags.FLAGS seed = FLAGS.seed path = FLAGS.path # noise = float(sys.argv[2]) params_file = os.path.join(path, "params_{:02d}.pkl.gz".format(seed)) gram_file = os.path.join(path, "gram_{:02d}.npy".format(seed)) Kxvx_file = os.path.join(path, "Kxvx_{:02d}.npy".format(seed)) Kxtx_file = os.path.join(path, "Kxtx_{:02d}.npy".format(seed)) Kv_diag_file = os.path.join(path, "Kv_diag_{:02d}.npy".format(seed)) Kt_diag_file = os.path.join(path, "Kt_diag_{:02d}.npy".format(seed)) csv_file = os.path.join(path, FLAGS.csv_dir, "{:02d}.csv".format(seed)) params = pu.load(params_file) print("Kernel params:", params) # Kxtx = np.load(Kxtx_file) # if Kxtx.shape[1] != 10000: # params['test_error'] = -1. # params['validation_error'] = -1. # params['training_error'] = -1. # params['time'] = -1. # pd.DataFrame(data=params, index=pd.Index([0])).to_csv(csv_file) # print("Test is wrong size for seed", seed) # sys.exit(1) print("Loading data and kernels") Kxx, Kxvx, _, Kxtx, _, X, Y, Xv, Yv, Xt, Yt = cut_training( FLAGS.N_train, FLAGS.N_vali, np.load(gram_file), np.load(Kxvx_file), None, np.load(Kxtx_file), None, *mnist_1hot_all()) Y[Y == 0.] = -1 # center labels print("Solving system") print("Size of Kxx, Y, Kxvx, Yv, Kxtx, Yt:", Kxx.shape, Y.shape, Kxvx.shape, Yv.shape, Kxtx.shape, Yt.shape) if FLAGS.jitter > 0.0: Kxx.flat[::len(Kxx)+1] += Kxx.mean() * FLAGS.jitter # Add jitter! K_inv_y = scipy.linalg.solve(Kxx, Y, overwrite_a=True, overwrite_b=False, check_finite=False, assume_a='pos', lower=False) def print_error(K_xt_x, Ytv, dit, key): print("Computing metric", key) Y_pred = K_xt_x @ K_inv_y t = sklearn.metrics.accuracy_score( np.argmax(Ytv, 1), np.argmax(Y_pred, 1)) dit[key] = (1-t)*100 print_error(Kxx, Y, params, "training_error") print_error(Kxvx, Yv, params, "validation_error") print_error(Kxtx, Yt, params, "test_error") params['time'] = time.time() - start_time pd.DataFrame(data=params, index=pd.Index([0])).to_csv(csv_file)
def __init__(self, name=None): BotHandler.__init__(self, name, hear_on_respond=False) respond_patterns = [ ([u' lsmemo',], self.lsmemo), ([u' mkmemo ([^ ]+) (.+)$', u' mkmemo',], self.mkmemo), ([u' catmemo ([^ ]+)$', u' catmemo',], self.catmemo), ([u' rmmemo ([^ ]+)$', u' rmmemo',], self.rmmemo), ] self.memo_data = load(self.file_path) self.set_respond_patterns(respond_patterns) self.set_debug(False)
def main(_): FLAGS = tf.app.flags.FLAGS np.random.seed(FLAGS.seed) tf.set_random_seed(FLAGS.seed) path = FLAGS.path if path is None: raise ValueError("Please provide a value for `FLAGS.path`") def file_for(name, fmt): return os.path.join(path, "{}_{:02d}.{}".format(name, FLAGS.seed, fmt)) params_file = file_for('params', 'pkl.gz') gram_file = file_for('gram', 'npy') Kxvx_file = file_for('Kxvx', 'npy') Kxtx_file = file_for('Kxtx', 'npy') # Replicate the parameters the experiments were ran with if FLAGS.allow_skip: LAYERS_MIN=4 LAYERS_SPAN=12 FILTER_SIZE_SPAN=4 else: LAYERS_MIN=2 LAYERS_SPAN=7 FILTER_SIZE_SPAN=5 if os.path.isfile(params_file): params = pu.load(params_file) else: params = dict( seed=FLAGS.seed, var_weight=np.random.rand() * 8 + 0.5, var_bias=np.random.rand() * 8 + 0.2, n_layers=LAYERS_MIN + int(np.random.rand()*LAYERS_SPAN), filter_sizes=3 + int(np.random.rand()*FILTER_SIZE_SPAN), strides=1 + int(np.random.rand()*3), padding=("VALID" if np.random.rand() > 0.5 else "SAME"), nlin=("ExReLU" if np.random.rand() > 0.5 else "ExErf"), skip_freq=(int(np.random.rand()*2) + 1 if ((np.random.rand() > 0.5 and FLAGS.allow_skip) or FLAGS.seed < 56) # Before that, skip_freq always positive else -1), ) if params['skip_freq'] > 0: params['padding'] = 'SAME' params['strides'] = 1 print("Params:", sorted(list(params.items()))) pu.dump(params, params_file) with tf.device("cpu:0"): kern = create_kern(params) save_kernels(kern, FLAGS.N_train, FLAGS.N_vali, FLAGS.n_gpus, gram_file, Kxvx_file, Kxtx_file, n_max=FLAGS.n_max)
def _on_where_are_we(self, bitslack_obj, event, args): help = self._help(bitslack_obj, event, args) if help is not None: return help users = bitslack_obj.get_users() whereis_data = load(self.file_path) texts = [] userid = u'<@%s>' % (event['user']) for username, user_info in users.items(): user = u'<@%s>' % (user_info['id']) if user in whereis_data: texts.append(u'%sさんは %s にいます。' % (user, whereis_data[user])) else: texts.append(u'%sさんの居場所はわかりません。' % (user)) return texts
def _on_im_at(self, bitslack_obj, event, args): help = self._help(bitslack_obj, event, args) if help is not None: return help where = self._parse_where(event['text'], u'im_at') texts = [] userid = u'<@%s>' % (event['user']) whereis_data = load(self.file_path) old_where = None if userid in whereis_data: old_where = whereis_data[userid] whereis_data[userid] = where save(whereis_data, self.file_path) if old_where is not None: texts.append(u'%s: 居場所を変更しました。(%s -> %s)' % (userid, old_where, where)) else: texts.append(u'%s: 居場所を設定しました。(%s)' % (userid, where)) return texts
def _on_where_is(self, bitslack_obj, event, args): help = self._help(bitslack_obj, event, args) if help is not None: return help user_list = self._parse_user_list(event['text']) not_user_list = self._parse_not_user_list(event['text']) whereis_data = load(self.file_path) texts = [] userid = u'<@%s>' % (event['user']) texts.append(u'%s:' % userid) for user in user_list: if user in whereis_data: texts.append(u'%sさんは %s にいます。' % (user, whereis_data[user])) else: texts.append(u'%sさんの居場所はわかりません。' % (user)) for not_user in not_user_list: texts.append(u'%sさんは多分このチームに所属していません。' % (not_user)) return texts
def __init__(self, name=None): BotHandler.__init__(self, name, hear_on_respond=False) respond_patterns = [ ([ u' lsmemo', ], self.lsmemo), ([ u' mkmemo ([^ ]+) (.+)$', u' mkmemo', ], self.mkmemo), ([ u' catmemo ([^ ]+)$', u' catmemo', ], self.catmemo), ([ u' rmmemo ([^ ]+)$', u' rmmemo', ], self.rmmemo), ] self.memo_data = load(self.file_path) self.set_respond_patterns(respond_patterns) self.set_debug(False)
""" Analysis of MIMIC-III dataset """ import pickle import matplotlib.pyplot as plt import numpy as np import pickle_utils as pu from numpy import cumsum, log, polyfit, sqrt, std, subtract from scipy.stats import pearsonr from smooth import savitzky_golay from scipy.interpolate import interp1d from scipy.io import savemat from helper_function import * timeSeriesCon2, label2, sizes2_2, _ = pu.load('dataset/data.pkl.gz'); N2 = timeSeriesCon2.shape[0] dim2 = timeSeriesCon2[0].shape[1] NB_f1 = 38 # downsample the second dataset timeSeriesCon2_new = np.empty(len(label2),dtype=object) sizes2_2_new = np.zeros(len(label2)) for i in range(len(label2)): temp = timeSeriesCon2[i] indices = np.flip(np.arange(temp.shape[0]-1,-1,-2),axis=0) timeSeriesCon2_new[i] = temp[indices,:] sizes2_2_new[i] = indices.shape[0] # remove std=0 features in the first dataset: [14,16,26] remove_f = np.arange(10,28)
def load_model(cls): """load model obj from pickle""" cls.model = pickle_utils.load('knn.model.pkl')
def predict(cls, ): """预测""" file_path = r'%s\c.mp3' % WORK_ROOT model = pickle_utils.load('knn.model.pkl') sec = cls.get_cut_sce(file_path, model) print 'sec', sec, cls.get_min(sec)