def get_peak_latencies(df, co, min_heights, dt, used_inds, min_height, win_start=0, win_stop=0.5): ''' Parameters: df: pandas dataframe containing experiment data Returns latencies: list of lists of latency from nearest target appearance in window. Each list gives the latencies for a different controller output all_peak_counts: count of all controller peaks, for each controller output target_peak_count: count of all peaks within window of target, for each controller ouput ''' dt = utils.get_dt(h5file, input_info) used_inds = utils.get_indices(input_info, trial_type) targets = df.loc[used_inds].kinematic.query('hit_target') trial_len = co.shape[1] * dt t_lfads = np.arange(co.shape[1]) * dt #time labels of lfads input all_peak_count = 0 # count of all controller peaks target_peak_count = 0 # count of all peaks within window of target latencies = [] # latency for i in used_inds: peaks = [] for input_idx in range(1): #range(co.shape[2]): input_peaks, _ = signal.find_peaks(np.abs(co[i, :, input_idx]), height=min_heights[input_idx]) peaks.append(input_peaks) peaks = np.concatenate(peaks) t_peaks = t_lfads[peaks] t_targets = targets.loc[i].index all_peak_count += len(t_peaks) for tp in t_peaks: if any((tp - t_targets >= win_start) & (tp - t_targets < win_stop)): diff_targets = tp - t_targets latency = np.min( diff_targets[diff_targets > 0]) #latency to closest target latencies.append(latency) target_peak_count += 1
def post(self, request): args_map = {key: value if value != "空" else None for key, value in request.req_args.items()} edit_action = args_map.get("action") select_sql = """SELECT `id` FROM `tb_order_info` WHERE `mail_pd_id`=?""" _id = self.db.get_value(select_sql, (args_map["mail_pd_id"], )) if not edit_action and _id: raise APIException(code_msg.CODE_DATA_EXIST) insert_sql = """INSERT INTO `tb_order_info`(`mail_pd_id`, `receiver`, `order_status`, `order_id`, `apply_time`, `wangwang_id`, `goods_id`, `return_pd_company`, `return_pd_id`) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)""" if edit_action: img_id = _id else: result = self.db.execute(insert_sql, (args_map["mail_pd_id"], args_map["receiver"], args_map["order_status"], args_map["order_id"], args_map["apply_time"], args_map["wangwang_id"], args_map["goods_id"], args_map["return_pd_company"], args_map["return_pd_id"])) img_id = result.lastrowid file_obj = args_map.get("upload_order_img") if file_obj: try: filename = self._upload_images(file_obj, img_id) self.db.execute("""UPDATE `tb_order_info` SET `img_name`=? WHERE `id`=?""", (filename, img_id)) except: self.db.execute("""DELETE FROM tb_order_info WHERE `id`=?""", (img_id, )) if os.path.exists(self.upload_img_path): os.remove(self.upload_img_path) raise APIException(code_msg.CODE_UPLOAD_ORDER_ERROR) if edit_action: update_sql = """UPDATE `tb_order_info` SET `receiver`=?, `order_status`=?, `order_id`=?, `apply_time`=?, """ \ """`wangwang_id`=?, `goods_id`=?, `return_pd_company`=?, `return_pd_id`=?, `comment`=?, """ \ """`update_time`=? WHERE `mail_pd_id`=? AND `is_delete`=0""" self.db.execute(update_sql, (args_map["receiver"], args_map["order_status"], args_map["order_id"], args_map["apply_time"], args_map["wangwang_id"], args_map["goods_id"], args_map["return_pd_company"], args_map["return_pd_id"], args_map["comment"], get_dt(), args_map["mail_pd_id"], ))
def post(self, request): dt = get_dt() status = request.req_args.get("status") mail_pd_id = request.req_args.get("mail_pd_id") update_sql = """UPDATE `tb_order_info` SET `is_delete`=?, `update_time`=? WHERE `mail_pd_id`=?""" if status == 1: # 恢复 self.db.execute(update_sql, (0, dt, mail_pd_id)) elif status == 2: # 彻底删除 id, img_name = self.db.get_one_row("""select `id`, `img_name` from tb_order_info WHERE `mail_pd_id`=?""", (mail_pd_id, )) if img_name: filename = str(id) + "." + img_name.rsplit('.', 1)[1] img_path = os.path.join(current_app.config["UPLOAD_IMG_PATH"], filename) if os.path.exists(img_path): os.remove(img_path) self.db.execute("""DELETE FROM tb_order_info WHERE `mail_pd_id`=?""", (mail_pd_id, )) elif status == 0: # 逻辑删除 self.db.execute(update_sql, (1, dt, mail_pd_id)) else: raise APIException(code_msg.CODE_INVALID_ARGUEMNTS)
def update_row_counts(table): conn = yield r.connect(host="localhost", port=28015) first_part = r.db('public').table(table).changes(squash=False) feed = yield first_part.run(conn) while (yield feed.fetch_next()): change = yield feed.next() if table == 'tables': if not change.get('old_val'): ioloop.IOLoop.current().add_callback(update_row_counts, change['new_val']['id']) if table == 'data_from_socrata': indexes = yield r.db('public').table(table).index_list().run(conn) for key in change['new_val']: index_name = "dataset_id&"+key if not index_name in indexes and not key == 'id': r.db('public').table(table).index_create( index_name, [r.row["dataset_id"], r.row[key]] ).run(conn) print 'table', table, changed_keys(change) if not table == 'changes' and not (table == 'tables' and changed_keys(change) == 'number_of_rows'): c = {'table': table, 'datetime': get_dt(), 'change': change} #print 'added change', r.db('public').table('changes').insert(c).run(conn) print table, change if change.get('new_val'): indexes = yield r.db('public').table(table).index_list().run(conn) table_data = yield r.db('public').table('tables').get(table).run(conn) fields = table_data.get('fields') for key in change['new_val']: if not key in indexes and not key == 'id': r.db('public').table(table).index_create(key).run(conn) if not fields: r.db('public').table('tables').get(table).update({'fields': change['new_val'].keys()}).run(conn) else: for key in change['new_val']: if not key in fields: fields.append(key) #print r.db('public').table(table).get(table).update({'fields': fields}).run(conn) if not change['new_val'] or not change['old_val']: number_of_rows = r.db('public').table(table).count() number_of_rows = yield number_of_rows.run(conn) query = r.db('public').table('tables').get(table).update({'number_of_rows': number_of_rows}) q = yield query.run(conn)
for dataset_name, dataset_dict in dataset_dicts.items(): for lfads_params in dataset_dict['lfads_params']: file_root = dataset_dict['file_root'] data_filename = '../data/intermediate/' + file_root + '.p' lfads_filename = '../data/model_output/' + \ '_'.join([file_root, lfads_params, 'all.h5']) inputInfo_filename = '../data/model_output/' + \ '_'.join([file_root, 'inputInfo.mat']) peak_filename = '../data/peaks/' + \ '_'.join([file_root, lfads_params, 'peaks_train.p']) df = data_filename = pd.read_pickle(data_filename) input_info = io.loadmat(inputInfo_filename) with h5py.File(lfads_filename, 'r+') as h5file: co = h5file['controller_outputs'][:] dt = utils.get_dt(h5file, input_info) trial_len = utils.get_trial_len(h5file, input_info) #peak_df = ta.get_peak_df(df, co, trial_len, min_heights, dt=0.01, win_start=win_start, win_stop=win_stop) peak_df = pd.read_pickle(peak_filename) peak_df = get_endpoint(peak_df, df, dt) for pre_param_dict in pre_param_dicts: if pre_param_dict.get('align_peaks'): X, y = get_inputs_to_model(peak_df, co, trial_len, dt, df=df, win_start=0.05, win_stop=0.1, **pre_param_dict)