def R3_resize(R3, E_all): E3, E2, E1 = E_all R3_new = np.zeros(resized_shape(R3.shape, E_all), dtype=R3.dtype) R3_new[-(R3.shape[0] - E3.center) :, :, :] = R3[ : end(E3.center), (E2.center + E3.center) : end(E2.center), E1.center : end(E1.center) ] try: return MetaArray(R3_new, ticks=resize_ticks(R3.ticks, E_all), rw_freq=R3.rw_freq) except AttributeError: return R3_new
item.reset_index().to_pickle('../feature/{}/f210_product.p'.format(folder)) #============================================================================== # main #============================================================================== make(0) make(1) make(2) #make(3) #make(4) #make(5) make(-1) utils.end(__file__)
convolution_operator(E2, L2), # convolution_operator(E2, L2)[:end(E1.center), :], convolution_operator(E3, L3, trim_left_boundary=False), ] d1, d2, d3 = (len(conv_matrices[i]) for i in xrange(3)) M1 = L3 + L2 + d1 M2 = L3 + d2 M3 = d3 shapes = [(L3, L2, L1), (L3, L2, M1), (L3, M2, M1), (M3, M2, M1)] slice_maps = [[], [], []] for t3 in xrange(L3): for t2 in xrange(L2): slice_maps[0].append( ( conv_matrices[0][E1.center : end(E1.center)], (t3, t2, slice(t3 + t2 + E1.center, t3 + t2 + d1 - E1.center)), (t3, t2), ) ) # slice_maps[0].append((conv_matrices[0], # (t3, t2, slice(t3 + t2, t3 + t2 + d1)), # (t3, t2))) for t3 in xrange(L3): for sum12 in xrange(M1): slice_maps[1].append((conv_matrices[1], (t3, slice(t3, t3 + d2), sum12), (t3, slice(None), sum12))) for sum23 in xrange(M2): for sum123 in xrange(M1): sl = slice(max(0, sum23 + len(E3) + E2.center - L3), min(M3, sum23 + 1))
{ # fixed 'objective': 'binary', 'metric': 'auc', 'learning_rate': 0.2, 'max_bin': 100, 'nthread': 64, 'bagging_freq': 10, # optimize 'max_depth': 3, 'num_leaves': 2**3 - 1, 'scale_pos_weight': 100, 'min_child_weight': 0.001, 'subsample': 0.1, 'colsample_bytree': 0.5, 'lambda_l1': 0, 'lambda_l2': 5, # fixed? 'min_child_samples': 300, 'seed': np.random.randint(9999) }, ] for param in params: do_lgb(param) #============================================================================== utils.end(__file__)
convolution_operator(E1, L1), convolution_operator(E2, L2), # convolution_operator(E2, L2)[:end(E1.center), :], convolution_operator(E3, L3, trim_left_boundary=False) ] d1, d2, d3 = (len(conv_matrices[i]) for i in xrange(3)) M1 = L3 + L2 + d1 M2 = L3 + d2 M3 = d3 shapes = [(L3, L2, L1), (L3, L2, M1), (L3, M2, M1), (M3, M2, M1)] slice_maps = [[], [], []] for t3 in xrange(L3): for t2 in xrange(L2): slice_maps[0].append( (conv_matrices[0][E1.center:end(E1.center)], (t3, t2, slice(t3 + t2 + E1.center, t3 + t2 + d1 - E1.center)), (t3, t2))) # slice_maps[0].append((conv_matrices[0], # (t3, t2, slice(t3 + t2, t3 + t2 + d1)), # (t3, t2))) for t3 in xrange(L3): for sum12 in xrange(M1): slice_maps[1].append( (conv_matrices[1], (t3, slice(t3, t3 + d2), sum12), (t3, slice(None), sum12))) for sum23 in xrange(M2): for sum123 in xrange(M1): sl = slice(max(0, sum23 + len(E3) + E2.center - L3),
#============================================================================== # DATA LOAD #==============================================================================''' ) pri = utils.read_df_pickle(path='../input/prior*.p') tra = utils.read_df_pickle(path='../input/train*.p') order = utils.read_df_pickle(path='../input/order*.p') logger.info(f''' #============================================================================== # MAKE EDA TABLE #==============================================================================''' ) pri_eda = eda.df_info(pri) pri_eda.to_csv('../eda/prior_eda.csv') tra_eda = eda.df_info(tra) tra_eda.to_csv('../eda/train_eda.csv') order_eda = eda.df_info(order) order_eda.to_csv('../eda/orders_eda.csv') sys.exit() clean_app(app) clean_prev(pre) # clean_pos(pos) # clean_ins(ins) # clean_ccb(ccb) utils.end(sys.argv[0]) # pre_eda = eda.df_info(pre)
def end(message): if message.chat.id != config.group_id: config.the_bot.forward_message(config.group_id, message.chat.id, message.message_id) utils.end(message)