epochs = 3
batch_size = 32
keep_prob = 0.2
kernel_size = 5
pool_stride = 2
hl1_depth = 8
hl2_depth = 16
hl3_depth = 32
hl4_depth = 64
hl5_depth = 128
fc1_size = 512
fc2_size = 1024

# Pre process the data
features = Features(data_dir=DATA_DIR, image_size=IMAGE_SIZE)
dataset = features.create(save=True, save_file='datasets.npy',
                          gray=False, flatten=False)
data = features.train_test_split(dataset, test_size=0.1, valid_portion=0.1)
X_train, y_train, X_test, y_test, X_val, y_val = data

# Build the network
net = input_data(shape=[None, IMAGE_SIZE, IMAGE_SIZE, NUM_CHANNEL],
                 name="input")
# Hidden layer 1
net = conv_2d(net, hl1_depth, kernel_size, activation='relu')
net = max_pool_2d(net, kernel_size)
# Hidden layer 2
net = conv_2d(net, hl2_depth, kernel_size, activation='relu')
net = max_pool_2d(net, kernel_size)
# Hidden layer 3
net = conv_2d(net, hl3_depth, kernel_size, activation='relu')
net = max_pool_2d(net, kernel_size)
Beispiel #2
0
        
        v_stack = pd.concat([kyc, age, cp_percent, tu_percent, trans_time_count, num_countries, trans_repeats, range_amount, std_amount, trans_min, country_count, outlier_amount, failed_signin], axis=1)
        
        v_stack = self.encode(v_stack)
        v_stack = v_stack.fillna(0)
        
        return v_stack
        
if __name__ == "__main__":
    engine = create_engine(conn_str, echo=False)
    session = sessionmaker()
    session.configure(bind=engine)
    
    from features import Features
    features = Features(session)
    
    user_df = features.get_users()
    trans_df = features.get_transactions()
    
    print('Creating Features...')
    #Create features and store them
    feature_df = features.create()
    labels = user_df['is_fraudster'] * 1
    labels = labels[feature_df.index]
    
    print('Saving Features...')
    all_data = pd.concat([feature_df, labels], axis=1)
    pickle.dump(all_data,  open('features.pck','wb') )
    print('Done.')