Exemple #1
0
def main():
	# load keras model
	kmodel = load_model('./models/cnn_model_10bar_ohlc.h5')
	# load data
	data = pro.load_pkl('./data/label8_eurusd_10bar_1500_500_val200_gaf.pkl')
	train_x = data['train_ohlc_gaf']
	train_label = data['train_label_onehot']
	# create foolbox model
	fmodel = KerasModel(kmodel, bounds=(-1, 1))
	# create our modified attack model
	MODIFIED_LocalSearchAttack = foolbox.attacks.LocalSearchAttack(model=fmodel)
	# generate fake data
	generate_adversarial_examples(kmodel = kmodel, fmodel = fmodel, attacker = MODIFIED_LocalSearchAttack,
	                              x_data = train_x, y_label = train_label)
Exemple #2
0
def main(params):
    # load data
    data = pro.load_pkl(params['data'])

    # load our model
    kmodel = load_model(params['model_name'])

    # create foolbox model
    fmodel = KerasModel(kmodel, bounds=(-1, 1))

    # customized LocalSearchAttack
    attack = foolbox.attacks.LocalSearchAttack(model=fmodel)

    # attack all samples
    results = attack_all_samples(data, kmodel, fmodel, attack)

    return results
Exemple #3
0
    # loop training
    result_dict = {
        'train_result_cm': [],
        'test_result_cm': [],
        'total_acc': []
    }
    for i in range(PARAMS['n_loop']):
        model, hist = train_model(PARAMS)
        # collect testing results
        collect_result(PARAMS, model, hist, result_dict)

    # save all results to pickle file
    filename = './results/merged_results_%s.pkl' % PARAMS['n_loop']
    with open(filename, 'wb') as f:
        pickle.dump(result_dict, f)


if __name__ == "__main__":
    PARAMS = {}
    PARAMS['data'] = pro.load_pkl('./data/merged_examples.pkl')
    PARAMS['classes'] = 9
    PARAMS['lr'] = 0.01
    PARAMS['epochs'] = 1000
    PARAMS['batch_size'] = 64
    PARAMS['patience'] = 80
    PARAMS['zero_weight'] = 2
    PARAMS['optimizer'] = optimizers.SGD(lr=PARAMS['lr'])
    PARAMS['n_loop'] = 100
    PARAMS['model_path'] = './models/mergedata_model.h5'

    main(PARAMS)
    '''
    plt.close()

    fig = plt.figure(figsize=(4, 4))
    ax = plt.subplot2grid((1, 1), (0, 0))
    mpf.candlestick_ohlc(ax, ts_data, width=0.4, alpha=1,
                                colordown='#53c156', colorup='#ff1717')
    plt.title(predict_result)

    plt.show()
    plt.close()


if __name__ == "__main__" :
    PATTERN_LS = ["no_class", "evening", "morning", "bearish_engulfing", "bullish_engulfing",
                    "shooting_star", "inverted_hammer", "bearish_harami", "bullish_harami"]
    PKL_NAME = "./pickle/label8_eurusd_1500_500_val200_gaf_culr.pkl"
    MODEL_PATH= './model/checkpoint_model.h5'

    data = pro.load_pkl(PKL_NAME)
    model = load_model(MODEL_PATH)
    _idx = int(args.i)

    data_gaf = data['test_gaf'][_idx, :, :, :]
    data_gaf = data_gaf.reshape(1, 32, 32, 4)
    ts_data = data['test_data']
    ts_data = np.c_[range(ts_data[_idx, :, :].shape[0]), ts_data[_idx, :, :]]

    predict_result = evaluate(data_gaf, model)
    result_picture(ts_data, predict_result)
Exemple #5
0
        candle_low = gen_imgs[:, :, 2]
        candle_close = gen_imgs[:, :, 3]

        max_in_o_c = np.maximum(candle_open, candle_close)
        min_in_o_c = np.minimum(candle_open, candle_close)
        gen_imgs[:, :, 1] = np.maximum(candle_high, max_in_o_c)
        gen_imgs[:, :, 2] = np.minimum(candle_low, min_in_o_c)

        util_plt.plot_vae_result(gen_imgs, filename)

    def save_model(self):
        self.decoder.save('./models/cvae_decoder.h5')
        self.encoder.save('./models/cvae_encoder.h5')


if __name__ == '__main__':
    PARAMS = dict()
    PARAMS['number_of_bars'] = 10
    PARAMS['epochs'] = 10001  # 10000001
    PARAMS['latent'] = 100
    PARAMS['batch_size'] = 100
    PARAMS['sample_interval'] = 10000
    PARAMS['data'] = util_pro.load_pkl(
        './data/label8_eurusd_10bar_1500_500_val200_gaf.pkl')

    cvae = CVAE(PARAMS)
    cvae.train(epochs=PARAMS['epochs'],
               batch_size=PARAMS['batch_size'],
               sample_interval=PARAMS['sample_interval'])
    cvae.save_model()
    result_dict = {
        'train_result_cm': [],
        'test_result_cm': [],
        'total_acc': []
    }
    for i in range(params['n_loop']):
        model, hist = train_model(params)
        # collect testing results
        collect_result(params, model, hist, result_dict)

    # save all results to pickle file
    filename = './results/clean_results_%s.pkl' % params['n_loop']
    with open(filename, 'wb') as f:
        pickle.dump(result_dict, f)


if __name__ == "__main__":
    PARAMS = {}
    PARAMS['data'] = pro.load_pkl(
        './data/label8_eurusd_10bar_1500_500_val200_gaf_culr.pkl')
    PARAMS['classes'] = 9
    PARAMS['lr'] = 0.01
    PARAMS['epochs'] = 1000
    PARAMS['batch_size'] = 64
    PARAMS['patience'] = 80
    PARAMS['zero_weight'] = 2
    PARAMS['optimizer'] = optimizers.SGD(lr=PARAMS['lr'])
    PARAMS['n_loop'] = 100
    PARAMS['model_path'] = './models/orgdata_model.h5'

    main(PARAMS)