def run_experiments(isbigram=True, islstm=True, nfolds=10): """Runs all experiments""" bigram_results = None lstm_results = None if isbigram: bigram_results = bigram.run(nfolds=nfolds) if islstm: lstm_results = lstm.run(nfolds=nfolds) return bigram_results, lstm_results
def run_experiments(isbigram=True, islstm=True, nfolds=10, savemodel=False): """Runs all experiments""" bigram_results = None lstm_results = None if isbigram: bigram_results = bigram.run(nfolds=nfolds, savemodel=savemodel) if islstm: lstm_results = lstm.run(nfolds=nfolds, savemodel=savemodel) return bigram_results, lstm_results
def run_experiments(isbigram=True, islstm=True, nfolds=10): """Runs all experiments""" bigram_results = None lstm_results = None if isbigram: max_bigram_epoch = int(os.environ.get('MAX_BIGRAM_EPOCH', 50)) bigram_results = bigram.run(nfolds=nfolds, max_epoch=max_bigram_epoch) if islstm: max_lstm_epoch = int(os.environ.get('MAX_LSTM_EPOCH', 25)) lstm_results = lstm.run(nfolds=nfolds, max_epoch=max_lstm_epoch) return bigram_results, lstm_results
def run_experiments(nfolds=10): options = { 'nfolds': nfolds, # enable for quick functional testing # 'max_epoch':2 } """Runs all experiments""" print '========== aloha_cnn_lstm ==========' aloha_cnn_lstm_results = aloha_cnn_lstm.run(**options) print '========== aloha_cnn ==========' aloha_cnn_results = aloha_cnn.run(**options) print '========== aloha_bigram ==========' aloha_bigram_results = aloha_bigram.run(**options) print '========== aloha_lstm ==========' aloha_lstm_results = aloha_lstm.run(**options) print '========== cnn_lstm ==========' cnn_lstm_results = cnn_lstm.run(**options) print '========== cnn ==========' cnn_results = cnn.run(**options) print '========== bigram ==========' bigram_results = bigram.run(**options) print '========== lstm ==========' lstm_results = lstm.run(**options) return { 'options': options, 'model_results': { 'aloha_bigram': aloha_bigram_results, 'aloha_lstm': aloha_lstm_results, 'aloha_cnn': aloha_cnn_results, 'aloha_cnn_lstm': aloha_cnn_lstm_results, 'bigram': bigram_results, 'lstm': lstm_results, 'cnn': cnn_results, 'cnn_lstm': cnn_lstm_results, } }
import dga_classifier.bigram as bigram import dga_classifier.lstm as lstm import tldextract import pandas as pd import dga_classifier.data as data import numpy as np from keras.preprocessing import sequence from keras.models import Sequential from keras.layers.core import Dense, Dropout, Activation from keras.layers.embeddings import Embedding from keras.layers.recurrent import LSTM import sklearn from sklearn.model_selection import train_test_split if __name__ == "__main__": lstm.run(1)