Beispiel #1
0
 def __init__(self, pkl_pathname: str):
     """
     Create a prediction server
     :param pkl_pathname: path to the Docknet pickle model file to load
     """
     super().__init__()
     self.docknet = read_pickle(pkl_pathname)
Beispiel #2
0
def test_read_pickle_to_json():
    expected_json_path = os.path.join(data_dir, 'docknet1.json')
    pickle_path = os.path.join(data_dir, 'docknet1.pkl')
    with open(expected_json_path, 'rt', encoding='UTF-8') as fp:
        expected_json = fp.read()
    actual_docknet = read_pickle(pickle_path)
    actual_file = io.StringIO()
    actual_docknet.to_json(actual_file, True)
    actual_json = actual_file.getvalue()
    assert actual_json == expected_json
Beispiel #3
0
def test_to_pickle_read_pickle_to_json(docknet1: Docknet):
    # Set network parameters as for the dummy initializer in order to enforce a specific expected output
    docknet1.initializer.initialize(docknet1.layers)
    pkl_path = os.path.join(temp_dir, 'docknet1.pkl')
    expected_json_path = os.path.join(data_dir, 'docknet1.json')
    with open(expected_json_path, 'rt', encoding='UTF-8') as fp:
        expected_json = fp.read()
    docknet1.to_pickle(pkl_path)
    docknet2 = read_pickle(pkl_path)
    actual_file = io.StringIO()
    docknet2.to_json(actual_file, True)
    actual_json = actual_file.getvalue()
    assert actual_json == expected_json
Beispiel #4
0
def main():
    args = parse_args()
    testset = pd.read_csv(args.testset, header=None)
    X = testset.iloc[0:-1, :].values
    Y = testset.iloc[-1:, :].values

    if args.model_type == '.json':
        docknet = read_json(args.model)
    else:
        docknet = read_pickle(args.model)
    docknet.cost_function = 'cross_entropy'
    Y_predicted = docknet.predict(X)
    Y_predicted = np.round(Y_predicted)
    results = metrics.classification_report(Y[0], Y_predicted[0])
    print(results)
Beispiel #5
0
def main():
    args = parse_args()
    X = pd.read_csv(args.dataset, header=None)
    if args.ignore_last_row:
        X = X.iloc[0:-1, :]
    X = X.values

    if args.model_type == '.json':
        docknet = read_json(args.model)
    else:
        docknet = read_pickle(args.model)
    docknet.cost_function = 'cross_entropy'

    Y = docknet.predict(X)
    Y_df = pd.DataFrame(Y)
    if args.output:
        with open(args.output, 'wt', encoding='UTF-8') as fp:
            Y_df.to_csv(fp, header=False, index=False)
    else:
        Y_df.to_csv(sys.stdout, header=False, index_label=False)
Beispiel #6
0
def main():
    args = parse_args()
    trainingset = pd.read_csv(args.trainingset, header=None)
    X = trainingset.iloc[0:-1, :].values
    Y = trainingset.iloc[-1:, :].values

    if args.model_in_type == '.json':
        docknet = read_json(args.model_in)
    else:
        docknet = read_pickle(args.model_in)
    initialize = False
    if args.initializer:
        initialize = True
        if args.initializer == 'random':
            docknet.initializer = RandomNormalInitializer()
        else:
            print('Unknown initializer {}; available initializers: random'.
                  format(args.initializer))
    if args.optimizer == 'adam':
        docknet.optimizer = AdamOptimizer()
    elif args.optimizer == 'gradient_descent':
        docknet.optimizer = GradientDescentOptimizer()
    else:
        print(
            'Unknown optimizer {}; available optimizers: adam, gradient_descent'
            .format(args.optimizer))
        sys.exit(1)
    docknet.cost_function = 'cross_entropy'
    docknet.train(X,
                  Y,
                  args.batch_size,
                  max_number_of_epochs=args.max_number_of_epochs,
                  error_delta=args.error_delta,
                  max_epochs_within_delta=args.max_epochs_within_delta,
                  stop_file_pathname=args.stop_file,
                  initialize=initialize)
    if args.model_out_type == '.json':
        docknet.to_json(args.model_out)
    else:
        docknet.to_pickle(args.model_out)