def test_exceptions_send_slack_msg(slack_outbox):
    with patch.object(load_dataset, "load_dataset") as load:
        load.side_effect = Exception("blah")
        with pytest.raises(Exception, match="blah"):
            load_dataset.main(["", "hpd_registrations"])
        load.assert_called_once_with("hpd_registrations")
        assert slack_outbox == [
            "Alas, an error occurred when loading the dataset `hpd_registrations`."
        ]
Beispiel #2
0
def test_exceptions_send_slack_msg(slack_outbox):
    with patch.object(load_dataset, 'load_dataset') as load:
        load.side_effect = Exception('blah')
        with pytest.raises(Exception, match='blah'):
            load_dataset.main(['', 'hpd_registrations'])
        load.assert_called_once_with('hpd_registrations')
        assert slack_outbox == [
            'Alas, an error occurred when loading the dataset `hpd_registrations`.'
        ]
Beispiel #3
0
X = df.select_dtypes(include=numerics)
X['race'] = df['race']
X['sex'] = df['sex']
X = pd.get_dummies(X)
del X['race_Caucasian']
del X['sex_Male']
vars_ = X.columns
print(X.head())
X = np.array(X)
'''

### GERMAN CREDIT
#X, y = load_dataset.main('credit', n_obs=10000)

### ONLINE NEWS POPULARITY
X, y = load_dataset.main('news', n_obs=10000)

#normalisation rajoutee elle n y etait pas attention
X = (X.copy() - X.mean(axis=0)) / X.std(axis=0)
X_train, X_test, y_train, y_test = train_test_split(X,
                                                    y,
                                                    train_size=0.7,
                                                    random_state=0)
#clf = xgb.XGBClassifier().fit(X_train, y_train)
clf = RandomForestClassifier(200, random_state=0).fit(X_train, y_train)
#clf = GaussianNB().fit(X_train, y_train)
#clf = SVC(C=1.0, probability=True).fit(X_train, y_train)
#clf = KNeighborsClassifier(n_neighbors=15, metric='manhattan').fit(X_train, y_train)
y_pred = clf.predict(X_test)

print(clf)
Beispiel #4
0
import os
import sys

sys.path.append(os.path.dirname(sys.argv[0]))
import load_dataset
import convert
import separate
import dataset_postprocessing

#load_dataset.main(global_path=os.path.dirname(sys.argv[0]), dataset_name='obj_detection')
#convert.main(global_path=os.path.dirname(sys.argv[0]), dataset_name='obj_detection')
#separate.main(global_path=os.path.dirname(sys.argv[0]))
#execution_path = os.path.dirname(sys.argv[0])
#print "the end!"
#print sys.argv[0]
#print sys.argv[2]

if sys.argv[2] == 'labeling':
    load_dataset.main(global_path=os.path.dirname(sys.argv[0]), dataset_name=sys.argv[1])
#elif sys.argv[2] == 'convert':
    convert.main(global_path=os.path.dirname(sys.argv[0]), dataset_name=sys.argv[1])
#elif sys.argv[2] == 'separate':
    separate.main(global_path=os.path.dirname(sys.argv[0]))

    dataset_postprocessing.main(global_path=os.path.dirname(sys.argv[0]), dataset_name='obj_detection', batch_size=64, subdivisions=8)
Beispiel #5
0
import tensorflow as tf
import numpy as np
import load_dataset
import sklearn.metrics as metrics

n_classes = 5  # 01_BA, 02_EO, 03_LY, 04_MO, 05_NE
f1_y_label = []
f1_y_pred = []

###################################################
# load the dataset

training_data_dir = '/home/ch/workspace/wbc/db/empty/'
test_data_dir = '/home/ch/workspace/wbc/gan/keras/crop_resize/'

training_data_x, training_data_y, test_data_x, test_data_y = load_dataset.main(
    training_data_dir, test_data_dir)
print('mizno, training data x (image data) = ' + str(len(training_data_x)))
print('mizno, training data y (label) = ' + str(len(training_data_y)))
print('mizno, test data x (image data) = ' + str(len(test_data_x)))
print('mizno, test data y (label) = ' + str(len(test_data_y)))

###################################################
# load the model

sess = tf.Session()
saver = tf.train.import_meta_graph('./model_cv_sm/model003/model.meta')
saver.restore(sess, tf.train.latest_checkpoint('./model_cv_sm/model003/'))
print(str(saver))

###################################################
# load the function of the model