def test_create_model(app): ds = _add_dataset() searcher = _add_searcher() resp = app.post( '/api/renom_rg/models', { 'dataset_id': ds.id, 'algorithm': 1, 'algorithm_params': json.dumps({ 'a': 1, 'script_file_name': '', 'num_neighbors': 5 }), 'batch_size': 10, 'epoch': 10, 'searcher_id': searcher.id }) assert resp.status_int == 200 model = db.session().query( db.Model).filter(db.Model.dataset_id == ds.id).one() assert model assert resp.json['model_id'] == model.id searchermodel = (db.session().query(db.ParamSearcherModel).filter( db.ParamSearcherModel.searcher_id == searcher.id).filter( db.ParamSearcherModel.model_id == model.id)).one() assert searchermodel
def test_model_deploy(app): model1 = _add_model() model1.deployed = 1 model2 = _add_model() model2.deployed = 1 session = db.session() session.add_all([model1, model2]) session.commit() resp = app.post('/api/renom_rg/models/%s/deploy' % model2.id) assert resp.status_int == 200 ret = resp.json['model'] assert ret['model_id'] == model2.id assert ret['deployed'] == 1 session = db.session() assert session.query(db.Model).get(model1.id).deployed == 0 assert session.query(db.Model).get(model2.id).deployed == 1 session.delete(model2) resp = app.post('/api/renom_rg/models/%s/deploy' % model2.id) assert resp.status_int == 404
def test_delete_searcher(app): searcher = _add_searcher() model1 = _add_model(searcher=searcher) model2 = _add_model(searcher=searcher) resp = app.delete('/api/renom_rg/searchers/%d' % searcher.id) assert resp.status_int == 200 resp = app.delete('/api/renom_rg/searchers/%d' % searcher.id) assert resp.status_int == 404 q = db.session().query(db.Model).filter_by(id=model1.id).one() qq = db.session().query( db.ParamSearcherModel).filter_by(searcher_id=searcher.id).all() assert not qq
def _add_dataset(): name = str(random.random()) ds = db.DatasetDef(name=name, description='description', explanatory_column_ids=pickle.dumps([1, 2, 3, 4, 5, 6]), target_column_ids=pickle.dumps([0]), selected_scaling=1, labels=pickle.dumps([1, 2, 3]), train_ratio=0.1, train_index=pickle.dumps(list(range(1, 405))), valid_index=pickle.dumps(list(range(405, 500))), target_train=pickle.dumps([1, 2]), target_valid=pickle.dumps([1, 2]), filename_y='none_scaling', filename_X='none_scaling', true_histogram=pickle.dumps([{ 'train': { 'counts': [1, 2], 'bins': [1, 2] }, 'valid': { 'counts': [1, 2], 'bins': [1, 2] } }])) session = db.session() session.add(ds) session.commit() return ds
def test_create_searcher(app): resp = app.post('/api/renom_rg/searchers', { 'info': json.dumps({'a': 1}), }) assert resp.status_int == 200 id = resp.json['id'] searcher = db.session().query( db.ParamSearcher).filter(db.ParamSearcher.id == id).one() assert searcher
def test_delete_model(app): searcher = _add_searcher() model = _add_model(searcher=searcher) resp = app.delete('/api/renom_rg/models/%d' % model.id) assert resp.status_int == 200 assert db.session().query( db.Model).filter(db.Model.id == model.id).count() == 0 resp = app.delete('/api/renom_rg/models/%d' % model.id) assert resp.status_int == 404
def test_delete_dataset(app): ds = _add_dataset() searcher = _add_searcher() model = _add_model(dataset=ds, searcher=searcher) resp = app.delete('/api/renom_rg/datasets/%d' % ds.id) assert resp.status_int == 200 assert db.session().query( db.DatasetDef).filter(db.DatasetDef.id == ds.id).count() == 0 resp = app.delete('/api/renom_rg/datasets/%d' % ds.id) assert resp.status_int == 404
def test_create_dataset(app): resp = app.post( '/api/renom_rg/datasets', { 'name': 'test_create_dataset_1', 'description': 'description', 'explanatory_column_ids': json.dumps([3, 4, 5, 6, 7, 8]), 'target_column_ids': json.dumps([1, 2]), 'selected_scaling': 1, 'filename_y': 'none_scaling', 'filename_X': 'none_scaling', 'labels': json.dumps([1, 2, 3, 4]), 'train_ratio': 0.1, 'train_index': json.dumps([5, 6]), 'valid_index': json.dumps([7, 8]), 'target_train': json.dumps([1, 2]), 'target_valid': json.dumps([1, 2]), 'true_histogram': json.dumps([{ 'train': { 'counts': [1, 2], 'bins': [1, 2] }, 'valid': { 'counts': [1, 2], 'bins': [1, 2] } }]) }) assert resp.status_int == 200 q = db.session().query(db.DatasetDef) ds = q.filter(db.DatasetDef.name == 'test_create_dataset_1').one() assert ds assert resp.json['dataset']['dataset_id'] == ds.id
def _add_model(algorithm=1, algorithm_params=dict(DEFAULT_ALGORITHM_PARAMS), dataset=None, searcher=None): if dataset is None: dataset = _add_dataset() model = db.Model(dataset_id=dataset.id, algorithm=algorithm, algorithm_params=pickle.dumps(algorithm_params), batch_size=10, epoch=10) session = db.session() session.add(model) session.commit() if searcher: searchermodel = db.ParamSearcherModel(searcher=searcher, model=model) session.add(searchermodel) session.commit() return model
def _add_searcher(): searcher = db.ParamSearcher() session = db.session() session.add(searcher) session.commit() return searcher