def test_model_status(app: Flask, fs: FakeFilesystem) -> None: fs.add_real_directory('./testdata/test_model/test_instance') fs.add_real_directory('./testdata/test_model/test_instance_unreleased') client = app.test_client() with app.test_request_context(): resp = client.get('/models?model=test_model', content_type='application/json') assert resp.status == '200 OK' result = json.loads(resp.data) assert result assert result['instances'] == ['test_instance', 'test_instance_unreleased'] assert result['preferred'] == 'test_instance' # Pick random test topics to assert assert result['topics']['Poverty'] == { 'name': 'Poverty', 'quality': 0.81, 'samples': 178 } # Pick topic with not enough samples for threshold optimization assert result['topics']['nan'] == { 'name': 'nan', 'samples': 0, 'quality': 0.0 }
def test_model_status(app: Flask, fs: FakeFilesystem) -> None: fs.add_real_directory('./testdata/test_model/test_instance') fs.add_real_directory('./testdata/test_model/test_instance_unreleased') client = app.test_client() with app.test_request_context(): resp = client.get('/models?model=test_model', content_type='application/json') assert resp.status == '200 OK' result = json.loads(resp.data) assert result assert result['instances'] == ['test_instance', 'test_instance_unreleased'] assert result['preferred'] == 'test_instance' # Pick two random test topics to assert assert result['topics']['Asylum-seekers - refugees'] == { 'name': 'Asylum-seekers - refugees', 'quality': 1.0, 'samples': 260 } assert result['topics']['National Human Rights Institution'] == { 'name': 'National Human Rights Institution', 'quality': 1.0, 'samples': 552 }
def test_missing_labels_file(self, fs: FakeFilesystem) -> None: fs.add_real_directory('./testdata/test_model/test_instance') fs.remove_object('./testdata/test_model/test_instance/label.vocab') with pytest.raises( Exception, match=(r'Failure to load labels file from {0} with exception'). format('./testdata/test_model/test_instance/label.vocab')): Classifier(self.BASE_CLASSIFIER_PATH, 'test_model')
def test_preferred_instance(self, fs: FakeFilesystem) -> None: model = 'test_model' fs.add_real_directory('./testdata/test_model/test_instance') fs.add_real_directory('./testdata/test_model/test_instance_unreleased') s = ModelStatus(self.BASE_CLASSIFIER_PATH, model) result = s.get_preferred_model_instance() assert result == 'test_instance'
def test_missing_instance_dir(self, fs: FakeFilesystem) -> None: fs.add_real_directory('./testdata/test_model/test_instance_unreleased') model_path = os.path.join(self.BASE_CLASSIFIER_PATH, 'test_model') with pytest.raises(Exception, match=('No valid instance of model found in %s, ' + 'instances were %s') % (model_path, r'\[\'test_instance_unreleased\'\]')): Classifier(self.BASE_CLASSIFIER_PATH, 'test_model')
def test_missing_model(self, fs: FakeFilesystem) -> None: instance_path = os.path.join(self.BASE_CLASSIFIER_PATH, 'test_model', 'test_instance_missing_model') fs.add_real_directory(instance_path) with pytest.raises(Exception, match=('SavedModel file does not exist at: {0}' ).format(instance_path)): Classifier(self.BASE_CLASSIFIER_PATH, 'test_model')
def test_missing_variables(self, fs: FakeFilesystem) -> None: instance_path = os.path.join(self.BASE_CLASSIFIER_PATH, 'test_model', 'test_instance_missing_variables') fs.add_real_directory(instance_path) with pytest.raises( Exception, match=('{0}/variables; No such file or directory'.format( instance_path))): Classifier(self.BASE_CLASSIFIER_PATH, 'test_model')
def test_instances(self, fs: FakeFilesystem) -> None: model = 'test_model' fs.add_real_directory(path.join(self.BASE_CLASSIFIER_PATH, model)) s = ModelStatus(self.BASE_CLASSIFIER_PATH, model_name=model) result = s.list_model_instances() assert result == [ 'test_instance', 'test_instance_missing_model', 'test_instance_missing_variables', 'test_instance_missing_variables_data', 'test_instance_missing_variables_index', 'test_instance_unreleased' ]
def test_store_and_verify_raiden( fs_reload_deployer: FakeFilesystem, deployed_raiden_info: DeployedContracts, deployer: ContractDeployer, ) -> None: """ Store some raiden contract deployment information and verify them """ fs_reload_deployer.add_real_directory( contracts_precompiled_path(version=None).parent) deployed_contracts_info = deployed_raiden_info deployer.store_and_verify_deployment_info_raiden( deployed_contracts_info=deployed_contracts_info) deployer.store_and_verify_deployment_info_raiden( deployed_contracts_info=deployed_contracts_info)
def test_classify(app: Flask, fs: FakeFilesystem) -> None: instance_path = os.path.join('./testdata/test_model/test_instance') fs.add_real_directory(instance_path) client = app.test_client() with app.test_request_context(): resp = client.post( '/classify?model=test_model', data=json.dumps( {'seqs': ['take forceful action to improve childrens rights']}), content_type='application/json') assert resp.status == '200 OK' result = json.loads(resp.data) assert result assert result[0]['Rights of the Child'] >= 0.7
def test_models(self, fs: FakeFilesystem) -> None: fs.add_real_directory('./testdata/test_model/test_instance') fs.add_real_directory('./testdata/test_model/test_instance_unreleased') fs.add_real_directory('./testdata/test_other_model/test_instance') fs.add_real_directory( './testdata/test_other_model/test_instance_unreleased') s = ModelStatus(self.BASE_CLASSIFIER_PATH) result = s.list_potential_models() assert result == ['test_model', 'test_other_model']
def root(fs: FakeFilesystem) -> Iterator[Path]: """ Create the blog root directory. """ # Add the templates to the fake filesystem, so builders can load them # during tests. The actual path depends if we're running in development # mode (``src/``) or installed (``site-packages``). root = get_project_root() locations = ("src/nefelibata/templates", "site-packages/nefelibata/templates") for location in locations: try: fs.add_real_directory(root / location) except FileNotFoundError: pass root = Path("/path/to/blog") fs.create_dir(root) yield root
def test_classify(self, fs: FakeFilesystem) -> None: fs.add_real_directory('./testdata/test_model/test_instance') fs.add_real_directory('./testdata/test_model/test_instance_unreleased') c = Classifier('./testdata', 'test_model') result = c.classify(['Increase access to health care']) assert c.labels is not None assert c.embedder is not None assert c.predictor is not None assert c.instance == 'test_instance' assert result # result ~ [{topic: probability, topic2: probability, ...}, ...] for topic, _ in result[0].items(): assert topic in c.labels assert len(result) == 1 assert result[0]['Right to health'] >= 0.8
def test_classify(self, fs: FakeFilesystem) -> None: fs.add_real_directory('./testdata/test_model/test_instance') fs.add_real_directory('./testdata/test_model/test_instance_unreleased') c = Classifier('./testdata', 'test_model') result = c.classify(['Where is my medical book?']) assert c.vocab is not None assert c.embedder is not None assert c.predictor is not None assert c.instance == 'test_instance' print(result) assert result # result ~ [{topic: probability, topic2: probability, ...}, ...] for topic, _ in result[0].items(): assert topic in c.vocab assert result[0]['Right to education'] >= 0.7
def test_store_and_verify_services( fs_reload_deployer: FakeFilesystem, deployer: ContractDeployer, deployed_service_info: DeployedContracts, token_address: HexAddress, ) -> None: """ Store some service contract deployment information and verify them """ fs_reload_deployer.add_real_directory( contracts_precompiled_path(version=None).parent) deployed_contracts_info = deployed_service_info deployer.verify_service_contracts_deployment_data( token_address=token_address, deployed_contracts_info=deployed_contracts_info, user_deposit_whole_balance_limit=DEPOSIT_LIMIT, ) deployer.store_and_verify_deployment_info_services( token_address=token_address, deployed_contracts_info=deployed_contracts_info, user_deposit_whole_balance_limit=DEPOSIT_LIMIT, )
def fake_filesystem(fs: FakeFilesystem) -> FakeFilesystem: """A pytest fixture which mocks the filesystem before each test.""" # The "fs" argument triggers pyfakefs' own pytest fixture to register # After pyfakefs has started all filesystem actions will happen on a fake in-memory filesystem # Proxy access to certifi's certificate authority bundle to the real filesystem # This is required to be able to send HTTP requests using requests fs.add_real_file(certifi.where()) # Proxy access to package data to the real filesystem fs.add_real_directory(os.path.join(os.path.dirname(__file__), "../lean/ssh")) # Create a fake home directory and set the cwd to an empty directory fs.create_dir(Path.home() / "testing") os.chdir(Path.home() / "testing") # Reset all singletons so Path instances get recreated # Path instances are bound to the filesystem that was active at the time of their creation # When the filesystem changes, old Path instances bound to previous filesystems may cause weird behavior container.reset_singletons() return fs
def test_invalid_bert(self, fs: FakeFilesystem) -> None: bad_bert_path = './bad/path/to/bert' config = """ { "bert": "%s", "labels": "label.vocab", "is_released": true, "description": "This is the latest model from Sascha.", "metadata": { "thesaurus": "issues" } } """ % (bad_bert_path) fs.add_real_directory('./testdata/test_model/test_instance') fs.remove_object('./testdata/test_model/test_instance/config.json') fs.create_file('./testdata/test_model/test_instance/config.json', contents=config) with pytest.raises(Exception, match='SavedModel file does not exist at'): c = Classifier(self.BASE_CLASSIFIER_PATH, 'test_model') # Bad bert is only used on uncached embed. c.classify(['some string'])
def test_classify(app: Flask, fs: FakeFilesystem) -> None: instance_path = os.path.join('./testdata/test_model/test_instance') fs.add_real_directory(instance_path) client = app.test_client() with app.test_request_context(): resp = client.post('/classify?model=test_model', data=json.dumps({ 'samples': [{ 'seq': 'improve access to health care for children' }] }), content_type='application/json') assert resp.status == '200 OK' result = json.loads(resp.data) assert result == dict(samples=[ dict(model_version='test_instance', predicted_labels=[dict(quality=1.0, topic='Right to health')], seq='improve access to health care for children', sharedId='') ])
def test_list_models(app: Flask, fs: FakeFilesystem) -> None: fs.add_real_directory('./testdata/test_model/test_instance') fs.add_real_directory('./testdata/test_model/test_instance_unreleased') fs.add_real_directory('./testdata/test_other_model/test_instance') client = app.test_client() with app.test_request_context(): resp = client.get('/models/list', content_type='application/json') assert resp.status == '200 OK' result = json.loads(resp.data) assert result assert len(result.keys()) == 1 assert set(result['models']) == set(['test_model', 'test_other_model'])
def test_e2e(app: Flask, fs: FakeFilesystem) -> None: seq_pattern = ( 'react more swiftly to comply with international instruments %d') instance_path = './testdata/test_model/test_instance' fs.add_real_directory(instance_path) fs.remove_object('./testdata/test_model/test_instance/quality.json') fs.remove_object('./testdata/test_model/test_instance/thresholds.json') client = app.test_client() with app.test_request_context(): # initial classify returns no topics because we deleted thresholds.json. resp = client.post('/classify?model=test_model', data=json.dumps({'seqs': [seq_pattern % 1]}), content_type='application/json') assert resp.status == '200 OK' assert len(json.loads(resp.data)[0]) == 0 # now we add training labels assert client.put( '/classification_sample?model=test_model', data=json.dumps({ 'samples': [{ 'seq': seq_pattern % i, 'training_labels': [{ 'topic': 'International instruments' }] } for i in range(20)] }), content_type='application/json').status == '200 OK' resp = client.get('/classification_sample?model=test_model&seq=*') assert resp.status == '200 OK' assert len(json.loads(resp.data)) == 20 assert client.post('/task', data=json.dumps({ 'provider': 'RefreshThresholds', 'name': 'thres', 'model': 'test_model' }), content_type='application/json').status == '200 OK' wait_for_task(client, 'thres') for i in range(20): resp = client.post('/classify?model=test_model', data=json.dumps({'seqs': [seq_pattern % i]}), content_type='application/json') assert resp.status == '200 OK' assert len(json.loads(resp.data)[0]) == 1 assert client.post('/task', data=json.dumps({ 'provider': 'RefreshPredictions', 'name': 'pred', 'model': 'test_model' }), content_type='application/json').status == '200 OK' wait_for_task(client, 'pred') resp = client.get('/classification_sample?model=test_model&seq=*') assert resp.status == '200 OK' data = json.loads(resp.data) assert len(data) == 20 assert data[0]['predicted_labels'][0][ 'topic'] == 'International instruments' assert data[0]['predicted_labels'][0]['quality'] >= 0.5
def test_example(fs: FakeFilesystem): fs.add_real_directory(cwd) example() assert os.path.exists(os.path.expanduser("~/.aec/ec2.toml")) assert os.path.exists( os.path.expanduser("~/.aec/userdata/amzn-install-docker.yaml"))