def test_predict_sklearn(): """Tests that we can generate predictions from a scikit-learn ``Bunch``.""" bunch = datasets.load_iris() model = TemplateSKLearnClassifier(num_features=DummyDataset.num_features, num_classes=DummyDataset.num_classes) data_pipe = DataPipeline(preprocess=TemplatePreprocess()) out = model.predict(bunch, data_source="sklearn", data_pipeline=data_pipe) assert isinstance(out[0], int)
def test_predict_numpy(): """Tests that we can generate predictions from a numpy array.""" row = np.random.rand(1, DummyDataset.num_features) model = TemplateSKLearnClassifier(num_features=DummyDataset.num_features, num_classes=DummyDataset.num_classes) data_pipe = DataPipeline(preprocess=TemplatePreprocess()) out = model.predict(row, data_pipeline=data_pipe) assert isinstance(out[0], int)
def test_forward(num_classes, shape): """Tests that a tensor can be given to the model forward and gives the correct output size.""" model = TemplateSKLearnClassifier( num_features=shape[1], num_classes=num_classes, ) model.eval() row = torch.rand(*shape) out = model(row) assert out.shape == (shape[0], num_classes)
def test_jit(tmpdir, jitter, args): path = os.path.join(tmpdir, "test.pt") model = TemplateSKLearnClassifier(num_features=16, num_classes=10) model.eval() model = jitter(model, *args) torch.jit.save(model, path) model = torch.jit.load(path) out = model(torch.rand(1, 16)) assert isinstance(out, torch.Tensor) assert out.shape == torch.Size([1, 10])
def test_test(tmpdir): """Tests that the model can be tested on our ``DummyDataset``.""" model = TemplateSKLearnClassifier(num_features=DummyDataset.num_features, num_classes=DummyDataset.num_classes) test_dl = torch.utils.data.DataLoader(DummyDataset(), batch_size=4) trainer = Trainer(default_root_dir=tmpdir, fast_dev_run=True) trainer.test(model, test_dl)
def test_predict_sklearn(): """Tests that we can generate predictions from a scikit-learn ``Bunch``.""" bunch = datasets.load_iris() model = TemplateSKLearnClassifier(num_features=DummyDataset.num_features, num_classes=DummyDataset.num_classes) datamodule = TemplateData.from_sklearn(predict_bunch=bunch, batch_size=1) trainer = Trainer() out = trainer.predict(model, datamodule=datamodule, output="classes") assert isinstance(out[0][0], int)
def test_predict_numpy(): """Tests that we can generate predictions from a numpy array.""" row = np.random.rand(1, DummyDataset.num_features) model = TemplateSKLearnClassifier(num_features=DummyDataset.num_features, num_classes=DummyDataset.num_classes) datamodule = TemplateData.from_numpy(predict_data=row, batch_size=1) trainer = Trainer() out = trainer.predict(model, datamodule=datamodule, output="classes") assert isinstance(out[0][0], int)
def test_smoke(): """A simple test that the class can be instantiated.""" model = TemplateSKLearnClassifier(num_features=1, num_classes=1) assert model is not None
from flash.core.classification import Labels from flash.template import TemplateData, TemplateSKLearnClassifier # 1. Download the data data_bunch = datasets.load_iris() # 2. Load the data datamodule = TemplateData.from_sklearn( train_bunch=data_bunch, val_split=0.8, ) # 3. Build the model model = TemplateSKLearnClassifier( num_features=datamodule.num_features, num_classes=datamodule.num_classes, serializer=Labels(), ) # 4. Create the trainer. trainer = flash.Trainer(max_epochs=1, limit_train_batches=1, limit_val_batches=1) # 5. Train the model trainer.fit(model, datamodule=datamodule) # 6. Save it! trainer.save_checkpoint("template_model.pt") # 7. Classify a few examples
# Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import numpy as np from sklearn import datasets from flash import Trainer from flash.template import TemplateData, TemplateSKLearnClassifier # 1. Download the data data_bunch = datasets.load_iris() # 2. Load the model from a checkpoint model = TemplateSKLearnClassifier.load_from_checkpoint("https://flash-weights.s3.amazonaws.com/template_model.pt") # 3. Classify a few examples predictions = model.predict([ np.array([4.9, 3.0, 1.4, 0.2]), np.array([6.9, 3.2, 5.7, 2.3]), np.array([7.2, 3.0, 5.8, 1.6]), ]) print(predictions) # 4. Or generate predictions from a whole dataset! datamodule = TemplateData.from_sklearn(predict_bunch=data_bunch) predictions = Trainer().predict(model, datamodule=datamodule) print(predictions)
import numpy as np import torch from sklearn import datasets import flash from flash.template import TemplateData, TemplateSKLearnClassifier # 1. Create the DataModule datamodule = TemplateData.from_sklearn( train_bunch=datasets.load_iris(), val_split=0.1, batch_size=4, ) # 2. Build the task model = TemplateSKLearnClassifier(num_features=datamodule.num_features, num_classes=datamodule.num_classes) # 3. Create the trainer and train the model trainer = flash.Trainer(max_epochs=3, gpus=torch.cuda.device_count()) trainer.fit(model, datamodule=datamodule) # 4. Classify a few examples datamodule = TemplateData.from_numpy( predict_data=[ np.array([4.9, 3.0, 1.4, 0.2]), np.array([6.9, 3.2, 5.7, 2.3]), np.array([7.2, 3.0, 5.8, 1.6]), ], batch_size=4, ) predictions = trainer.predict(model, datamodule=datamodule, output="classes")