Example #1
0
def main(ensemble, tta, output):
    # Read in test data images from the 'data/test' folder
    print("Loading test data.")
    test_imgs = ImageList.from_folder(path=os.path.join(DATA_DIR,
                                                        TEST_FOLDER), )

    # Get predictions
    if ensemble:
        # Load ensemble of learners
        learners = []
        learner_names = ['dpn92', 'inceptionv4', 'se_resnext101']
        for name in learner_names:
            print(f"Loading {name}")
            learn = load_learner(SAVED_DIR, f'{name}.pkl', test=test_imgs)
            learners.append(learn)

        # Init ensemble
        print("Initializing ensemble.")
        ensemble = Ensemble(learners)

        # Get predictions
        print("Performing inference...")
        preds = ensemble.predict(tta)
        print("Predictions done.")

        # Get classes list
        classes = learners[0].data.classes
        # Get image names list
        img_names = [i.name for i in learners[0].data.test_ds.items]

    else:
        learner_name = 'se_resnext101'

        # Initialize Learner
        print(f"Loading {learner_name}")
        learn = load_learner(SAVED_DIR, f'{learner_name}.pkl', test=test_imgs)

        # Get predictions
        print("Performing inference...")
        if tta:
            preds, _ = learn.TTA(ds_type=DatasetType.Test)
        else:
            preds, _ = learn.get_preds(ds_type=DatasetType.Test)
        print("Predictions done.")

        # Get classes list
        classes = learn.data.classes
        # Get image names list
        img_names = [i.name for i in learn.data.test_ds.items]

    # Initialize DataFrame with the predictions
    df = pd.DataFrame(np.array(preds), columns=classes)
    # Insert image names to DataFrame
    df.insert(0, 'img_name', img_names)

    # Save predictions as csv file
    df.to_csv(output, index=False)
    print(f"Predictions saved to {output}")
 def __init__(self, root_model_path, fwd_model_file_name, bwd_model_file_name, threshold=0.5):
     # root_model_path = "outputs/experiment_ckpts/ulmfit-dialog_babi_data_model"
     self.fwd_model = load_learner(root_model_path, fwd_model_file_name)
     self.bwd_model = load_learner(root_model_path, bwd_model_file_name)
     self.ds = self.fwd_model.data.single_ds.y
     self.trigger_functions = []
     self.response_functions = [templates.functions.root_concern]
     self.threshold = threshold
     for func in function_groups:
         self.trigger_functions.append(func[0])
         self.response_functions.extend(func[1])
 def __init__(self, root_model_path, fwd_model_file_name, bwd_model_file_name, threshold=0.5):
     self.fwd_model_response = load_learner(root_model_path[0], fwd_model_file_name)
     self.bwd_model_response = load_learner(root_model_path[0], bwd_model_file_name)
     self.fwd_model_trigger = load_learner(root_model_path[1], fwd_model_file_name)
     self.bwd_model_trigger = load_learner(root_model_path[1], bwd_model_file_name)
     self.ds_response = self.fwd_model_response.data.single_ds.y
     self.ds_trigger = self.fwd_model_trigger.data.single_ds.y
     self.trigger_functions = []
     self.response_functions = [templates.functions.root_concern]
     self.threshold = threshold
     for func in function_groups:
         self.trigger_functions.append(func[0])
         self.response_functions.extend(func[1])
Example #4
0
    def __init__(self, fastaimodelpkl, fp16=False, verbose=False):
        """
        :param fastaimodelpkl - full path to the pkl file, e.g. "mystuff/fastaimodel.pkl"
        :param fp16 - set it to use half precision
        """
        dirname = ntpath.dirname(fastaimodelpkl)
        filename = ntpath.basename(fastaimodelpkl)

        if fp16:
            self.learn = load_learner(dirname, file=filename).to_fp16()
        else:
            self.learn = load_learner(dirname, file=filename)
            
        self.verbose=verbose
Example #5
0
    def load(self, path):
        if load_learner is None:
            raise ImportError("fastai package is required to use "
                              "bentoml.artifacts.FastaiModelArtifact")

        model = load_learner(path, self._file_name)
        return self.pack(model)
Example #6
0
def setup_learner(url, file_name):
    download_file(url, path / file_name)
    try:
        learner = load_learner(path, file_name)
        return learner
    except RuntimeError:
        logging.exception('Error setting up learner')
 def load_model(self, tmp_dir):
     """Load the model in preparation for one or more prediction calls."""
     if self.inf_learner is None:
         model_uri = self.config['model_uri']
         model_path = download_if_needed(model_uri, tmp_dir)
         self.inf_learner = load_learner(dirname(model_path),
                                         basename(model_path))
def _get_learner_object(data, layers, emb_szs, ps, emb_drop, pretrained_path):

    if pretrained_path:
        learn = load_learner(
            os.path.dirname(pretrained_path),
            os.path.basename(pretrained_path).split('.')[0] + "_exported.pth")
        if not data._is_empty:
            learn.data = data._databunch
    else:
        databunch = data._databunch
        if not emb_szs or isinstance(emb_szs, dict):
            emb_szs = databunch.get_emb_szs({} if not emb_szs else emb_szs)

        model = TabularModel(emb_szs,
                             len(databunch.cont_names),
                             out_sz=databunch.c,
                             layers=layers,
                             ps=ps,
                             emb_drop=emb_drop,
                             y_range=None,
                             use_bn=False)
        learn = Learner(databunch,
                        model,
                        model_dir=tempfile.TemporaryDirectory().name)

    return learn
Example #9
0
 def __init__(self,
              model_path='./models',
              od_model='faster-rcnn.pt',
              class_model='class_resnet.pkl',
              ls_model='ls_resnet.pkl',
              gam_model='gam_resnet.pkl',
              cutoffs=[1.5, 2.5]):
     model_path = Path(model_path)
     device = torch.device(
         'cuda') if torch.cuda.is_available() else torch.device('cpu')
     self.od_model = torch.load(str(model_path / od_model), device)
     self.od_model.eval()
     self.class_model = load_learner(path=model_path, file=class_model)
     self.ls_model = load_learner(path=model_path, file=ls_model)
     self.gam_model = load_learner(path=model_path, file=gam_model)
     self.cutoffs = cutoffs
Example #10
0
    def __init__(self, 
                 model_path:PathOrStr,
                 model_file_name:PathOrStr):

        self.learn = load_learner(path=model_path, file=model_file_name)
        self.learn.model.eval()  # turn off dropout, etc. only need to do this after loading model.
        self.encoder = self.learn.model[0]
Example #11
0
def upload_file():
    """
    retrieve the image uploaded and make sure it is an image file
    """
    file = request.files['file']
    image_extensions = ['jpg', 'jpeg', 'png']

    if file.filename.split('.')[1] not in image_extensions:
        return jsonify('Please upload an appropriate image file')
    """
    Load the trained model in export.pkl 
    """
    learn = load_learner(path=".")
    """
    Perform prediction
    """
    #image_bytes = file.read()
    #img = Image.open(io.BytesIO(image_bytes))

    img = open_image(file)

    pred_class, pred_idx, outputs = learn.predict(img)
    i = pred_idx.item()
    classes = [
        'Domestic Medium Hair', 'Persian', 'Ragdoll', 'Siamese', 'Snowshoe'
    ]
    prediction = classes[i]

    return jsonify(f'Your cat is a {prediction}')
Example #12
0
def get_pred_new_data_old_model(
        valid_df: pd.DataFrame,
        path: Path = MODELS_PATH) -> Tuple[Learner, float]:
    """Get a RSMPE score for predictions from the existing best model, with
    new data.

    Input: a pd.DataFrame for the validation data and the path for the model.
    Output: the model ready to save, and the root mean squared percentage error
    for the predicted sales. (If this model is second-best, we'll still want
    to save it to a different file for record-keeping purposes.)
    """
    valid_df = preprocess.preprocess(valid_df)

    # Get the right model to load
    models = [
        file for file in os.listdir(path) if file.startswith('current_best')
    ]
    best_model = sorted(models, reverse=True)[0]
    learn = load_learner(path=path,
                         fname=best_model,
                         test=TabularList.from_df(valid_df, path=path))

    # get log predictions and compare to actual values
    log_preds, _ = learn.get_preds(ds_type=DatasetType.Test)
    valid_preds = np.exp(np.array(log_preds.flatten()))
    valid_reals = valid_df.loc[valid_df.sales != 0, 'sales'].values
    new_rmspe = rmspe(valid_preds, valid_reals)
    return (learn, new_rmspe)
Example #13
0
def get_pred_single_val(data: pd.Series, path: Path) -> float:
    """Get a prediction for a single row of data.

    Input: a pd.Series for the data and the path for the model.
    Output: the predicted sales for that row of data.
    """
    # Get the right model to load
    models = [
        file for file in os.listdir(path) if file.startswith('current_best')
    ]
    best_model = sorted(models, reverse=True)[0]

    # Load the model and get the prediction
    #learn = load_learner(path/best_model)
    learn = load_learner(path=path, fname=best_model)

    log_pred_tens, _, _ = learn.predict(data)

    # The model returns a tensor (Float [x]) so we need to get x
    log_pred = log_pred_tens.data[0]

    # Also it gives log predictions, so we need to exp it
    prediction = math.exp(log_pred)

    return prediction
Example #14
0
def get_prediction_data(image_filepath, model_path):
    learn = basic_train.load_learner(model_path)
    image_for_fastai = vision.image.open_image(image_filepath)
    prediction = learn.predict(image_for_fastai)
    predicted_class = str(prediction[0]).replace('_', ' ')
    predicted_class_index = prediction[1].item()
    confidence = int(100*prediction[2][predicted_class_index].item())
    return predicted_class, confidence
Example #15
0
 def get_classified_prob(img):
     learn: Learner = load_learner(path=os.path.join(
         settings.BASE_DIR, 'vandydj/ai_models/'),
                                   file='model_v1.pkl')
     prediction = learn.predict(img)
     label = prediction[0]
     prob = max(prediction[2])
     return prob, label
Example #16
0
 def load_model(self, tmp_dir):
     """Load the model in preparation for one or more prediction calls."""
     if self.inf_learner is None:
         self.log_options()
         model_uri = self.backend_opts.model_uri
         model_path = download_if_needed(model_uri, tmp_dir)
         self.inf_learner = load_learner(
             dirname(model_path), basename(model_path))
Example #17
0
def load_model() -> None:
    # Define model
    global model
    try:
        model = load_learner(path='../../models/', file='wm_remove.pkl')
        model.model.eval()
    except:
        print('Error Loading model, check model')
Example #18
0
 def __init__(self, model_folder_name:
              '(str) locate to the folder dont specify in any extension',
              model_name: '(str) the model name in .pkl extension',
              min_char_len:
              '(int) ignore input if its length is less than this number'):
     self.learn = load_learner(model_folder_name, model_name)
     self.classes = self.learn.data.train_ds.classes
     self.min_char_len = min_char_len
Example #19
0
    def __init__(self, bbz_path, model_name="bnet20191028"):
        self._bbz_path = bbz_path
        model_path = bbz_path / "models" / model_name

        from fastai.basic_train import load_learner

        # see https://docs.fast.ai/tutorial.inference.html
        self._learner = load_learner(model_path)
Example #20
0
 def load(cls, path: str, reset_paths=True, verbose=True):
     from fastai.basic_train import load_learner
     model = super().load(path, reset_paths=reset_paths, verbose=verbose)
     model.model = load_pkl.load_with_fn(
         f'{model.path}{model.model_internals_file_name}',
         lambda p: load_learner(model.path, p),
         verbose=verbose)
     return model
def load_fastai():
    modelpath = Path(__file__).parent
    modelpath = modelpath / 'trained'
    logging.info('load model path {0}'.format(os.getcwd()))
    defaults.device = device('cpu')
    learn = load_learner(modelpath)
    learn.load('stage-2')
    return learn
 def load_model(self, tmp_dir):
     """Load the model in preparation for one or more prediction calls."""
     if self.inf_learner is None:
         self.print_options()
         model_uri = self.backend_opts.model_uri
         model_path = download_if_needed(model_uri, tmp_dir)
         self.inf_learner = load_learner(
             dirname(model_path), basename(model_path))
         self.device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
    def load(self, path):
        try:
            # We need matplotlib and torch for fastai.  Make sure we install those.
            from fastai.basic_train import load_learner
        except ImportError:
            raise ImportError(
                'fastai package is required to use FastaiModelArtifact')

        model = load_learner(path, self._file_name)
        return self.pack(model)
Example #24
0
    def test_save_models(self, mock_Learner_export, mock_Learner_save):
        """save_models should save the second-best model to an appropriate
        file, and the best model to the current best model file.
        """
        # TODO: this test was edited due to some changes - this test can and
        # should be rewritten to be more specific (ie useful).

        # fake winner/loser models and the names they should be saved with
        winner = load_learner(self.model_path, fname='current_best')
        # fname below could be replaced by any other not-as-good model
        # Even better would be to replace this with a pre-generated model as
        # a pytest funcarg
        loser = load_learner(path=self.model_path, fname='current_best')

        # Call the function
        train_model.save_models(winner, loser)

        # Assertions
        mock_Learner_export.assert_called()
Example #25
0
def _load_learner(export_file_name, label):
    try:
        from fastai.basic_train import load_learner
        learn = load_learner('notebooks', export_file_name)
        print(f'INFO: running with {label}')
        return learn
    except ModuleNotFoundError as ex:
        print(f'INFO: running without {label} as fastai not found')
    except FileNotFoundError as ex:
        print(f'INFO: running without {label} as {export_file_name} not found')
Example #26
0
 def load(cls, path: str, file_prefix='', reset_paths=False, verbose=True):
     from fastai.basic_train import load_learner
     obj = super().load(path,
                        file_prefix=file_prefix,
                        reset_paths=reset_paths,
                        verbose=verbose)
     obj.model = load_pkl.load_with_fn(
         f'{obj.path}{obj.model_internals_file_name}',
         lambda p: load_learner(obj.path, p),
         verbose=verbose)
     return obj
Example #27
0
 def __init__(self, model_path: PathOrStr, model_file_name: PathOrStr):
     """Load the Learner object from model_path/model_file_name.
     Args:
       model_path: The path (directory) of the Learner object.
                   e.g., ./model_files
       model_file_name: The file name of the Learner object.
                        e.g., model.pkl
     """
     self.learn = load_learner(path=model_path, file=model_file_name)
     self.learn.model.eval(
     )  # turn off dropout, etc. only need to do this after loading model.
     self.encoder = self.learn.model[0]
Example #28
0
async def setup_learner(url, dest):
    await download_file(url, path / dest)
    try:
        learn = load_learner(path, dest)
        return learn
    except RuntimeError as e:
        if len(e.args) > 0 and 'CPU-only machine' in e.args[0]:
            print(e)
            message = "\n\nThis model was trained with an old version of fastai and will not work in a CPU environment.\n\nPlease update the fastai library in your training environment and export your model again.\n\nSee instructions for 'Returning to work' at https://course.fast.ai."
            raise RuntimeError(message)
        else:
            raise
Example #29
0
 def __init__(self, tokenizer="fastai"):
     """Initalize the ULMFiT model for script generation
     
     Keyword Arguments:
         tokenizer {str} -- Text tokenizer to use for script generation (default: {"fastai"})
                             1. fastai
                             2. sentencepiece
     """
     self.tokenizer = tokenizer
     self.checkModelFile()
     logging.info("Model file present!")
     self.predictor = load_learner(self.modelFolder, self.modelFile)
Example #30
0
async def setup_learner():
    await download_file(export_file_url, path / export_file_name)
    try:
        learn = load_learner(path, export_file_name)
        return learn
    except RuntimeError as e:
        if len(e.args) > 0 and 'CPU-only machine' in e.args[0]:
            print(e)
            message = "ERROR: This model was trained with an old version of fastai, UPDATE IT!"
            raise RuntimeError(message)
        else:
            raise