Esempio n. 1
0
def model():
    '''Returns a simple test model'''
    def add(x: int, y: int) -> int:
        '''Adds two numbers'''
        return x + y

    def count(strings: List[str]) -> Dict[str, int]:
        return Counter(strings)

    def empty() -> int:
        return 1

    Image = new_type(raw_type=bytes, name="Image", metadata={"test": "this is a test"})

    def rotate_image(img: Image) -> Image:
        return img

    Dictionary = new_type(raw_type=dict, name="Dictionary", metadata={"test": "this is a test"})

    def handle_dict(_dict: Dictionary) -> Dictionary:
        return _dict

    Text = new_type(str, 'Text')

    def count_words(text: Text) -> int:
        '''Counts the number of words in the text'''
        return len(text.split(u' '))

    def create_words(n: int) -> Text:
        return " ".join(["éééé"] * n)

    model = Model(add=add, count=count, empty=empty, rotate_image=rotate_image, handle_dict=handle_dict, count_words=count_words, create_words=create_words)
    return model
def test_script_req():
    '''Tests that Python scripts can be included using Requirements'''
    def predict(x: int) -> int:
        return x

    model = Model(predict=predict)
    model_name = 'my-model'

    # tests that individual script and directory of scripts are both gathered
    reqs = Requirements(scripts=_abspath('./user_module.py', './user_package'))

    with _dump_model(model, model_name, reqs) as dump_dir:
        _verify_files(dump_dir,
                      ('scripts/user_provided/user_package_module.py',
                       'scripts/user_provided/__init__.py',
                       'scripts/user_provided/user_module.py'))

    bad_reqs = Requirements(
        scripts=_abspath('./user_module.py', './user_package', 'not_real.py'))

    with pytest.raises(AcumosError, match='does not exist'):
        with _dump_model(model, model_name, bad_reqs) as dump_dir:
            pass

    bad_reqs = Requirements(
        scripts=_abspath('./user_module.py', './user_package', './att.png'))

    with pytest.raises(AcumosError, match='is invalid'):
        with _dump_model(model, model_name, bad_reqs) as dump_dir:
            pass
Esempio n. 3
0
def test_session_dump_zip(replace: bool):
    '''Tests session dump zip'''
    def my_transform(x: int, y: int) -> int:
        return x + y

    model = Model(transform=my_transform)
    model_name = 'my-model'

    session = AcumosSession()

    with tempfile.TemporaryDirectory() as tdir:
        model_zip_path = Path(tdir) / f"{model_name}.zip"

        session.dump_zip(model, model_name, model_zip_path)
        import zipfile
        with zipfile.ZipFile(model_zip_path, "r") as model_zip:
            assert set(model_zip.namelist()) == set(_REQ_FILES)

        if replace is False:
            with pytest.raises(AcumosError):
                session.dump_zip(model, model_name,
                                 model_zip_path)  # file already exists
        else:
            session.dump_zip(
                model, model_name, model_zip_path,
                replace=replace)  # file already exists but it will be replaced
def test_dump_model():
    '''Tests dump model utility, including generated artifacts'''
    def predict(x: int) -> int:
        return user_function(x)

    model = Model(predict=predict)
    model_name = 'my-model'

    reqs = Requirements(reqs=['wronglib'],
                        req_map={'wronglib': 'scipy'},
                        packages=[_USER_PACKAGE_DIR])

    with _dump_model(model, model_name, reqs) as dump_dir:

        assert set(listdir(dump_dir)) == set(_REQ_FILES)

        metadata = load_artifact(dump_dir,
                                 'metadata.json',
                                 module=json,
                                 mode='r')
        schema = _load_schema(SCHEMA_VERSION)
        validate(metadata, schema)

        # test that a user-provided library was included and correctly mapped
        assert 'scipy' in {
            r['name']
            for r in metadata['runtime']['dependencies']['pip']['requirements']
        }

        # test that custom package was bundled
        _verify_files(
            dump_dir,
            ('scripts/user_provided/user_package/user_package_module.py',
             'scripts/user_provided/user_package/__init__.py',
             'scripts/user_provided/user_module.py'))
def model_create_pipeline(formatter, clf):
    formatter.set_params(classifier=clf)
    tag_type = []
    for item in formatter.output_types_:
        for k in item:
            tag_type.append((k, item[k]))
    name_in = "ImageTag"
    ImageTag = create_namedtuple(name_in, tag_type)
    name_multiple_in = name_in + "s"
    ImageTagSet = create_namedtuple(name_in + "Set",
                                    [(name_multiple_in, List[ImageTag])])

    def predict_class(val_wrapped: ImageTagSet) -> ImageTagSet:
        df = pd.DataFrame(getattr(val_wrapped, name_multiple_in),
                          columns=ImageTag._fields)
        tags_df = formatter.predict(df)
        tags_parts = tags_df.to_dict('split')
        tags_list = [ImageTag(*r) for r in tags_parts['data']]
        print("[{} - {}:{}]: Input {} row(s) ({}), output {} row(s) ({}))".
              format("classify", MODEL_NAME, VERSION, len(df), ImageTagSet,
                     len(tags_df), ImageTagSet))
        return ImageTagSet(tags_list)

    package_path = path.dirname(path.realpath(__file__))
    return Model(classify=predict_class), Requirements(packages=[package_path],
                                                       reqs=[pd, np, sklearn])
Esempio n. 6
0
    def generate_model(self, CSV_filename, is_raw_data=False):
        from acumos.modeling import Model, List, create_namedtuple
        from acumos.session import Requirements
        from os import path
        import sklearn

        print(">> %s:  Loading raw features, training model" % CSV_filename)
        model = self.build_model_from_CSV(CSV_filename,
                                          is_raw_data=is_raw_data)
        print(">> %s:  Reload features, push to server" % CSV_filename)
        df = pd.read_csv(CSV_filename)[self.features]
        listVars = [(df.columns[i], df.dtypes[i].type)
                    for i in range(len(df.columns))]
        VmPredictorDataFrame = create_namedtuple('VmPredictorDataFrame',
                                                 listVars)

        def predict_metric(df: VmPredictorDataFrame) -> List[float]:
            '''Returns an array of float predictions'''
            X = np.column_stack(df)
            return model.predict(X)

        # compute path of this package to add it as a dependency
        package_path = path.dirname(path.realpath(__file__))
        return Model(classify=predict_metric), Requirements(
            packages=[package_path], reqs=[matplotlib, sklearn, np, pd])
def test_session_push_sklearn():
    '''Tests basic model pushing functionality with sklearn'''
    clear_jwt()

    with _patch_auth():
        with MockServer() as server:
            iris = load_iris()
            X = iris.data
            y = iris.target

            clf = RandomForestClassifier(random_state=0)
            clf.fit(X, y)

            columns = [
                'sepallength', 'sepalwidth', 'petallength', 'petalwidth'
            ]
            X_df = pd.DataFrame(X, columns=columns)

            DataFrame = create_dataframe('DataFrame', X_df)
            Predictions = create_namedtuple('Predictions',
                                            [('predictions', List[int])])

            def predict(df: DataFrame) -> Predictions:
                '''Predicts the class of iris'''
                X = np.column_stack(df)
                yhat = clf.predict(X)
                preds = Predictions(predictions=yhat)
                return preds

            model = Model(predict=predict)

            model_url, auth_url, _, _ = server.config
            s = AcumosSession(model_url, auth_url)
            s.push(model, name='sklearn_iris_push')
def _push_dummy_model(extra_headers=None):
    '''Generic dummy model push routine'''
    def my_transform(x: int, y: int) -> int:
        return x + y

    model = Model(transform=my_transform)

    with MockServer() as server:
        model_url, auth_url, _, _ = server.config
        s = AcumosSession(model_url, auth_url)
        s.push(model, name='my-model', extra_headers=extra_headers)
def test_custom_script():
    '''Tests that custom modules can be included, wrapped, and loaded'''
    def predict(x: int) -> int:
        return user_function(x)

    model = Model(predict=predict)
    model_name = 'my-model'

    with _dump_model(model, model_name) as dump_dir:
        run_command(
            [sys.executable, _MODEL_LOADER_HELPER, dump_dir, 'user_module'])
Esempio n. 10
0
def model_create_pipeline(formatter, clf):
    from acumos.modeling import Model, List, create_namedtuple
    from acumos.session import Requirements
    from os import path
    from image_mood_classifier._version import MODEL_NAME, __version__ as VERSION

    # add classifier
    formatter.set_params(classifier=clf)

    # create a dataframe and image set
    # ImageSet = create_dataframe("ImageSet", ImageDecoder.generate_input_dataframe())
    # TODO: replace with more friendly dataframe operation when it supoprts strings...
    tag_type = []
    print("=================formatter.output_types_:%s",formatter.output_types_)
    for item in formatter.output_types_:
        print("++++++++++++++++++++++item:%s",item)
        for k in item:
            print("======================k:%s",k)
            tag_type.append((k, item[k]))
            print("=================tag_type:%s",tag_type)
    name_in = "ImageTag"
    ImageTag = create_namedtuple(name_in, tag_type)
    print("==========================ImageTag:%s",ImageTag)
    name_multiple_in = name_in + "s"
    print("==========================name_multiple_in:%s",name_multiple_in)
    ImageTagSet = create_namedtuple(name_in + "Set", [(name_multiple_in, List[ImageTag])])
    print("========================ImageTagSet:%s",ImageTagSet)
    print("=======================ImageTag._fields:%s",ImageTag._fields)

    def predict_class(val_wrapped: ImageTagSet) -> ImageTagSet:
        '''Returns an array of float predictions'''
        # NOTE: we don't have a named output type, so need to match 'value' to proto output
        # print("-===== input -===== ")
        # print(input_set)
        df = pd.DataFrame(getattr(val_wrapped, name_multiple_in), columns=ImageTag._fields)
        # print("-===== df -===== ")
        # print(df)
        # print("-===== out df -===== ")
        tags_df = formatter.predict(df)
        # print(tags_df)
        tags_parts = tags_df.to_dict('split')
        # print("-===== out list -===== ")
        # print(output_set)
        tags_list = [ImageTag(*r) for r in tags_parts['data']]
        print("[{} - {}:{}]: Input {} row(s) ({}), output {} row(s) ({}))".format(
              "classify", MODEL_NAME, VERSION, len(df), ImageTagSet, len(tags_df), ImageTagSet))
        return ImageTagSet(tags_list)

    # compute path of this package to add it as a dependency
    package_path = path.dirname(path.realpath(__file__))
    return Model(classify=predict_class), Requirements(packages=[package_path], reqs=[pd, np, sklearn])
def _mock_model(yield_model=True):
    '''Context manager that yields an acumos.wrapped.WrappedModel model for testing purposes'''
    def add(x: int, y: int) -> int:
        return x + y

    def multiply(x: int, y: int) -> int:
        return x * y

    model = Model(add=add, multiply=multiply)

    session = AcumosSession()
    with TemporaryDirectory() as tdir:
        session.dump(model, 'test-model', tdir)
        model_dir = os.path.join(tdir, 'test-model')
        wrapped_model = load_model(model_dir)
        yield wrapped_model if yield_model else model_dir
Esempio n. 12
0
def test_user_script():
    '''Tests that user scripts are identified as dependencies'''
    def predict(x: int) -> int:
        return user_function(x)

    model = Model(predict=predict)

    with AcumosContextManager() as context:
        model_path = context.build_path('model.pkl')
        with open(model_path, 'wb') as f:
            dump_model(model, f)

            assert 'user_module' in context.script_names

        # unpickling should fail because `user_module` is not available
        with pytest.raises(Exception, match="No module named 'user_module'"):
            run_command([sys.executable, _UNPICKLER_HELPER, context.abspath])
def test_session_dump():
    '''Tests session dump'''
    def my_transform(x: int, y: int) -> int:
        return x + y

    model = Model(transform=my_transform)
    model_name = 'my-model'

    s = AcumosSession()

    with tempfile.TemporaryDirectory() as tdir:

        s.dump(model, model_name, tdir)
        model_dir = path_join(tdir, model_name)
        assert set(listdir(model_dir)) == set(_REQ_FILES)

        with pytest.raises(AcumosError):
            s.dump(model, model_name, tdir)  # file already exists
Esempio n. 14
0
def _push_dummy_model(extra_headers=None,
                      use_model_url=True,
                      use_auth_url=False,
                      options=None):
    '''Generic dummy model push routine'''
    def my_transform(x: int, y: int) -> int:
        return x + y

    model = Model(transform=my_transform)

    with MockServer() as server:
        _model_url, _auth_url, _, _ = server.config
        model_url = _model_url if use_model_url else None
        auth_url = _auth_url if use_auth_url else None

        session = AcumosSession(model_url, auth_url)
        session.push(model,
                     name='my-model',
                     extra_headers=extra_headers,
                     options=options)
def test_custom_package():
    '''Tests that custom packages can be included, wrapped, and loaded'''
    def my_transform(x: int, y: int) -> int:
        return user_package_module.add_numbers(x, y)

    model = Model(transform=my_transform)
    model_name = 'my-model'

    # load should fail without requirements
    with pytest.raises(
            Exception,
            match='Module user_package was detected as a dependency'):
        with _dump_model(model, model_name) as dump_dir:
            pass

    reqs = Requirements(packages=[_USER_PACKAGE_DIR])

    with _dump_model(model, model_name, reqs) as dump_dir:
        run_command(
            [sys.executable, _MODEL_LOADER_HELPER, dump_dir, 'user_package'])
Esempio n. 16
0
    def generate_model(self, file_list, data_out=None):
        from acumos.modeling import Model, List, create_namedtuple
        from acumos.session import Requirements
        from os import path
        import sklearn

        # Note:  all files in the list will be appended
        master_df, VM_list = self.preprocess_files(file_list)
        train_start, range_end = self.find_time_range(
            master_df)  # TBD:  allow user to specify start/stop dates
        train_stop = train_start + self.train_interval
        xmodel, train_data = self.train_timeslice_model(master_df,
                                                        VM_list,
                                                        train_start,
                                                        train_stop,
                                                        featfile=data_out)

        df = train_data[self.features]
        listColumns = list(df.columns)
        listVars = [(df.columns[i], type(df.ix[0][i]))
                    for i in range(len(listColumns))]
        VmPredictorDataFrame = create_namedtuple('VmPredictorDataFrame',
                                                 listVars)
        VmPredictorDataFrameSet = create_namedtuple(
            'VmPredictorDataFrameSet',
            [('frames', List[VmPredictorDataFrame])])
        type_out = List[float]

        def predict_metric(val_wrapped: VmPredictorDataFrameSet) -> type_out:
            '''Returns an array of float predictions'''
            df = pd.DataFrame(val_wrapped.frames, columns=listColumns)
            # df = pd.DataFrame(np.column_stack(val_wrapped), columns=val_wrapped._fields)  # numpy doesn't like binary
            predict_nd = xmodel.predict(df)  # return here is a nd-array
            predict_list = predict_nd.tolist()  # flatten to tag set
            # predict_list = type_out(list(predict_nd))  # flatten to tag set
            return predict_list

        # compute path of this package to add it as a dependency
        package_path = path.dirname(path.realpath(__file__))
        return Model(classify=predict_metric), Requirements(
            packages=[package_path], reqs=[sklearn, np, pd])
def test_session_push_keras():
    '''Tests basic model pushing functionality with keras'''
    clear_jwt()

    with _patch_auth():
        with MockServer() as server:
            iris = load_iris()
            X = iris.data
            y = pd.get_dummies(iris.target).values

            clf = Sequential()
            clf.add(Dense(3, input_dim=4, activation='relu'))
            clf.add(Dense(3, activation='softmax'))
            clf.compile(loss='categorical_crossentropy',
                        optimizer='adam',
                        metrics=['accuracy'])
            clf.fit(X, y)

            columns = [
                'sepallength', 'sepalwidth', 'petallength', 'petalwidth'
            ]
            X_df = pd.DataFrame(X, columns=columns)

            DataFrame = create_dataframe('DataFrame', X_df)
            Predictions = create_namedtuple('Predictions',
                                            [('predictions', List[int])])

            def predict(df: DataFrame) -> Predictions:
                '''Predicts the class of iris'''
                X = np.column_stack(df)
                yhat = clf.predict(X)
                preds = Predictions(predictions=yhat)
                return preds

            model = Model(predict=predict)

            model_url, auth_url, _, _ = server.config
            s = AcumosSession(model_url, auth_url)
            s.push(model, name='keras_iris_push')
Esempio n. 18
0
def test_session_dump(replace: bool):
    '''Tests session dump'''
    def my_transform(x: int, y: int) -> int:
        return x + y

    model = Model(transform=my_transform)
    model_name = 'my-model'

    session = AcumosSession()

    with tempfile.TemporaryDirectory() as tdir:

        session.dump(model, model_name, tdir)
        model_dir = path_join(tdir, model_name)
        assert set(listdir(model_dir)) == set(_REQ_FILES)
        if replace is False:
            with pytest.raises(AcumosError):
                session.dump(model, model_name, tdir)  # file already exists
        else:
            session.dump(
                model, model_name, tdir,
                replace=replace)  # file already exists but it will be replaced
def test_model():
    '''Tests Model class'''
    def my_transform(x: int, y: int) -> int:
        '''Docstrings also work'''
        return x + y

    def another_transform(x: int, y: int) -> int:
        return x + y

    model = Model(transform=my_transform, another=another_transform)

    input_type = model.transform.input_type
    output_type = model.transform.output_type

    assert input_type.__name__ == 'TransformIn'
    assert output_type.__name__ == 'TransformOut'

    assert model.transform.inner(1, 1) == 2
    assert model.transform.wrapped(input_type(1, 1)) == output_type(2)

    assert model.transform.description == '''Docstrings also work'''
    assert model.another.description == ''
Esempio n. 20
0
def test_raw_type(func, f_in, f_out, in_media_type, out_media_type, in_is_raw,
                  out_is_raw):
    '''Tests to make sure that supported raw data type models are working correctly'''
    model = Model(transform=func)
    model_name = 'my-model'

    with TemporaryDirectory() as tdir:
        with _dump_model(model, model_name) as dump_dir:
            _copy_dir(dump_dir, tdir, model_name)

        copied_dump_dir = path_join(tdir, model_name)
        metadata_file_path = path_join(copied_dump_dir, 'metadata.json')

        with open(metadata_file_path) as metadata_file:
            metadata_json = json.load(metadata_file)

            assert metadata_json['methods']['transform']['input'][
                'media_type'] == in_media_type
            assert metadata_json['methods']['transform']['output'][
                'media_type'] == out_media_type

        wrapped_model = load_model(copied_dump_dir)
        if in_is_raw:
            wrapped_return = wrapped_model.transform.from_raw(f_in)
        else:
            arguments = model.transform.input_type(*f_in)
            arguments_pb_msg = _pack_pb_msg(arguments,
                                            wrapped_model.transform._module)
            wrapped_return = wrapped_model.transform.from_pb_msg(
                arguments_pb_msg)

        if out_is_raw:
            ret = wrapped_return.as_raw()
        else:
            ret_pb_msg = wrapped_return.as_pb_msg()
            ret = _unpack_pb_msg(model.transform.output_type, ret_pb_msg).value

        assert ret == f_out
Esempio n. 21
0
def test_model2proto():
    '''Tests the generation of protobuf messages from a Model'''
    T1 = NamedTuple('T1', [('x', int), ('y', int)])
    T2 = NamedTuple('T2', [('data', int)])

    Thing = Enum('Thing', 'a b c d e')

    def f1(x: int, y: int) -> int:
        return x + y

    def f2(data: T1) -> T2:
        return T2(data.x + data.y)

    def f3(data: List[Thing]) -> Thing:
        return data[0]

    def f4(data: List[T1]) -> None:
        pass

    def f5(x: List[np.int32]) -> np.int32:
        return np.sum(x)

    df = pd.DataFrame({'x': [1, 2, 3], 'y': [4, 5, 6]})
    TestDataFrame = create_dataframe('TestDataFrame', df)

    def f6(in_: TestDataFrame) -> None:
        pass

    model = Model(f1=f1, f2=f2, f3=f3, f4=f4, f5=f5, f6=f6)
    module = 'model'
    package = 'pkg'
    protostr = model2proto(model, package)

    # main test is to make sure that compilation doesn't fail
    with tempfile.TemporaryDirectory() as tdir:
        compile_protostr(protostr, package, module, tdir)
Esempio n. 22
0
assert (res.f < 1.0)

print('Test square')
ci = SquareMessage(2.0)
res = square(ci)
assert (res.d >= ci.d * ci.d)

print('Test subtract')
ci = ComputeInput(1.0, 2.0, "string")
res = subtract(ci)
assert (res.f < ci.f1 and res.f < ci.f2)

# Dump and on-board the methods as models
# This relies on the user entering a password on the command line
push = 'http://cognita-dev1-vm01-core.eastus.cloudapp.azure.com:8090/onboarding-app/v2/models'
auth = 'http://cognita-dev1-vm01-core.eastus.cloudapp.azure.com:8090/onboarding-app/v2/auth'
session = AcumosSession(push_api=push, auth_api=auth)

for f in add, average, concatenate, classify, ingest, manipulate, multiply, output, padd, paverage, pmultiply, poutput, predict, psubtract, square, subtract:
    d = {f.__name__: f}
    model = Model(**d)
    subdir = 'dump_' + f.__name__
    # if the dump dir exists, assume it was pushed also
    if os.path.isdir(subdir):
        print('Found dump of model {}, skipping'.format(f.__name__))
    else:
        print('Dumping model {}'.format(f.__name__))
        session.dump(model, subdir, '.')
        print('Pushing model {}'.format(f.__name__))
        session.push(model, f.__name__)
Esempio n. 23
0
    with mlflow.start_run():
        lr = ElasticNet(alpha=alpha, l1_ratio=l1_ratio, random_state=42)
        lr.fit(train_x, train_y)

        predicted_qualities = lr.predict(test_x)

        (rmse, mae, r2) = eval_metrics(test_y, predicted_qualities)

        print("Elasticnet model (alpha=%f, l1_ratio=%f):" % (alpha, l1_ratio))
        print("  RMSE: %s" % rmse)
        print("  MAE: %s" % mae)
        print("  R2: %s" % r2)

        mlflow.log_param("alpha", alpha)
        mlflow.log_param("l1_ratio", l1_ratio)
        mlflow.log_metric("rmse", rmse)
        mlflow.log_metric("r2", r2)
        mlflow.log_metric("mae", mae)

        mlflow.sklearn.log_model(lr, "model")

        # Acumos part
        from acumos.modeling import Model, List, Dict, create_namedtuple, create_dataframe
        from acumos.session import AcumosSession, Requirements

        model = Model(eval=eval_metrics)
        #Exporting Models
        session = AcumosSession()
        session.dump(model, 'SK model with Gaia', '.')
                           y: target_onehot
                       })
    print("Epoch {} | Loss {}".format(epoch, loss))

prediction = tf.argmax(logits, 1)
yhat = sess.run([prediction], {x: data})[0]

# note: this predicts on the training set for illustration purposes only
print(classification_report(target, yhat))

# =============================================================================
# create a acumos model from the tensorflow model
# =============================================================================

X_df = pd.DataFrame(
    data,
    columns=['sepal_length', 'sepal_width', 'petal_length', 'petal_width'])
IrisDataFrame = create_dataframe('IrisDataFrame', X_df)


def classify_iris(df: IrisDataFrame) -> List[int]:
    '''Returns an array of iris classifications'''
    X = np.column_stack(df)
    return prediction.eval({x: X}, sess)


model = Model(classify=classify_iris)

session = AcumosSession()
session.dump(model, 'model', '.')  # creates ./model
Esempio n. 25
0
def model_create_pipeline(path_model, path_label, top_n):
    from sklearn.pipeline import Pipeline
    import keras
    from image_classifier.keras_model.prediction_formatter import Formatter
    from image_classifier.keras_model.evaluate_image import Predictor
    from image_classifier.keras_model.image_decoder import ImageDecoder
    from acumos.modeling import Model, List, create_namedtuple
    from acumos.session import Requirements
    from os import path
    from _version import __version__

    # read dictionary to pass along to formatter class
    dict_classes = eval(open(path_label, 'r').read()) if path_label else None

    # we will create a hybrid keras/scikit pipeline because we need some preprocessing done
    #   within scikit that is not easily posisble with keras
    #
    # stages are as follows (the quoted section is the scikit pipeline name)
    #   #1 'decode' - input+reshape - decode incoming image with MIME+BINARY as inputs
    #   #2 'predict' - prediction - input the transformed image to the prediction method
    #   #3 'format' - predict transform - post-process the predictions into sorted prediction classes
    # see this page for hints about what happens...
    #   https://stackoverflow.com/questions/37984304/how-to-save-a-scikit-learn-pipline-with-keras-regressor-inside-to-disk
    #
    # NOTE: the last object is an "estimator" type so that we can call "predict", as required by the
    #       acumos-based wrapper functionality
    pipeline = Pipeline([
        ('decode', ImageDecoder()),
        ('predict', Predictor(path_model=path_model)),
        ('format', Formatter(dict_classes, top_n))
    ])

    # create a dataframe and image set
    # ImageSet = create_dataframe("ImageSet", ImageDecoder.generate_input_dataframe())
    # TODO: replace with more friendly dataframe operation when it supoprts strings...
    df = ImageDecoder.generate_input_dataframe()
    image_type = tuple(zip(df.columns, ImageDecoder.generate_input_types()))
    name_in = "Image"
    input_image = create_namedtuple(name_in, image_type)

    # output of clasifier, list of tags
    df = Formatter.generate_output_dataframe()
    tag_result = tuple(zip(df.columns, Formatter.generate_output_types()))
    name_out = "ImageTag"
    ImageTag = create_namedtuple(name_out, tag_result)
    output_set = create_namedtuple(name_out + "Set", [(name_out + "s", List[ImageTag])])

    def predict_class(val_wrapped: input_image) -> output_set:
        '''Returns an array of float predictions'''
        # NOTE: we don't have a named output type, so need to match 'value' to proto output
        # print("-===== input -===== ")
        # print(input_set)
        df = pd.DataFrame([val_wrapped], columns=input_image._fields)
        # print("-===== df -===== ")
        # print(df)
        # print("-===== out df -===== ")
        tags_df = pipeline.predict(df)
        # print(tags_df)
        tags_parts = tags_df.to_dict('split')
        # print("-===== out list -===== ")
        # print(output_set)
        tags_list = [ImageTag(*r) for r in tags_parts['data']]
        print("[{} - {}:{}]: Input {} row(s) ({}), output {} row(s) ({}))".format(
              "classify", MODEL_NAME, __version__, len(df), input_image, len(tags_df), output_set))
        return output_set(tags_list)

    # compute path of this package to add it as a dependency
    package_path = path.dirname(path.realpath(__file__))
    return Model(classify=predict_class), Requirements(packages=[package_path], reqs=[pd, np, keras, 'tensorflow', 'Pillow'])
# This file is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ===============LICENSE_END=========================================================
'''
Dumps an example model for illustrating acumos_model_runner usage
'''
from collections import Counter

from acumos.session import AcumosSession
from acumos.modeling import Model, List, Dict


def add(x: int, y: int) -> int:
    '''Adds two numbers'''
    return x + y


def count(strings: List[str]) -> Dict[str, int]:
    '''Counts the occurrences of words in `strings`'''
    return Counter(strings)


if __name__ == '__main__':
    '''Main'''
    model = Model(add=add, count=count)

    session = AcumosSession()
    session.dump(model, 'example-model', '.')
from acumos.session import AcumosSession

if __name__ == '__main__':
    '''Main'''

    iris = load_iris()
    X = iris.data
    y = iris.target

    clf = RandomForestClassifier(random_state=0)
    clf.fit(X, y)

    columns = ['sepallength', 'sepalwidth', 'petallength', 'petalwidth']
    X_df = pd.DataFrame(X, columns=columns)

    DataFrame = create_dataframe('DataFrame', X_df)
    Predictions = create_namedtuple('Predictions',
                                    [('predictions', List[int])])

    def predict(df: DataFrame) -> Predictions:
        '''Predicts the class of iris'''
        X = np.column_stack(df)
        yhat = clf.predict(X)
        preds = Predictions(predictions=yhat)
        return preds

    model = Model(transform=predict)

    s = AcumosSession(None)
    s.dump(model, 'model', '.')
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ===============LICENSE_END=========================================================
"""
Creates an example Acumos model for DCAE on-boarding testing
"""
from acumos.modeling import Model, NamedTuple
from acumos.session import AcumosSession


class NumbersIn(NamedTuple):
    x: int
    y: int


class NumberOut(NamedTuple):
    result: int


def add(numbers: NumbersIn) -> NumberOut:
    '''Adds two integers'''
    x, y = numbers
    return NumberOut(x + y)


model = Model(add=add)

session = AcumosSession()
session.dump(model, 'example-model', '.')
Esempio n. 29
0
    test_df = redfin.match_train(test_df)
    redfin.train_model()
    # print('Using %d Calculated Features for Valuation' % len(test_df.columns.values))
    return redfin.model.predict(test_df)


def appraise_multiple(data: List[HouseDataFrame]) -> List[float]:
    res = pd.DataFrame(data, columns=HouseDataFrame._fields)
    return predict(res)


def appraise(data: HouseDataFrame) -> List[float]:
    return appraise_multiple([data])


acumos_model = Model(appraise=appraise)
# session.push(model, MODEL_PATH) # usable with active credentials
print('Acumos %s created' % MODEL_PATH)


@app.route('/hello')
def hello_world():
    return 'Hello, World!'


# Listen for conversation state change events.
@app.route('/predict', methods=['POST'])
def events():
    raw_data = request.data
    data = json.loads(raw_data)
    print('payload', data)
Esempio n. 30
0
"""
import io

import PIL

from acumos.modeling import Model, create_namedtuple
from acumos.session import AcumosSession

ImageShape = create_namedtuple('ImageShape', [('width', int), ('height', int)])


def get_format(data: bytes) -> str:
    '''Returns the format of an image'''
    buffer = io.BytesIO(data)
    img = PIL.Image.open(buffer)
    return img.format


def get_shape(data: bytes) -> ImageShape:
    '''Returns the width and height of an image'''
    buffer = io.BytesIO(data)
    img = PIL.Image.open(buffer)
    shape = ImageShape(width=img.width, height=img.height)
    return shape


model = Model(get_format=get_format, get_shape=get_shape)

session = AcumosSession()
session.dump(model, 'image-model', '.')  # creates ./image-model