Пример #1
0
def main(export_dir,
         train_config_path=TRAINING_PARAMS_PATH,
         params_config_path=RUN_PARAMS_PATH):

    params = do.load(RUN_PARAMS_PATH)
    train_params = do.load(TRAINING_PARAMS_PATH)
    train = train_params.params

    sio = socketio.Server()
    app = Flask(__name__)

    controller = SimplePIController(params.kp, params.ki)
    controller.set_desired(params.speed)

    # model = ti.estimator.SavedModelPredictor(export_dir)
    model = tf.contrib.predictor.from_saved_model(export_dir)

    car = Car(sio, model, controller, train, params)
    car.register()

    # wrap Flask application with engineio's middleware
    app = socketio.Middleware(sio, app)

    # deploy as an eventlet WSGI server
    eventlet.wsgi.server(eventlet.listen(('', 4567)), app)
Пример #2
0
def main(model_path: str, speed: float = 22):
    global app
    global model
    global params
    global components

    params = dicto.load(os.path.join(model_path, "params.yml"))
    components = params["components"]

    model_obj = tf.saved_model.load(model_path)
    model = model_obj.signatures["serving_default"]

    controller.set_desired(speed)

    # wrap Flask application with engineio's middleware
    app = socketio.Middleware(sio, app)

    # depcloy as an eventlet WSGI server
    eventlet.wsgi.server(eventlet.listen(("", 4567)), app)
Пример #3
0
        self.error = self.set_point - measurement

        # integral error only if has not exploded
        if abs(self.Ki * self.integral) < 100:
            self.integral += self.error

        return self.Kp * self.error + self.Ki * self.integral


sio = socketio.Server()
app = Flask(__name__)
model = None

controller = SimplePIController(0.1, 0.002)

params = dicto.load("training/params.yml")


@sio.on("telemetry")
def telemetry(sid, data):
    if data:
        # The current steering angle of the car
        steering_angle = data["steering_angle"]
        # The current throttle of the car
        throttle = data["throttle"]
        # The current speed of the car
        speed = data["speed"]
        # The current image from the center camera of the car
        imgString = data["image"]
        image = Image.open(BytesIO(base64.b64decode(imgString)))
        image_array = np.asarray(image)
Пример #4
0
def main(
    params_path: Path = Path("training/params.yml"),
    cache: bool = False,
    viz: bool = False,
    debug: bool = False,
):
    if debug:
        import debugpy

        print("Waiting debuger....")
        debugpy.listen(("localhost", 5678))
        debugpy.wait_for_client()

    params = dicto.load(params_path)

    train_cache_path = Path("cache") / "train.feather"
    test_cache_path = Path("cache") / "test.feather"

    if cache and train_cache_path.exists() and test_cache_path.exists():
        print("Using cache...")

        df_train = pd.read_feather(train_cache_path)
        df_test = pd.read_feather(test_cache_path)

    else:
        if params["dataset"] == "udacity_simulator":
            df = dataget.image.udacity_simulator().get()
            df_train, df_test = estimator.split(df, params)
            df_train = estimator.preprocess(df_train, params, "train")
            df_test = estimator.preprocess(df_test, params, "test")
        else:
            header = [
                "center",
                "left",
                "right",
                "steering",
                "throttle",
                "break",
                "speed",
            ]
            df = pd.read_csv(
                os.path.join(params["dataset"], "driving_log.csv"), names=header
            )
            df_train, df_test = estimator.split(df, params)
            df_train = estimator.preprocess(
                df_train, params, "train", params["dataset"]
            )
            df_test = estimator.preprocess(df_test, params, "test", params["dataset"])

        # cache data
        df_train = df_train.reset_index(drop=True)
        df_test = df_test.reset_index(drop=True)

        train_cache_path.parent.mkdir(exist_ok=True)

        df_train.to_feather(train_cache_path)
        df_test.to_feather(test_cache_path)

    ds_train = estimator.get_dataset(df_train, params, "train")
    ds_test = estimator.get_dataset(df_test, params, "test")

    # Visualize dataset for debuggings
    if viz:
        import matplotlib.pyplot as plt

        iteraror = iter(ds_train)
        image_batch, steer_batch, weights = next(iteraror)
        for image, steer, weight in zip(image_batch, steer_batch, weights):
            plt.imshow(image.numpy())
            plt.title(f"Steering angle: {steer} weight {weight}")
            plt.show()

        return

    components = params["components"]

    def gnll_loss(y, parameter_vector):
        """ Computes the mean negative log-likelihood loss of y given the mixture parameters.
        """
        alpha, mu, sigma = slice_parameter_vectors(
            parameter_vector, components
        )  # Unpack parameter vectors

        gm = tfd.MixtureSameFamily(
            mixture_distribution=tfd.Categorical(probs=alpha),
            components_distribution=tfd.Normal(loc=mu, scale=sigma),
        )

        log_likelihood = gm.log_prob(tf.transpose(y))  # Evaluate log-probability of y

        return -tf.reduce_mean(log_likelihood, axis=-1)

    model = estimator.get_model(params, components)
    # model = estimator.get_simclr(params)
    loss = gnll_loss if components is not None else "mse"
    metrics = None if components is not None else ["mae"]
    model.compile(
        optimizer=tf.keras.optimizers.Adam(params.lr), loss=loss, metrics=metrics,
    )

    model.summary()
    # exit()

    model.fit(
        ds_train,
        epochs=params.epochs,
        steps_per_epoch=params.steps_per_epoch,
        validation_data=ds_test,
        callbacks=[
            tf.keras.callbacks.TensorBoard(
                log_dir=str(Path("summaries") / Path(model.name)), profile_batch=0
            )
        ],
    )

    # Export to saved model
    save_path = os.path.join("models", model.name)
    model.save(save_path)

    # Save also yml with configs
    dicto.dump(params, os.path.join(save_path, "params.yml"))
Пример #5
0
        noise = tf.random_normal([], mean=0.0, stddev=params.angle_noise_std)
        row["steering"] = tf.cast(row["steering"], tf.float32) + noise

    else:
        row["steering"] = tf.cast(row["steering"], tf.float32)

    image = (image / 255.0) * 2.0 - 1.0

    row["image"] = image

    return row


def get_crop_window(params):

    final_height = params.image_height - (params.crop_up + params.crop_down)
    final_width = params.image_width

    return [
        params.crop_up,
        0,
        final_height,
        final_width,
    ]


if __name__ == "__main__":
    module_path = os.path.dirname(__file__)
    configs_path = os.path.join(module_path, "configs", "train.yml")
    params = do.load(configs_path)
Пример #6
0
def main(
    params_path: Path = "training/params.yml",
    viz: bool = False,
    toy: bool = False,
    model: str = "torch",
) -> None:

    torch.autograd.set_detect_anomaly(True)

    params = dicto.load(params_path)

    df_train, df_test = dataget.toy.spirals().get()

    X_train = df_train[["x0", "x1"]].to_numpy()
    y_train = df_train["y"].to_numpy()
    X_test = df_test[["x0", "x1"]].to_numpy()
    y_test = df_test["y"].to_numpy()

    transform = MinMaxScaler()
    X_train = transform.fit_transform(X_train)
    X_test = transform.transform(X_test)

    ds_train = ContrastiveDataset(
        X_train,
        y_train,
        batch_size=params.batch_size,
        steps_per_epoch=params.steps_per_epoch,
        noise_std=params.noise_std,
        n_neighbors=params.n_neighbors,
        n_hops=params.n_hops,
        transform=torch.tensor,
        viz=viz,
    )

    ds_test = ContrastiveDataset(
        X_test,
        y_test,
        batch_size=32,
        steps_per_epoch=1,
        noise_std=params.noise_std,
        n_neighbors=params.n_neighbors,
        n_hops=params.n_hops,
        transform=torch.tensor,
        viz=False,
    )

    if viz:
        visualize(ds_train)

    # pytorch
    model = ContrastiveNet(
        batch_size=params.batch_size * 2,
        n_layers=params.n_layers,
        n_units=params.n_units,
        embedding_size=params.embedding_size,
    )

    net = skorch.NeuralNet(
        model,
        criterion=criterion,
        batch_size=None,
        max_epochs=params.epochs,
        lr=params.lr,
        optimizer=torch.optim.Adam,
        # train_split=lambda X, y: (X, ds_test),
        train_split=None,
        device="cuda",
    )

    net.fit(ds_train, y=None)

    net.module.eval()
    h = (net.module(
        torch.tensor(X_train, dtype=torch.float32, device="cuda"),
        return_embeddings=True,
    ).cpu().detach().numpy())

    h = PCA(1).fit_transform(h)

    px.scatter(x=X_train[:, 0], y=X_train[:, 1], color=h[:, 0]).show()
Пример #7
0
def main(
    params_path: Path = Path("training/params.yml"),
    cache: bool = False,
    viz: bool = False,
    debug: bool = False,
    toy: bool = False,
):
    if debug:
        import debugpy

        print("Waiting debuger....")
        debugpy.listen(("localhost", 5678))
        debugpy.wait_for_client()

    params = dicto.load(params_path)

    train_cache = Path("cache/train.csv")
    test_cache = Path("cache/test.csv")
    transformer_cache = Path("cache/transformer.pkl")

    if cache and train_cache.exists():
        df_train = pd.read_csv(train_cache)
        df_test = pd.read_csv(test_cache)
        transformer = pickle.load(transformer_cache.open("rb"))
    else:
        df, df_real = dataget.kaggle(competition="cat-in-the-dat-ii").get(
            files=["train.csv", "test.csv"])

        df.drop(columns=["id"], inplace=True)

        df_train, df_test = estimator.split(df, params)

        if toy:
            df_train = df_train.sample(n=1000)
            df_test = df_test.sample(n=1000)

        transformer = GenericTransformer(
            categorical=params.categorical,
            numerical=params.numerical,
        )

        df_train = transformer.fit_transform(df_train)
        df_test = transformer.transform(df_test)

        df_train.to_csv(train_cache, index=False)
        df_test.to_csv(test_cache, index=False)
        pickle.dump(transformer, transformer_cache.open("wb"))

    print(df_train)
    print(df_test)

    ds_train = estimator.get_dataset(df_train, params, "train")
    ds_test = estimator.get_dataset(df_test, params, "test")

    print(ds_train[:10])
    print(ds_test[:10])

    model = estimator.get_model(params,
                                n_categories=transformer.n_categories,
                                numerical=[])
    print(model)
    exit()

    net = skorch.NeuralNet(model, )

    model.summary()

    print(ds_train)

    model.fit(
        ds_train,
        epochs=params.epochs,
        steps_per_epoch=params.steps_per_epoch,
        validation_data=ds_test,
        callbacks=[
            tf.keras.callbacks.TensorBoard(log_dir=str(
                Path("summaries") / Path(model.name)),
                                           profile_batch=0)
        ],
    )

    # Export to saved model
    save_path = f"models/{model.name}"
    model.save(save_path)

    print(f"{save_path=}")

    vizualize(df_train, df_test, model)