Example #1
0
def load_data(dataset):
    etl = ETL(DATA_PATH, [128, 256, 512, 1024],
              sma_window=3,
              minimal_movement=0.75)
    etl.load(dataset)
    etl.preprocess_pooled()
    etl.generate_fourier_dataset(window_overlap=1)
Example #2
0
def generate_fourier(data_path, window_sizes, size, params):
    etl = ETL(
        data_path=data_path,
        window_sizes=window_sizes,
        sma_window=params["sma"],
        minimal_movement=params["minimal_movement"],
        size=size
    )
    etl.load("CIMA")
    print("\nPreprocessing data.")
    etl.preprocess_pooled()
    print("\nGenerating fourier data.")
    etl.generate_fourier_dataset(window_overlap=params["window_overlap"])
Example #3
0
def cv(model_name):
    kf = StratifiedKFold(n_splits=5, shuffle=True, random_state=42)
    angles = [
        "right_shoulder", "left_shoulder", "right_elbow", "left_elbow",
        "right_hip", "left_hip", "right_knee", "left_knee"
    ]
    window_sizes = [128, 256, 512, 1024]

    etl = ETL(DATA_PATH, [128, 256, 512, 1024],
              sma_window=3,
              minimal_movement=0.75)

    etl.load("CIMA")

    infants = np.array(list(etl.cima.keys()))
    labels = np.array([etl.cima[infant]["label"] for infant in infants])

    etl.preprocess_pooled()
    etl.generate_fourier_dataset(window_overlap=1)

    X = pd.DataFrame()
    for train_index, test_index in kf.split(infants, labels):

        ids = infants[train_index]

        id_hash = f"{model_name}_{sha1(ids).hexdigest()[:5]}"
        model_path = f"saved_models/{id_hash}.joblib"

        if os.path.exists(model_path):
            models = joblib.load(model_path)
        else:
            models = {}
            for window_size in window_sizes:
                for angle in angles:
                    fourier_path = os.path.join(DATA_PATH, str(window_size),
                                                angle + ".json")
                    df = pd.read_json(fourier_path)
                    X = X.append(df)
                X = X[X.id.isin(ids)]
                y = X["label"]
                X = pd.DataFrame(X.data.tolist())

                # model_name = f"{window_size}_{model_name}"
                models[window_size] = train_model(model_name, X, y, save=False)
            joblib.dump(models, model_path)

        x_test = infants[test_index]
        y_test = labels[test_index]

        score = evaluate_model(id_hash, models, x_test, y_test)
Example #4
0
    def predict(self, data_path, infant_id):
        if self.verbose:
            print(
                f"Predicting infant {infant_id} - {strftime('%H:%M:%S', gmtime())}"
            )
        window_sizes = [128, 256, 512, 1024]
        etl = ETL(data_path,
                  window_sizes,
                  pooling="mean",
                  sma_window=3,
                  bandwidth=0,
                  minimal_movement=0.75)
        etl.load_infant(infant_id)
        if self.verbose:
            print(f"Preprocessing the data - {strftime('%H:%M:%S', gmtime())}")
        etl.preprocess_pooled()

        angles = [
            "right_shoulder", "left_shoulder", "right_elbow", "left_elbow",
            "right_hip", "left_hip", "right_knee", "left_knee"
        ]
        predictions = {}
        video_length = len(etl.cima[infant_id]["data"])
        prediction = Prediction(video_length)
        for angle in angles:
            predictions[angle] = pd.Series(
                [[] for i in range(len(etl.cima[infant_id]["data"]))])

        if self.verbose:
            print(
                f"Generating fourier data - {strftime('%H:%M:%S', gmtime())}")
        for window_size in window_sizes:
            for angle in angles:
                dataframe = etl.generate_fourier_data(angle, window_size,
                                                      window_size // 4)
                data_features = pd.DataFrame(dataframe.data.tolist())
                if not data_features.empty:
                    data_transformed = self.model[window_size][
                        "pls"].transform(data_features)
                    dataframe["label"] = self.model[window_size][
                        "model"].predict_proba(data_transformed)
                else:
                    dataframe["label"] = pd.Series([])
                prediction.set_window_data(window_size, angle, dataframe)

        infant = etl.cima[infant_id]
        infant["predictions"] = prediction

        return infant, prediction
Example #5
0
from etl.etl import ETL
from matplotlib import pyplot as plt


etl = ETL("/home/erlend/datasets", [128, 256, 512, 1024], size=16, random_seed=42)
etl.cache = False
etl.load("CIMA")


infant = etl.cima["077"]
infant = etl.resample(infant)
before_sma = infant["data"]["right_wrist_x"][:250]

etl.preprocess_pooled()

after_sma = etl.cima["077"]["data"]["right_wrist_x"][:250]

fig = plt.Figure()

plt.plot(before_sma, color="red", alpha=0.5)
plt.plot(after_sma, color="green", alpha=0.5)

plt.xlabel("Frame")
plt.ylabel("right_wrist_x")
plt.legend(["Raw data", "SMA=3"])

plt.savefig("sma.png")

Example #6
0
def load_validation_set(data_path):
    etl = ETL(data_path, [128, 256, 512, 1024])
    etl.load("CIMA", validation=True)
    etl.preprocess_pooled()
    return etl.cima