import os.path
#os.chdir('OneDrive\Dokumente\Sc_Master\Masterthesis\Project\DomAdapt')

import preprocessing

import pandas as pd
import time

from dev_convnet import conv_selection_parallel
import multiprocessing as mp
import itertools
import torch.nn as nn
#%% Load Data
datadir = r"OneDrive\Dokumente\Sc_Master\Masterthesis\Project\DomAdapt"
X, Y = preprocessing.get_splits(sites=["le_bray"],
                                datadir=os.path.join(datadir, "data"),
                                dataset="profound",
                                simulations=None)

#%% Grid search of hparams
hiddensize = [16, 64, 128, 256, 512]
batchsize = [16, 64, 128, 256, 512]
learningrate = [1e-4, 1e-3, 5e-3, 1e-2, 5e-2]
history = [5, 10, 15, 20]
channels = [[7, 14], [10, 20], [14, 28]]
kernelsize = [2, 3, 4]
activation = [nn.Sigmoid, nn.ReLU]

hp_list = [
    hiddensize, batchsize, learningrate, history, channels, kernelsize,
    activation
]
import os.path
import preprocessing

import pandas as pd
import time

from dev_lstm import lstm_selection_parallel
import multiprocessing as mp
import itertools
import torch
import torch.nn.functional as F
#%% Load Data
data_dir = r"OneDrive\Dokumente\Sc_Master\Masterthesis\Project\DomAdapt"
X, Y = preprocessing.get_splits(
    sites=["le_bray"],
    years=[2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008],
    datadir=os.path.join(data_dir, "data"),
    dataset="profound",
    simulations=None)

#%% Grid search of hparams
hiddensize = [16, 64, 128, 256, 512]
batchsize = [16, 64, 128, 256, 512]
learningrate = [1e-4, 1e-3, 5e-3, 1e-2, 5e-2]
history = [5, 10, 15, 20]
activation = [torch.sigmoid, F.relu]
hp_list = [hiddensize, batchsize, learningrate, history, activation]

epochs = 3000
splits = 6
searchsize = 50
Beispiel #3
0
import dev_lstm
import torch
import torch.optim as optim
import torch.nn as nn
import preprocessing
import visualizations
import models
from sklearn import metrics
import utils
import numpy as np
#%%

data_dir = "OneDrive\Dokumente\Sc_Master\Masterthesis\Project\DomAdapt"
X, Y = preprocessing.get_splits(sites=['hyytiala'],
                                years=[2001, 2003, 2004],
                                datadir=os.path.join(data_dir, "data"),
                                dataset="profound",
                                simulations=None)

X = utils.minmax_scaler(X)
#%%

fc = nn.Linear(1, 32)

latent = []
for feature in range(3):
    latent.append(fc(X.unsqueeze(1)[:, :, feature]).unsqueeze(2))

latent = torch.stack(latent, dim=2).squeeze(3)
latent.shape
latent = torch.mean(latent, dim=2)
import preprocessing

import pandas as pd
import time

from dev_rf import rf_selection_parallel
import multiprocessing as mp
import itertools
import utils

#%% Load Data
data_dir = r"/home/fr/fr_fr/fr_mw263/scripts"
X, Y = preprocessing.get_splits(
    sites=["bily_kriz", "collelongo", "soro"],
    years=[2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008],
    datadir=os.path.join(data_dir, "data"),
    dataset="profound",
    simulations=None)

#%%
cv_splits = [5]
shuffled = [False]
n_trees = [200, 300, 400, 500]
depth = [4, 5, 6, 7]

p_list = utils.expandgrid(cv_splits, shuffled, n_trees, depth)

searchsize = len(p_list[0])

if __name__ == '__main__':
    #freeze_support()
Beispiel #5
0
# -*- coding: utf-8 -*-
"""
Created on Fri Aug 14 12:01:09 2020

@author: marie
"""

import sys

sys.path.append('OneDrive\Dokumente\Sc_Master\Masterthesis\Project\DomAdapt')

import preprocessing
import numpy as np

import matplotlib.pyplot as plt

#%%
X, Y = preprocessing.get_splits(
    sites=["hyytiala"],
    datadir="OneDrive\Dokumente\Sc_Master\Masterthesis\Project\DomAdapt\data",
    dataset="profound",
    to_numpy=False)
#%%
fig, ax = plt.subplots(5, figsize=(8, 9), sharex='col')
fig.suptitle("Preles input data")
for i in range(5):
    ax[i].plot(X.to_numpy()[:365, i])
    ax[i].set_ylabel(X.columns[i])
fig.text(0.5, 0.04, "Day of Year")
Beispiel #6
0
import os.path
import preprocessing

import pandas as pd
import time

from dev_lstm import lstm_selection_parallel
import multiprocessing as mp
import itertools
import torch
import torch.nn.functional as F
#%% Load Data
data_dir = r"/home/fr/fr_fr/fr_mw263"
X, Y = preprocessing.get_splits(sites = ["le_bray"], 
                                years = [2001,2003,2004,2005,2006],
                                datadir = os.path.join(data_dir, "scripts/data"), 
                                dataset = "profound",
                                simulations = None)

X_test, Y_test = preprocessing.get_splits(sites = ['le_bray'],
                                years = [2008],
                                datadir = os.path.join(data_dir, "scripts/data"), 
                                dataset = "profound",
                                simulations = None)

#%% Grid search of hparams
hiddensize = [16, 64, 128, 256, 512]
batchsize = [16, 64, 128, 256, 512]
learningrate = [1e-4, 1e-3, 5e-3, 1e-2, 5e-2]
history = [5,10,15,20]
activation = [torch.sigmoid, F.relu]
Beispiel #7
0
#%% Set working directory
import os.path
import preprocessing

import pandas as pd
import time

from dev_mlp import mlp_selection_parallel
import multiprocessing as mp
import itertools
#%% Load Data
data_dir = r"OneDrive\Dokumente\Sc_Master\Masterthesis\Project\DomAdapt\python"

X, Y = preprocessing.get_splits(sites=['le_bray', 'bily_kriz', 'collelongo'],
                                years=[2001, 2003, 2004, 2005, 2006],
                                datadir=os.path.join(data_dir, "data"),
                                dataset="profound",
                                simulations=None)

X_test, Y_test = preprocessing.get_splits(
    sites=['le_bray', 'bily_kriz', 'collelongo'],
    years=[2008],
    datadir=os.path.join(data_dir, "data"),
    dataset="profound",
    simulations=None)

#%% Grid search of hparams
hiddensize = [16, 64, 128, 256, 512]
batchsize = [8, 64, 128, 256, 512]
learningrate = [1e-4, 1e-3, 5e-3, 1e-2, 5e-2]
history = [0, 1, 2]
Beispiel #8
0
import torch
import torch.nn as nn
import torch.nn.functional as F

import numpy as np
import random

import preprocessing
import utils
import models
import torch.nn.functional as F
#%% Load Data
datadir = "OneDrive\Dokumente\Sc_Master\Masterthesis\Project\DomAdapt"
X, Y = preprocessing.get_splits(sites = ["hyytiala"],
                                years = [2001, 2002, 2003, 2004, 2005, 2006, 2007],
                                datadir = os.path.join(datadir, "data"), 
                                dataset = "profound")

#x = torch.tensor(np.transpose(sims['sim1'][0])).type(dtype=torch.float)
#y = torch.tensor(np.transpose(sims['sim1'][1])).type(dtype=torch.float)

#%% Normalize features
X = utils.minmax_scaler(X)


#%% Prep data
N = 50
subset = random.sample(range(X.shape[0]), N)
X_batch, y_batch = X[subset], Y[subset]
        
x = torch.tensor(X_batch).type(dtype=torch.float)