Exemple #1
0
def get_random_example(n, trim_length, trimed=True, standardize=True):
    if standardize:
        data = standardize_all_data()
    else:
        data = get_trajectory()

    if trimed:
        data = trim_data(data, length=trim_length)

    examples = random.choices(data, k=n)

    return examples
Exemple #2
0
def play_real(length, translation=True):
    data = standardize_all_data()
    data = trim_data(data, length)
    data = istandardize_data(data)
    x = random.choice(data)
    print(type(x))
    print(x.shape)
    x = np.array(x)

    write_one_file('example', x, format='rov')

    cmd = CMD + PAR1 + 'example' + PAR2
    if translation:
        cmd += TRANSLATION

    os.system(cmd)

    return x
from data_processor import time_stamp
from data_processor import trim_data
from dynamic_reporter import init_dynamic_report
from dynamic_reporter import stop_dynamic_report
from dynamic_reporter import report
from data_reader import write_one_file
from multiprocessing import set_start_method
import random
import os

# time.sleep(13500)
# Prepare the training set for this model
print('Preparing the training set...')
if config.TRIM_LENGTH is None:
    set_trim_length(300)
train_set = trim_data(standardize_all_data())
train_set, WIN = window(train_set, 'hann')
print(WIN.shape)
train_set = fft_data(train_set)
train_set, dim = lpf_dimension_reduction(train_set, frequency=10)
train_set = flatten_complex_data(train_set)
print(dim)
print('Training set is ready!')


class Complex_Fully_Connected_Linear_Discriminator_LPF(nn.Module):
    def __init__(self, dimension):
        super(Complex_Fully_Connected_Linear_Discriminator_LPF,
              self).__init__()
        self.n_in = dimension * 2 * 6  # real part and imaginary part are saperated
Exemple #4
0
from data_processor import iflatten_complex_data
from data_processor import time_stamp
from data_processor import trim_data
from dynamic_reporter import init_dynamic_report
from dynamic_reporter import stop_dynamic_report
from dynamic_reporter import report
from data_reader import write_one_file
from multiprocessing import set_start_method
import random
import os

# Prepare the training set for this model
print('Preparing the training set...')
if config.TRIM_LENGTH is None:
    set_trim_length(300)
origin = trim_data(standardize_all_data())
data = fft_all_data()
train_set = flatten_complex_data(data)
print('Training set is ready!')

class Complex_Fully_Connected_Linear_Discriminator(nn.Module):
    def __init__(self, dimension):
        super(Complex_Fully_Connected_Linear_Discriminator, self).__init__()
        self.n_in = dimension * (config.TRIM_LENGTH // 2 + 1) * 2   # real part and imaginary part are saperated

        # hidden linear layers
        self.linear1 = nn.Linear(self.n_in, self.n_in)
        self.linear2 = nn.Linear(self.n_in, self.n_in)
        self.linear3 = nn.Linear(self.n_in, self.n_in)
        self.linear4 = nn.Linear(self.n_in, 1)
from data_processor import time_stamp
from data_processor import trim_data
from dynamic_reporter import init_dynamic_report
from dynamic_reporter import stop_dynamic_report
from dynamic_reporter import report
from data_reader import write_one_file
from multiprocessing import set_start_method
import random
import os

# Prepare the training set for this model
print('Preparing the training set...')
if config.TRIM_LENGTH is None:
    set_trim_length(300)
fft_all_data()
data = trim_data(standardize_all_data())
train_set = flatten_real_data(data)
print('Training set is ready!')


class Complex_Fully_Connected_Discriminator(nn.Module):
    def __init__(self, dimension):
        super(Complex_Fully_Connected_Discriminator, self).__init__()
        self.n_in = dimension * config.TRIM_LENGTH

        # hidden linear layers
        self.linear1 = nn.Linear(self.n_in, self.n_in)
        self.linear2 = nn.Linear(self.n_in, self.n_in)
        self.linear3 = nn.Linear(self.n_in, 1)
        self.drop_layer = nn.Dropout(0.87)