Пример #1
0
    def __init__(self, dataparser_obj, start, size):
        self.dp = dataparser_obj
        self.hyp = Hyperparameters()

        self.labels = self.dp.get_meta()

        self.size_of_sample = size
        self.chunkIdentifier = str(size)

        self.validation_start = 0
        self.test_start = self.hyp.VALIDATION_NUMBER + self.size_of_sample - 1
        self.train_start = self.test_start + self.hyp.TEST_NUMBER + self.size_of_sample - 1

        self.train_matrix_data = list()
        self.train_matrix_labels = list()
        self.train_count = 0

        self.valid_count = 0
        self.test_count = 0
        print("making validation set")
        self.make_valid_set(start, size)
        print("making test set")
        self.make_test_set(start, size)
        print("making train set")
        self.make_train_set(start, size)
Пример #2
0
    def __init__(self, dataparser_obj):
        self.tool = Utility()
        self.dp = dataparser_obj
        self.hyp = Hyperparameters()

        self.labels = self.dp.get_meta()

        self.size_of_sample = self.dp.return_size_name(
            self.hyp.MODE_OF_LEARNING)
        self.chunkIdentifier = self.dp.return_chunkIdent_name(
            self.hyp.MODE_OF_LEARNING)

        self.validation_start = 0
        self.test_start = self.hyp.VALIDATION_NUMBER + self.size_of_sample - 1
        self.train_start = self.test_start + self.hyp.TEST_NUMBER + self.size_of_sample - 1

        self.train_matrix_data = list()
        self.train_matrix_labels = list()
        self.train_count = 0

        self.valid_count = 0
        self.test_count = 0
        print("making validation set")
        self.make_valid_set()
        print("making test set")
        self.make_test_set()
        print("making train set")
        self.make_train_set()
Пример #3
0
    def __init__(self, *argv):
        self.hyp = Hyperparameters()

        if(len((argv)) == 2):
            print("I have registered a cookie-cutter chunk mode")
            assert isinstance(argv[0], DataParser_Universal) == True, "you did not give me a DataParser object!"
            assert isinstance(argv[1], str) == True, "you didn't give a proper key word"
            self.chunkIdentifier = argv[1]
            self.size = self.hyp.sizedict[self.chunkIdentifier]
            self.mode = 1

        elif(len(argv) == 3):
            print("I have registered arbitrary mode")
            assert isinstance(argv[0], DataParser_Universal) == True, "you did not give me a DataParser object!"
            assert isinstance(argv[1], int) == True, "you didn't give a proper start"
            assert isinstance(argv[2], int) == True, "you didn't give a proper size"
            self.size = argv[2]
            self.start = argv[1]
            self.mode = 0

        else:
            raise Exception("invalid number of arguments")

        self.dp = argv[0]

        self.labels = self.dp.get_meta()

        self.validation_start = 0
        self.test_start = self.hyp.VALIDATION_NUMBER + self.size - 1
        self.train_start = self.test_start + self.hyp.TEST_NUMBER + self.size - 1

        self.train_matrix_data = list()
        self.train_matrix_labels = list()

        self.train_count = 0
        self.valid_count = 0
        self.test_count = 0


        print("making validation set")
        self.make_valid_set()
        print("making test set")
        self.make_test_set()
        print("making train set")
        self.make_train_set()
Пример #4
0
    def __init__(self, dataparser_obj, arbiflag):
        self.dp = dataparser_obj
        self.hyp = Hyperparameters()

        self.labels = self.dp.get_meta()
        if not (arbiflag):
            self.size_of_sample = self.dp.return_size_name(
                self.hyp.MODE_OF_LEARNING)

        self.chunkIdentifier = self.dp.return_chunkIdent_name(
            self.hyp.MODE_OF_LEARNING)

        self.validation_start = 0
        self.test_start = self.hyp.VALIDATION_NUMBER + self.size_of_sample - 1
        self.train_start = self.test_start + self.hyp.TEST_NUMBER + self.size_of_sample - 1

        self.test_count = 0

        print("making test set")
        self.make_test_set()
Пример #5
0
    def __init__(self):
        self.hyp = Hyperparameters()
        self.datasetList = list()
        self.amparr = list()
        self.superList = list(
        )  # this will contain dictionasries for each directory

        for largeDirectory in self.hyp.data_to_include:
            masteramparr = {}
            files = sorted(listdir(largeDirectory))

            for file in files:
                if file.find(".") < 0:
                    try:
                        masteramparr[file] = self.getAmpArr(
                            file, largeDirectory)
                        print(largeDirectory + "/" + file)
                        self.datasetList.append(file)
                    except:
                        print(file + " was empty. I skipped it")
            self.superList.append(masteramparr)

        self.datasetList = list(dict.fromkeys(
            self.datasetList))  # removes duplicates
Пример #6
0
import tensorflow as tf
import numpy as np
import csv
import os
from pipeline.ProjectUtility import Utility
import shutil
import pickle

from pipeline.MyCNNLibrary import *  #this is my own "keras" extension onto tensorflow
from pipeline.Hyperparameters import Hyperparameters
from pipeline.DatasetMaker_Single_Test import DatasetMaker
from pipeline.DataParser_Single import DataParser
from housekeeping.csv_to_mat import ConfusionMatrixVisualizer
HYP = Hyperparameters()
DP = DataParser()

name = "Vanilla"
Cross = "test"
version = "AllDataCNN_test" + HYP.MODE_OF_LEARNING

weight_bias_list = list()  #this is the weights and biases matrix

base_directory = "../Graphs_and_Results/" + name + "/" + version + "/"
try:
    os.mkdir(base_directory)
    print("made directory {}".format(
        base_directory))  #this can only go one layer deep
except:
    print("directory exists!")
    pass