def __init__(self): Frame.__init__(self) self.master.title("ASL Game") self.master.minsize(width=500,height=500) self.grid() self.__dataPane = Frame(self) self.__dataPane.grid(row=0, column=0) self.__mode1Frame = Frame(self) self.__mode2Frame = Frame(self) ## self.__mode3Frame = Frame(self) self.__dp = DataProcessing() self.__whole = self.__dp.getWholeSignDict() self.__NumLtr = self.__dp.getNumLtrDict() self.__homeButton = Button(self.__dataPane, text="Home",\ command=self.__home) self.__welcome = Label(self.__dataPane, text="Choose a mode") self.__mode1Button = Button(self.__dataPane, text="Mode 1",\ command=self.__mode1) self.__mode2Button = Button(self.__dataPane, text="Mode 2",\ command=self.__mode2) ## self.__mode3Button = Button(self.__dataPane, text="Mode 3",\ ## command=self.__mode3) self.__homeButton.grid(row=1, column=1) self.__welcome.grid(row=2, column=2) self.__mode1Button.grid(row=3, column=1) self.__mode2Button.grid(row=3, column=2) #self.__mode3Button.grid(row=3, column=3) mainloop()
def SaveDataToFile(): try: objF = DataProcessing.DataProcessing() objF.FileName = "CustomerData.txt" objF.TextData = Customers.CustomerList.ToString() objF.SaveData() except Exception as e: print(e)
def Operate(Specs): dataObj = DataProcessing.DataProcessing(Specs) # TBD make sure that input data is valid etc. # TBD back test results against real data # TBD Error handling # TBD fix model paths #print("----------------------------------------------------------") #print("event forecast started.") #print(datetime.now()) #print("----------------------------------------------------------") if not hasattr(dataObj, "data"): print("no valid data could be extracted.") return if Specs.Mode == "2": print("No Neural Network support at the moment.") return if not int(Specs.AnzahlPrognosen) == 1 or not Specs.AnzahlPrognosen: print( "corrected wrong configuration: Only realtime forecast is considered" ) Specs.AnzahlPrognosen = 1 print("input data: ", dataObj.datapath) #load model try: mdlObj = generateModel(Specs, dataObj) except Exception as e: print("problems while loading model: " + e) try: ForecastDyn = mdlObj.predictOperative(Specs.horizont) timeseriesNF = dataObj.data["NoFilter"] except Exception as e: print("problems while prediction:\n" + e) try: print("creating csv.") createCsv(ForecastDyn, Specs) print("done") except Exception as e: print("no csv could be created:\n" + e) try: DataDyn = timeseriesNF[-Specs.horizont:] multi = "\n##### " + str( Specs.horizont) + "-step Prediction against last values ##### \n" print(multi) sys.stdout.flush() FDyn = ErrorAnalysis.ErrorAnalysis(DataDyn, ForecastDyn) print(FDyn.criterias) sys.stdout.flush() except Exception as e: print(e)
def SaveDataToFile(): try: objF = DataProcessing.DataProcessing() objF.FileName = "EmployeeData.txt" objF.TextData = Employees.EmployeeList.ToString() print("Reached here") objF.SaveData() except Exception as e: print(e)
import os import re # import pandas as pd # import sys import null import numpy as np from requests.packages import chardet # reload(sys) import DataPreprocessing as dp import CreateDictionary as cd import DataProcessing as dproc a1=dp.DataPreprocessing() a2 = cd.CreateDictionary() a3 = dproc.DataProcessing() # f='D:\scientific work\InformationExtracting\processeddataex\qword-20151019144240604.xml' # # # print a.find_first_occurrences_of_header(f,u'ТАЛОН НА ОКАЗАНИЕ ВМП ИЗ ПАК') # # # print a.find_last_occurrence_of_header(f,u'ТАЛОН НА ОКАЗАНИЕ ВМП ИЗ ПАК') # text = a1.find_all_occurrences_of_header(f,u'ЭПИДАНАМНЕЗ') # # # print text # sent = a2.sentences_list(text) # colloc = a2.collocations_list(sent) # words = a2.words_list(colloc) # # print sent # # print colloc # frequency lists for stationary diagnosis path = "D:\scientific work\data\All_Without_Tags" # path = "D:\scientific work\data\All_Part_WithoutTags"
def BackTesting(Specs): #warning: Format YYYY-MM-DD (~) > 1000x faster than DD.MM.YYYY dataObj = DataProcessing.DataProcessing(Specs) modelObj = generateModel(Specs, dataObj) timeseriesNF = dataObj.data["NoFilter"] print("\nprediction with generated model" + str(Specs.order) + "x" + str(Specs.sorder) + "\n\n") if (Specs.Mode == "1"): ##### -1-Step Prediction- ##### pred1 = modelObj.predict1Step() F1 = ErrorAnalysis.ErrorAnalysis(timeseriesNF, modelObj.predicted.predicted_mean) one = "##### 1-step Prediction ##### \n" print(one) sys.stdout.flush() print(F1.criterias) sys.stdout.flush() ##### -Multi-Step Prediction- ##### ForecastDyn = modelObj.predictDyn(nstep=Specs.horizont, n=Specs.sorder[0], delay=Specs.delay, anzahl=Specs.AnzahlPrognosen) DataDyn = timeseriesNF.loc[ForecastDyn.index] FDyn = ErrorAnalysis.ErrorAnalysis(DataDyn, ForecastDyn) multi = "\n##### " + str(Specs.horizont) + "-step Prediction ##### \n" print(multi) sys.stdout.flush() print(FDyn.criterias) sys.stdout.flush() try: import matplotlib.pyplot as plt ax = plt.subplot(2, 1, 1) dataObj.visualize(ax, '1-step Forecast', timeseriesNF, pred1.predicted_mean) ax1 = plt.subplot(2, 1, 2) dataObj.visualize(ax1, str(Specs.horizont) + '-step Forecast', DataDyn, ForecastDyn) plt.show() except Exception as e: print(e) print("No visualization possible.") else: print("\npredictions with generated Neural Network") trainPred, trainOut, testPred, testOut = modelObj.predict() Ftrain = ErrorAnalysis.ErrorAnalysis(trainOut.flatten(), trainPred.flatten()) Ftest = ErrorAnalysis.ErrorAnalysis(testOut.flatten(), testPred.flatten()) try: import matplotlib.pyplot as plt ax = plt.subplot(2, 1, 1) dataObj.visualize(ax, str(Specs.horizont) + '-step Forecast', trainOut.flatten(), trainPred.flatten()) ax1 = plt.subplot(2, 1, 2) dataObj.visualize(ax, str(Specs.horizont) + '-step Forecast', testOut.flatten(), testPred.flatten()) except: print("No visualization possible.") print("Training:\n") print(Ftrain.criterias) sys.stdout.flush() print("\nTest:\n") print(Ftest.criterias) sys.stdout.flush() plt.show()
# Protection if len(sys.argv) < 2: print("1 argument needed: train_dataset") exit(1) # Option EarlyStopping delta_min_loss = 0.00001 for i, arg in enumerate(sys.argv): if "-earlystopping" in arg.lower() and i + 1 < len(sys.argv): delta_min_loss = sys.argv[i + 1] # Parsing train_dataset, targets = parse(sys.argv[1]) dataProcessing = DataProcessing(train_dataset, targets, columns=columns_name) dataProcessing.normalize() train_dataset, targets = dataProcessing.get_data( data_type="2d_array", # shuffle=False, shuffle=True, ) if len(train_dataset) != len(targets): print(f"len(train_dataset) = {len(train_dataset)}") print(f"len(targets) = {len(targets)}") exit(0) # Create model with random weights models = [Logreg(len(columns_name), name=name) for name in house_matrix]
model = [[float(x) if x else 0 for x in neuron.split(',')] for neuron in model[:-1].split("\n")] return features, model # Protection if len(sys.argv) != 3: print("2 arguments needed: dataset weights") exit(1) # Parsing test_dataset, model = parse(sys.argv[1], sys.argv[2]) dataProcessing = DataProcessing(test_dataset, columns=columns_name) dataProcessing.normalize() test_dataset = dataProcessing.get_data(data_type="2d_array") print(f"features ({len(test_dataset)} features) : {test_dataset}") print(f"Weights: {model}") # Create model with weights already trainned model = [Logreg(len(weights), weights=weights) for weights in model] # Create answers file "houses" houses_file = open("houses.csv", 'w') houses_file.write("Index,Hogwarts House\n") for i, features in enumerate(test_dataset):
# !/usr/local/bin/python3 import Persons person = Persons.Person() person.FirstName = "James" person.LastName = "Cameron" print(person) print(person.GetObjectCount()) import Customers customer = Customers.Customer() customer.Id = 1 customer.FirstName = "Alisha" customer.LastName = "Banner" print("This is the Customer Object: ", customer) import DataProcessing file = DataProcessing.DataProcessing() file.FileName = "Testfile.txt" file.TextData = "Blah" file.SaveData()