def simulateSynch(): totalResults = [] for file in files: print "Working on: " + file.split('/')[-2] # Initial data --------------------------------------------------------------- importer = Importer() importedData = importer.getInputData(file, samplingInterval) #PredictedData --------------------------------------------------------------- predictor = DRPredictor() predictedData = predictor.getPredictedData(importedData, predictionInterval, \ samplingInterval) for txRate in txRates: #Transmitted packets and data --------------------------------------------- transmitter = SynchTransmitter() transmittedPackets = transmitter.getTransmittedPackets(int(txRate), predictedData) reconstructor = Reconstructor() transmittedData = reconstructor.getReconstructedData(transmittedPackets) result = PartBResult() result.movementType = findMovementType(file) result.threshold = 0 result.txRate = txRate result.txError = calculateError(importedData, transmittedData) totalResults.append(result) return totalResults
def runSimulation(): totalResults = [] for file in files: print "Working on: " + file.split('/')[-2] # Initial data --------------------------------------------------------------- importer = Importer() importedData = importer.getInputData(file, samplingInterval) #PredictedData --------------------------------------------------------------- predictor = DRPredictor() predictedData = predictor.getPredictedData(importedData, predictionInterval, \ samplingInterval) for threshold in thresholds: #Transmitted packets and data --------------------------------------------- transmitter = DRTransmitter(heartbeat) transmittedPackets = transmitter.getTransmittedPackets(threshold, predictedData) reconstructor = Reconstructor() transmittedData = reconstructor.getReconstructedData(transmittedPackets) result = PartAResult() result.movementType = findMovementType(file) result.threshold = threshold result.txRate = calculateTransmissionRate(importedData, transmittedPackets) result.txError = calculateError(importedData, transmittedData) totalResults.append(result) return totalResults
def getStartingPitcher(self, index, pitcherFile, gameNumber, year, team): if (self.getShowFunctions()): print("LineupCreator.getStartingPitcher") else: pass def getGame(gameNumber, index, year, team): if (self.getShowFunctions()): print("LineupCreator.getStartingPitcher.getGame") else: pass #add up the team's wins and losses from that year to determine how many games they played. yearFile = open(File.teamYearInfo(year)) for line in yearFile: if team in line: tempLine = line.split(',') seasonGames = int(tempLine[3]) + int( tempLine[4] ) #this is how many games they played that year. break #if the gameNumber is higher than the number of games that team played that year, subtract the number of games from that season until the number is <= games played that year. while (gameNumber > seasonGames): gameNumber -= seasonGames return (gameNumber) if not (team == "CTM"): thisGame = str(getGame(gameNumber, index, year, team)) file = open(pitcherFile) for line in file: if (thisGame in line): tempLine = line break file.close() startingPitcher = tempLine.split(',')[1].split(';')[0] else: spList = [] for i in range(len(self.__addedPlayers[index])): positionList = [] playerTeams = Importer.findPlayerTeam( self.__addedPlayers[index][i], self.__additionalPlayers_YearList[index][i].get(), False, self.getShowFunctions()) for team in playerTeams: positionList += Importer.getPositionIncrementally( self.__addedPlayers[index][i], File.teamFielding( team, self.__additionalPlayers_YearList[index][i].get()), 1, "American League", self.getShowFunctions()) if (Utility.removeDuplicates( positionList, self.getShowFunctions())[0] == "SP"): spList.append(self.__addedPlayers[index][i]) else: continue #gameNumber -= 1 #the first game should be zero so it starts with the first starting pticher in the list seasonGames = len(spList) while (gameNumber > seasonGames): gameNumber -= seasonGames startingPitcher = spList[gameNumber - 1] return (startingPitcher)
def main(): im = Importer() train = im.get_training_set() test = im.get_test_set() logis = linear_model.LogisticRegression() for i in range(100): mu1 = train['mu1'][:, i] mu0 = train['mu0'][:, i] yf = train['yf'][:, i] ycf = train['ycf'][:, i] t = train['t'][:, i] x = train['x'][:, :, i] #x = x[:, 0][:, np.newaxis] logis.fit(x, t) mu1_test = test['mu1'][:, i] mu0_test = test['mu0'][:, i] yf_test = test['yf'][:, i] ycf_test = test['ycf'][:, i] t_test = test['t'][:, i] x_test = test['x'][:, :, i] mask = [] for num in t_test: if num == 0: mask.append(False) else: mask.append(True) #x_test = x_test[:, 0][:, np.newaxis] ptx = logis.predict_proba(x_test) pt1 = np.sum(t_test)/len(t_test) pt0 = 1 - pt1 ptx1 = ptx[mask] ptx0 = ptx[np.invert(mask)] #print('pt', pt1, pt0) #print('ptx', ptx1, ptx0) w1 = pt1/ptx1[:, 1] w0 = pt0/ptx0[:, 0] #print('w', w1, w0) yw1 = yf_test[mask]*w1 yw0 = yf_test[np.invert(mask)]*w0 #print('yf', yf_test) #print('yw', yw1, yw0) avg_yw1 = np.sum(yw1)/len(yw1) avg_yw0 = np.sum(yw0)/len(yw0) #print(avg_yw1, avg_yw0) print(avg_yw1 - avg_yw0)
def IIR_str_to_SDFG(iir: str): stencilInstantiation = IIR_pb2.StencilInstantiation() stencilInstantiation.ParseFromString(iir) metadata = stencilInstantiation.metadata id_resolver = IdResolver(metadata.accessIDToName, metadata.APIFieldIDs, metadata.temporaryFieldIDs, metadata.globalVariableIDs, metadata.fieldIDtoDimensions) imp = Importer(id_resolver) stencils = imp.Import_Stencils(stencilInstantiation.internalIR.stencils) UnparseCode(stencils, id_resolver) AddRegisters(stencils, id_resolver) SplitMultiStages(stencils) AddMsMemlets(stencils, id_resolver) AddDoMethodMemlets(stencils, id_resolver) exp = Exporter(id_resolver, name=metadata.stencilName) exp.Export_ApiFields(metadata.APIFieldIDs) exp.Export_TemporaryFields(metadata.temporaryFieldIDs) exp.Export_Globals({ id: stencilInstantiation.internalIR.globalVariableToValue[ id_resolver.GetName(id)].value for id in metadata.globalVariableIDs }) exp.Export_Stencils(stencils) exp.sdfg.fill_scope_connectors() return exp.sdfg
def importFile(self, path, importFormat): total = 0 importer = Importer(path, self.referenceManager) if importFormat == settings.ImportFormat.BIBTEX: total = importer.bibtexImport() elif importFormat == settings.ImportFormat.CSV: total = importer.csvImport() return total > 0
def transmitData(inputFile, logDir, predictionInterval, samplingInterval, heartbeat, drThreshold, delay, jitter, packetLoss): # Import data print "Importing data..." importer = Importer() rawInputData = importer.getInputData(inputFile, samplingInterval) exportData(logDir + "RawInputData.txt", rawInputData) # Filtering input data print "Filtering data..." samplingFreq = int(1e3 / samplingInterval) taps = 80 bands = [0.0, 10, 11, 50.0] weights = [1, 0] coefficients = scipy.signal.remez(taps, bands, weights, Hz=samplingFreq) gain = 1.0 / sum(coefficients) filteredInputData = filterData(rawInputData, logDir, "cc", samplingInterval, coefficients)[0] filteredInputData = amplifyData(filteredInputData, gain) exportData(logDir + "FilteredInputData.txt", filteredInputData) # Create the prediction vectors print "Creating the prediction vectors..." predictor = DRPredictor() predictedData = predictor.getPredictedData(filteredInputData, predictionInterval, samplingInterval) exportData(logDir + "PredictionData.txt", predictedData) # Run the transmission algorithm print "Simulating the transmission algorithm..." transmitter = DRTransmitter(heartbeat) drTxPackets = transmitter.getTransmittedPackets(drThreshold, predictedData) exportData(logDir + "DRTxPackets.txt", drTxPackets) # Simulate the transmission of the packets print "Simulating the network..." network = Network() drRxPackets = network.getReceivedPackets(drTxPackets, delay, jitter, packetLoss) exportData(logDir + "DRRxPackets.txt", drRxPackets) # Receive the packets print "Receiving the packets..." receiver = Receiver() drRxFilteredPackets = receiver.getFilteredData(drRxPackets) exportData(logDir + "DRRxData.txt", drRxFilteredPackets) return [ rawInputData, filteredInputData, predictedData, drTxPackets, drRxPackets, drRxFilteredPackets ]
def __init__(self, fileName = "", defaultFileExtension = ""): self._settings = None self.fileSystem = FileSystem() self._fileComponents = MirroredDirectory() self.importer = Importer() self._templateDir = None self.set(fileName, defaultFileExtension)
def predict(self): # Let the user chose a file. # only allows mat files to be read in filename = QFileDialog.getOpenFileName(self, 'Open File', filter="*.mat")[0] if filename: # Make sure the user wants to use train. choice = QMessageBox.question( self, 'Extract!', "Are you sure you want to run prediction? If you click yes, the prediction might take a long time to run", QMessageBox.Yes | QMessageBox.No) if choice == QMessageBox.Yes: # User wants to train. raw_data = Importer.mat(filename) # Make data_raw smaller. (Should be removed in future versions). for row in raw_data: raw_data[row] = raw_data[row][:1] prediction = self.wrapper.predict(raw_data, verbose=True) print(list(prediction)) else: # User don't want to train. pass
def import_result(args, vars, objs, actual_time_steps, url, session_id): write_progress(1, steps) imp=Importer(vars, objs, actual_time_steps, url, session_id) write_progress(2, steps) imp.load_network(args.network, args.scenario) write_progress(3, steps) imp.import_res() write_progress(4, steps) imp.save()
def transmitData(inputFile, logDir, predictionInterval, samplingInterval, heartbeat, drThreshold, delay, jitter, packetLoss): # Import data print "Importing data..." importer = Importer() rawInputData = importer.getInputData(inputFile, samplingInterval) exportData(logDir + "RawInputData.txt", rawInputData) # Filtering input data print "Filtering data..." samplingFreq = int(1e3/samplingInterval) taps = 80 bands = [0.0, 10, 11, 50.0] weights = [1, 0] coefficients = scipy.signal.remez(taps, bands, weights, Hz=samplingFreq) gain = 1.0 / sum(coefficients) filteredInputData = filterData(rawInputData, logDir, "cc", samplingInterval, coefficients)[0] filteredInputData = amplifyData(filteredInputData, gain) exportData(logDir + "FilteredInputData.txt", filteredInputData) # Create the prediction vectors print "Creating the prediction vectors..." predictor = DRPredictor() predictedData = predictor.getPredictedData(filteredInputData, predictionInterval, samplingInterval) exportData(logDir + "PredictionData.txt", predictedData) # Run the transmission algorithm print "Simulating the transmission algorithm..." transmitter = DRTransmitter(heartbeat) drTxPackets = transmitter.getTransmittedPackets(drThreshold, predictedData) exportData(logDir + "DRTxPackets.txt", drTxPackets) # Simulate the transmission of the packets print "Simulating the network..." network = Network() drRxPackets = network.getReceivedPackets(drTxPackets, delay, jitter, packetLoss) exportData(logDir + "DRRxPackets.txt", drRxPackets) # Receive the packets print "Receiving the packets..." receiver = Receiver() drRxFilteredPackets = receiver.getFilteredData(drRxPackets) exportData(logDir + "DRRxData.txt", drRxFilteredPackets) return [rawInputData, filteredInputData, predictedData, drTxPackets, drRxPackets, drRxFilteredPackets]
def __init__(self, in_file, out_file): self.tm = Text_manipulator() self.f = Formatter() self.i = Importer() self.tm.silent = True self.out_file = out_file self.in_file = in_file self.links = dict() self.links["mail_client.py"] = "bash" #self.links["bash"] = "mail_client.py" self.links["tg_client.py"] = "bash" self.links["bash"] = "tg_client.py"
def import_result(args, vars, objs, actual_time_steps): write_progress(9, steps) imp = Importer(vars, objs, actual_time_steps, args.server_url, args.session_id) write_progress(10, steps) imp.load_network(args.network, args.scenario) write_progress(11, steps) #imp.set_network(network) imp.import_res() write_progress(12, steps) imp.save()
def development(filename): # Import mat file. data_raw = Importer.mat(filename) # Make data_raw smaller. for row in data_raw: data_raw[row] = data_raw[row][:1] # Train. wrapper.development(data_raw)
def __init__(self): self.version = "0.5.4" self.utils = Utilities() self.save_path = "./controllers/nn/" + self.utils.getTimestamp() + "/" self.importer = Importer() self.exporter = Exporter(self.version) self.exporter.setSaveLocation(self.save_path) self.nnm = NeuralNetworkManager() self.nnm.setSaveLocation(self.save_path) self.staticController = StaticController() self.debug_mode = False self.importer.setDebugMode(False) self.nnm.setDebugMode(self.debug_mode) print("COTONN v" + self.version + "\n")
def __init__(self): space_files = [ '2010_DGD.txt', '2011_DGD.txt', '2012_DGD.txt', '2013_DGD.txt', '2014_DGD.txt', '2015_DGD.txt', '2016_DGD.txt' ] #, '2017_DGD.txt', '2018_DGD.txt'] weather_files = [ 'Inari Nellim.csv', 'Rovaniemi Lentoasema.csv', 'Ranua lentokentta.csv', 'Vantaa Lentoasema.csv' ] self.Importer = Importer() while (not self.Importer.completed): self.Importer.import_all(weather_files, space_files) self.Importer.to_json('Datafile.json') #self.Importer.df = pd.read_json("Datafile.json") self.df_split = self.split_sets() print("Import done") self.result = self.create_output(self.Importer.df) self.RunAll(self.result) self.to_json() self.to_database()
def import_result(args, vars, objs, actual_time_steps): write_progress(9, steps) imp = Importer(vars, objs, actual_time_steps, args.server_url, args.session_id) write_progress(10, steps) imp.load_network(args.network, args.scenario) write_progress(11, steps) # imp.set_network(network) imp.import_res() write_progress(12, steps) imp.save()
def simulateADR(): totalResults = [] for file in files: print "Working on: " + file.split('/')[-2] # Initial data --------------------------------------------------------------- importer = Importer() importedData = importer.getInputData(file, samplingInterval) #PredictedData --------------------------------------------------------------- predictor = DRPredictor() predictedData = predictor.getPredictedData(importedData, predictionInterval, \ samplingInterval) for index, delay in enumerate(delays): delayList = [delay] * len(predictedData) jitter = jitters[index] jitterList = [jitter] * len(predictedData) transmitter = ADRTransmitter(heartbeat) transmittedPackets = transmitter.getTransmittedPackets(minThreshold, maxThreshold, maxDelay, maxJitter, \ delayList, jitterList, \ predictedData) reconstructor = Reconstructor() transmittedData = reconstructor.getReconstructedData(transmittedPackets) result = PartBResult() result.movementType = findMovementType(file) result.threshold = 0 result.txRate = calculateTransmissionRate(importedData, transmittedPackets) result.txError = calculateError(importedData, transmittedData) result.delay = delay result.jitter = jitter result.packetLoss = 0 totalResults.append(result) return totalResults
def __init__(self): self.version = "2.0" self.utils = Utilities() self.save_path = "./nn/" + self.utils.getTimestamp() + "/" self.importer = Importer() self.exporter = Exporter(self.version) self.exporter.setSaveLocation(self.save_path) self.nnm = NeuralNetworkManager() self.nnm.setSaveLocation(self.save_path) self.staticController = StaticController() self.debug_mode = False self.importer.setDebugMode(False) self.nnm.setDebugMode(self.debug_mode) print("COTONN v" + self.version + "\n") self.encode = EncodeTypes.Boolean self.var_order = Ordering.PerCoordinate self.filename = ""
def train(self): # Let the user chose a file. # only allows mat files to be read in filename = QFileDialog.getOpenFileName(self, 'Open File', filter="*.mat")[0] if filename: # Make sure the user wants to use train. choice = QMessageBox.question( self, 'Train', "Are you sure you want to train? If you click yes, the training might take long periods to train.", QMessageBox.Yes | QMessageBox.No) if choice == QMessageBox.Yes: # User wants to train. raw_data = Importer.mat(filename) # Make data_raw smaller. (Should be removed in future versions). for row in raw_data: raw_data[row] = raw_data[row][:1] self.wrapper.train(data_raw=raw_data, shrink_percent=0.0, verbose=True) # Statistics obtained from the data global raw_signal global filtered_signal global chunked_X global chunked_Y global accuracy raw_signal = self.wrapper.bciObject.preprocessor.preprocess_statistics.raw_signal filtered_signal = self.wrapper.bciObject.preprocessor.preprocess_statistics.filtered_signal chunked_X = self.wrapper.bciObject.preprocessor.preprocess_statistics.chunked_X chunked_Y = self.wrapper.bciObject.preprocessor.preprocess_statistics.chunked_Y accuracy = self.wrapper.bciObject.prediction_model.model_statistics.accuracy #print(accuracy) msg = QMessageBox(self) msg.resize(500, 400) msg.setText("The training accuracy is %.3f" % accuracy) msg.show() else: # User don't want to train. pass
def __init__(self, mail, fichier): Tk.__init__(self) self.mail = mail self.fichier = fichier self.geometry("350x400") #self.liste_mails= ['mail1', 'mail2'] #self.fichier.addListeMails(self.liste_mails) ## TOP ## top = PanedWindow(self) top_top = PanedWindow(top, orient=HORIZONTAL) top_bot = PanedWindow(top, orient=HORIZONTAL) dedoublonner = Button( top_top, text="Dédoublonner", command=self.actionListener_dedoublonner).pack(side=LEFT) valider = Button(top_top, text="Valider", command=self.actionListener_valider).pack(side=RIGHT) importer = Button( top_top, text="Importer", command=lambda: Importer(self.fichier)).pack(side=LEFT) #import_csv = Button(top_bot, text="Import CSV", command=lambda: Import_CSV(self.fichier)).pack(side=LEFT) #import_url = Button(top_bot, text="Import URL", command=lambda: Import_URL(self.fichier)).pack(side=RIGHT) top_top.pack(side=TOP) top_bot.pack(side=BOTTOM) top.pack(side=TOP) ## BOT ## self.bot = PanedWindow(self) self.bottom = self.paint(self.bot, 0) self.bottom.pack(side=TOP) """ for mail in mail.getDestinataires(): panel = PanedWindow(bot, orient=HORIZONTAL) adresse_mail = Label(panel, text=mail).pack(side=LEFT) adresse_valide = Label(panel, text=" OK" if TestMail.isMail(mail) else "PAS OK").pack(side=LEFT) supprimer = Button(panel, text="X").pack(side=RIGHT) panel.pack(anchor="e") """ self.bot.pack(side=TOP) suite = Button(self, text="Suite", command=self.actionListener_suite).pack(side=BOTTOM)
def controller(self, index, team, year, gameNumber, league, players, starts, startingPitcher): if (self.getShowFunctions()): print("LineupCreator.controller") else: pass place = 0 batters = [] positions = [] while place <= 8: batter = self.getPlayerRandomly(players[place], starts[place]) tempBatters = self.ensurePlayerUniqueness(place, batter, batters, players) if (tempBatters != "restart"): batters = tempBatters else: #print ("restart") return (self.controller(index, team, year, gameNumber, league, players, starts, startingPitcher)) if ( place == 8 and "SP" not in positions and (self.__homeYear < 1973 or league == "National League") ): #if it has come to the last spot in the order of a lineup under National League rules and the pitcher has not yet been add. position = "SP" #the next player must be a pitcher else: #print ("is " + batters[place] + " in") #print (self.__addedPlayers[index]) if not (batters[place] in self.__addedPlayers[index] ): #if this is not an added player to the team #print ("no\n") position = Importer.getPositionIncrementally( batters[place], self.getFieldingFile(), 1, league, self.getShowFunctions()) #get their primary position else: #if this is an added player to the team #print ("yes\n") for t in range( len(self.__addedPlayers[index]) ): #loop through the added players list for this team if (self.__addedPlayers[index][t] == batters[place] ): #if this player is the player in question fieldingFile = File.teamFielding( Importer.findPlayerTeam( batters[place], self.__additionalPlayers_YearList[index] [t].get(), False, self.getShowFunctions())[0], self.__additionalPlayers_YearList[index] [t].get() ) #get the first team returned for purposes of finding their position break else: #if this player is not the player in question continue #increment position = Importer.getPositionIncrementally( batters[place], fieldingFile, 1, league, self.getShowFunctions()) batters, positions = self.ensurePositionUniqueness( index, 2, position, place, players, self.getBattingFile(), self.getFieldingFile(), league, batters, positions, year) if not (batters == "restart" and positions == "restart"): place += 1 else: #print ("restart") return (self.controller(index, team, year, gameNumber, league, players, starts, startingPitcher)) if (self.__homeYear < 1973 or league == "National League"): batters = self.addStartingPitcherToLineup(batters, positions, startingPitcher) else: pass output = [] output.append(batters) output.append(positions) return (output)
def getPlayers(self, index, team, battingFile): if (self.getShowFunctions()): print("LineupCreator.getPlayers") else: pass hitters = [] starts = [] if not (team == "CTM"): #if this team is not a custom team file = open(battingFile) for line in file: #there's only one "line" in the batting order file, so this is a way to get a hold of it. batters = line file.close() battingList = batters.split(';') del battingList[ -1] #get rid of the last element which is just a newline character returned by the split method. for i in range( len(battingList)): #loop through the 9 spots in the order. hitters.append([]) starts.append([]) tempBattingList = battingList[i].split( ',') #split each spot in the order by its commas. for j in range( len(tempBattingList) ): #loop through the list that was create from the split in the line above. if (j % 2 == 0): #even numbers will return players. hitters[i].append(tempBattingList[j]) else: #odd numbers will return starts. starts[i].append(tempBattingList[j]) else: #if this team is a custom team for s in range(9): #add sublists for the part below to use hitters.append([]) starts.append([]) if (len(self.__addedPlayers[index]) == 0 ): #if there are no added players pass #do nothing else: #if there are added players for i in range( len(self.__addedPlayers[index] )): #loop through all the added players for this team player = self.__addedPlayers[index][ i] #nail down the player we're looking at teams = Importer.findPlayerTeam( player, self.__additionalPlayers_YearList[index][i].get(), False, self.getShowFunctions( )) #determine which teams they played on during this year for team in teams: #loop through those teams file = open( File.teamBatting( team, self.__additionalPlayers_YearList[index] [i].get())) #open this team's batting file spots = file.read().split( ';' )[: -1] #split the whole file into individual spots in the lineup for j in range( len(spots)): #loop through each spot in the lineup spot = spots[j].split( ',' ) #split this spot in the lineup in order to search through it for this player for k in range( len(spot) // 2 ): #loop through each player in this spot in the lineup if (spot[k * 2] == player): #if there's a match higher = True #initiate the while loop counter = 0 #initiate a counter while (higher): #loop through the starts list if not ( counter == len(starts[j]) ): #if the loop hasn't traversed the entire list yet if ( int(starts[j][counter]) <= int( spot[(k * 2) + 1]) ): #if the player's number of starts at this spot in the lineup are >= this player's starts hitters[j].insert( counter, spot[k * 2] ) #place the added player to this part of the lineup starts[j].insert( counter, spot[(k * 2) + 1] ) #place the added player's number of starts into the starts list higher = False #break the loop else: #otherwise counter += 1 #increment else: #if the loop has traversed the entire list hitters[j].append( spot[k * 2] ) #append the player to the end of the hitters list at this spot in the lineup starts[j].append( spot[(k * 2) + 1] ) #append the player's starts to the end of the starts lsit at this spot in the lineup higher = False #break the loop else: #if there's not a match continue #go back to the top file.close() hitters, starts = self.evenOutBattingOrder(hitters, starts) return (hitters, starts)
class COTONN: def __init__(self): self.version = "0.5.4" self.utils = Utilities() self.save_path = "./controllers/nn/" + self.utils.getTimestamp() + "/" self.importer = Importer() self.exporter = Exporter(self.version) self.exporter.setSaveLocation(self.save_path) self.nnm = NeuralNetworkManager() self.nnm.setSaveLocation(self.save_path) self.staticController = StaticController() self.debug_mode = False self.importer.setDebugMode(False) self.nnm.setDebugMode(self.debug_mode) print("COTONN v" + self.version + "\n") # Clean memory function def cleanMemory(self): del self.nnm.nn del self.nnm del self.staticController del self.exporter del self.importer # Generate MLP from fullset def fullSetMLP(self, filename, layer_width, layer_height, learning_rate, dropout_rate, fitness_threshold, batch_size, display_step, save_option=True): self.staticController = self.importer.readStaticController(filename) fullSet = DataSet() fullSet.readSetFromController(self.staticController) fullSet.formatToBinary() self.nnm.setDebugMode(True) self.nnm.setType(NNTypes.MLP) self.nnm.setTrainingMethod(NNOptimizer.Adam) self.nnm.setActivationFunction(NNActivationFunction.Sigmoid) self.nnm.setDataSet(fullSet) self.nnm.setDropoutRate(dropout_rate) self.nnm.rectangularHiddenLayers(layer_width, layer_height) self.nnm.initialize(learning_rate, fitness_threshold, batch_size, display_step, -1, 5000) self.nnm.getDataSize() # Train model and visualize performance self.nnm.train() self.nnm.plot() fitness, wrong_states = self.nnm.checkFitness(fullSet) self.nnm.randomCheck(fullSet) if (save_option): self.exporter.saveNetwork(self.nnm) self.exporter.saveWrongStates(wrong_states) self.exporter.saveMatlabMLP(self.staticController, self.nnm) self.exporter.saveBinary(self.nnm) self.nnm.close() self.cleanMemory() # Generate MLP from subset def subSetMLP(self, filename, percentage, layer_width, layer_height, learning_rate, dropout_rate, fitness_threshold, batch_size, display_step, save_option=True): self.staticController = self.importer.readStaticController(filename) fullSet = DataSet() fullSet.readSetFromController(self.staticController) fullSet.formatToBinary() subSet = DataSet() subSet.readSubsetFromController(self.staticController, percentage) subSet.formatToBinary() self.nnm.setDebugMode(True) self.nnm.setType(NNTypes.MLP) self.nnm.setTrainingMethod(NNOptimizer.Adam) self.nnm.setActivationFunction(NNActivationFunction.Sigmoid) self.nnm.setDataSet(subSet) self.nnm.setDropoutRate(dropout_rate) self.nnm.rectangularHiddenLayers(layer_width, layer_height) self.nnm.initialize(learning_rate, fitness_threshold, batch_size, display_step, -1, 5000) self.nnm.getDataSize() # Train model and visualize performance self.nnm.train() self.nnm.plot() fitness, wrong_states = self.nnm.checkFitness(fullSet) self.nnm.randomCheck(fullSet) if (save_option): self.exporter.saveNetwork(self.nnm) self.exporter.saveWrongStates(wrong_states) self.exporter.saveMatlabMLP(self.staticController, self.nnm) self.nnm.close() self.cleanMemory() # Scout learningrate convergence def scoutLearningRateConvergence(self, filename, layer_width, layer_height, epoch_threshold, rates, batch_size, display_step): self.staticController = self.importer.readStaticController(filename) dataSet = DataSet() dataSet.readSetFromController(self.staticController) dataSet.formatToBinary() self.nnm.setDebugMode(False) fitnesses = [] for r in rates: print("\nLearning rate: " + str(r)) self.nnm.setType(NNTypes.MLP) self.nnm.setTrainingMethod(NNOptimizer.Adam) self.nnm.setActivationFunction(NNActivationFunction.Sigmoid) self.nnm.setDataSet(dataSet) self.nnm.rectangularHiddenLayers(layer_width, layer_height) self.nnm.initializeNeuralNetwork() self.nnm.initializeTraining(r, 1.0, batch_size, display_step, epoch_threshold) self.nnm.train() fitness, wrong_states = self.nnm.checkFitness(dataSet) self.fitnesses.append(fitness) self.nnm.close() # Plot plt.semilogx(rates, fitnesses, 'r-') plt.xlabel("Rates") plt.ylabel("Fitness") plt.grid() (x1, x2, y1, y2) = plt.axis() plt.axis((min(rates), max(rates), 0.0, y2 + 0.1)) plt.show() self.cleanMemory() # Generate MLP from fullset def importMLP(self, import_path, filename, layer_width, layer_height, learning_rate, dropout_rate, fitness_threshold, batch_size, display_step, save_option=True): self.staticController = self.importer.readStaticController(filename) fullSet = DataSet() fullSet.readSetFromController(self.staticController) fullSet.formatToBinary() self.nnm.setDebugMode(True) self.nnm.setType(NNTypes.MLP) self.nnm.setTrainingMethod(NNOptimizer.Adam) self.nnm.setActivationFunction(NNActivationFunction.Sigmoid) self.nnm.setDataSet(fullSet) # Option to adjust parameters for new training session self.nnm.setDropoutRate(dropout_rate) self.nnm.rectangularHiddenLayers(layer_width, layer_height) self.nnm.initialize(learning_rate, fitness_threshold, batch_size, display_step) self.nnm.getDataSize() # Restore Network from saved file: self.importer.restoreNetwork(self.nnm, import_path) # Train model and visualize performance self.nnm.train() self.nnm.plot() fitness, wrong_states = self.nnm.checkFitness(fullSet) self.nnm.randomCheck(fullSet) # Save Network or Variables if (save_option): self.exporter.saveNetwork(self.nnm) self.exporter.saveWrongStates(wrong_states) self.exporter.saveMatlabMLP(self.staticController, self.nnm) self.nnm.close() # Generate MLP from fullset def customFullSetMLP(self, filename, layer, learning_rate, dropout_rate, fitness_threshold, batch_size, display_step, save_option=True): self.staticController = self.importer.readStaticController(filename) fullSet = DataSet() fullSet.readSetFromController(self.staticController) fullSet.formatToBinary() self.nnm.setDebugMode(True) self.nnm.setType(NNTypes.MLP) self.nnm.setTrainingMethod(NNOptimizer.Adam) self.nnm.setActivationFunction(NNActivationFunction.Sigmoid) self.nnm.setDataSet(fullSet) self.nnm.setDropoutRate(dropout_rate) self.nnm.customHiddenLayers(layer) self.nnm.initialize(learning_rate, fitness_threshold, batch_size, display_step, -1, 5000) self.nnm.getDataSize() # Train model and visualize performance self.nnm.train() self.nnm.plot() fitness, wrong_states = self.nnm.checkFitness(fullSet) self.nnm.randomCheck(fullSet) if (save_option): self.exporter.saveNetwork(self.nnm) self.exporter.saveWrongStates(wrong_states) self.exporter.saveMatlabMLP(self.staticController, self.nnm) self.exporter.saveBinary(self.nnm) self.nnm.close() self.cleanMemory()
from Importer import Importer from sklearn import linear_model import numpy as np im = Importer() train = im.get_training_set() test = im.get_test_set() for i in range(1): mu1 = train['mu1'][:, i] mu0 = train['mu0'][:, i] yf = train['yf'][:, i] ycf = train['ycf'][:, i] t = train['t'][:, i] x = train['x'][:, :, i] mask = [] for num in t: if num == 0: mask.append(False) else: mask.append(True) lm_t = linear_model.LinearRegression() model_t = lm_t.fit(x[mask], yf[mask]) lm_nt = linear_model.LinearRegression() model_nt = lm_nt.fit(x[np.invert(mask)], yf[np.invert(mask)]) te = sum(lm_t.predict(test['x'][:, :, i])) / len(test['x'][:, :, i]) nte = sum(lm_nt.predict(test['x'][:, :, i])) / len(test['x'][:, :, i])
def sync(uuid): Importer(uuid) return "ok"
import sys import os import shelve import dbm sys.path.append("/root/develop/suidev/server/") sys.path.append("/root/develop/suidev/server/BlenderImporter/") print("Blender Script run at %s", sys.executable) print("Script File path is %s", sys.path[0]) from Importer import Importer from Config import Config #render_task = shelve.Shelf(dbm.open(Config.data_path + "temp/render", 'r')) render_task = shelve.open(Config.data_path + "temp/render", protocol=2, writeback=True) kind = render_task["kind"] render_task.close() importer = Importer() importer.Render(kind)
from Importer import Importer im = Importer() for t in im.get_test_set()['t']: print(t)
delay = 150 jitter = 80 packetLoss = 0 # Viewing parameters plotTimeDomain = True plotFreqDomain = True plotPhaseResponse = True useDb = True lowerBoundTime = 17000 #17000 upperBoundTime = 22000 #22000 lowerBoundFreq = 0 upperBoundFreq = 400 for fileName in files: # Import data print "Importing data..." importer = Importer() inputData = importer.getInputData(fileName, samplingInterval) print "First: " + str(inputData[0]) print "Last: " + str(inputData[-1]) print # ---Code unfinsihed--- # Not sure how to accomplish this task # a) Stitching together different data sets seems like it would cause high frequency noise # TODO-> Look into this problem and solve if applicable to the paper
class ML(): def __init__(self): space_files = [ '2010_DGD.txt', '2011_DGD.txt', '2012_DGD.txt', '2013_DGD.txt', '2014_DGD.txt', '2015_DGD.txt', '2016_DGD.txt' ] #, '2017_DGD.txt', '2018_DGD.txt'] weather_files = [ 'Inari Nellim.csv', 'Rovaniemi Lentoasema.csv', 'Ranua lentokentta.csv', 'Vantaa Lentoasema.csv' ] self.Importer = Importer() while (not self.Importer.completed): self.Importer.import_all(weather_files, space_files) self.Importer.to_json('Datafile.json') #self.Importer.df = pd.read_json("Datafile.json") self.df_split = self.split_sets() print("Import done") self.result = self.create_output(self.Importer.df) self.RunAll(self.result) self.to_json() self.to_database() ### Linear Regression ### def RunAll(self, result): res = [] for i in range(len(self.Importer.stations)): df = result.iloc[:, [ 0, 1, 2, (i * 2 + 3), (i * 2 + 4), -1 ]] # + result.iloc[:, [(i*2+3),(i*2+4)]] + result.iloc[:, [-1]] #print(df.head()) #self.LinReg(self.df_split[i][0], self.df_split[i][1]) x = self.RandForest(self.df_split[i][0], self.df_split[i][1], df) result[self.Importer.stations[i] + ' Proba'] = x print(x) print(result.head()) print(result.columns) return res def split_sets(self, weather=False): self.Importer.df = self.Importer.df.reset_index() complete_data = [] for name in self.Importer.stations: data = [] target = [] for index, row in self.Importer.df.iterrows(): if weather: #only weather prediction target.append(row[name]) data.append( np.array([ row['Inari Nellim_m'], row['Inari Nellim_d'], row['Inari Nellim_Time'], row[name + '_Horizontal visibility (m)'], row[name + '_Cloud amount (1/8)'] ])) else: target.append(row[name + ' Overall']) data.append( np.array([ row['Inari Nellim_m'], row['Inari Nellim_d'], row['Inari Nellim_Time'], row[name + '_Horizontal visibility (m)'], row[name + '_Cloud amount (1/8)'], row['College'] ])) complete_data.append((data, target)) return complete_data def LinReg(self, data, target): x_train, x_test, y_train, y_test = train_test_split(data, target, test_size=0.2, random_state=0) print(x_train[0], y_train[0]) model = LogisticRegression() model.fit(x_train, y_train) score = model.score(x_test, y_test) predictions = model.predict(x_test[:1]) print('LinReg: ', score) print('Pred: ', predictions) def RandForest(self, data, target, result): x_train, x_test, y_train, y_test = train_test_split(data, target, test_size=0.2, random_state=0) #print(np.any(np.isnan(x_train)), np.all(np.isfinite(x_train))) clf = RandomForestClassifier(n_estimators=200) clf.fit(x_train, y_train) score = clf.score(x_test, y_test) #proba = clf.predict_proba(result) proba = clf.predict_proba(result)[:, 1] print('Predict ', clf.predict(result)) print('Rand', score) return proba def to_database(self): conn = sqlite3.connect(self.Importer.db_name) self.result.to_sql("data", conn, if_exists="replace") conn.close() print("Modelcreation complete") def to_json(self): self.result.to_json("result.json") print("Result written") def create_output(self, df): #df[self.Importer.df['Inari Nellim_Cloud amount (1/8)'].isnull()] data = [] for month in range(1, 13): month_df = df[df['Inari Nellim_m'] == month] for day in range(1, 32): day_df = month_df[month_df['Inari Nellim_d'] == day] for time in range(0, 22, 3): temp_df = day_df[day_df['Inari Nellim_Time'] == time] if not temp_df.empty: row = { 'Month': month, 'Day': day, 'Time': time, 'Inari Nellim_Cloud amount (1/8)': temp_df['Inari Nellim_Cloud amount (1/8)'].mode() [0], 'Inari Nellim_Horizontal visibility (m)': int(temp_df[ 'Inari Nellim_Horizontal visibility (m)'].mean( )), 'Rovaniemi Lentoasema_Cloud amount (1/8)': temp_df['Rovaniemi Lentoasema_Cloud amount (1/8)']. mode()[0], 'Rovaniemi Lentoasema_Horizontal visibility (m)': int(temp_df[ 'Rovaniemi Lentoasema_Horizontal visibility (m)'] .mean()), 'Ranua lentokentta_Cloud amount (1/8)': temp_df['Ranua lentokentta_Cloud amount (1/8)']. mode()[0], 'Ranua lentokentta_Horizontal visibility (m)': int(temp_df[ 'Ranua lentokentta_Horizontal visibility (m)']. mean()), 'Vantaa Lentoasema_Cloud amount (1/8)': temp_df['Vantaa Lentoasema_Cloud amount (1/8)']. mode()[0], 'Vantaa Lentoasema_Horizontal visibility (m)': int(temp_df[ 'Vantaa Lentoasema_Horizontal visibility (m)']. mean()), 'College': temp_df['College'].mode()[0] } data.append(row) print('One Month done, %s remaining' % (12 - month)) result = pd.DataFrame(data) print(result.head()) return result
__author__ = 'pascal' from Importer import Importer from NaiveBayes import NaiveBayes importer = Importer() print('Loading stop words') importer.add_stop_words('data/stopwords/german/') # Importing training sets training_data = [] print('Loading training data') training_data.append(importer.extract_training_data('data/politik/', label='politik')) training_data.append(importer.extract_training_data('data/wirtschaft/', label='wirtschaft')) training_data.append(importer.extract_training_data('data/sport/', label='sport')) nb = NaiveBayes() print('Training') nb.train(training_data) # Importing test sets test_data = [] print('Loading test data') test_data.append(importer.extract_test_data('data/politik/', label='politik')) test_data.append(importer.extract_test_data('data/sport/', label='sport')) test_data.append(importer.extract_test_data('data/wirtschaft/', label='wirtschaft')) print('Testing')
def startup(args: argparse.Namespace, **kwargs: Dict[str, Any]) -> None: global announce, dispatcher, group, httpServer, notification, validator global registration, remote, request, security, statistics, storage, event global rootDirectory global aeStatistics global supportedReleaseVersions, cseType, defaultSerialization, cseCsi, cseRi, cseRn global cseOriginator global isHeadless rootDirectory = os.getcwd() # get the root directory os.environ[ "FLASK_ENV"] = "development" # get rid if the warning message from flask. # Hopefully it is clear at this point that this is not a production CSE # Handle command line arguments and load the configuration if args is None: args = argparse.Namespace( ) # In case args is None create a new args object and populate it args.configfile = None args.resetdb = False args.loglevel = None args.headless = False for key, value in kwargs.items(): args.__setattr__(key, value) isHeadless = args.headless if not Configuration.init(args): return # Initialize configurable constants supportedReleaseVersions = Configuration.get( 'cse.supportedReleaseVersions') cseType = Configuration.get('cse.type') cseCsi = Configuration.get('cse.csi') cseRi = Configuration.get('cse.ri') cseRn = Configuration.get('cse.rn') cseOriginator = Configuration.get('cse.originator') defaultSerialization = Configuration.get('cse.defaultSerialization') # init Logging Logging.init() if not args.headless: Logging.console('Press ? for help') Logging.log('============') Logging.log('Starting CSE') Logging.log(f'CSE-Type: {cseType.name}') Logging.log('Configuration:') Logging.log(Configuration.print()) # Initiatlize the resource storage storage = Storage() # Initialize the event manager event = EventManager() # Initialize the statistics system statistics = Statistics() # Initialize the registration manager registration = RegistrationManager() # Initialize the resource validator validator = Validator() # Initialize the resource dispatcher dispatcher = Dispatcher() # Initialize the request manager request = RequestManager() # Initialize the security manager security = SecurityManager() # Initialize the HTTP server httpServer = HttpServer() # Initialize the notification manager notification = NotificationManager() # Initialize the group manager group = GroupManager() # Import a default set of resources, e.g. the CSE, first ACP or resource structure # Import extra attribute policies for specializations first importer = Importer() if not importer.importAttributePolicies() or not importer.importResources( ): return # Initialize the remote CSE manager remote = RemoteCSEManager() # Initialize the announcement manager announce = AnnouncementManager() # Start AEs startAppsDelayed( ) # the Apps are actually started after the CSE finished the startup # Start the HTTP server event.cseStartup() # type: ignore httpServer.run() # This does return (!) Logging.log('CSE started') if isHeadless: # when in headless mode give the CSE a moment (2s) to experience fatal errors before printing the start message BackgroundWorkerPool.newActor( delay=2, workerCallback=lambda: Logging.console('CSE started') if not shuttingDown else None).start() # # Enter an endless loop. # Execute keyboard commands in the keyboardHandler's loop() function. # commands = { '?': _keyHelp, 'h': _keyHelp, '\n': lambda c: print(), # 1 empty line '\x03': _keyShutdownCSE, # See handler below 'c': _keyConfiguration, 'C': _keyClearScreen, 'D': _keyDeleteResource, 'i': _keyInspectResource, 'l': _keyToggleLogging, 'Q': _keyShutdownCSE, # See handler below 'r': _keyCSERegistrations, 's': _keyStatistics, 't': _keyResourceTree, 'T': _keyChildResourceTree, 'w': _keyWorkers, } # Endless runtime loop. This handles key input & commands # The CSE's shutdown happens in one of the key handlers below loop(commands, catchKeyboardInterrupt=True, headless=args.headless) shutdown()
def ensurePositionUniqueness(self, index, attempt, position, place, players, battingFile, positionFile, league, batters, positions, year): if (self.getShowFunctions()): print("LineupCreator.ensurePositionUniqueness") else: pass #print (batters) #print (positions) if ((position in positions) or (position in {"none", "RP"}) or (self.__homeYear >= 1973 and league == "American League" and position == "SP")): #print ("is " + batters[place] + " in") #print (self.__addedPlayers[index]) if not (batters[place] in self.__addedPlayers[index] ): #if this is not an added player to the team #print ("no\n") position = Importer.getPositionIncrementally( batters[place], self.getFieldingFile(), attempt, league, self.getShowFunctions()) else: #if this is an added player to the team #print ("yes\n") for t in range( len(self.__addedPlayers[index]) ): #loop through the added players list for this team if (self.__addedPlayers[index][t] == batters[place] ): #if this player is the player in question fieldingFile = File.teamFielding( Importer.findPlayerTeam( batters[place], self.__additionalPlayers_YearList[index] [t].get(), False, self.getShowFunctions())[0], self.__additionalPlayers_YearList[index][t].get() ) #get the first team returned for purposes of finding their position break else: #if this player is not the player in question continue #increment position = Importer.getPositionIncrementally( batters[place], fieldingFile, attempt, league, self.getShowFunctions()) if (position == "none"): newBatter = self.getPlayerIncrementally( players[place], batters[place]) if (newBatter != "restart"): batters = self.ensurePlayerUniqueness( place, newBatter, batters, players) if (batters != "restart"): attempt = 1 #print ("is " + batters[place] + " in") #print (self.__addedPlayers[index]) if not (batters[place] in self.__addedPlayers[index] ): #if this is not an added player to the team #print ("no\n") newPlayerPosition = Importer.getPositionIncrementally( batters[place], self.getFieldingFile(), attempt, league, self.getShowFunctions()) else: #if this is an added player to the team #print ("yes\n") for t in range( len(self.__addedPlayers[index]) ): #loop through the added players list for this team if ( self.__addedPlayers[index][t] == batters[place] ): #if this player is the player in question fieldingFile = File.teamFielding( Importer.findPlayerTeam( batters[place], self.__additionalPlayers_YearList[ index][t].get(), False, self.getShowFunctions())[0], self.__additionalPlayers_YearList[ index][t].get() ) #get the first team returned for purposes of finding their position break else: #if this player is not the player in question continue #increment newPlayerPosition = Importer.getPositionIncrementally( batters[place], fieldingFile, attempt, league, self.getShowFunctions()) return (self.ensurePositionUniqueness( index, 2, newPlayerPosition, place, players, battingFile, positionFile, league, batters, positions, year)) else: return ("restart", "restart") else: return ("restart", "restart") else: return (self.ensurePositionUniqueness( index, attempt + 1, position, place, players, battingFile, positionFile, league, batters, positions, year)) else: if (place == len(positions)): positions.append(position) else: positions[place] = position return (batters, positions)
features, 'tfidf_char', 'test') # Cross Validation predictions self.check_model(classifier, xcross_tfidf, self.y_cross, model_name, features, 'tfidf_char', 'cross') def get_and_print_all_scores(self): print('Running for count_vectors') for i in range(500, 5000, 500): self.count_vectors(i) self.tfidf_words(i) self.tfidf_ngram(i) self.tfidf_char(i) imp = Importer() trainDF = imp.ImportFuncs.read_csv_into_dataframe( 'csv_classification/Multi-class/classified_sentences_all.csv') prePro = PreProcessor() trainDF = prePro.clean_dataframe_for_training(trainDF) print(trainDF.head()) a = MultiClassifier(trainDF) a.get_and_print_all_scores() print(a.all_scores) exp = Exporter() exp.create_csv_scores(a.all_scores, 'all_scores_cleaned')
class Processor(): def __init__(self, in_file, out_file): self.tm = Text_manipulator() self.f = Formatter() self.i = Importer() self.tm.silent = True self.out_file = out_file self.in_file = in_file self.links = dict() self.links["mail_client.py"] = "bash" #self.links["bash"] = "mail_client.py" self.links["tg_client.py"] = "bash" self.links["bash"] = "tg_client.py" def simple_spawn( self, f, arg ): #simple cause it can be implemented as multicore/multimachine distributed computation technlology using multiprocessing.manager try: from multiprocessing import Process p = Process(target=f, args=(arg, )) p.start() except: raise def inParse(self, raw): #print "\n" #print "[p] parsing..." raw_list = list() listified = self.f.listfy(raw, "\n") for line in listified: meta_data, data = self.f.filter_in(line) if meta_data == None: if raw_list != list(): raw_list[-1][1] = raw_list[-1][1] + "\n" + data else: #print "-databug in tg_client_API at ln 58 in inParse()-" #print data #print type(data) pass else: raw_list.append([self.metaInParse(meta_data), data]) return raw_list def outParse(self, meta, data): return self.f.filter_out(data, meta) #switch? def metaInParse(self, meta): #meta: [i/o] [n] [program_name] print #"\n" print #"[p] meta-parsing [%s ]" % (str(meta)) return meta.split(" ") def metaOutParse(self, meta): return self.f.stringfy(meta, " ") return [data_type, istance, prog_name] def identyParse(self, data): #maybe it can be an API pass def metaIdentyParse(self, meta): data_type, istance, prog_name = meta if data_type == "o": return True else: return False def dataParse(self, data, meta): #parsing prog_data oriented to op_data oriented try: data_type, istance, prog_name = meta Parser = self.i._get(prog_name.replace(".", "_") + "_API.py") return Parser.P2oParser.Parse(data, meta) except: raise def linker(self, meta): data_type, istance, prog_name = meta if data_type == "i": data_type = "o" else: data_type = "i" prog_name = self.links[prog_name] return data_type, istance, prog_name def compute(self, raw): #bot part: # -[status]> coded but not implemented as compatible script #AI part: <- AI stand for artificial intelligence, but can stand for anarchy/ist intelligence # -[status]> not coded and not implemented, suggest: use pybrain neural networks # #print "\n" #print "[c] computing [%s ]" % (str(raw)) meta = raw[0] data = raw[1] #if not self.IdentyParse(data) and or self.metaIdentyParse(meta): return if not self.metaIdentyParse(meta): return data = self.dataParse(data, meta) meta = self.linker(meta) #that does the trick data = self.dataParse(data, meta) data = self.outParse(self.metaOutParse(meta), data) self.output(data) print data def output(self, data): try: #print "\n" #print "[o] %s" % (str(data)) self.tm.Write_as_output(self.out_file, data) except: raise def server(self): print "[!] running..." while True: try: raw = self.tm.Read_as_input(self.in_file) #print "\n" #print "[i] %s" % (str(raw)) raw_list = self.inParse(raw) for raw in raw_list: self.simple_spawn(self.compute, raw) except KeyboardInterrupt: print "\n" print "[!] server stopping..." break except: raise print "[!] server stopped" exit()
# Filesystem paramters dataRoot = "/Users/fstakem/Data/Movements_5_1_08/" root = "/Users/fstakem/Research/PhD/2010_Research/OptimalFiltering/" logDir = root + "code/log/" outputDir = root + "code/output/" movement = "Stacking" inputFile = inputFile = dataRoot + movement + "/Simulation" + str( 1) + "/positionLog.txt" # Parameters for all algorithms samplingInterval = 10 numPoints = 131072 # Importing the raw data print "Importing data..." importer = Importer() rawInputData = importer.getInputData(inputFile, samplingInterval) s.exportData(logDir + "RawInputData.txt", rawInputData) # Filtering input data print "Filtering data..." samplingFreq = int(1e3 / samplingInterval) taps = 80 bands = [0.0, 10, 11, 50.0] weights = [1, 0] coefficients = scipy.signal.remez(taps, bands, weights, Hz=samplingFreq) gain = 1.0 / sum(coefficients) filteredInputData = s.filterData(rawInputData, logDir, "cc", samplingInterval, coefficients)[0] filteredInputData = s.amplifyData(filteredInputData, gain) s.exportData(logDir + "FilteredInputData.txt", filteredInputData)
import os from Configuration import Configuration from Importer import Importer os.environ['PYSPARK_PYTHON'] = '/usr/bin/python3.5' os.environ["PYSPARKDRIVER_PYTHON"]= "/usr/bin/python3.5" cfg = Configuration.configuration('config.yml') importer = Importer(cfg) importer.process(cfg['dates']['startDate'], cfg['dates']['endDate'])
def startup(args: argparse.Namespace, **kwargs: Dict[str, Any]) -> None: global announce, dispatcher, group, httpServer, notification, validator global registration, remote, security, statistics, storage, event global rootDirectory global aeStatistics rootDirectory = os.getcwd() # get the root directory os.environ[ "FLASK_ENV"] = "development" # get rid if the warning message from flask. # Hopefully it is clear at this point that this is not a production CSE # Handle command line arguments and load the configuration if args is None: args = argparse.Namespace( ) # In case args is None create a new args object and populate it args.configfile = None args.resetdb = False args.loglevel = None for key, value in kwargs.items(): args.__setattr__(key, value) if not Configuration.init(args): return # init Logging Logging.init() Logging.log('============') Logging.log('Starting CSE') Logging.log('CSE-Type: %s' % C.cseTypes[Configuration.get('cse.type')]) Logging.log(Configuration.print()) # Initiatlize the resource storage storage = Storage() # Initialize the event manager event = EventManager() # Initialize the statistics system statistics = Statistics() # Initialize the registration manager registration = RegistrationManager() # Initialize the resource validator validator = Validator() # Initialize the resource dispatcher dispatcher = Dispatcher() # Initialize the security manager security = SecurityManager() # Initialize the HTTP server httpServer = HttpServer() # Initialize the notification manager notification = NotificationManager() # Initialize the announcement manager announce = AnnouncementManager() # Initialize the group manager group = GroupManager() # Import a default set of resources, e.g. the CSE, first ACP or resource structure importer = Importer() if not importer.importResources(): return # Initialize the remote CSE manager remote = RemoteCSEManager() remote.start() # Start AEs startAppsDelayed( ) # the Apps are actually started after the CSE finished the startup # Start the HTTP server event.cseStartup() # type: ignore Logging.log('CSE started') httpServer.run() # This does NOT return
movement = "Stacking" simulationNumber = 1 inputFile = dataRoot + movement + "/Simulation" + str(simulationNumber) + "/positionLog.txt" # Test parameters samplingInterval = 1 # Viewing parameters plotTimeDomain = True plotFreqDomain = True plotPhaseResponse = True lowerFreqBound = -30 upperFreqBound = 30 # Import data importer = Importer() data = importer.getInputData(inputFile, samplingInterval) # Split data into components time = range(0, len(data) * samplingInterval, samplingInterval) x = [] y = [] z = [] for sample in data: x.append(sample.position.x) y.append(sample.position.y) z.append(sample.position.z) # Do the fft on the signals -> output is complex number fftX = scipy.fft(x)
class TemplateFileCreator: def __init__(self, fileName = "", defaultFileExtension = ""): self._settings = None self.fileSystem = FileSystem() self._fileComponents = MirroredDirectory() self.importer = Importer() self._templateDir = None self.set(fileName, defaultFileExtension) def set(self, fileName, defaultFileExtension = ""): if DEBUG: print("TemplateFileCreator: setting dir to: '" + fileName + "'") self._fileComponents.set(fileName) if DEBUG: print("TemplateFileCreator: dir set to: '" + str(self._fileComponents.getOriginalFileName()) + "'") self._cursors = [] def createFromTemplate(self): templatePath = os.path.join(self.getTemplateDir(), self._fileComponents.getExtension(), self.classifyKind() + ".template") variablePath = os.path.join(self.getTemplateDir(), self._fileComponents.getExtension(), self.classifyKind() + ".variables") functionPath = os.path.join(self.getTemplateDir(), self._fileComponents.getExtension(), "functions.py") templateContent = self.fileSystem.getFileContent(templatePath) variableContent = self.fileSystem.getFileContent(variablePath) functionCollectionObject = self.importer.getObjectInstance(functionPath, "FunctionCollection")() content = self.getReplacementContent(templateContent, variableContent, functionCollectionObject) if DEBUG: print("TemplateFileCreator: creating file: " + self._fileComponents.getOriginalFileName()) return self.fileSystem.createFile(self._fileComponents.getOriginalFileName(), content) def setBasePath(self, basePath): self._fileComponents.setBasePath(basePath) def setSettings(self, settings): self._settings = settings def setTemplateDir(self, templateDir): self._templateDir = templateDir def getCursors(self): return self._cursors def getFileName(self): return self._fileComponents.getOriginalFileName() def setDefaultExtension(self, fileExtension): self._fileComponents.setDefaultExtension(fileExtension) def getArgsDictFromVarContent(self, VarContent): result = dict() try: varDictionary = ast.literal_eval(VarContent) except: raise TypeError("the content of VarContent could not be converted to a dict.") for templateVar in varDictionary: variableName = templateVar["variable"] settingsValues = dict() if "fromSettings" in templateVar: for settingsVariable in templateVar["fromSettings"]: settingsValues[settingsVariable] = self._settings.get(settingsVariable) args = dict() args["settings"] = str(settingsValues) args["name"] = variableName args["dir"] = self._fileComponents.getFileName() #args["basePath"] = "" args["command"] = templateVar["command"] result[variableName] = args return result """def getReplacements(self, args, functionCollectionObject): # TODO: this check has loopholes... if isinstance(functionCollectionObject, (int, float, complex, str)) or functionCollectionObject is None: raise Exception("The functionCollectionObject argument must be an instance of an object, " + str(type(functionCollectionObject)) + " passed instead.") result = dict() for name, arg in Std.getIterItems(args): function = getattr(functionCollectionObject, arg["command"]) result["/* @" + name + " */"] = function(arg) return result""" def getReplacements(self, args, functionCollectionObject): # TODO: this check has loopholes... if isinstance(functionCollectionObject, (int, float, complex, str)) or functionCollectionObject is None: raise Exception("The functionCollectionObject argument must be an instance of an object, " + str(type(functionCollectionObject)) + " passed instead.") result = dict() for name, arg in Std.getIterItems(args): function = getattr(functionCollectionObject, arg["command"]) result["/* @" + name + " */"] = function(arg) return result def getCursorsFromContent(self, templateContent): lines = templateContent.splitlines() cursorString = "/* @cursor */" lineNbr = 0 cursors = [] for line in lines: while cursorString in line: row = line.find(cursorString) line = line[:row] + line[row + len(cursorString):] cursors.append((lineNbr, row)) lineNbr += 1 return cursors def getSearchStringForNone(self, templateContent, searchString): regexSearchString = searchString.replace("/", "\\/") regexSearchString = regexSearchString.replace("*", "\\*") regexString = ".*(" + regexSearchString + ").*\\n?\\r?" match = re.search(regexString, templateContent) if match: line = match.group() lineRemoved1 = line.replace(searchString, "") lineRemoved2 = lineRemoved1.replace("*", "") lineRemoved3 = lineRemoved2.replace("/", "") lineRemoved4 = lineRemoved3.replace("#", "") lineRemoved5 = lineRemoved4.replace("\"\"\"", "") lineRemoved6 = lineRemoved5.replace("'''", "") lineRemoved7 = lineRemoved6.strip(' \n\r\t') if len(lineRemoved7) < 1: searchString = line return searchString def getReplacementContent(self, templateContent, variableContent, functionCollectionObject): args = self.getArgsDictFromVarContent(variableContent) replacements = self.getReplacements(args, functionCollectionObject) for searchString, replacement in Std.getIterItems(replacements): if replacement is None: replacement = "" searchString = self.getSearchStringForNone(templateContent, searchString) templateContent = templateContent.replace(searchString, replacement) self._cursors = self.getCursorsFromContent(templateContent) templateContent = templateContent.replace("/* @cursor */", "") return templateContent def getTemplateDir(self): return self._templateDir def classifyKind(self): return self._fileComponents.getKind() def setKind(self, kind): self._fileComponents.setKind(kind)