def run(self): # Read file file_reader = FileReader(self.file_path) file_reader.parse() # Store output in data object data = Data(raw_data=file_reader.result) # Sort data sorter = Sorter(raw_data=file_reader.result) sorter.create_time_employee() sorter.create_time_sorted() # Store sorted data in data object data.time_employee = sorter.time_employee data.time_sorted = sorter.time_sorted # Create statistics stats = StatsGenerator(data) stats.calculate_average_working_time() # Write data to files file_writer = FileWriter(data, stats.get_statistics) file_writer.create_stats_output() file_writer.create_data_output() file_writer.write2json() file_writer.write2cvs()
def save_data(data, out_format, destination): ''' Saves hotels list in file :param data: hotels list :param out_format: json, csv or excel :return: ''' writer = FileWriter(data, out_format, destination) file = writer.output_file() print('All accommodations are saved. You can find them in', file, 'file')
def __decodeImage__(self, frames): """ decode ZMQ image frames and save as .tif """ header = json.loads(frames[0].bytes) data = FileWriter().__decodeImage__(frames) # read back image data if self.dtype: data = data.astype(self.dtype) if len(frames)==5: self.metadata["appendix"] = frames[4].bytes self.metadata["real_time"] = json.loads(frames[3].bytes)["real_time"] threading.Thread(target=self.saveImage,args=(data, header["series"], header["frame"],self.metadata)).start() return data
def __init__(self, basename, path, verbose=False): self.basename = basename self.path = path self.__verbose__ = verbose self.ftype = ".raw" FileWriter().__init__(basename, path, self.ftype, verbose)
def __init__(self, basename, path, verbose=False): self.basename = basename self.path = path self._verbose = verbose self.ftype = ".cbf" self.series = 0 self.metadata = {} FileWriter().__init__(basename, path, self.ftype, verbose)
def __init__(self, basename, path, verbose=False): self.basename = basename self.path = path self._verbose = verbose self.ftype = ".tiff" #for pixelmask and flatfield self.series = 0 self.metadata = {} self.data = np.zeros((1000,1000)) FileWriter().__init__(basename, path, self.ftype, verbose) threading.Thread(target=self.openViewer()).start()
def __init__(self, basename, path, verbose=False): self.basename = basename self.path = path self._verbose = verbose self.ftype = ".tiff" #for pixelmask and flatfield self.series = 0 self.metadata = {} self.openViewer() FileWriter().__init__(basename, path, self.ftype, verbose)
def __init__(self, basename, path, verbose=False): self.basename = basename # file basename self.path = path # file path self.__verbose__ = verbose # verbosity self.ftype = ".bytes" # file extension FileWriter().__init__(basename, path, self.ftype, verbose) # filewriter init routine self.filename = os.path.join(path, basename) # create filename self.index = 0 # file index
def run(self, exit): """Initialization of every subprocess. Inside of a loop it is checked, whether every subprocess is running. If not, every subprocess is closed and the programm closes itself. Parameters: exit: Used to enable this method to tell gui.py if the whole programm should be closed and vice versa""" def stopAllProcesses(): """Closes every subprocess""" self.exit.set( ) #The parameter of every while-loop inside every run-method of every subprocess is now not false. Subprocesses will now begin to stop print("Handler1: Starting to terminate all subprocesses") sleep(0.4) for process in self.processes.values(): process.join() print("Handler1: starting every subprocess...") socketClientObj = Client_socket() self.processes["client_socket"] = multiprocessing.Process( target=socketClientObj.run, args=(self.pipes[1][0], self.exit)) dump1090ToPipeObj = Dump1090ToPipe() self.processes["dump1090ToPipe"] = multiprocessing.Process( target=dump1090ToPipeObj.run, args=(self.pipes[0][0], self.exit)) telegramProcessingObj = TelegramProcessing() self.processes["telegramProcessing"] = multiprocessing.Process( target=telegramProcessingObj.run, args=(self.pipes[1][1], self.pipes[0][1], self.pipes[2][0], self.exit)) fileWriterObj = FileWriter() self.processes["fileWriter"] = multiprocessing.Process( target=fileWriterObj.run, args=(self.pipes[2][1], self.exit)) for process in self.processes.values(): process.start() #starts every subprocess print("Handler1: All subprocesses started") sleep(1) while not exit.is_set( ): #Loop until gui.py sets exit or it is set, due to a subprocess failing allAlive = True for key in self.processes.keys(): if (not self.processes[key].is_alive()): outputQueue.put( "Handler1: A subprocess stopped running: " + key) allAlive = False if (not allAlive): exit.set() #Stopping the while loop sleep(0.1) stopAllProcesses() print("Handler1: Stopped")
def __decodeEndOfSeries__(self, frames): """ Decode end of series message and write down image buffer. args: frames, ZMQ EndOfSeries Frame(s) return: True """ FileWriter().__decodeEndOfSeries__(frames) self.__writeData__(self.__dataBuffer__) # write image buffer self.__calcAngles__() # calculate and write goniometer angles print self.__getStatistics__() # print series statistics self.__initParams__() # reset variables return True
def main(): quizGen = QuizGenerator() fileWriter = FileWriter() questions = quizGen.getQuestions() fileWriter.saveQuestions(questions) quizGen.generateQuiz(questions, fileWriter.jsonData) fileWriter.dumpJsonData()
def __init__(self, basename, path, verbose=False): """ create Stream2Hdf instance. args: basename: file basename path: file path verbose: verbosity """ self.basename = basename # file basename self.path = path # file path self.ftype = ".h5" # file extension self._verbose = verbose # verbosity FileWriter().__init__(self.basename, path, self.ftype, verbose) # FileWrite init procedure self.nimagesPerFile = NIMAGESPERFILE # images per h5 container. adapt this value according to memory and cpu capaciy. self.__initParams__()
def __decodeImage__(self, frames): """ decode ZMQ image frames and pass np array to the write function. args: frames, image info and data blob frames return: np data array """ data = FileWriter().__decodeImage__(frames) # read back image data if len(frames)==5 and self._verbose: # image appendix. # TODO: maybe append to nexus tree. Discuss with AndF. print "[*] appendix: %s\n" %frames[4].bytes header = json.loads(frames[0].bytes) if not self.__series__: # if series id not given e.g. if arm was not detected self.__series__ = header["series"] self.__frameID__.append(header["frame"]) self.__appendData__(data=np.array(data,ndmin=3)) # handle data, must be 3 dim return data
if arguments.Args.gpsoff == True: try: gpsWorker = GpsReader() gpsWorker.start() except: # not exiting correctly exit(0) #gpsWorker.currentLapTimeValue.connect(dash.currentLapTimeGauge.currentLapTime_update) gpsWorker.latValue.connect(dash.debugGps.gpsGauge.lat_update) gpsWorker.longValue.connect(dash.debugGps.gpsGauge.long_update) gpsWorker.rollValue.connect(dash.debugGps.gpsGauge.roll_update) gpsWorker.pitchValue.connect(dash.debugGps.gpsGauge.pitch_update) gpsWorker.gForceValue.connect(dash.debugGps.gpsGauge.gForce_update) if arguments.Args.log: fileWriter = FileWriter() fileWriter.start() if arguments.Args.canoff == True: canWorker.rpmUpdateValue.connect(fileWriter.rpm_write) canWorker.socUpdateValue.connect(fileWriter.soc_write) canWorker.mcTempUpdateValue.connect(fileWriter.mcTemp_write) canWorker.motorTempUpdateValue.connect(fileWriter.motorTemp_write) canWorker.highMotorTempUpdateValue.connect( fileWriter.highMotorTemp_write) canWorker.highCellTempUpdateValue.connect( fileWriter.highCellTemp_write) canWorker.lowCellTempUpdateValue.connect( fileWriter.lowCellTemp_write) canWorker.DCLUpdateValue.connect(fileWriter.DCL_write) canWorker.errorSignal.connect(fileWriter.error_write) if arguments.Args.gpsoff == True:
def geneticSearch(data, labels, populationSize=1000, replacePerGenerationPercentage=0.2, tournamentPercentage=0.05, mutationChance=0.1, epsilon=0.65): data = [row + CONSTANTS for row in data] fileWriter = FileWriter.getFileWriter() seed(None) fileWriter.write("Generating everythin, might take a while :'(") individuals = [ Chromosome(MAX_DEPTH, TERMINALS, FUNCTIONS, CONSTANTS) for i in range(populationSize) ] for individual in individuals: individual.computeFitness(data=data, labels=labels) alphaIndividual = None tournamentSize = int(floor(populationSize * tournamentPercentage)) populationReplacement = int( floor(populationSize * replacePerGenerationPercentage)) fitter = lambda pretender1, pretender2: pretender1 if pretender1.getFitness( ) < pretender2.getFitness() else pretender2 currentEpoch = 0 while alphaIndividual is None or alphaIndividual.getAccuracy() < epsilon: fileWriter.write("START EPOCH " + str(currentEpoch)) try: probabilityDistribution = [ max(individual.getFitness() for individual in individuals) - individual.getFitness() for individual in individuals ] probabilityDistribution = [ p / sum(probabilityDistribution) for p in probabilityDistribution ] except ZeroDivisionError: fileWriter("STUCK WITH LOCAL OPTIMUM:\n\tfitness:" + str(alphaIndividual.getFitness()) + "\n\taccuracy:" + str(alphaIndividual.getAccuracy())) assert False children = [] for i in range(populationReplacement): selected = list( numpy.random.choice(individuals, size=tournamentSize, replace=False, p=probabilityDistribution)) selected += list( numpy.random.choice(individuals, size=tournamentSize, replace=False, p=probabilityDistribution)) mother = reduce(fitter, selected[:tournamentSize]) father = reduce(fitter, selected[tournamentSize:]) child = mother + father if numpy.random.random() < mutationChance: child = child.mutate() children.append(child) fileWriter.write("GENERATED OFFSPRINGS") for child in children: child.computeFitness(data=data, labels=labels) individuals += children fileWriter.write("COMPUTED FITNESS FOR OFFSPRINGS") individuals.sort(key=lambda individual: individual.getFitness()) individuals = individuals[:populationSize] if alphaIndividual is None or alphaIndividual.getAccuracy( ) < individuals[0].getAccuracy(): alphaIndividual = individuals[0] fileWriter.write("BEST INDIVIDUAL:\n\tfitness:" + str(alphaIndividual.getFitness()) + "\n\taccuracy:" + str(alphaIndividual.getAccuracy())) fileWriter.write("==============================================") currentEpoch += 1 tmp = "POPULATION SIZE: " + str(populationSize) tmp += "MUTATION: " + str(mutationChance) tmp += "REPLACE: " + str(replacePerGenerationPercentage) tmp += "TOURNAMENT: " + str(tournamentPercentage) fileWriter.write(tmp) fileWriter.write(str(alphaIndividual)) fileWriter.write("fitness: " + alphaIndividual.getFitness() + "accuracy: " + alphaIndividual.getAccuracy())
def test_emtpy_stats(self): empty_data = FileWriter({}) self.assertDictEqual({}, empty_data._stats)
def setUpClass(cls): data = Data({'alex': 1120, 'andre': -130, 'bernhard': 3000, 'david': 5460, 'petra': 1120, 'olga': 1120}, {1120: ['alex', 'olga', 'petra'], 3000: ['bernhard'], 5460: ['david'], -130: ['andre']}, [5460, 3000, 1120, -130]) cls.fileWriter = FileWriter(data, {'average_working_time': 1948})