def diffDumps(fileName0, fileName1, allowedRelativeDifference, thresholdValue, out): """Checks to see if the two dump files differ """ dumpFile0 = dump.Dump(fileName0) dumpFile1 = dump.Dump(fileName1) #check headers [headersMatch, fatal] = diffHeader(dumpFile0, dumpFile1, allowedRelativeDifference, thresholdValue, out) if fatal: out.write( "Differences do not allow comparison of variables, stopping!\n") return False #check variables varsMatch = True for i in range(dumpFile0.numVars): tmp = diffVar(dumpFile0, dumpFile1, i, allowedRelativeDifference, thresholdValue, out) if not tmp: varsMatch = False if not varsMatch or not headersMatch: out.write("files are not numerically identical\n") return False else: out.write("files are numerically identical\n") return True
def makeVTKFile(fileName,options,nCount,nNumFiles): """Just a wrapper to writeRadialProfile member function of snapshot This allows different fileMakingFunctions to be specified. For example to create VTK files. """ #if profile not already made for this combined binary file if not (os.path.exists(fileName+".vts")) or options.remakeVTK: #make vts file print (__name__+":"+makeVTKFile.__name__+": creating vts file from \"" +fileName+"\" "+str(nCount)+"/"+str(nNumFiles)+" ...") #create object to hold model model=dump.Dump() #read in model model.read(fileName,eosFile=options.eosFile) #write out a vtk file from the model model.writeVTKFile(fileName,curvature=options.curvature ,eosFile=options.eosFile,includeScalars=["T","e","rho","kappa","c","cv","vr_con_cen"]) else: print __name__+":"+makeVTKFile.__name__+": vts file \""+fileName\ +".vts\" already exists, not remaking"
def makeHDFFiles(self,options): """Makes HDF files specified by settings""" #check to make sure that all combined binary files are made fileName=self.baseFileName+"["+str(self.start)+"-"+str(self.end)+"]" failedFiles=combine_bins.combine_bin_files(options.keep,fileName,options.remakeBins) #get a list of files in our range filesExistCombBin=glob.glob(self.baseFileName+"[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9]") files=[] for file in filesExistCombBin: intOfFile=int(file[len(self.baseFileName):len(file)]) if intOfFile>=self.start and intOfFile<self.end: files.append(file) files.sort() #loop over files to make HDF files if len(files)==0: warnings.warn("no files found in \""+fileName+"\"") for file in files: tmp=dump.Dump(file) hdfTmp=self.convertDumpToHDF(tmp) hdfTmp.write()
def read_data(serial): ''' Read data from one serial port and store it into the database We need one mongo client per connection ''' # Extract metadata serial_num = serial.readline().decode("utf-8")[:-1] temp_cal_30 = int(serial.readline().decode("utf-8")[:-1]) temp_cal_110 = int(serial.readline().decode("utf-8")[:-1]) vrefint_cal = int(serial.readline().decode("utf-8")[:-1]) for _ in range(args['size']): mem_address = serial.readline().decode("utf-8")[:-1] vdd_raw = int(serial.readline().decode("utf-8")[:-1]) temp_raw = int(serial.readline().decode("utf-8")[:-1]) raw_data = serial.readline().decode("utf-8") raw_data = [int(b) for b in raw_data.split(' ')[:-1]] # There are some problems with some boards which don't have the calibration values. # In those cases just store the raw value and the calibration values will be # calculated from the rest of the boards if vrefint_cal == 0: vdd = vdd_raw else: vdd = (3300 * vrefint_cal / vdd_raw) * 0.001 if temp_cal_30 == 0 or temp_cal_110 == 0: temp = temp_raw else: temp = ((110 - 30) / (temp_cal_110 - temp_cal_30)) \ * (temp_raw - temp_cal_30) + 30.0 dump_data = dump.Dump(serial_num, raw_data, mem_address, temp, vdd, temp_cal_30, temp_cal_110, vrefint_cal) if args['more_verbose']: print(f'{serial_num} [{mem_address}] at {dump_data.Timestamp}') print(f'Temp: {temp:.6f} C, Vdd: {vdd:.8f} V') store_data(dump_data.__dict__, args['csv']) else: print(f'[DISCONNECTED] Board on port {serial.port}') serial.close()
def makeHDFFiles(self, options): """Makes HDF files specified by settings""" #check to make sure that all combined binary files are made fileName = self.baseFileName + "[" + str(self.start) + "-" + str( self.end) + "]" failedFiles = combine_bins.combine_bin_files(options.keep, fileName, options.remakeBins) #get a list of files in our range filesExistCombBin = glob.glob( self.baseFileName + "[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9]") files = [] for file in filesExistCombBin: intOfFile = int(file[len(self.baseFileName):len(file)]) if intOfFile >= self.start and intOfFile < self.end: files.append(file) files.sort() #loop over files to make HDF files if len(files) == 0: warnings.warn("no files found in \"" + fileName + "\"") times = [] numFiles = len(files) - 1 for i in range(0, len(files), self.frequency): file = files[i] print "reading file \"" + file + "\" " + str(i) + "/" + str( numFiles) + " ..." tmp = dump.Dump(file) times.append(tmp.time) self.convertDumpToHDF(tmp) #write out time hdf.open(self.timeFile) hdf.openData("time", hdf.DFNT_FLOAT64, [len(times)]) hdf.writeData(times) hdf.closeData() hdf.close()
#!/usr/bin/python # Author: Timothy Stranex <*****@*****.**> # Copyright 2013 Timothy Stranex #!/usr/bin/python from carpcomm.pb import stream_pb2 import dump dump.Dump('test-contacts', 'pb.Contact', stream_pb2.Contact)
#!/usr/bin/python # Author: Timothy Stranex <*****@*****.**> # Copyright 2013 Timothy Stranex #!/usr/bin/python from carpcomm.pb import user_pb2 import dump dump.Dump('r1-users', 'pb.User', user_pb2.User)
def __init__(self, conn): self.__DB = mySQLConnect.myConn(conn['CONN']) self.__Dump = dump.Dump(conn['DUMP'])
#!/usr/bin/python # Author: Timothy Stranex <*****@*****.**> # Copyright 2013 Timothy Stranex #!/usr/bin/python from carpcomm.pb import station_pb2 import dump dump.Dump('r1-stations', 'pb.Station', station_pb2.Station)
def main(): (dump_path, ) = sys.argv[1:] site = Site(dump.Dump(dump_path)) reactor.listenTCP(8080, site) reactor.run()