コード例 #1
0
ファイル: testcase.py プロジェクト: ronzohan/CSC-181
	def test_01_testPasswordMatch(self):
		my = DataFile("bundesliga","mourinho","win1win2","chelsea")
		my.setPassword("samplePassword123")
		fileOperation = FileOperations()
		fileOperation.SaveFileInfo(my,"sample.p")
		my = fileOperation.LoadFileInfo("sample.p","samplePassword123")
		self.failIf(my.getPassword() != "samplePassword123","Fail")
コード例 #2
0
ファイル: DoForce.py プロジェクト: kheaactua/cmaq_forcing
	def loadDims(filename):
		""" Load the dimensions from a netcdf file, then close the file.
		    Validator initializer also does this.

		Keyword Arguments:

		filename:*string*
		   Netcdf file name

		Returns:
		   dict of ni,nj,nk,nt
		"""

		conc = DataFile(filename, mode='r', open=True)
		dims={'ni': conc.dimensions['COL'], \
		'nj': conc.dimensions['ROW'], \
		'nk': conc.dimensions['LAY']}

		# TSTEP is unlimited, so python has problems reading it
		# So instead we'll examine the shape of a variable
		# Let's assume TFLAG exists
		shape = conc.variables['TFLAG'].shape
		# This first element is TSTEP
		dims['nt'] = shape[0]

		conc.close()

		return dims
コード例 #3
0
class ProcessSentence:
    """This class to process the user sentence"""

    def __init__(self):
        self.username = USERNAME
        self.botname  = BOTNAME
        self.db = DataFile()

    def get_user_name(self):
        return self.username

    def get_bot_name(self):
        return self.botname

    def set_user_name(self, name):
        self.username = name

    def set_bot_name(self, name):
        self.botname = name

    def check_for_greeting(self, sentence):
        """Check for greeting setence and respond"""
        words = sentence.split()
        for word in words:
            greeting = self.db.check_for_greeting(word.lower())
            if greeting is not None:
                return self.db.get_greeting_response()

    def process_sentence(self, sentence):
        """Process the user sentence"""
コード例 #4
0
ファイル: DoForce.py プロジェクト: kheaactua/cmaq_forcing
	def maskSpace(self, maskf, variable, value=1):
		""" Set a grid mask

		Keyword arguments:

		maskf:*string*
		   NetCDF file containing the masking field

		variable:*string*
		   Variable in the NetCDF file that is the mask

		value:*int*
		   The masking value of the mask
		"""

		c = bc()
		if maskf is not None:
			try:
				f=DataFile(maskf, mode='r', open=True)
				#print "\n%sOpened spacial mask file %s, variable=%s, val=%s%s\n"%(c.yellow, maskf, variable, value, c.clear)
				mask = f.variables[variable][0][0]
				#print "type(mask)=%s, mask.shape=%s"%(type(mask), mask.shape)
				self.space = mask==value
				if type(self.space) != type(mask):
					print "%sError! self.space's type changed to %s%s"%(c.red, type(self.space), c.clear)
				f.close()
			except Exception as ex:
				print "%sSomething went wrong masking space with mask file %s (exists?: %r) %s%s"%(c.red, maskf, os.path.isfile(maskf), ex, c.clear)
				raise
コード例 #5
0
ファイル: testcase.py プロジェクト: ronzohan/CSC-181
	def test_02_testLoadFromFileSucceed(self):
		my = DataFile("bundesliga","mourinho","win1win2","chelsea")
		my.setPassword("samplePassword123")
		fileOperation = FileOperations()
		fileOperation.SaveFileInfo(my,"sample.p")
		my.league =  "barclays premier league"
		my = fileOperation.LoadFileInfo("sample.p","samplePassword123")
		self.failIf(my.league != "bundesliga","Fail")
コード例 #6
0
ファイル: Data.py プロジェクト: kamilszewc/smoothie
 def __init__(self, filename):
     self.data = DataFile(filename, "float")
     self.N = self.data.rawnum()
     self.para = DataFile(
         os.path.split(filename)[0] + '/' + 'parameters.dat')
     self.__read_data__()
     self.__read_param__()
     self.__set_linked_list__()
     if filename.split('.')[-1] == "bz2":
         self.t = os.path.split(filename)[-1].split('.')[0] + '.'
         self.t = self.t + os.path.split(filename)[-1].split('.')[1]
         self.t = float(self.t)
     else:
         self.t = float(os.path.splitext(os.path.split(filename)[-1])[0])
コード例 #7
0
def main():
    cpybook_path = 'test.cpy'
    data_file_path = 'test.txt'

    # Create object model to represent the Copybook
    cpybook_parser = CopyBookParser.CopyBookParser(cpybook_path,
                                                   data_file_path)
    cpybook = cpybook_parser.create_copybook()

    # Create object model to represent the Data File
    data_file = DataFile(data_file_path)

    cpybook_comparer = CopyBookCompare(cpybook, data_file)
    cpybook_comparer.compare_copybook_with_data()
コード例 #8
0
ファイル: Validator.py プロジェクト: kheaactua/cmaq_forcing
	def __init__(self,filename):
		self.conc=DataFile(filename, mode='r', open=True)

		self.ni = self.conc.dimensions['COL']
		self.nj = self.conc.dimensions['ROW']
		self.nk = self.conc.dimensions['LAY']

		# TSTEP is unlimited, so python has problems reading it
		# So instead we'll examine the shape of a variable
		# Let's assume TFLAG exists
		shape = self.conc.variables['TFLAG'].shape
		# This first element is TSTEP
		self.nt = shape[0]

		ns = len(self.conc.variables.keys())
コード例 #9
0
def getDataFiles(args, config):
    dataFiles = des_filesearcher.getDataFiles(
        config.getOption("data_location"))
    filtered = des_filesearcher.filterDataFiles(
        dataFiles,
        PARSER_CONF,
        config,
        verboseFilter=(args.verbose > 1),
        verbosePass=(args.verbose > 2))

    if args.include_files:
        for f in args.include_files.split(","):
            if f not in [x.getDataPath() for x in filtered]:
                filtered.append(
                    DataFile(config.getOption("data_location"),
                             os.path.normpath(f)))

    return (dataFiles, filtered)
コード例 #10
0
 def __init__(self, name, address, adapter):
     self.name = name
     self.address = address
     self.adapter = adapter
     self.battery = 0.0
     self.temperature = 0.0
     self.velocity = 0.0
     self.data_to_log = bytearray()
     self.dataFileTemperature = DataFile(self.name, 'Temperature')
     self.dataFileVelocity = DataFile(self.name, 'Velocity')
     self.dataFileBatteryLevel = DataFile(self.name, 'BatteryLevel')
     
     print "Create testo device with name: ", self.name, " address: ", self.address
     self.connect()
コード例 #11
0
ファイル: Validator.py プロジェクト: kheaactua/cmaq_forcing
	def changeFile(self, newfile):
		self.conc.close();
		self.conc=DataFile(newfile, mode='r', open=True)
コード例 #12
0
from FileNames import FileNames
from FormatFile import FormatFile
from DataFile import DataFile
from Query import Query

dataFolder = "data"
specFolder = "specs"

specFile = "testformat1.csv"
dataFile = "testformat1_2015-06-28.txt"

specPath = specFolder + "/" + specFile
dataPath = dataFolder + "/" + dataFile

name = FileNames(dataFile)
print(name.dataFileName, name.specFileName, name.Date)

spec = FormatFile(specFolder + "/" + name.specFileName)
print(spec.getFormat())

data = DataFile(dataPath, spec.getFormat())
print(data.data)

q = Query()
for row in data.data:
    print(
        q.insert_query(name.tableName, [col[0] for col in spec.getFormat()],
                       row))
コード例 #13
0
class TestoDevice(object):
    ##################################################################################################################
    # variables
    ##################################################################################################################
    name = None
    address = None
    adapter = None
    device = None
    
    ##################################################################################################################
    # FUNCTIONS DEFINITION
    ##################################################################################################################
    # constructor        
    def __init__(self, name, address, adapter):
        self.name = name
        self.address = address
        self.adapter = adapter
        self.battery = 0.0
        self.temperature = 0.0
        self.velocity = 0.0
        self.data_to_log = bytearray()
        self.dataFileTemperature = DataFile(self.name, 'Temperature')
        self.dataFileVelocity = DataFile(self.name, 'Velocity')
        self.dataFileBatteryLevel = DataFile(self.name, 'BatteryLevel')
        
        print "Create testo device with name: ", self.name, " address: ", self.address
        self.connect()
        
    def connect(self):
        print "Trying to connect with: ", self.name
#        self.adapter.start()
        self.device = self.adapter.connect(self.address, 10.0)
        
        if self.device._connected:
            print "Connected with: ", self.name
            aa = self.device.discover_characteristics()
            print "Characteristic from with: ", self.name
            for bb in aa:
                #print aa[bb].uuid, aa[bb].handle, aa[bb].descriptors,
                try:
                    if aa[bb].handle == 3 or aa[bb].handle ==24:
                        value = self.device.char_read(bb, 1)
                        print "Characterostic handle:\t", aa[bb].handle, "value\t", value
                    #value2 = self.device.char_read_handle(aa[bb].handle, 1)
                    #print aa[bb].handle, 'value2:\t ", "%02X'% value2

                    #for ch in value:
                    #    print ch, '\t', chr(ch),         
                    if aa[bb].handle == 40:
                        print "Subscrobe to handle: ", aa[bb].handle
                        self.device.subscribe(bb, callback=self.callback_fun)
                        time.sleep(1)                       
                except pygatt.exceptions.NotificationTimeout:
                    print "TIMEOUT"                    
                except:
                    print "Error:", sys.exc_info()
            
            self.device.char_write_handle(37, self.convert_str_bytearray('5600030000000c69023e81'), True, 5)
            self.device.char_write_handle(37, self.convert_str_bytearray('200000000000077b'), True, 5)
            self.device.char_write_handle(37, self.convert_str_bytearray('04001500000005930f0000004669726d77617265'), True, 5)
            self.device.char_write_handle(37, self.convert_str_bytearray('56657273696f6e304f'), True, 5)
            self.device.char_write_handle(37, self.convert_str_bytearray('04001500000005930f0000004669726d77617265'), True, 5)
            self.device.char_write_handle(37, self.convert_str_bytearray('56657273696f6e304f'), True, 5)
            self.device.char_write_handle(37, self.convert_str_bytearray('04001600000005d7100000004d6561737572656d'), True, 5)
            self.device.char_write_handle(37, self.convert_str_bytearray('656e744379636c656161'), True, 5)
            self.device.char_write_handle(37, self.convert_str_bytearray('110000000000035a'), True, 5)
                                
            #print self.device.get_rssi()
            #print self.device._connected
    def callback_fun(self, handle, data):
        if data[0]==16:
            self.data_to_log = bytearray()
            self.data_to_log = self.data_to_log + data
        else:
            self.data_to_log = self.data_to_log + data

            print (datetime.datetime.now().strftime("%H:%M:%S>") + ' ' + self.name + '> '),
            if len(self.data_to_log) > 12:
                if self.data_to_log[12]==0x42: #B > BatteryLevel
                    print (self.data_to_log[12:24] + ' '),
                    print (str(struct.unpack('f', self.data_to_log[24:28])[0]))
                    self.battery = struct.unpack('f', self.data_to_log[24:28])[0] 
                    self.dataFileBatteryLevel.addRow(datetime.datetime.now().strftime("%H:%M:%S") ,self.battery)
                if self.data_to_log[12]==0x54: #T > Temperature
                    print (self.data_to_log[12:23] + ' '),
                    print (str(struct.unpack('f', self.data_to_log[23:27])[0]))
                    self.temperature = struct.unpack('f', self.data_to_log[23:27])[0]
                    self.dataFileTemperature.addRow(datetime.datetime.now().strftime("%H:%M:%S") ,self.temperature) 
                if self.data_to_log[12]==0x56: #V > Velocity
                    print (self.data_to_log[12:20] + ' '),
                    print (str(struct.unpack('f', self.data_to_log[20:24])[0]))
                    self.velocity = struct.unpack('f', self.data_to_log[20:24])[0]
                    self.dataFileVelocity.addRow(datetime.datetime.now().strftime("%H:%M:%S") ,self.velocity)
        
    def convert_str_bytearray(self, s):
        split_string = lambda x, n: [x[i:i+n] for i in range(0, len(x), n)]
        a = split_string(s,2)
        ba = bytearray()
        for z in a:
            ba.append(int(z,16))
    
        return ba
コード例 #14
0
	def loadScalarField(self):
		""" Open up the mortality and population files and read
		their values.  Generate a field to multiply forcing by.

		Forcing = F * Pop * Mortality * VSL
		"""

		if self.beta is None:
			raise ForcingException("Must supply concentration response factor")

		if self._mortality_fname is None or self._mortality_var is None:
			raise ForcingException("Must supply mortality file")

		if self._pop_fname is None or self._pop_var is None:
			raise ForcingException("Must supply population file")

		# This is optional
		#if self.vsl is None:
		#	raise ForcingException("Must specify statistical value of life (in millions)")

		# Open the mortality file
		try:
			mortality = DataFile(self._mortality_fname, mode='r', open=True)
		except IOError as ex:
			Forcing.error("Error!  Cannot open mortality file %s.  File exists? %r"%(self._mortality_fname, os.path.isfile(self._mortality_fname)))
			raise

		# Check dimensions
		if not (mortality.dimensions['COL'] == self.ni and mortality.dimensions['ROW'] == self.nj):
			raise ValueError("Error, dimensions in mortality file %s do not match domain."%self._mortality_fname)

		# Read the field
		try:
			# dims are TSTEP, LAY, ROW, COL.. so skip TSTEP and LAY
			# this should be made more general, or the file should be made better.
			mfld = mortality.variables[self._mortality_var][0][0]
		except IOError as e:
			raise e
		except IndexError as e:
			raise ForcingFileDimensionException("Mortality NetCDF file seems to have incompatible dimensions.  Currently require shape (TSTEP, LAY, ROW, COL).  This is marked to be improved, as the data does not vary with time or layer.")

		# Close the file
		if self._pop_fname != self._pop_fname:
			mortality.close()

			# Open the population file
			try:
				pop = DataFile(self._pop_fname, mode='r', open=True)
			except IOError as ex:
				Forcing.error("Error!  Cannot open population file %s"%(self._pop_fname))
				raise

			# Check dimensions
			if not (pop.dimensions['COL'] == self.ni and pop.dimensions['ROW'] == self.nj):
				raise ValueError("Error, dimensions in population file %s do not match domain."%self._pop_fname)
		else:
			# Same file?
			pop = mortality

		# Read the field
		try:
			# dims are TSTEP, LAY, ROW, COL.. so skip TSTEP and LAY
			pfld = pop.variables[self._pop_var][0][0]
		except IOError as e:
			raise e
		except IndexError as e:
			raise ForcingFileDimensionException("Population NetCDF file seems to have incompatible dimensions.  Currently require shape (TSTEP, LAY, ROW, COL).  This is marked to be improved, as the data does not vary with time or layer.")


		pop.close()

		# Debug, remember, when debugging this against plotted data or fortran
		# code: values like (70,70) started at index 1 whereas we started at
		# index 0, so (70,70)=(69,69)
		#print "[j=%d,i=%d] = mfld * mfld_scale * pfld * self.beta / 365 = %e %e %e %e %e = %e"%(self.debug_j, self.debug_i, mfld[self.debug_j,self.debug_i], (10.**-4), pfld[self.debug_j,self.debug_i], self.beta, 365.0, mfld[self.debug_j,self.debug_i]*(10.**-4)*pfld[self.debug_j,self.debug_i]*self.beta/365.0)

		# (mfld * pfld) is element wise multiplication, not matrix multiplication
		# Take leap years into account?
		Forcing.debug("[TODO]: Leap years are not yet accounted for.")
		self.timeInvariantScalarMultiplcativeFld = mfld * self.mort_scale / 365.0 * pfld * self.beta
		if self.vsl is not None:
			self.timeInvariantScalarMultiplcativeFld = self.timeInvariantScalarMultiplcativeFld * self.vsl
コード例 #15
0
ファイル: Data.py プロジェクト: kamilszewc/smoothie
class Data:
    """Data - this class reads particles information from file"""
    def __init__(self, filename):
        self.data = DataFile(filename, "float")
        self.N = self.data.rawnum()
        self.para = DataFile(
            os.path.split(filename)[0] + '/' + 'parameters.dat')
        self.__read_data__()
        self.__read_param__()
        self.__set_linked_list__()
        if filename.split('.')[-1] == "bz2":
            self.t = os.path.split(filename)[-1].split('.')[0] + '.'
            self.t = self.t + os.path.split(filename)[-1].split('.')[1]
            self.t = float(self.t)
        else:
            self.t = float(os.path.splitext(os.path.split(filename)[-1])[0])

    def __read_data__(self):
        self.id = self.data.getcol(1)
        self.phaseId = self.data.getcol(2)
        self.x = self.data.getcol(3)
        self.y = self.data.getcol(4)
        self.u = self.data.getcol(5)
        self.v = self.data.getcol(6)
        self.m = self.data.getcol(7)
        self.p = self.data.getcol(8)
        self.d = self.data.getcol(9)
        self.di = self.data.getcol(10)
        self.o = self.data.getcol(11)
        self.nu = self.data.getcol(12)
        self.mi = self.data.getcol(13)
        self.gamma = self.data.getcol(14)
        self.s = self.data.getcol(15)
        self.b = self.data.getcol(16)
        self.c = self.data.getcol(17)
        self.nx = self.data.getcol(18)
        self.ny = self.data.getcol(19)
        self.na = self.data.getcol(20)
        self.cu = self.data.getcol(21)
        self.stx = self.data.getcol(22)
        self.sty = self.data.getcol(23)
        self.tp = self.data.getcol(24)

    def __read_param__(self):
        i = 1
        for raw in self.para.getcol(1):
            value = self.para.get(2, i)
            if (raw == "H"): self.H = float(value)
            if (raw == "I_H"): self.I_H = float(value)
            if (raw == "DH"): self.DH = float(value)
            if (raw == "DR"): self.DR = float(value)
            if (raw == "XCV"): self.XCV = float(value)
            if (raw == "YCV"): self.YCV = float(value)
            #if (raw == "NX"): self.NX = int(value)
            #if (raw == "NY"): self.NY = int(value)
            if (raw == "NCX"): self.NXC = int(value)
            if (raw == "NCY"): self.NYC = int(value)
            if (raw == "T_BOUNDARY_PERIODICITY"): self.B_PER = int(value)
            if (raw == "V_N"): self.V_N = float(value)
            if (raw == "V_S"): self.V_S = float(value)
            if (raw == "V_E"): self.V_E = float(value)
            if (raw == "V_W"): self.V_W = float(value)
            if (raw == "N"): self.N0 = int(value)
            if (raw == "NC"): self.NC = int(value)
            if (raw == "GAMMA"): self.GAMMA = float(value)
            if (raw == "SOUND"): self.SOUND = float(value)
            if (raw == "NU"): self.NU = float(value)
            if (raw == "G_X"): self.G_X = float(value)
            if (raw == "G_Y"): self.G_Y = float(value)
            if (raw == "KNORM"): self.KNORM = float(value)
            if (raw == "GKNORM"): self.GKNORM = float(value)
            if (raw == "TIME"): self.TIME = float(value)
            if (raw == "DT"): self.DT = float(value)
            if (raw == "NOUT"): self.NOUT = int(value)
            if (raw == "SIGMA"): self.SIGMA = float(value)
            if (raw == "T_SHARP"): self.T_SHARP = int(value)
            if (raw == "SHARP"): self.SHARP = float(value)
            if (raw == "T_SURF_TENS"): self.T_SURF_TENS = int(value)
            if (raw == "T_XSPH"): self.T_XSPH = int(value)
            if (raw == "XSPH"): self.XSPH = float(value)
            i += 1
        self.NX = int(self.XCV / self.DR)
        self.NY = int(self.YCV / self.DR)

    def __set_linked_list__(self):
        self.h = [-1 for i in range(self.NC)]
        self.l = [-1 for i in range(self.N)]

        for i in range(self.N):
            if KERNEL_TYPE <= 3:
                xc = long(0.5 * self.x[i] / self.H)
                yc = long(0.5 * self.y[i] / self.H)
            else:
                xc = long(self.x[i] / (self.H * 3.0))
                yc = long(self.y[i] / (self.H * 3.0))
            c = xc + yc * self.NXC
            if (xc < self.NXC) and (yc < self.NYC) and (xc >= 0) and (yc >= 0):
                self.l[i] = self.h[c]
                self.h[c] = i
コード例 #16
0
                label_object_array = volume_map.get(sub_folder_key)
                sub_folder_full_path = (os.path.join(root, sub_folder))
                data_file_list = os.listdir(sub_folder_full_path)
                for each_file in data_file_list:
                    print(each_file)
                    suffix_index = each_file.rfind('.')
                    suffix = each_file[suffix_index + 1:len(each_file)]
                    file_name = each_file[0:suffix_index]
                    if (suffix != 'dcm'):
                        continue
                    split_name_array = file_name.split('_')
                    data_slice_index = int(split_name_array[-3]) - 1
                    for each_label_object in label_object_array:
                        if (each_label_object.slice_num == data_slice_index):
                            data_object = DataFile(each_file,
                                                   sub_folder_full_path, 0,
                                                   volume_key)
                            pair_list.append([
                                each_label_object.loading_path,
                                data_object.loading_path
                            ])
                            file.write(each_label_object.loading_path +
                                       '@@@@' + data_object.loading_path +
                                       '\n')
                            print('~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~')
                            file_count = file_count + 1
                            print(file_count)
                            print(each_label_object.loading_path + '@@@@' +
                                  data_object.loading_path)
file.close()
print('Finished')
コード例 #17
0
ファイル: cli.py プロジェクト: azeemshaikh38/CloverHealth
import os
from FileNames import FileNames
from FormatFile import FormatFile
from DataFile import DataFile
from Query import Query
from Db import Db

dataFolder = "data"
specFolder = "specs"

db = Db()
q = Query()

for filename in os.listdir(dataFolder):
    name = FileNames(filename)
    spec = FormatFile(specFolder+"/"+name.specFileName)
    data = DataFile(dataFolder+"/"+filename, spec.getFormat())
    for row in data.getData():
        query, params = q.insert_query(name.tableName, spec.getCols(), row)
        print(query, params)
        db.insert(query, params) 
コード例 #18
0
ファイル: DoForce.py プロジェクト: kheaactua/cmaq_forcing
	def initForceFile(self, conc, fpath, species = None):
		""" Initialize a forcing file.
			This method opens the NetCDF file in read/write mode, copies the
			dimensions, copies I/O Api attributes over, and any other common
		    initialization that should be applied to all files

		Keyword Arguments:

		conc:*DataFile*
		   Concentration file to use as a template

		fpath:*string*
		   Path (dir and name) of the forcing file to initialize

		species:*string[]*
		   List of species to create

		Returns:

		NetCDFFile
		   Writable NetCDF File
		"""

		if not isinstance(conc, DataFile) or conc is None:
			raise ValueError("Concentration file must be a DataFile")

		# Make sure the concentration file is open
		conc.open()

		# Debug
		c=bc()

		# fpath should not exist
		if os.path.exists(fpath):
			# TEMP, remove
			os.remove(fpath)
			Forcing.debug("Deleted %s!"%(fpath))
			#raise IOError("%s already exists."%fpath)

		#print "Opening %s for writing"%fpath
		force = DataFile(fpath, mode='w', open=True)

		# Exceptions, so we don't needlessly create huge forcing files
		exceptions={'LAY': self.nk_f}
		Forcing.copyDims(conc, force, exceptions=exceptions)
		Forcing.copyIoapiProps(conc, force)

		if species is None:
			species = self.species

		# First, check the sample concentration file vs the concentration file
		try:
			#var = conc.variables['TFLAG'].getValue()
			var = conc.variables['TFLAG']
		except IOError as e:
			# Pointless try loop for now, but I'll add to it later if needed.
			raise

		if var.shape[0] != self.nt:
			#print "conc.shape = %s, sample.shape = %s"%(str(var.shape), str((self.nt, self.nk, self.nj, self.ni)))
			raise BadSampleConcException("Input file's dimensions (time steps) not not match those of the sample concentration file!  Cannot continue.")

		# Create the variables we'll be writing to
		#print "Initializing %s"%fpath
		for s in species:
			try:
				var = force.createVariable(s, 'f', ('TSTEP', 'LAY', 'ROW', 'COL'))
				z=np.zeros((self.nt,self.nk_f,self.nj,self.ni), dtype=np.float32)
				var[:,:,:,:] = z
				#Forcing.debug("Created zero variable %s in %s"%(s, force.basename))
			except (IOError, ValueError) as ex:
				print "%sWriting error %s%s when trying to create variable %s (%sTSTEP=%d, LAY=%d, ROW=%d, COL=%d%s)=%s%s%s in today's file.\n"%(c.red, type(ex), c.clear, s, c.blue, self.nt, self.nk_f, self.nj, self.ni, c.clear, c.orange, str(z.shape), c.clear), ex
				print "Current variable names: %s\n"%(" ".join(map(str, force.variables.keys())))

		# Copy over TFLAG
		vsrc = conc.variables['TFLAG'][:]

		force.createVariable('TFLAG', 'i', ('TSTEP', 'VAR', 'DATE-TIME'))
		vdest = force.variables['TFLAG']
		try:
			vdest[:]=vsrc[:]
		except (IOError, ValueError) as ex:
			print "%sWriting error %s%s when trying to write TFLAG variable"%(c.red, type(ex), c.clear)
			print "%sshape(vsrc)=%s, shape(vdest)=%s%s"%(c.cyan, str(vsrc.shape), str(vdest.shape), c.clear)
			raise


		## Fix geocode data
		## http://svn.asilika.com/svn/school/GEOG%205804%20-%20Introduction%20to%20GIS/Project/webservice/fixIoapiProjection.py
		## fixIoapiSpatialInfo

		# Sync the file before sending it off
		force.sync()

		# Close the files
		conc.close()
		force.close()

		# From now on, force will be read and written to, so change the mode
		force.mode='a'

		return force
コード例 #19
0
 def __init__(self):
     """Instantitae user Session"""
     self.db = DataFile()
コード例 #20
0
ファイル: cli.py プロジェクト: azeemshaikh38/CloverHealth
import os
from FileNames import FileNames
from FormatFile import FormatFile
from DataFile import DataFile
from Query import Query
from Db import Db

dataFolder = "data"
specFolder = "specs"

db = Db()
q = Query()

for filename in os.listdir(dataFolder):
    name = FileNames(filename)
    spec = FormatFile(specFolder + "/" + name.specFileName)
    data = DataFile(dataFolder + "/" + filename, spec.getFormat())
    for row in data.getData():
        query, params = q.insert_query(name.tableName, spec.getCols(), row)
        print(query, params)
        db.insert(query, params)
コード例 #21
0
ファイル: DoForce.py プロジェクト: kheaactua/cmaq_forcing
	def FindFiles(file_format, path=None, date_min=None, date_max=None):
		""" Find the concentration files that match the pattern/format provided

		Keyword Arguments:

		path:*string*
			Path to look for files

		file_format:*string*
		   A format containing wildcards (*) and date indicators,
		   i.e. YYYY, YY, MM, DD or JJJ for Julian day

		date_min:*Datetime*
		   If set, this is the minimum accepted date

		date_max:*Datetime*
		   If set, this is the maximum accepted date

		Returns:

		*list[DataFile]*
		   Returns a list of Datafiles
		"""

		if path == None:
			path = self.inputPath
		if path == None:
			raise ValueError("Must provide a path to search")
		if path[-1] != "/":
			path=path+"/"

		#files=os.listdir( "/mnt/mediasonic/opt/output/morteza/frc-8h-US/" ) # Obviously change this..
		files=os.listdir(path)


		if date_min!=None and not isinstance(date_min, date):
			raise TypeError("Minimum date may either be None or a DateTime")
		if date_max!=None and not isinstance(date_max, date):
			raise TypeError("Maximum date may either be None or a DateTime")

		# Backup
		reg=file_format

		# Year
		reg=re.sub(r'YYYY', '\\d{4}', reg) 
		reg=re.sub(r'MM', '\\d{2}', reg) 
		reg=re.sub(r'DD', '\\d{2}', reg) 
		reg=re.sub(r'JJJ', '\\d{3}', reg) 
		reg=re.sub(r'\*', '.*', reg) 

		#print "RE: %s"% reg
		cfiles=[]

		for f in files:
			#print "Does %s match?"%f
			if re.search(reg, f):
				#print "%s matches"%f
				df=DataFile(f, path=path, file_format=file_format)
				df.loadDate()

				#is_between_date = df.date>=date_min and df.date<=date_max
				#print "df.date=%s, between [%s %s]?=%r type(df.date)=%s, type(date_min)=%s"%(df.date, date_min, date_max, is_between_date, type(df.date), type(date_min))

				if (date_min == None and date_max == None) or ( (date_min != None and df.date >= date_min) and (date_max != None and df.date <= date_max) ):
					#print "File added"
					cfiles.append(df)

		#return sorted(cfiles, key=lambda student: .age)
		return sorted(cfiles)
コード例 #22
0
ファイル: Validator.py プロジェクト: kheaactua/cmaq_forcing
class ForcingValidator:

	LAY_SURFACE_NAME='Surface'

	ni=None
	nj=None
	nk=None
	ns=None
	nt=None

	conc = None

	def __init__(self,filename):
		self.conc=DataFile(filename, mode='r', open=True)

		self.ni = self.conc.dimensions['COL']
		self.nj = self.conc.dimensions['ROW']
		self.nk = self.conc.dimensions['LAY']

		# TSTEP is unlimited, so python has problems reading it
		# So instead we'll examine the shape of a variable
		# Let's assume TFLAG exists
		shape = self.conc.variables['TFLAG'].shape
		# This first element is TSTEP
		self.nt = shape[0]

		ns = len(self.conc.variables.keys())


	def close(self):
		try:
			self.conc.close()
		except IOError:
			# nothing.. it's closed.
			self.conc = None

	def __del__(self):
		self.close()

	def changeFile(self, newfile):
		self.conc.close();
		self.conc=DataFile(newfile, mode='r', open=True)

	def getDate(self):
		""" Again, not a validator just a getter.  Useful to know the date
		    of the concentration file being used.  Since we're using an
		    I/O Api file, we'll look at the SDATE attribute.

		Returns:

		datetime
		"""

		self.conc.loadDate()
		return self.conc.date

#		# Get the sdate, in the format YYYYJJJ
#		if not hasattr(self.conc, 'SDATE'):
#			raise IOError("Sample concentration file does not seem to be a proper I/O Api file.")
#
#		sdate=str(getattr(self.conc, 'SDATE'))
#		# Sometimes sdate has brackets around it
#		if sdate[0] == "[" and sdate[-1] == "]":
#			sdate=sdate[1:-1]
#		year=int(sdate[:4])
#		jday=int(sdate[4:])
#
#		date = datetime.date(year, 1, 1)
#		days = datetime.timedelta(days=jday-1) # -1 because we started at day 1
#		date=date+days
#
#		return date

	def getLayers(self):
		"""Return a list of layers.  This isn't really a validator, but
		it shares a lot of the functionality.  Assumes that there's
		always a ground layer.

		Returns:
		list of layers
		"""

		num_layers = self.conc.dimensions['LAY']
		layers=[self.LAY_SURFACE_NAME]
		for l in range(2, num_layers):
			layers+=str(l)
		return layers

	def getTimes(self):
		"""Return a list of times(hours).  This isn't really a validator, but
		it shares a lot of the functionality.  Assumes that there's
		always a ground layer.

		Returns:
		list of times
		"""

		shape = self.conc.variables['TFLAG'].shape
		nt = shape[0]

		times=list(xrange(nt))
		# Cut off the 25th time
		for t in range(0, nt-1):
			times[t]=str(t)
		return times

	def getSpecies(self):
		"""Return a list of species.  This isn't really a validator, but
		it shares a lot of the functionality

		Returns:
		list of species
		"""
		vars = self.conc.variables.keys()
		for i, var in enumerate(vars):
			vars[i]=var.upper()

		vars=sorted(vars)

		pri_vars = []
		normal_vars = []

		# Put some of the important ones up top
		for var in vars:
			# Select case basically
			if var in ['O3', 'NO', 'NO2']:
				pri_vars.append(var)
			elif var == "TFLAG":
				continue
			else:
				normal_vars.append(var)

		return pri_vars+normal_vars


	# Check to ensure all the chosen species are available 
	# Species is a string vector
	def validateSpecies(self, species):
		"""Validate species against a sample datafile variables

		Keyword Arguments:
		species -- Vector of species names to use

		Raises:
		ValidationError -  if invalid species is input

		Returns:
		TRUE if valid, false otherwise
		"""

		#print "Got species", '[%s]' % ', '.join(map(str, species))

		vars = self.conc.variables.keys()
		for i in range(0, len(vars)):
			vars[i]=vars[i].upper()

		notFound=[]

		for s in species:
			found=False
			for v in vars:
				if v == s:
					found=True
					break
			if found == False:
				notFound.append(s)

		if len(notFound)>0:
			raise ValidationError("Invalid species: ", '[%s]' % ', '.join(map(str, notFound)))
			return False;
		return True


	def validateLayers(self,layers):
		"""Validate layers against a sample datafile file

		Keyword Arguments:
		layers -- Vector of layers to use

		Raises:
		ValidationError -  if invalid layer is input

		Returns:
		TRUE if valid, false otherwise
		"""

		num_layers = self.conc.dimensions['LAY']
		for l in layers:
			if l > num_layers:
				raise ValidationError("Invalid layer: ", l)
				return False
		return True

	def validateTimes(self,times):
		"""Validate times against a sample datafile file

		Keyword Arguments:
		times -- Vector of times to use

		Raises:
		ValidationError -- if invalid times step is input

		Returns:
		TRUE if valid, false otherwise
		"""

		# Not yet implemented
		return True

	def ValidateDataFileSurface(self, filename):
		""" Validates a datafile by checking if it's 2D surface domani
			(ni,nj) matches the sample file """

		datafile=DataFile(filename, mode='r', open=True)

		#print "COL %d, self.ni: %d   -  ROW: %d, self.nj: %d"%(datafile.dimensions['COL'], self.ni, datafile.dimensions['ROW'], self.nj)
		return datafile.dimensions['COL'] == self.ni and datafile.dimensions['ROW'] == self.nj
コード例 #23
0
 def __init__(self):
     self.username = USERNAME
     self.botname  = BOTNAME
     self.db = DataFile()
コード例 #24
0
            _collectParsed(args, config)
        elif args.mode == "collect-parsed-div":
            _collectParsedDiv(args, config)
        elif args.mode == "compare-collected-div":
            _compareCollectedDiv(args, config)
        elif args.mode == "collected-div-count-first":
            _collectedDivCountFirst(args, config)
        elif args.mode == "collected-div-device-overlap":
            _collectedDivDeviceOverlap(args, config)
        elif args.mode == "plot":
            _plot(args, config)

        else:
            if args.specific_files:
                df = [
                    DataFile(config.getOption("data_location"),
                             os.path.normpath(x))
                    for x in args.specific_files.split(",")
                ]
                files, filtered = df, df
            else:
                files, filtered = getDataFiles(args, config)

            if args.mode == "parse": _parse(files, filtered, args, config)
            elif args.mode == "send": _send(files, filtered, args, config)
            elif args.mode == "parse-send" or args.mode == "send-parse":
                _parseSend(files, filtered, args, config)
            elif args.mode == "list":
                _list(files, filtered, args, config)
            elif args.mode == "impossible-timestamps":
                _impossibleTimestamps(files, filtered, args, config)
            elif args.mode == "output":