def testReadWriteVariousTypes(self): a=_pyhl.nodelist() b=_pyhl.node(_pyhl.GROUP_ID,"/info") a.addNode(b) b=_pyhl.node(_pyhl.ATTRIBUTE_ID,"/info/xscale") b.setScalarValue(-1,0.85,"double",-1) a.addNode(b) b=_pyhl.node(_pyhl.ATTRIBUTE_ID,"/info/yscale") b.setScalarValue(-1,1.0,"float",-1) a.addNode(b); b=_pyhl.node(_pyhl.ATTRIBUTE_ID,"/info/xsize") b.setScalarValue(-1, 240, "int", -1) a.addNode(b) b=_pyhl.node(_pyhl.ATTRIBUTE_ID,"/info/ysize") b.setScalarValue(-1,480,"long",-1) a.addNode(b) a.write(self.TESTFILE) #verify a=_pyhl.read_nodelist(self.TESTFILE) self.assertEqual("double", a.fetchNode("/info/xscale").format()) self.assertEqual("float", a.fetchNode("/info/yscale").format()) self.assertEqual("int", a.fetchNode("/info/xsize").format()) self.assertEqual("long", a.fetchNode("/info/ysize").format())
def writeHac(self, fstr, compression=0): nodelist = _pyhl.nodelist() node = _pyhl.node(_pyhl.ATTRIBUTE_ID, "/accumulation_count") node.setScalarValue(-1,self.hac.getAttribute("how/count"),"long",-1) nodelist.addNode(node) node = _pyhl.node(_pyhl.ATTRIBUTE_ID, "/validity_time_of_last_update") node.setScalarValue(-1,int(time.time()),"long",-1) nodelist.addNode(node) node = _pyhl.node(_pyhl.DATASET_ID, "/hit_accum") node.setArrayValue(-1,[self.hac.ysize, self.hac.xsize], self.hac.getData(),"uint",-1) nodelist.addNode(node) fcp = _pyhl.filecreationproperty() fcp.userblock = 0 fcp.sizes = (4,4) fcp.sym_k = (1,1) fcp.istore_k = 1 fcp.meta_block_size = 0 path = os.path.split(fstr)[0] if not os.path.isdir(path): os.makedirs(path) nodelist.write(fstr, compression, fcp)
def add_H5IM_attributes(a, IDA): b=_pyhl.node(_pyhl.ATTRIBUTE_ID, IDA+"/CLASS") b.setScalarValue(-1, "IMAGE", "string", -1) a.addNode(b) b=_pyhl.node(_pyhl.ATTRIBUTE_ID, IDA+"/IMAGE_VERSION") b.setScalarValue(-1, "1.2", "string", -1) a.addNode(b)
def testAddDuplicate_GroupNode(self): nodelist = _pyhl.nodelist(); nodelist.addNode(_pyhl.node(_pyhl.GROUP_ID, "/root")) nodelist.addNode(_pyhl.node(_pyhl.GROUP_ID, "/root/group1")) nodelist.addNode(_pyhl.node(_pyhl.GROUP_ID, "/root/group2")) try: nodelist.addNode(_pyhl.node(_pyhl.GROUP_ID, "/root/group1")) self.fail("Expected IOError") except IOError: pass
def _copyAttribute(self, nodelist, name, ntype=None, oname=None): d = self._nodelist.getNode(name).data() if oname: n = _pyhl.node(_pyhl.ATTRIBUTE_ID, oname) else: n = _pyhl.node(_pyhl.ATTRIBUTE_ID, name) if ntype: n.setScalarValue(-1, d, ntype, -1) else: n.setScalarValue(-1, d, self._convertValueTypeToStr(d), -1) nodelist.addNode(n)
def Array2Tempfile(value): import rave_tempfile _, fstr = rave_tempfile.mktemp() a = _pyhl.nodelist() b = _pyhl.node(_pyhl.GROUP_ID, "/dataset1") a.addNode(b) b = _pyhl.node(_pyhl.DATASET_ID, "/dataset1/data") h5typ = ARRAYTYPES[value.dtype.char] b.setArrayValue(-1, list(value.shape), value, h5typ, -1) a.addNode(b) a.write(fstr) return fstr
def _copyData(self, nodelist, name, oname=None): d = self._nodelist.getNode(name).data() datatype = str(d.dtype) if datatype in DATATYPES: translDatatype = DATATYPES[datatype] else: raise Exception("Unsupported datatype %s" % datatype) c = _pyhl.compression(_pyhl.COMPRESSION_ZLIB) c.level = 6 if oname: n = _pyhl.node(_pyhl.DATASET_ID, oname, c) else: n = _pyhl.node(_pyhl.DATASET_ID, name, c) n.setArrayValue(-1, list(d.shape), d, translDatatype, -1) nodelist.addNode(n)
def make_directory(self, name): """make_directory(name) -- create a new HDF directory, return status""" self.check_open() self.inquire_nodelist().addNode( _pyhl.node(_pyhl.GROUP_ID, self.new_directory_name(name))) return 1
def make_directory(self, name): """make_directory(name) -- create a new HDF directory, return status""" self.check_open() self.inquire_nodelist().addNode(_pyhl.node(_pyhl.GROUP_ID, self.new_directory_name(name))) return 1
def testReadWriteSameFile(self): a=_pyhl.nodelist() b=_pyhl.node(_pyhl.GROUP_ID, "/slask") a.addNode(b) a.write(self.TESTFILE) a = _pyhl.read_nodelist(self.TESTFILE) a.write(self.TESTFILE2)
def testWriteOnlyRootGroup(self): a=_pyhl.nodelist() b=_pyhl.node(_pyhl.GROUP_ID, "/") a.addNode(b) try: a.write(self.TESTFILE) self.fail("Expected IOError") except IOError: pass
def traverse_save(e, a, ID, datadict): for i in list(e): if list(i): IDA = ID + "/"+ i.tag b =_pyhl.node(_pyhl.GROUP_ID, IDA) a.addNode(b) traverse_save(i, a, IDA, datadict) else: typ = i.get("type", "string") value = geth5attr(i, datadict) IDA = ID+"/"+i.tag h5typ = None if typ == "dataset": if COMPRESSION == "zlib": comp = _pyhl.compression(_pyhl.COMPRESSION_ZLIB) comp.level = COMPRESSION_ZLIB_LEVEL elif COMPRESSION == "szip": comp = _pyhl.compression(_pyhl.COMPRESSION_SZLIB) comp.szlib_px_per_block = 10 else: comp = None if comp is not None: b = _pyhl.node(_pyhl.DATASET_ID, IDA, comp) else: b = _pyhl.node(_pyhl.DATASET_ID, IDA) h5typ = ARRAYTYPES[value.dtype.char] #h5typ = ARRAYTYPES[value.typecode()] # relic from Numeric b.setArrayValue(-1, list(value.shape), value, h5typ, -1) elif typ == "sequence": b = _pyhl.node(_pyhl.ATTRIBUTE_ID, IDA) if type(value[0]) in [int, float]: v = [] for val in value: v.append(str(val)) value = v b.setArrayValue(-1, [len(value)], value, "string", -1) else: b =_pyhl.node(_pyhl.ATTRIBUTE_ID, IDA) b.setScalarValue(-1, value, typ, -1) a.addNode(b) # Workaround. For 8-bit uchar datasets, add H5IM attributes. if typ == "dataset" and h5typ == 'uchar': add_H5IM_attributes(a, IDA)
def writeFile(): # Create the rave info HDF5 type typedef = _rave_info_type.type() # Create the rave info HDF5 object obj = _rave_info_type.object() # Set the values obj.xsize = 10 obj.ysize = 10 obj.xscale = 150.0 obj.yscale = 150.0 aList = _pyhl.nodelist() # Create a datatype node aNode = _pyhl.node(_pyhl.TYPE_ID, "/MyDatatype") # Make the datatype named aNode.commit(typedef.hid()) aList.addNode(aNode) # Create an attribute containing the compound type aNode = _pyhl.node(_pyhl.ATTRIBUTE_ID, "/myCompoundAttribute") # Note that I use both itemSize and lhid # Also note how I translate the compound object to a string aNode.setScalarValue(typedef.size(), obj.tostring(), "compound", typedef.hid()) aList.addNode(aNode) # Better create a dataset also with the compound type obj.xsize = 1 obj.ysize = 1 aNode = _pyhl.node(_pyhl.DATASET_ID, "/myCompoundDataset") # I use setArrayValue instead aNode.setArrayValue(typedef.size(), [1], obj.tostring(), "compound", typedef.hid()) aList.addNode(aNode) # And finally write the HDF5 file. aList.write("compound_test.hdf")
def write(self, name, quantity, record=0, indx=None): """Write quantity to file as 'name'""" self.check_open() if self.inquire_verbosity() > 1: if record == 0: print "PW::write writing", name else: print "PW::write writing record", record, \ "of", name if isinstance(quantity, ndarray): anode = _pyhl.node(_pyhl.DATASET_ID, self._pwd + '/' + name) anode.setArrayValue(-1, shape(quantity), quantity, self.type_dict[quantity.typecode()], -1) self.inquire_nodelist().addNode(anode) elif len(shape(quantity)) == 0 or isinstance(quantity, str): anode = _pyhl.node(_pyhl.ATTRIBUTE_ID, self._pwd + '/' + name) anode.setScalarValue(-1, quantity, self.type_dict[type(quantity)], -1) self.inquire_nodelist().addNode(anode) else: # TODO: write as pickled object as a string raise Exception("type unsupported")
def write (self, name, quantity, record = 0, indx = None): """Write quantity to file as 'name'""" self.check_open() if self.inquire_verbosity () > 1: if record == 0: print "PW::write writing", name else: print "PW::write writing record", record, \ "of", name if isinstance(quantity,ndarray): anode = _pyhl.node(_pyhl.DATASET_ID,self._pwd+'/'+name) anode.setArrayValue(-1,shape(quantity),quantity, self.type_dict[quantity.typecode()],-1) self.inquire_nodelist().addNode(anode) elif len(shape(quantity)) == 0 or isinstance(quantity,str): anode = _pyhl.node(_pyhl.ATTRIBUTE_ID,self._pwd+'/'+name) anode.setScalarValue(-1,quantity, self.type_dict[type(quantity)],-1) self.inquire_nodelist().addNode(anode) else: # TODO: write as pickled object as a string raise Exception("type unsupported")
def create_test_image(): a = _pyhl.nodelist() # First create the palette} b = _pyhl.node(_pyhl.DATASET_ID, "/PALETTE") c = createPalette() b.setArrayValue(-1, [256, 3], c, "uchar", -1) a.addNode(b) b = _pyhl.node(_pyhl.ATTRIBUTE_ID, "/PALETTE/CLASS") b.setScalarValue(-1, "PALETTE", "string", -1) a.addNode(b) b = _pyhl.node(_pyhl.ATTRIBUTE_ID, "/PALETTE/PAL_VERSION") b.setScalarValue(-1, "1.2", "string", -1) a.addNode(b) b = _pyhl.node(_pyhl.ATTRIBUTE_ID, "/PALETTE/PAL_COLORMODEL") b.setScalarValue(-1, "RGB", "string", -1) a.addNode(b) b = _pyhl.node(_pyhl.ATTRIBUTE_ID, "/PALETTE/PAL_TYPE") b.setScalarValue(-1, "STANDARD8", "string", -1) a.addNode(b) # Now create the image to display} b = _pyhl.node(_pyhl.DATASET_ID, "/IMAGE1") c = createImage() b.setArrayValue(1, [256, 256], c, "uchar", -1) a.addNode(b) b = _pyhl.node(_pyhl.ATTRIBUTE_ID, "/IMAGE1/CLASS") b.setScalarValue(-1, "IMAGE", "string", -1) a.addNode(b) b = _pyhl.node(_pyhl.ATTRIBUTE_ID, "/IMAGE1/IMAGE_VERSION") b.setScalarValue(-1, "1.2", "string", -1) a.addNode(b) # Finally insert the reference} b = _pyhl.node(_pyhl.REFERENCE_ID, "/IMAGE1/PALETTE") b.setScalarValue(-1, "/PALETTE", "string", -1) a.addNode(b) a.write("test_image.hdf")
def writeFile(): # Create an empty node list instance aList = _pyhl.nodelist() # Create an group called info aNode = _pyhl.node(_pyhl.GROUP_ID, "/info") # Add the node to the nodelist # Remember that the nodelist takes responsibility aList.addNode(aNode) # Insert the attribute xscale in the group "/info" aNode = _pyhl.node(_pyhl.ATTRIBUTE_ID, "/info/xscale") # Set the value to a double with value 10.0 # Note the -1's that has been used since the data not is compaound aNode.setScalarValue(-1, 10.0, "double", -1) aList.addNode(aNode) # Similar for yscale,xsize and ysize aNode = _pyhl.node(_pyhl.ATTRIBUTE_ID, "/info/yscale") aNode.setScalarValue(-1, 20.0, "double", -1) aList.addNode(aNode) aNode = _pyhl.node(_pyhl.ATTRIBUTE_ID, "/info/xsize") aNode.setScalarValue(-1, 10, "int", -1) aList.addNode(aNode) aNode = _pyhl.node(_pyhl.ATTRIBUTE_ID, "/info/ysize") aNode.setScalarValue(-1, 10, "int", -1) aList.addNode(aNode) # Add a description aNode = _pyhl.node(_pyhl.ATTRIBUTE_ID, "/info/description") aNode.setScalarValue(-1, "This is a simple example", "string", -1) aList.addNode(aNode) # Add an array of data myArray = arange(100) myArray = array(myArray.astype('i'), 'i') myArray = reshape(myArray, (10, 10)) aNode = _pyhl.node(_pyhl.DATASET_ID, "/data") # Set the data as an array, note the list with [10,10] which # Indicates that it is an array of 10x10 items aNode.setArrayValue(-1, [10, 10], myArray, "int", -1) aList.addNode(aNode) # And now just write the file as "simple_test.hdf" with # Compression level 9 (highest compression) aList.write("simple_test.hdf", 9)
def save(self, filename): """Save the current instance to nordrad hdf format. """ import _pyhl status = 1 msgctype = self.ctype node_list = _pyhl.nodelist() # What node = _pyhl.node(_pyhl.GROUP_ID, "/what") node_list.addNode(node) node = _pyhl.node(_pyhl.ATTRIBUTE_ID, "/what/object") node.setScalarValue(-1, "IMAGE", "string", -1) node_list.addNode(node) node = _pyhl.node(_pyhl.ATTRIBUTE_ID, "/what/sets") node.setScalarValue(-1, 1, "int", -1) node_list.addNode(node) node = _pyhl.node(_pyhl.ATTRIBUTE_ID, "/what/version") node.setScalarValue(-1, "H5rad 1.2", "string", -1) node_list.addNode(node) node = _pyhl.node(_pyhl.ATTRIBUTE_ID, "/what/date") yyyymmdd = self.datestr[0:8] hourminsec = self.datestr[8:12]+'00' node.setScalarValue(-1, yyyymmdd, "string", -1) node_list.addNode(node) node = _pyhl.node(_pyhl.ATTRIBUTE_ID, "/what/time") node.setScalarValue(-1, hourminsec, "string", -1) node_list.addNode(node) # Where node = _pyhl.node(_pyhl.GROUP_ID, "/where") node_list.addNode(node) node = _pyhl.node(_pyhl.ATTRIBUTE_ID, "/where/projdef") node.setScalarValue(-1, msgctype.area.proj4_string, "string", -1) node_list.addNode(node) node = _pyhl.node(_pyhl.ATTRIBUTE_ID, "/where/xsize") node.setScalarValue(-1, msgctype.num_of_columns, "int", -1) node_list.addNode(node) node = _pyhl.node(_pyhl.ATTRIBUTE_ID, "/where/ysize") node.setScalarValue(-1, msgctype.num_of_lines, "int", -1) node_list.addNode(node) node = _pyhl.node(_pyhl.ATTRIBUTE_ID, "/where/xscale") node.setScalarValue(-1, msgctype.xscale, "float", -1) node_list.addNode(node) node = _pyhl.node(_pyhl.ATTRIBUTE_ID, "/where/yscale") node.setScalarValue(-1, msgctype.yscale, "float", -1) node_list.addNode(node) node = _pyhl.node(_pyhl.ATTRIBUTE_ID, "/where/LL_lon") node.setScalarValue(-1, msgctype.ll_lon, "float", -1) node_list.addNode(node) node = _pyhl.node(_pyhl.ATTRIBUTE_ID, "/where/LL_lat") node.setScalarValue(-1, msgctype.ll_lat, "float", -1) node_list.addNode(node) node = _pyhl.node(_pyhl.ATTRIBUTE_ID, "/where/UR_lon") node.setScalarValue(-1, msgctype.ur_lon, "float", -1) node_list.addNode(node) node = _pyhl.node(_pyhl.ATTRIBUTE_ID, "/where/UR_lat") node.setScalarValue(-1, msgctype.ur_lat, "float", -1) node_list.addNode(node) # How node = _pyhl.node(_pyhl.GROUP_ID, "/how") node_list.addNode(node) node = _pyhl.node(_pyhl.ATTRIBUTE_ID, "/how/area") node.setScalarValue(-1, msgctype.region_name, "string", -1) node_list.addNode(node) # image1 node = _pyhl.node(_pyhl.GROUP_ID, "/image1") node_list.addNode(node) node = _pyhl.node(_pyhl.DATASET_ID, "/image1/data") node.setArrayValue(1, [msgctype.num_of_columns, msgctype.num_of_lines], msgctype.cloudtype.astype('B'), "uchar", -1) node_list.addNode(node) node = _pyhl.node(_pyhl.GROUP_ID, "/image1/what") node_list.addNode(node) node = _pyhl.node(_pyhl.ATTRIBUTE_ID, "/image1/what/product") #We should eventually try to use the msg-parameters "package", #"product_algorithm_version", and "product_name": node.setScalarValue(1, 'MSGCT', "string", -1) node_list.addNode(node) node = _pyhl.node(_pyhl.ATTRIBUTE_ID, "/image1/what/prodpar") node.setScalarValue(1, 0.0, "float", -1) node_list.addNode(node) node = _pyhl.node(_pyhl.ATTRIBUTE_ID, "/image1/what/quantity") node.setScalarValue(1, "ct", "string", -1) node_list.addNode(node) node = _pyhl.node(_pyhl.ATTRIBUTE_ID, "/image1/what/startdate") node.setScalarValue(-1, yyyymmdd, "string", -1) node_list.addNode(node) node = _pyhl.node(_pyhl.ATTRIBUTE_ID, "/image1/what/starttime") node.setScalarValue(-1, hourminsec, "string", -1) node_list.addNode(node) node = _pyhl.node(_pyhl.ATTRIBUTE_ID, "/image1/what/enddate") node.setScalarValue(-1, yyyymmdd, "string", -1) node_list.addNode(node) node = _pyhl.node(_pyhl.ATTRIBUTE_ID, "/image1/what/endtime") node.setScalarValue(-1, hourminsec, "string", -1) node_list.addNode(node) node = _pyhl.node(_pyhl.ATTRIBUTE_ID, "/image1/what/gain") node.setScalarValue(-1, 1.0, "float", -1) node_list.addNode(node) node = _pyhl.node(_pyhl.ATTRIBUTE_ID, "/image1/what/offset") node.setScalarValue(-1, 0.0, "float", -1) node_list.addNode(node) node = _pyhl.node(_pyhl.ATTRIBUTE_ID, "/image1/what/nodata") node.setScalarValue(-1, 0.0, "float", -1) node_list.addNode(node) # What we call missingdata in PPS: node = _pyhl.node(_pyhl.ATTRIBUTE_ID, "/image1/what/undetect") node.setScalarValue(-1, 20.0, "float", -1) node_list.addNode(node) node_list.write(filename, COMPRESS_LVL) return status
def createVHLHDF_READ_DATAFILE(self): a=_pyhl.nodelist() b=_pyhl.node(_pyhl.ATTRIBUTE_ID,"/stringvalue") b.setScalarValue(-1,"My String","string",-1) a.addNode(b) b=_pyhl.node(_pyhl.ATTRIBUTE_ID,"/charvalue") b.setScalarValue(-1,123,"char",-1) a.addNode(b) b=_pyhl.node(_pyhl.ATTRIBUTE_ID,"/scharvalue") b.setScalarValue(-1,45,"schar",-1) a.addNode(b) b=_pyhl.node(_pyhl.ATTRIBUTE_ID,"/ucharvalue") b.setScalarValue(-1,99,"uchar",-1) a.addNode(b) b=_pyhl.node(_pyhl.ATTRIBUTE_ID,"/shortvalue") b.setScalarValue(-1,4321,"short",-1) a.addNode(b) b=_pyhl.node(_pyhl.ATTRIBUTE_ID,"/ushortvalue") b.setScalarValue(-1,9999,"ushort",-1) a.addNode(b) b=_pyhl.node(_pyhl.ATTRIBUTE_ID,"/intvalue") b.setScalarValue(-1,989898,"int",-1) a.addNode(b) b=_pyhl.node(_pyhl.ATTRIBUTE_ID,"/uintvalue") b.setScalarValue(-1,987654,"uint",-1) a.addNode(b) b=_pyhl.node(_pyhl.ATTRIBUTE_ID,"/longvalue") b.setScalarValue(-1,-123456789,"long",-1) a.addNode(b) b=_pyhl.node(_pyhl.ATTRIBUTE_ID,"/ulongvalue") b.setScalarValue(-1,123456789,"ulong",-1) a.addNode(b) b=_pyhl.node(_pyhl.ATTRIBUTE_ID,"/llongvalue") b.setScalarValue(-1,-123456789012,"llong",-1) a.addNode(b) b=_pyhl.node(_pyhl.ATTRIBUTE_ID,"/ullongvalue") b.setScalarValue(-1,123456789012,"ullong",-1) a.addNode(b) b=_pyhl.node(_pyhl.ATTRIBUTE_ID,"/floatvalue") b.setScalarValue(-1,12.65,"float",-1) a.addNode(b) b=_pyhl.node(_pyhl.ATTRIBUTE_ID,"/doublevalue") b.setScalarValue(-1,12999.8989,"double",-1) a.addNode(b) #Not supported yet #b=_pyhl.node(_pyhl.ATTRIBUTE_ID,"/ldoublevalue") #b.setScalarValue(-1,65765.762525,"ldouble",-1) #a.addNode(b) b=_pyhl.node(_pyhl.ATTRIBUTE_ID,"/hsizevalue") b.setScalarValue(-1,65765,"hsize",-1) a.addNode(b) b=_pyhl.node(_pyhl.ATTRIBUTE_ID,"/herrvalue") b.setScalarValue(-1,12,"herr",-1) a.addNode(b) b=_pyhl.node(_pyhl.ATTRIBUTE_ID,"/hboolvalue") b.setScalarValue(-1,0,"herr",-1) a.addNode(b) b=_pyhl.node(_pyhl.GROUP_ID, "/group1") a.addNode(b) b=_pyhl.node(_pyhl.GROUP_ID, "/group1/group11") a.addNode(b) # #DATASETS FOR char, schar, uchar, short, ushort, int, uint, long, float, double # b=_pyhl.node(_pyhl.DATASET_ID, "/group1/chardset") c = self.createDataset([5,5],numpy.character) b.setArrayValue(1,[5,5],c,"char",-1) a.addNode(b) b=_pyhl.node(_pyhl.DATASET_ID, "/group1/schardset") c = self.createDataset([5,5],numpy.int8) b.setArrayValue(1,[5,5],c,"schar",-1) a.addNode(b) b=_pyhl.node(_pyhl.DATASET_ID, "/group1/uchardset") c = self.createDataset([5,5],numpy.uint8) b.setArrayValue(1,[5,5],c,"uchar",-1) a.addNode(b) b=_pyhl.node(_pyhl.DATASET_ID, "/group1/shortdset") c = self.createDataset([5,5],numpy.int16) b.setArrayValue(1,[5,5],c,"short",-1) a.addNode(b) b=_pyhl.node(_pyhl.DATASET_ID, "/group1/ushortdset") c = self.createDataset([5,5],numpy.uint16) b.setArrayValue(1,[5,5],c,"ushort",-1) a.addNode(b) b=_pyhl.node(_pyhl.DATASET_ID, "/group1/intdset") c = self.createDataset([5,5],numpy.int32) b.setArrayValue(1,[5,5],c,"int",-1) a.addNode(b) b=_pyhl.node(_pyhl.DATASET_ID, "/group1/uintdset") c = self.createDataset([5,5],numpy.uint32) b.setArrayValue(1,[5,5],c,"uint",-1) a.addNode(b) b=_pyhl.node(_pyhl.DATASET_ID, "/group1/longdset") c = self.createDataset([5,5],numpy.int64) b.setArrayValue(1,[5,5],c,"long",-1) a.addNode(b) b=_pyhl.node(_pyhl.DATASET_ID, "/group1/floatdset") c = self.createDataset([5,5],numpy.float32) b.setArrayValue(1,[5,5],c,"float",-1) a.addNode(b) b=_pyhl.node(_pyhl.DATASET_ID, "/group1/doubledset") c = self.createDataset([5,5],numpy.float64) b.setArrayValue(1,[5,5],c,"double",-1) a.addNode(b) # ARRAYS OF TYPE string, double, float, int and long b=_pyhl.node(_pyhl.DATASET_ID, "/stringarray") c = ["ABC", "def", "EFG"] b.setArrayValue(1,[len(c)],c,"string",-1) a.addNode(b) b=_pyhl.node(_pyhl.DATASET_ID, "/doublearray") c = [1.0, 2.1, 3.2] b.setArrayValue(1,[len(c)],c,"double",-1) a.addNode(b) b=_pyhl.node(_pyhl.DATASET_ID, "/floatarray") c = [1.1, 2.2, 3.3] b.setArrayValue(1,[len(c)],c,"float",-1) a.addNode(b) b=_pyhl.node(_pyhl.DATASET_ID, "/intarray") c = [1, 2, 3, 4] b.setArrayValue(1,[len(c)],c,"int",-1) a.addNode(b) b=_pyhl.node(_pyhl.DATASET_ID, "/longarray") c = [2, 3, 4, 5, 6] b.setArrayValue(1,[len(c)],c,"long",-1) a.addNode(b) # Create a dataset with an attribute and a reference in it b=_pyhl.node(_pyhl.DATASET_ID, "/dataset1") c = [1, 2, 3, 4] b.setArrayValue(1,[len(c)],c,"int",-1) a.addNode(b) b=_pyhl.node(_pyhl.ATTRIBUTE_ID,"/dataset1/attribute1") b.setScalarValue(-1,989898,"int",-1) a.addNode(b) b=_pyhl.node(_pyhl.REFERENCE_ID,"/dataset1/doublearray") b.setScalarValue(-1,"/doublearray","string",-1) a.addNode(b) # REFERENCES POINTING AT DIFFERENT PLACES b=_pyhl.node(_pyhl.GROUP_ID, "/references") a.addNode(b) b=_pyhl.node(_pyhl.REFERENCE_ID,"/references/doublearray") b.setScalarValue(-1,"/doublearray","string",-1) a.addNode(b) b=_pyhl.node(_pyhl.REFERENCE_ID,"/references/floatdset") b.setScalarValue(-1,"/group1/floatdset","string",-1) a.addNode(b) b=_pyhl.node(_pyhl.REFERENCE_ID,"/references/group1") b.setScalarValue(-1,"/group1","string",-1) a.addNode(b) #b=_pyhl.node(_pyhl.REFERENCE_ID,"/references/tosomethingdefinedafterthis") #b.setScalarValue(-1,"/groupdefinedafterreferences","string",-1) #a.addNode(b) #b=_pyhl.node(_pyhl.GROUP_ID, "/groupdefinedafterreferences") #a.addNode(b) b=_pyhl.node(_pyhl.REFERENCE_ID,"/rootreferencetolongarray") b.setScalarValue(-1,"/longarray","string",-1) a.addNode(b) #Add a compound type as well rinfo_type=_rave_info_type.type() rinfo_obj=_rave_info_type.object() rinfo_obj.xsize=10 rinfo_obj.ysize=10 rinfo_obj.xscale=150.0 rinfo_obj.yscale=150.0 b=_pyhl.node(_pyhl.TYPE_ID,"/RaveDatatype") b.commit(rinfo_type.hid()) a.addNode(b) b=_pyhl.node(_pyhl.GROUP_ID, "/compoundgroup") a.addNode(b) b=_pyhl.node(_pyhl.ATTRIBUTE_ID,"/compoundgroup/attribute") b.setScalarValue(rinfo_type.size(),rinfo_obj.tostring(),"compound",rinfo_type.hid()) a.addNode(b) rinfo_obj.xsize=99 rinfo_obj.ysize=109 rinfo_obj.area_extent=(10.0, 20.0, 30.0, 40.0) b=_pyhl.node(_pyhl.ATTRIBUTE_ID,"/compoundgroup/attribute2") b.setScalarValue(rinfo_type.size(),rinfo_obj.tostring(),"compound",rinfo_type.hid()) a.addNode(b) rinfo_obj.xscale=170.0 rinfo_obj.area_extent=(10.0, 20.0, 35.0, 40.0) b=_pyhl.node(_pyhl.DATASET_ID,"/compoundgroup/dataset") b.setArrayValue(rinfo_type.size(),[1],rinfo_obj.tostring(),"compound",rinfo_type.hid()) a.addNode(b) # Create a compund array containing 2x2 items rinfo_obj2 =_rave_info_type.object() rinfo_obj2.xsize = 98 rinfo_obj2.ysize = 97 rinfo_obj2.xscale = 120.0 rinfo_obj2.yscale = 130.0 rinfo_obj2.area_extent = (33.0,32.0,31.0,30.0) rinfo_obj3 =_rave_info_type.object() rinfo_obj3.xsize = 88 rinfo_obj3.ysize = 87 rinfo_obj3.xscale = 100.0 rinfo_obj3.yscale = 110.0 rinfo_obj3.area_extent = (43.0,42.0,41.0,40.0) rinfo_obj4 =_rave_info_type.object() rinfo_obj4.xsize = 78 rinfo_obj4.ysize = 77 rinfo_obj4.xscale = 90.0 rinfo_obj4.yscale = 91.0 rinfo_obj4.area_extent = (53.0,52.0,51.0,50.0) b=_pyhl.node(_pyhl.DATASET_ID,"/compoundgroup/dataset2") str = rinfo_obj.tostring() + rinfo_obj2.tostring() + rinfo_obj3.tostring() + rinfo_obj4.tostring() b.setArrayValue(rinfo_type.size(),[2,2],str,"compound",rinfo_type.hid()) a.addNode(b) # Create an unamed compound value rinfo_unnamed_type=_rave_info_type.type() rinfo_unnamed_obj=_rave_info_type.object() rinfo_unnamed_obj.xsize = 1 rinfo_unnamed_obj.ysize = 2 rinfo_unnamed_obj.xscale = 10.0 rinfo_unnamed_obj.yscale = 20.0 rinfo_unnamed_obj.area_extent = (1.0, 2.0, 3.0, 4.0) b=_pyhl.node(_pyhl.ATTRIBUTE_ID,"/compoundgroup/unnamed_type_attribute") b.setScalarValue(rinfo_unnamed_type.size(),rinfo_unnamed_obj.tostring(),"compound",rinfo_unnamed_type.hid()) a.addNode(b) a.write(self.VHLHDF_READ_DATAFILE)
def writeCloudsatCwcAvhrrMatchObj(filename,ca_obj,compress_lvl): import _pyhl status = -1 a=_pyhl.nodelist() shape = [ca_obj.cloudsatcwc.longitude.shape[0]] # Match-Up - time difference: # ==== b=_pyhl.node(_pyhl.DATASET_ID,"/diff_sec_1970") b.setArrayValue(1,shape,ca_obj.diff_sec_1970,"double",-1) a.addNode(b) # AVHRR # ==== b=_pyhl.node(_pyhl.GROUP_ID,"/avhrr") a.addNode(b) b=_pyhl.node(_pyhl.DATASET_ID,"/avhrr/longitude") shape = ca_obj.avhrr.longitude.shape b.setArrayValue(1,shape,ca_obj.avhrr.longitude,"float",-1) a.addNode(b) b=_pyhl.node(_pyhl.DATASET_ID,"/avhrr/latitude") b.setArrayValue(1,shape,ca_obj.avhrr.latitude,"float",-1) a.addNode(b) b=_pyhl.node(_pyhl.DATASET_ID,"/avhrr/sec_1970") b.setArrayValue(1,shape,ca_obj.avhrr.sec_1970,"double",-1) a.addNode(b) b=_pyhl.node(_pyhl.DATASET_ID,"/avhrr/ctth_pressure") b.setArrayValue(1,shape,ca_obj.avhrr.ctth_pressure,"double",-1) a.addNode(b) b=_pyhl.node(_pyhl.DATASET_ID,"/avhrr/ctth_temperature") b.setArrayValue(1,shape,ca_obj.avhrr.ctth_temperature,"double",-1) a.addNode(b) b=_pyhl.node(_pyhl.DATASET_ID,"/avhrr/ctth_height") b.setArrayValue(1,shape,ca_obj.avhrr.ctth_height,"double",-1) a.addNode(b) b=_pyhl.node(_pyhl.DATASET_ID,"/avhrr/cloudtype") b.setArrayValue(1,shape,ca_obj.avhrr.cloudtype,"uchar",-1) a.addNode(b) b=_pyhl.node(_pyhl.DATASET_ID,"/avhrr/bt11micron") b.setArrayValue(1,shape,ca_obj.avhrr.bt11micron,"double",-1) a.addNode(b) b=_pyhl.node(_pyhl.DATASET_ID,"/avhrr/bt12micron") b.setArrayValue(1,shape,ca_obj.avhrr.bt12micron,"double",-1) a.addNode(b) b=_pyhl.node(_pyhl.DATASET_ID,"/avhrr/surftemp") b.setArrayValue(1,shape,ca_obj.avhrr.surftemp,"double",-1) a.addNode(b) b=_pyhl.node(_pyhl.DATASET_ID,"/avhrr/satz") b.setArrayValue(1,shape,ca_obj.avhrr.satz,"double",-1) a.addNode(b) # CWC-RVOD # ==== shapecwc = [ca_obj.cloudsatcwc.longitude.shape[0]] shape2dcwc = ca_obj.cloudsatcwc.Height.shape #shapeTAIcwc = [ca_obj.cloudsatcwc.TAI_start.shape[0]] shapeTAIcwc = [ca_obj.cloudsatcwc.Temp_min_mixph_K.shape[0]] shapecwclong = [ca_obj.cloudsatcwc.Profile_time.shape[0]] # Geolocation b=_pyhl.node(_pyhl.GROUP_ID,"/cloudsatcwc") a.addNode(b) b=_pyhl.node(_pyhl.DATASET_ID,"/cloudsatcwc/longitude") b.setArrayValue(1,shapecwc,ca_obj.cloudsatcwc.longitude,"double",-1) a.addNode(b) b=_pyhl.node(_pyhl.DATASET_ID,"/cloudsatcwc/latitude") b.setArrayValue(1,shapecwc,ca_obj.cloudsatcwc.latitude,"double",-1) a.addNode(b) b=_pyhl.node(_pyhl.DATASET_ID,"/cloudsatcwc/elevation") b.setArrayValue(1,shapecwc,ca_obj.cloudsatcwc.elevation,"short",-1) a.addNode(b) b=_pyhl.node(_pyhl.DATASET_ID,"/cloudsatcwc/Height") b.setArrayValue(1,shape2dcwc,ca_obj.cloudsatcwc.Height,"short",-1) a.addNode(b) b=_pyhl.node(_pyhl.DATASET_ID,"/cloudsatcwc/avhrr_linnum") b.setArrayValue(1,shape,ca_obj.cloudsatcwc.avhrr_linnum,"int",-1) a.addNode(b) b=_pyhl.node(_pyhl.DATASET_ID,"/cloudsatcwc/avhrr_pixnum") b.setArrayValue(1,shape,ca_obj.cloudsatcwc.avhrr_pixnum,"int",-1) a.addNode(b) # International Atomic Time (TAI) seconds from Jan 1, 1993: b=_pyhl.node(_pyhl.DATASET_ID,"/cloudsatcwc/Profile_time") b.setArrayValue(1,shapecwclong,ca_obj.cloudsatcwc.Profile_time,"double",-1) a.addNode(b) #b=_pyhl.node(_pyhl.DATASET_ID,"/cloudsatcwc/TAI_start") #b.setArrayValue(1,shapeTAIcwc,ca_obj.cloudsatcwc.TAI_start,"double",-1) #a.addNode(b) b=_pyhl.node(_pyhl.DATASET_ID,"/cloudsatcwc/sec_1970") b.setArrayValue(1,shapecwc,ca_obj.cloudsatcwc.sec_1970,"double",-1) a.addNode(b) # The data b=_pyhl.node(_pyhl.DATASET_ID,"/cloudsatcwc/Data_quality") b.setArrayValue(1,shapecwclong,ca_obj.cloudsatcwc.Data_quality,"uchar",-1) a.addNode(b) b=_pyhl.node(_pyhl.DATASET_ID,"/cloudsatcwc/Data_targetID") b.setArrayValue(1,shapecwclong,ca_obj.cloudsatcwc.Data_targetID,"uchar",-1) a.addNode(b) b=_pyhl.node(_pyhl.DATASET_ID,"/cloudsatcwc/RVOD_liq_water_path") b.setArrayValue(1,shapecwc,ca_obj.cloudsatcwc.RVOD_liq_water_path,"short",-1) a.addNode(b) b=_pyhl.node(_pyhl.DATASET_ID,"/cloudsatcwc/RVOD_liq_water_path_uncertainty") b.setArrayValue(1,shapecwc,ca_obj.cloudsatcwc.RVOD_liq_water_path_uncertainty,"uchar",-1) a.addNode(b) b=_pyhl.node(_pyhl.DATASET_ID,"/cloudsatcwc/RVOD_ice_water_path") b.setArrayValue(1,shapecwc,ca_obj.cloudsatcwc.RVOD_ice_water_path,"short",-1) a.addNode(b) b=_pyhl.node(_pyhl.DATASET_ID,"/cloudsatcwc/RVOD_ice_water_path_uncertainty") b.setArrayValue(1,shapecwc,ca_obj.cloudsatcwc.RVOD_ice_water_path_uncertainty,"uchar",-1) a.addNode(b) b=_pyhl.node(_pyhl.DATASET_ID,"/cloudsatcwc/LO_RVOD_liquid_water_path") b.setArrayValue(1,shapecwc,ca_obj.cloudsatcwc.LO_RVOD_liquid_water_path,"short",-1) a.addNode(b) b=_pyhl.node(_pyhl.DATASET_ID,"/cloudsatcwc/LO_RVOD_liquid_water_path_uncertainty") b.setArrayValue(1,shapecwc,ca_obj.cloudsatcwc.LO_RVOD_liquid_water_path_uncertainty,"uchar",-1) a.addNode(b) b=_pyhl.node(_pyhl.DATASET_ID,"/cloudsatcwc/IO_RVOD_ice_water_path") b.setArrayValue(1,shapecwc,ca_obj.cloudsatcwc.IO_RVOD_ice_water_path,"short",-1) a.addNode(b) b=_pyhl.node(_pyhl.DATASET_ID,"/cloudsatcwc/IO_RVOD_ice_water_path_uncertainty") b.setArrayValue(1,shapecwc,ca_obj.cloudsatcwc.IO_RVOD_ice_water_path_uncertainty,"uchar",-1) a.addNode(b) ########################################################################################################################################## #pdb.set_trace() b=_pyhl.node(_pyhl.DATASET_ID,"/cloudsatcwc/RVOD_liq_water_content") b.setArrayValue(1,shape2dcwc,ca_obj.cloudsatcwc.RVOD_liq_water_content,"short",-1) a.addNode(b) b=_pyhl.node(_pyhl.DATASET_ID,"/cloudsatcwc/RVOD_liq_water_content_uncertainty") b.setArrayValue(1,shape2dcwc,ca_obj.cloudsatcwc.RVOD_liq_water_content_uncertainty,"uchar",-1) a.addNode(b) b=_pyhl.node(_pyhl.DATASET_ID,"/cloudsatcwc/RVOD_ice_water_content") b.setArrayValue(1,shape2dcwc,ca_obj.cloudsatcwc.RVOD_ice_water_content,"short",-1) a.addNode(b) b=_pyhl.node(_pyhl.DATASET_ID,"/cloudsatcwc/RVOD_ice_water_content_uncertainty") b.setArrayValue(1,shape2dcwc,ca_obj.cloudsatcwc.RVOD_ice_water_content_uncertainty,"uchar",-1) a.addNode(b) b=_pyhl.node(_pyhl.DATASET_ID,"/cloudsatcwc/LO_RVOD_liquid_water_content") b.setArrayValue(1,shape2dcwc,ca_obj.cloudsatcwc.LO_RVOD_liquid_water_content,"short",-1) a.addNode(b) b=_pyhl.node(_pyhl.DATASET_ID,"/cloudsatcwc/LO_RVOD_liquid_water_content_uncertainty") b.setArrayValue(1,shape2dcwc,ca_obj.cloudsatcwc.LO_RVOD_liquid_water_content_uncertainty,"uchar",-1) a.addNode(b) b=_pyhl.node(_pyhl.DATASET_ID,"/cloudsatcwc/IO_RVOD_ice_water_content") b.setArrayValue(1,shape2dcwc,ca_obj.cloudsatcwc.IO_RVOD_ice_water_content,"short",-1) a.addNode(b) b=_pyhl.node(_pyhl.DATASET_ID,"/cloudsatcwc/IO_RVOD_ice_water_content_uncertainty") b.setArrayValue(1,shape2dcwc,ca_obj.cloudsatcwc.IO_RVOD_ice_water_content_uncertainty,"uchar",-1) a.addNode(b) b=_pyhl.node(_pyhl.DATASET_ID,"/cloudsatcwc/Temp_min_mixph_K") b.setArrayValue(1,shapeTAIcwc,ca_obj.cloudsatcwc.Temp_min_mixph_K,"double",-1) a.addNode(b) b=_pyhl.node(_pyhl.DATASET_ID,"/cloudsatcwc/Temp_max_mixph_K") b.setArrayValue(1,shapeTAIcwc,ca_obj.cloudsatcwc.Temp_max_mixph_K,"double",-1) a.addNode(b) status = a.write(filename,compress_lvl) return status
def _addGroup(self, nodelist, name): node = _pyhl.node(_pyhl.GROUP_ID, name) nodelist.addNode(node)
def _addAttribute(self, nodelist, name, value): node = _pyhl.node(_pyhl.ATTRIBUTE_ID, name) node.setScalarValue(-1, value, self._convertValueTypeToStr(value), -1) nodelist.addNode(node)
def addScalarValueNode(self, nodelist, type, name, sz, value, hltype, hid): b = _pyhl.node(type, name) b.setScalarValue(sz, value, hltype, hid) nodelist.addNode(b) return b
def addTypeNode(self, nodelist, name, hid): b=_pyhl.node(_pyhl.TYPE_ID, name) b.commit(hid) nodelist.addNode(b) return b
def addArrayValueNode(self, nodelist, type, name, sz, dims, value, hltype, hid): b = _pyhl.node(type, name) b.setArrayValue(sz, dims, value, hltype, hid) nodelist.addNode(b) return b
def addReference(self, nodelist, name, ref): b=_pyhl.node(_pyhl.REFERENCE_ID, name) b.setScalarValue(-1,ref,"string",-1) nodelist.addNode(b) return b
def addGroupNode(self, nodelist, name): b=_pyhl.node(_pyhl.GROUP_ID, name) nodelist.addNode(b) return b