コード例 #1
0
  def testReadWriteVariousTypes(self):
    a=_pyhl.nodelist()
    
    b=_pyhl.node(_pyhl.GROUP_ID,"/info")
    a.addNode(b)

    b=_pyhl.node(_pyhl.ATTRIBUTE_ID,"/info/xscale")
    b.setScalarValue(-1,0.85,"double",-1)
    a.addNode(b)

    b=_pyhl.node(_pyhl.ATTRIBUTE_ID,"/info/yscale")
    b.setScalarValue(-1,1.0,"float",-1)
    a.addNode(b);

    b=_pyhl.node(_pyhl.ATTRIBUTE_ID,"/info/xsize")
    b.setScalarValue(-1, 240, "int", -1)
    a.addNode(b)

    b=_pyhl.node(_pyhl.ATTRIBUTE_ID,"/info/ysize")
    b.setScalarValue(-1,480,"long",-1)
    a.addNode(b)

    a.write(self.TESTFILE)
    
    #verify
    a=_pyhl.read_nodelist(self.TESTFILE)
    self.assertEqual("double", a.fetchNode("/info/xscale").format())
    self.assertEqual("float", a.fetchNode("/info/yscale").format())
    self.assertEqual("int", a.fetchNode("/info/xsize").format())
    self.assertEqual("long", a.fetchNode("/info/ysize").format())
コード例 #2
0
ファイル: rave_ctfilter.py プロジェクト: aperezhortal/rave
def readCT(filename):
    ct, cp = _cartesian.new(), _cartesianparam.new()

    nodelist = _pyhl.read_nodelist(filename)
    for n in NODENAMES:
        nodelist.selectNode(n)
    nodelist.fetch()

    ct.defaultParameter = "CT"
    ct.projection = _projection.new("MSG", "no description",
                                    nodelist.getNode("/PROJECTION").data())

    cp.setData(nodelist.getNode("/CT").data())

    ysize = nodelist.getNode("/CT/N_LINES").data()
    xsize = nodelist.getNode("/CT/N_COLS").data()
    ULx = nodelist.getNode("/XGEO_UP_LEFT").data()
    ULy = nodelist.getNode("/YGEO_UP_LEFT").data()
    LRx = nodelist.getNode("/XGEO_LOW_RIGHT").data()
    LRy = nodelist.getNode("/YGEO_LOW_RIGHT").data()
    yscale = (ULy - LRy) / ysize
    xscale = (LRx - ULx) / xsize
    xoffset, yoffset = xscale / 2, yscale / 2  # Offsets to adjust LL and UR corners
    LLx = LRx - (xsize * xscale) - xoffset
    LLy = ULy - (ysize * yscale) - yoffset
    URx = LRx + xoffset
    URy = ULy + yoffset
    ct.areaextent = (LLx, LLy, URx, URy)  # Differs ~5 cm from PPS and PyTROLL
    ct.xscale, ct.yscale = xscale, yscale
    cp.quantity = "CT"
    cp.gain, cp.offset = 1.0, 0.0
    cp.nodata, cp.undetect = -1.0, 0.0
    ct.addParameter(cp)

    return ct
コード例 #3
0
 def testReadErroneouslyNullterminatedString(self):
     nodelist = _pyhl.read_nodelist(self.STRINGSFIXTURE)
     node = nodelist.fetchNode("/erroneously_nullterminated")
     self.assertEqual("this is a null terminated string", node.data())
     self.assertEqual("this is a null terminated string\x00",
                      node.rawdata())
     nodelist = None
コード例 #4
0
  def load_file(self, filename, quantity):
    nodelist = _pyhl.read_nodelist(filename)
    names = nodelist.getNodeNames()

    index = 1
    finished = False

    vol = None
    if "/Conventions" in names:
      conv = nodelist.fetchNode("/Conventions").data()
      if conv == "ODIM_H5/V2_0":
        vol = _rave.volume()

    if vol == None:
      return None
    
    vol.longitude = nodelist.fetchNode("/where/lon").data() * math.pi / 180.0
    vol.latitude = nodelist.fetchNode("/where/lat").data() * math.pi / 180.0
    vol.height = nodelist.fetchNode("/where/height").data()
      
    while not finished:
      scanname = "/dataset%d"%index
      if scanname in names:
        scan = self.load_scan_from_file(scanname, nodelist, names, quantity)
        if scan != None:
          vol.addScan(scan)
      else:
        finished = True
        
      index = index + 1

    #vol.sortByElevations(1) # Ascending sort
    
    return vol
コード例 #5
0
 def testWriteNamedCompoundAttribute(self):
   a=_pyhl.nodelist()
   rinfo_obj =_rave_info_type.object()
   rinfo_type=_rave_info_type.type()    
   rinfo_obj.xsize = 98
   rinfo_obj.ysize = 97
   rinfo_obj.xscale = 120.0
   rinfo_obj.yscale = 130.0
   rinfo_obj.area_extent = (33.0,32.0,31.0,30.0)
   self.addTypeNode(a, "/RaveType", rinfo_type.hid())
   self.addScalarValueNode(a, _pyhl.ATTRIBUTE_ID, "/attribute", rinfo_type.size(), rinfo_obj.tostring(), "compound", rinfo_type.hid())
   a.write(self.TESTFILE)    
   
   #verify
   a=_pyhl.read_nodelist(self.TESTFILE)
   b=a.fetchNode("/RaveType")
   self.assertEqual("UNDEFINED", b.format())
   self.assertEqual(_pyhl.TYPE_ID, b.type())
   
   b=a.fetchNode("/attribute")
   self.assertEqual("compound", b.format())
   self.assertEqual(_pyhl.ATTRIBUTE_ID, b.type())
   result = b.compound_data()
   self.assertEqual(98, result['xsize'])
   self.assertEqual(97, result['ysize'])
   self.assertEqual(120, result['xscale'])
   self.assertEqual(130, result['yscale'])
   self.assertTrue(numpy.all([33.0,32.0,31.0,30.0]==result['area_extent']))
コード例 #6
0
ファイル: rave.py プロジェクト: aperezhortal/rave
    def read_dataset(self, index=1):
        """
        Reads dataset with the given index into the existing RAVE object.

        Arguments:
          int index: the index of the dataset to read

        Returns: nothing, the dataset is read into self.
        """
        if index > self.eval('/what/sets'):
            raise IndexError("Don't have that many datasets.")

        a = _pyhl.read_nodelist(self.__file__)
        counter = 1

        for k, i in a.getNodeNames().items():
            if i == _pyhl.DATASET_ID:
                if index == counter:
                    none1, tag, none2 = k.split('/')
                    try:
                        check = self.data[tag]  # if already there, do nothing
                        return
                    except KeyError:
                        a.selectNode(k)
                        a.fetch()
                        self.data[tag] = a.getNode(k).data()
                        self.info.find(
                            k[1:]).text = tag  # self.put doesn't work
                        self.set(k[1:], 'type', 'dataset')
                        return
                else:
                    counter += 1
コード例 #7
0
 def testReadWriteSameFile(self):
   a=_pyhl.nodelist()
   b=_pyhl.node(_pyhl.GROUP_ID, "/slask")
   a.addNode(b)
   a.write(self.TESTFILE)
   
   a = _pyhl.read_nodelist(self.TESTFILE)
   a.write(self.TESTFILE2)
コード例 #8
0
 def __init__(self, filename, quantities):
     self._filename = filename
     if not _pyhl.is_file_hdf5(filename):
         raise Exception("Not a HDF5 file")
     self._nodelist = _pyhl.read_nodelist(self._filename)
     self._nodelist.selectAll()
     self._nodelist.fetch()
     self._nodenames = self._nodelist.getNodeNames().keys()
     self._converted_files = [
     ]  # Contains tuples of (nodelist, suggested name)
コード例 #9
0
ファイル: rave_IO.py プロジェクト: aperezhortal/rave
def open_hdf5(filename):
    datadict = {}

    a = _pyhl.read_nodelist(filename)
    a.selectAll()
    a.fetch()
    node_names = a.getNodeNames()

    items = []
    for nodename in node_names.keys():
        b = a.getNode(nodename)
        items.append((nodename, b, b.type()))

    items.sort() # guarantees groups before attributes

    h5rad = rave_info.INFO("h5rad", version=H5RAD_VERSION)

    groupmapping = {"" : h5rad}
    for nodename, node, typ in items:
        index = nodename.rindex("/")
        parentname, tag = nodename[:index], nodename[index+1:]
        # Deal with (ignore) H5IM stubs
        #if tag in ["CLASS", "IMAGE_VERSION"]:
        if os.path.split(parentname)[1] == 'data':
            continue
        e = SubElement(groupmapping[parentname], tag)
        if typ==1:
            groupmapping[nodename] = e # save the nodename to element mapping
        elif typ==0:
            t = h5type(node.data())
            if t == "sequence":
                # convert list to string
                nodes = []
                for n in node.data():
                    node = n.strip()
                    node = remove_nulls(str(node))
                    nodes.append(("'"+node+"'"))
                e.text = ", ".join(nodes)
            else:
                e.text = remove_nulls(str(node.data()))
            if t != "string":
                e.attrib["type"] = t
        elif typ==2:
            datadict[nodename] = node.data()
            e.attrib["type"] = "dataset"
            e.text=nodename            
##             label = string.replace(parentname, "/", "")
##             print parentname, label
##             if label.startswith("profile"):  # relic from 717 ...
##                 label = label + "_" + tag
##             datadict[label] = node.data()
##             e.attrib["type"] = "dataset"
##             e.text=label

    return h5rad, datadict, items
コード例 #10
0
  def testWriteInt(self):
    a=_pyhl.nodelist()
    self.addScalarValueNode(a, _pyhl.ATTRIBUTE_ID, "/intvalue", -1, -123, "int", -1)
    a.write(self.TESTFILE)

    #verify
    a=_pyhl.read_nodelist(self.TESTFILE)
    b=a.fetchNode("/intvalue")
    self.assertEqual("int", b.format())
    self.assertEqual(_pyhl.ATTRIBUTE_ID, b.type())
    self.assertEqual(-123, b.data())
コード例 #11
0
  def testWriteStringArray(self):
    a=_pyhl.nodelist()
    self.addArrayValueNode(a, _pyhl.ATTRIBUTE_ID, "/stringarray", -1, [4], ["abc", "def", "ghi", "jkl"], "string", -1)
    a.write(self.TESTFILE)

    #verify
    a=_pyhl.read_nodelist(self.TESTFILE)
    b=a.fetchNode("/stringarray")
    self.assertEqual("string", b.format())
    self.assertEqual(_pyhl.ATTRIBUTE_ID, b.type())
    self.assertTrue(numpy.all(["abc", "def", "ghi", "jkl"] == b.data()))
コード例 #12
0
 def testWriteLong(self):
   a=_pyhl.nodelist()
   self.addScalarValueNode(a, _pyhl.ATTRIBUTE_ID, "/longvalue", -1, 987654, "long", -1)
   a.write(self.TESTFILE)
   
   #verify
   a=_pyhl.read_nodelist(self.TESTFILE)
   b=a.fetchNode("/longvalue")
   self.assertEqual("long", b.format())
   self.assertEqual(_pyhl.ATTRIBUTE_ID, b.type())
   self.assertEqual(987654, b.data())
コード例 #13
0
 def testWriteDoubleArray(self):
   a=_pyhl.nodelist()
   self.addArrayValueNode(a, _pyhl.ATTRIBUTE_ID, "/doublearray", -1, [4], [1.1,2.2,3.3,4.4], "double", -1)
   a.write(self.TESTFILE)
   
   #verify
   a=_pyhl.read_nodelist(self.TESTFILE)
   b=a.fetchNode("/doublearray")
   self.assertEqual("double", b.format())
   self.assertEqual(_pyhl.ATTRIBUTE_ID, b.type())
   self.assertTrue(numpy.all([1.1,2.2,3.3,4.4] == b.data()))
コード例 #14
0
 def testWriteDouble(self):
   a=_pyhl.nodelist()
   self.addScalarValueNode(a, _pyhl.ATTRIBUTE_ID, "/doublevalue", -1, 1.123, "double", -1)
   a.write(self.TESTFILE)
   
   #verify
   a=_pyhl.read_nodelist(self.TESTFILE)
   b=a.fetchNode("/doublevalue")
   self.assertEqual("double", b.format())
   self.assertEqual(_pyhl.ATTRIBUTE_ID, b.type())
   self.assertAlmostEqual(1.123, b.data(), 4)
コード例 #15
0
 def testWriteLongArray(self):
   a=_pyhl.nodelist()
   self.addArrayValueNode(a, _pyhl.ATTRIBUTE_ID, "/longarray", -1, [4], [1,2,3,4], "long", -1)
   a.write(self.TESTFILE)
   
   #verify
   a=_pyhl.read_nodelist(self.TESTFILE)
   b=a.fetchNode("/longarray")
   self.assertEqual("long", b.format())
   self.assertEqual(_pyhl.ATTRIBUTE_ID, b.type())
   self.assertTrue(numpy.all([1,2,3,4] == b.data()))
コード例 #16
0
  def testWriteString(self):
    #execute
    a=_pyhl.nodelist()
    self.addScalarValueNode(a, _pyhl.ATTRIBUTE_ID, "/stringvalue", -1, "My String", "string", -1)
    a.write(self.TESTFILE)
 
    #verify
    a=_pyhl.read_nodelist(self.TESTFILE)
    b=a.fetchNode("/stringvalue")
    self.assertEqual("string", b.format())
    self.assertEqual(_pyhl.ATTRIBUTE_ID, b.type())
    self.assertEqual("My String", b.data())
コード例 #17
0
 def testWriteReference(self):
   a=_pyhl.nodelist()
   self.addArrayValueNode(a, _pyhl.DATASET_ID, "/doublearray", -1, [4], [1.1,2.2,3.3,4.4], "double", -1)
   self.addReference(a, "/reference", "/doublearray")
   a.write(self.TESTFILE)    
   
   #verify
   a=_pyhl.read_nodelist(self.TESTFILE)
   b=a.fetchNode("/reference")
   self.assertEqual("string", b.format())
   self.assertEqual(_pyhl.REFERENCE_ID, b.type())
   self.assertEqual("/doublearray", b.data())
コード例 #18
0
ファイル: RaveTest.py プロジェクト: aperezhortal/rave
    def XtestVerifyImageAttributesWritten(self):
        obj = rave.open(self.OLD_NRD_FORMAT_TESTFILE)
        self.assertEquals(None, obj.get("/image1/data/CLASS"))
        self.assertEquals(None, obj.get("/image1/data/IMAGE_VERSION"))

        obj.save(self.TESTFILE)

        # Verify
        nodelist = _pyhl.read_nodelist(self.TESTFILE)
        node = nodelist.fetchNode("/image1/data/CLASS")
        self.assertEquals("IMAGE", node.data())
        node = nodelist.fetchNode("/image1/data/IMAGE_VERSION")
        self.assertEquals("1.2", node.data())
コード例 #19
0
def readFile():
  # There is no meaning creating the type
  aList = _pyhl.read_nodelist("compound_test.hdf")

  # Fetch the node
  aNode = aList.fetchNode("/myCompoundAttribute")

  # Translate from the string representation to object
  cdescr = aNode.compound_data()
  print "XSIZE="+`cdescr["xsize"]`
  print "YSIZE="+`cdescr["ysize"]`
  print "XSCALE="+`cdescr["xscale"]`
  print "YSCALE="+`cdescr["yscale"]`
コード例 #20
0
    def readHac(self, fstr):
        if os.path.isfile(fstr):
            nodelist = _pyhl.read_nodelist(fstr)
            nodelist.selectNode("/accumulation_count")
            nodelist.selectNode("/hit_accum")
            nodelist.fetch()

            self.hac = _ravefield.new()
            self.hac.addAttribute("how/count",
                                  nodelist.getNode("/accumulation_count").data())
            self.hac.setData(nodelist.getNode("/hit_accum").data())
        else:
            raise IOError, "No such HAC file: %s" % fstr
コード例 #21
0
 def testWriteGroup(self):
   a=_pyhl.nodelist()
   self.addGroupNode(a, "/group1")
   self.addGroupNode(a, "/group1/group11")
   a.write(self.TESTFILE)
   
   #verify
   a=_pyhl.read_nodelist(self.TESTFILE)
   b=a.fetchNode("/group1")
   self.assertEqual("UNDEFINED", b.format())
   self.assertEqual(_pyhl.GROUP_ID, b.type())
   b=a.fetchNode("/group1/group11")
   self.assertEqual("UNDEFINED", b.format())
   self.assertEqual(_pyhl.GROUP_ID, b.type())
コード例 #22
0
 def testWriteDoubleDataset(self):
   a=_pyhl.nodelist()
   c=numpy.arange(100)
   c=numpy.array(c.astype(numpy.float64),numpy.float64)
   c=numpy.reshape(c,(10,10)).astype(numpy.float64)
   self.addArrayValueNode(a, _pyhl.DATASET_ID, "/doubledataset", -1, numpy.shape(c), c, "double", -1)
   a.write(self.TESTFILE)
   
   #verify
   a=_pyhl.read_nodelist(self.TESTFILE)
   b=a.fetchNode("/doubledataset")
   self.assertEqual("double", b.format())
   self.assertEqual(_pyhl.DATASET_ID, b.type())
   self.assertTrue(numpy.all(c == b.data()))
コード例 #23
0
  def testWriteUcharDataset(self):
    a=_pyhl.nodelist()
    c=numpy.arange(100)
    c=numpy.array(c.astype(numpy.uint8),numpy.uint8)
    c=numpy.reshape(c,(10,10)).astype(numpy.uint8)    
    self.addArrayValueNode(a, _pyhl.ATTRIBUTE_ID, "/uchardataset", -1, numpy.shape(c), c, "uchar", -1)
    a.write(self.TESTFILE)

    #verify
    a=_pyhl.read_nodelist(self.TESTFILE)
    b=a.fetchNode("/uchardataset")
    self.assertEqual("uchar", b.format())
    self.assertEqual(_pyhl.ATTRIBUTE_ID, b.type())
    self.assertTrue(numpy.all(c == b.data()))
コード例 #24
0
 def testWriteFloatArray(self):
   a=_pyhl.nodelist()
   self.addArrayValueNode(a, _pyhl.ATTRIBUTE_ID, "/floatarray", -1, [4], [1.1,2.2,3.3,4.4], "float", -1)
   a.write(self.TESTFILE)
   
   #verify
   a=_pyhl.read_nodelist(self.TESTFILE)
   b=a.fetchNode("/floatarray")
   self.assertEqual("float", b.format())
   self.assertEqual(_pyhl.ATTRIBUTE_ID, b.type())
   c = b.data()
   self.assertEqual(4, len(c))
   self.assertAlmostEqual(1.1, c[0], 4)
   self.assertAlmostEqual(2.2, c[1], 4)
   self.assertAlmostEqual(3.3, c[2], 4)
   self.assertAlmostEqual(4.4, c[3], 4)
コード例 #25
0
def get_lat_lon_1km_aggregated_mersi(satscene, options):
    """Read latitude and longitude for each (aggregated) pixel.
    """
    # Example: FY3A_MERSI_GBAL_L1_20100308_0915_1000M_MS.HDF
    filename = satscene.time_slot.strftime("FY3A_MERSI_GBAL_L1_%Y%M%D_%H%M_1000M_MS.HDF")
    filename = os.path.join(options["dir"], filename)

    a = _pyhl.read_nodelist(filename)
    b = a.getNodeNames()
    # Should only select/fetch the datasets needed. FIXME!
    a.selectAll()
    a.fetch()

    lat = a.getNode("/Latitude").data()
    lon = a.getNode("/Longitude").data()

    return lat, lon
コード例 #26
0
 def testWriteLongLongArray(self):
   a=_pyhl.nodelist()
   try:
     self.addArrayValueNode(a, _pyhl.ATTRIBUTE_ID, "/llongarray", -1, [4], [1,2,3,4], "llong", -1)
     self.fail("Expected TypeError")
   except TypeError:
     pass
   
   a.write(self.TESTFILE)
   
   #verify
   a=_pyhl.read_nodelist(self.TESTFILE)
   try:
     a.fetchNode("/llongarray")
     self.fail("Expected IOError")
   except IOError:
     pass
コード例 #27
0
 def testWriteLongLongDataset(self):
   a=_pyhl.nodelist()
   c=numpy.arange(100)
   c=numpy.array(c.astype(numpy.int64),numpy.int64)
   c=numpy.reshape(c,(10,10)).astype(numpy.int64)
   self.addArrayValueNode(a, _pyhl.DATASET_ID, "/llongdataset", -1, numpy.shape(c), c, "llong", -1)
   a.write(self.TESTFILE)
   
   #verify
   a=_pyhl.read_nodelist(self.TESTFILE)
   b=a.fetchNode("/llongdataset")
   if (_varioustests.sizeoflong() < _varioustests.sizeoflonglong()):
     self.assertEqual("llong", b.format())
   else:
     self.assertEqual("long", b.format())    
   self.assertEqual(_pyhl.DATASET_ID, b.type())
   self.assertTrue(numpy.all(c == b.data()))
コード例 #28
0
def get_lat_lon_1km_aggregated_mersi(satscene, options):
    """Read latitude and longitude for each (aggregated) pixel.
    """
    # Example: FY3A_MERSI_GBAL_L1_20100308_0915_1000M_MS.HDF
    filename = satscene.time_slot.strftime(
        "FY3A_MERSI_GBAL_L1_%Y%M%D_%H%M_1000M_MS.HDF")
    filename = os.path.join(options["dir"], filename)

    a = _pyhl.read_nodelist(filename)
    b = a.getNodeNames()
    # Should only select/fetch the datasets needed. FIXME!
    a.selectAll()
    a.fetch()

    lat = a.getNode("/Latitude").data()
    lon = a.getNode("/Longitude").data()

    return lat, lon
コード例 #29
0
ファイル: ReadSimpleFile.py プロジェクト: baltrad/hlhdf
def readFile():
    aList = _pyhl.read_nodelist("simple_test.hdf")

    # Select individual nodes, instead of all of them
    aList.selectNode("/info/xscale")
    aList.selectNode("/info/yscale")
    aList.selectNode("/data")

    # Fetch the data for selected nodes
    aList.fetch()

    # Print the data
    aNode = aList.getNode("/info/xscale")
    print "XSCALE=" + ` aNode.data() `
    aNode = aList.getNode("/info/yscale")
    print "YSCALE=" + ` aNode.data() `
    aNode = aList.getNode("/data")
    print "DATA=" + ` aNode.data() `
コード例 #30
0
ファイル: rave_IO.py プロジェクト: aperezhortal/rave
def get_metadata(filename):

    a = _pyhl.read_nodelist(filename)
    a.selectMetadata()
    a.fetch()
    node_names = a.getNodeNames()

    items = []
    for nodename in node_names.keys():
        b = a.getNode(nodename)
        items.append((nodename, b, b.type()))

    items.sort() # guarantees groups before attributes

    h5rad = rave_info.INFO("h5rad", version=H5RAD_VERSION)

    groupmapping = {"" : h5rad}
    for nodename, node, typ in items:
        index = nodename.rindex("/")
        parentname, tag = nodename[:index], nodename[index+1:]
        e = SubElement(groupmapping[parentname], tag)
        if typ==1:
            groupmapping[nodename] = e # save the nodename to element mapping
        elif typ==0:
            t = h5type(node.data())
            if t == "sequence":
                # convert list to string
                nodes = []
                for n in node.data():
                    node = n.strip()
                    node = remove_nulls(str(node))
                    nodes.append(("'"+node+"'"))
                e.text = ", ".join(nodes)
            else:
                e.text = remove_nulls(str(node.data()))
            if t != "string":
                e.attrib["type"] = t
        # Skip typ==2, dataset array

    # return only h5rad
    return h5rad
コード例 #31
0
 def testFetch_from_dataset_2(self):
   nodelist = _pyhl.read_nodelist(self.TESTFILE, "/dataset1/")
   node = nodelist.fetchNode("/dataset1/attribute1")
   self.assertEqual("/dataset1/attribute1", node.name()) 
   node = nodelist.fetchNode("/dataset1/doublearray")
   self.assertEqual("/dataset1/doublearray", node.name()) 
コード例 #32
0
def load_1km_aggregated_mersi(satscene, options):
    """Read 1km agregated mersi data from file and load it into *satscene*.
    """
    # Example: FY3A_MERSI_GBAL_L1_20100308_0915_1000M_MS.HDF
    filename = satscene.time_slot.strftime("FY3A_MERSI_GBAL_L1_%Y%m%d_%H%M_1000M_MS.HDF")
    filename = os.path.join(options["dir"], filename)

    a = _pyhl.read_nodelist(filename)
    b = a.getNodeNames()
    # Should only select/fetch the datasets needed. FIXME!
    a.selectAll()
    a.fetch()

    # MERSI Channel 1-4: EV_250_Aggr.1KM_RefSB
    # MERSI Channel 5: EV_250_Aggr.1KM_Emissive
    # MERSI Channel 6-20: EV_1KM_RefSB

    datasets = ["/EV_250_Aggr.1KM_RefSB", "/EV_250_Aggr.1KM_Emissive", "/EV_1KM_RefSB"]

    for nodename in datasets:
        band_data = a.getNode(nodename).data()
        valid_range = a.getNode("%s/valid_range" % (nodename)).data()
        band_names = a.getNode("%s/band_name" % (nodename)).data().split(",")
        # Special treatment of the situation where the bandnames are stored
        # as '6~20' (quite inconvenient):
        if "6~20" in band_names:
            band_names = ["6", "7", "8", "9", "10", "11", "12", "13", "14", "15", "16", "17", "18", "19", "20"]

        for (i, band) in enumerate(band_names):
            if band not in satscene.channels_to_load:
                continue

            satscene[band] = np.ma.masked_outside(band_data[i], valid_range[0], valid_range[1], copy=False)
            satscene[band].info = {
                "var_name": "ch" + str(band),
                "var_data": satscene[band].data,
                "var_dim_names": ("x", "y"),
                "_FillValue": 32767,
                "standard_name": "",
                "short_name": band,
                "scale_factor": 1.0,
                "add_offset": 0.0,
            }

    satscene.info = {
        "var_children": [  # {'var_name' : 'lat', 'var_callback': Functor(satscene.get_lat, low_res), 'var_dim_names': ('x','y') },
            # {'var_name' : 'lon', 'var_callback' : Functor(satscene.get_lon, low_res) , 'var_dim_names': ('x','y') },
            ## {'var_name' : 'lat_hrvis', 'var_data' : satscene.lat[high_res]},
            ## {'var_name' : 'lon_hrvis', 'var_data' : satscene.lon[high_res]},
        ],
        "Satellite": satscene.fullname,
        "Antenna": "None",
        "Receiver": "SMHI",
        "Time": satscene.time_slot.strftime("%Y-%m-%d %H:%M:%S UTC"),
        #'Area_Name' : satscene.area_id,
        "Area_Name": "swath",
        "Projection": "satproj",
        "Platform": "fengyun",
        "Number": "3a",
        "Service": "",
        #'Columns' : satscene.channels[0].shape[1],
        #'Lines' : satscene.channels[0].shape[0],
        "SampleX": 1.0,
        "SampleY": 1.0,
        "title": "MERSI Level 1",
    }