Exemplo n.º 1
0
def savelayers(grids, filename):
    """
    Save ground failure layers object as a MultiHazard HDF file, preserving
    metadata structures. All layers must have same geodictionary.

    Args:
        grids: Ground failure layers object.
        filename (str): Path to where you want to save this file.

    Returns:
        .hdf5 file containing ground failure layers
    """
    layers = collections.OrderedDict()
    metadata = collections.OrderedDict()
    for key in list(grids.keys()):
        layers[key] = grids[key]['grid'].getData()
        metadata[key] = {
            'description': grids[key]['description'],
            'type': grids[key]['type'],
            'label': grids[key]['label']
        }
    origin = {}
    header = {}
    mgrid = MultiHazardGrid(layers, grids[key]['grid'].getGeoDict(),
                            origin,
                            header,
                            metadata=metadata)
    mgrid.save(filename)
Exemplo n.º 2
0
def test():
    shakefile = os.path.join(homedir, 'data', 'northridge.xml')
    t1 = datetime.datetime.now()
    sgrid = ShakeGrid.load(shakefile, adjust='res')
    t2 = datetime.datetime.now()
    origin = {}
    origin['id'] = sgrid._eventDict['event_id']
    origin['source'] = sgrid._eventDict['event_network']
    origin['time'] = sgrid._eventDict['event_timestamp']
    origin['lat'] = sgrid._eventDict['lat']
    origin['lon'] = sgrid._eventDict['lon']
    origin['depth'] = sgrid._eventDict['depth']
    origin['magnitude'] = sgrid._eventDict['magnitude']

    header = {}
    header['type'] = 'shakemap'
    header['version'] = sgrid._shakeDict['shakemap_version']
    header['process_time'] = sgrid._shakeDict['process_timestamp']
    header['code_version'] = sgrid._shakeDict['code_version']
    header['originator'] = sgrid._shakeDict['shakemap_originator']
    header['product_id'] = sgrid._shakeDict['shakemap_id']
    header['map_status'] = sgrid._shakeDict['map_status']
    header['event_type'] = sgrid._shakeDict['shakemap_event_type']

    layers = collections.OrderedDict()
    for (layername, layerdata) in sgrid.getData().items():
        layers[layername] = layerdata.getData()

    tdict = {
        'name': 'fred',
        'family': {
            'wife': 'wilma',
            'daughter': 'pebbles'
        }
    }
    mgrid = MultiHazardGrid(layers,
                            sgrid.getGeoDict(),
                            origin,
                            header,
                            metadata={'flintstones': tdict})
    tdir = tempfile.mkdtemp()
    testfile = os.path.join(tdir, 'test.hdf')
    try:
        mgrid.save(testfile)
        t3 = datetime.datetime.now()
        mgrid2 = MultiHazardGrid.load(testfile)
        t4 = datetime.datetime.now()
        xmlmb = os.path.getsize(shakefile) / float(1e6)
        hdfmb = os.path.getsize(testfile) / float(1e6)
        xmltime = (t2 - t1).seconds + (t2 - t1).microseconds / float(1e6)
        hdftime = (t4 - t3).seconds + (t4 - t3).microseconds / float(1e6)
        print('Input XML file size: %.2f MB (loading time %.3f seconds)' %
              (xmlmb, xmltime))
        print('Output HDF file size: %.2f MB (loading time %.3f seconds)' %
              (hdfmb, hdftime))
    except DataSetException as obj:
        pass
    finally:
        if os.path.isdir(tdir):
            shutil.rmtree(tdir)
Exemplo n.º 3
0
def test():
    shakefile = os.path.join(homedir,'data','northridge.xml')
    t1 = datetime.datetime.now()
    sgrid = ShakeGrid.load(shakefile,adjust='res')
    t2 = datetime.datetime.now()
    origin = {}
    origin['id'] = sgrid._eventDict['event_id']
    origin['source'] = sgrid._eventDict['event_network']
    origin['time'] = sgrid._eventDict['event_timestamp']
    origin['lat'] = sgrid._eventDict['lat']
    origin['lon'] = sgrid._eventDict['lon']
    origin['depth'] = sgrid._eventDict['depth']
    origin['magnitude'] = sgrid._eventDict['magnitude']

    header = {}
    header['type'] = 'shakemap'
    header['version'] = sgrid._shakeDict['shakemap_version']
    header['process_time'] = sgrid._shakeDict['process_timestamp']
    header['code_version'] = sgrid._shakeDict['code_version']
    header['originator'] = sgrid._shakeDict['shakemap_originator']
    header['product_id'] = sgrid._shakeDict['shakemap_id']
    header['map_status'] = sgrid._shakeDict['map_status']
    header['event_type'] = sgrid._shakeDict['shakemap_event_type']

    layers = collections.OrderedDict()
    for (layername,layerdata) in sgrid.getData().items():
        layers[layername] = layerdata.getData()

    tdict = {'name':'fred','family':{'wife':'wilma','daughter':'pebbles'}}
    mgrid = MultiHazardGrid(layers,sgrid.getGeoDict(),origin,header,metadata={'flintstones':tdict})
    tdir = tempfile.mkdtemp()
    testfile = os.path.join(tdir,'test.hdf')
    try:
        mgrid.save(testfile)
        t3 = datetime.datetime.now()
        mgrid2 = MultiHazardGrid.load(testfile)
        t4 = datetime.datetime.now()
        xmlmb = os.path.getsize(shakefile)/float(1e6)
        hdfmb = os.path.getsize(testfile)/float(1e6)
        xmltime = (t2-t1).seconds + (t2-t1).microseconds/float(1e6)
        hdftime = (t4-t3).seconds + (t4-t3).microseconds/float(1e6)
        print('Input XML file size: %.2f MB (loading time %.3f seconds)' % (xmlmb,xmltime))
        print('Output HDF file size: %.2f MB (loading time %.3f seconds)' % (hdfmb,hdftime))
    except DataSetException as obj:
        pass
    finally:
        if os.path.isdir(tdir):
            shutil.rmtree(tdir)
Exemplo n.º 4
0
def savelayers(grids, filename):
    """
    Save ground failure layers object as a MultiHazard HDF file, preserving metadata structures
    Must all have the same geodict
    :param grids: ground failure layers object
    :param filename: Full file path to where you want to save this file
    """
    layers = collections.OrderedDict()
    metadata = collections.OrderedDict()
    for key in list(grids.keys()):
        layers[key] = grids[key]['grid'].getData()
        metadata[key] = {'description': grids[key]['description'], 'type': grids[key]['type'], 'label': grids[key]['label']}
    origin = {}
    header = {}
    mgrid = MultiHazardGrid(layers, grids[key]['grid'].getGeoDict(), origin, header, metadata=metadata)
    mgrid.save(filename)
Exemplo n.º 5
0
def savelayers(grids, filename):
    """
    Save ground failure layers object as a MultiHazard HDF file, preserving metadata structures
    Must all have the same geodict
    :param grids: ground failure layers object
    :param filename: Full file path to where you want to save this file
    """
    layers = collections.OrderedDict()
    metadata = collections.OrderedDict()
    for key in list(grids.keys()):
        layers[key] = grids[key]['grid'].getData()
        metadata[key] = {'description': grids[key]['description'], 'type': grids[key]['type'],
                         'label': grids[key]['label']}
    origin = {}
    header = {}
    mgrid = MultiHazardGrid(layers, grids[key]['grid'].getGeoDict(), origin, header, metadata=metadata)
    mgrid.save(filename)
Exemplo n.º 6
0
def loadlayers(filename):
    """
    Load a MultiHazard HDF file back in as a ground failure layers object in active memory (must have been saved for this purpose)
    """
    mgrid = MultiHazardGrid.load(filename)
    grids = collections.OrderedDict()
    for key in mgrid.getLayerNames():
        grids[key] = {'grid': mgrid.getData()[key], 'description': mgrid.getMetadata()[key]['description'], 'type': mgrid.getMetadata()[key]['type'], 'label': mgrid.getMetadata()[key]['label']}

    return grids
Exemplo n.º 7
0
def loadlayers(filename):
    """
    Load a MultiHazard HDF file back in as a ground failure layers object in active memory (must have been saved for this purpose)
    """
    mgrid = MultiHazardGrid.load(filename)
    grids = collections.OrderedDict()
    for key in mgrid.getLayerNames():
        grids[key] = {'grid': mgrid.getData()[key], 'description': mgrid.getMetadata()[key]['description'],
                      'type': mgrid.getMetadata()[key]['type'], 'label': mgrid.getMetadata()[key]['label']}

    return grids