def test(): shakefile = os.path.join(homedir, 'data', 'northridge.xml') t1 = datetime.datetime.now() sgrid = ShakeGrid.load(shakefile, adjust='res') t2 = datetime.datetime.now() origin = {} origin['id'] = sgrid._eventDict['event_id'] origin['source'] = sgrid._eventDict['event_network'] origin['time'] = sgrid._eventDict['event_timestamp'] origin['lat'] = sgrid._eventDict['lat'] origin['lon'] = sgrid._eventDict['lon'] origin['depth'] = sgrid._eventDict['depth'] origin['magnitude'] = sgrid._eventDict['magnitude'] header = {} header['type'] = 'shakemap' header['version'] = sgrid._shakeDict['shakemap_version'] header['process_time'] = sgrid._shakeDict['process_timestamp'] header['code_version'] = sgrid._shakeDict['code_version'] header['originator'] = sgrid._shakeDict['shakemap_originator'] header['product_id'] = sgrid._shakeDict['shakemap_id'] header['map_status'] = sgrid._shakeDict['map_status'] header['event_type'] = sgrid._shakeDict['shakemap_event_type'] layers = collections.OrderedDict() for (layername, layerdata) in sgrid.getData().items(): layers[layername] = layerdata.getData() tdict = { 'name': 'fred', 'family': { 'wife': 'wilma', 'daughter': 'pebbles' } } mgrid = MultiHazardGrid(layers, sgrid.getGeoDict(), origin, header, metadata={'flintstones': tdict}) tdir = tempfile.mkdtemp() testfile = os.path.join(tdir, 'test.hdf') try: mgrid.save(testfile) t3 = datetime.datetime.now() mgrid2 = MultiHazardGrid.load(testfile) t4 = datetime.datetime.now() xmlmb = os.path.getsize(shakefile) / float(1e6) hdfmb = os.path.getsize(testfile) / float(1e6) xmltime = (t2 - t1).seconds + (t2 - t1).microseconds / float(1e6) hdftime = (t4 - t3).seconds + (t4 - t3).microseconds / float(1e6) print('Input XML file size: %.2f MB (loading time %.3f seconds)' % (xmlmb, xmltime)) print('Output HDF file size: %.2f MB (loading time %.3f seconds)' % (hdfmb, hdftime)) except DataSetException as obj: pass finally: if os.path.isdir(tdir): shutil.rmtree(tdir)
def loadlayers(filename): """ Load a MultiHazard HDF file back in as a ground failure layers object in active memory (must have been saved for this purpose) """ mgrid = MultiHazardGrid.load(filename) grids = collections.OrderedDict() for key in mgrid.getLayerNames(): grids[key] = {'grid': mgrid.getData()[key], 'description': mgrid.getMetadata()[key]['description'], 'type': mgrid.getMetadata()[key]['type'], 'label': mgrid.getMetadata()[key]['label']} return grids
def test(): shakefile = os.path.join(homedir,'data','northridge.xml') t1 = datetime.datetime.now() sgrid = ShakeGrid.load(shakefile,adjust='res') t2 = datetime.datetime.now() origin = {} origin['id'] = sgrid._eventDict['event_id'] origin['source'] = sgrid._eventDict['event_network'] origin['time'] = sgrid._eventDict['event_timestamp'] origin['lat'] = sgrid._eventDict['lat'] origin['lon'] = sgrid._eventDict['lon'] origin['depth'] = sgrid._eventDict['depth'] origin['magnitude'] = sgrid._eventDict['magnitude'] header = {} header['type'] = 'shakemap' header['version'] = sgrid._shakeDict['shakemap_version'] header['process_time'] = sgrid._shakeDict['process_timestamp'] header['code_version'] = sgrid._shakeDict['code_version'] header['originator'] = sgrid._shakeDict['shakemap_originator'] header['product_id'] = sgrid._shakeDict['shakemap_id'] header['map_status'] = sgrid._shakeDict['map_status'] header['event_type'] = sgrid._shakeDict['shakemap_event_type'] layers = collections.OrderedDict() for (layername,layerdata) in sgrid.getData().items(): layers[layername] = layerdata.getData() tdict = {'name':'fred','family':{'wife':'wilma','daughter':'pebbles'}} mgrid = MultiHazardGrid(layers,sgrid.getGeoDict(),origin,header,metadata={'flintstones':tdict}) tdir = tempfile.mkdtemp() testfile = os.path.join(tdir,'test.hdf') try: mgrid.save(testfile) t3 = datetime.datetime.now() mgrid2 = MultiHazardGrid.load(testfile) t4 = datetime.datetime.now() xmlmb = os.path.getsize(shakefile)/float(1e6) hdfmb = os.path.getsize(testfile)/float(1e6) xmltime = (t2-t1).seconds + (t2-t1).microseconds/float(1e6) hdftime = (t4-t3).seconds + (t4-t3).microseconds/float(1e6) print('Input XML file size: %.2f MB (loading time %.3f seconds)' % (xmlmb,xmltime)) print('Output HDF file size: %.2f MB (loading time %.3f seconds)' % (hdfmb,hdftime)) except DataSetException as obj: pass finally: if os.path.isdir(tdir): shutil.rmtree(tdir)