def test_multipoint(self):
        smodel = self.check_round_trip(MULTIPOINT)

        # test toml round trip
        temp = general.gettemp(suffix='.toml')
        with open(temp, 'w') as f:
            tomldump(smodel, f)
        with open(temp, 'r') as f:
            sm = toml.load(f)['sourceModel']
        self.assertEqual(smodel.name, sm['_name'])
 def check_round_trip(self, fname):
     smodel = nrml.to_python(fname, conv)
     fd, name = tempfile.mkstemp(suffix='.xml')
     with os.fdopen(fd, 'wb'):
         write_source_model(name, smodel)
     with open(name + '.toml', 'w') as f:
         tomldump(smodel, f)
     if open(name).read() != open(fname).read():
         raise Exception('Different files: %s %s' % (name, fname))
     os.remove(name)
     os.remove(name + '.toml')
     return smodel
Beispiel #3
0
 def __call__(self, ltmodel, apply_unc, fname, fileno, monitor):
     fname_hits = collections.Counter()  # fname -> number of calls
     mags = set()
     src_groups = []
     [sm] = nrml.read_source_models([fname], self.converter, monitor)
     newsm = self.makesm(fname, sm, apply_unc)
     fname_hits[fname] += 1
     for sg in newsm:
         # sample a source for each group
         if os.environ.get('OQ_SAMPLE_SOURCES'):
             sg.sources = random_filtered_sources(
                 sg.sources, self.srcfilter, sg.id)
         sg.info = numpy.zeros(len(sg), source_info_dt)
         for i, src in enumerate(sg):
             if hasattr(src, 'data'):  # nonparametric
                 srcmags = [item[0].mag for item in src.data]
             else:
                 srcmags = [item[0] for item in
                            src.get_annual_occurrence_rates()]
             mags.update(srcmags)
             toml = sourcewriter.tomldump(src)
             checksum = zlib.adler32(toml.encode('utf8'))
             sg.info[i] = (ltmodel.ordinal, 0, src.source_id,
                           src.code, src.num_ruptures, 0, 0, 0, checksum,
                           src.wkt(), toml)
         src_groups.append(sg)
     return dict(fname_hits=fname_hits, changes=newsm.changes,
                 src_groups=src_groups, mags=mags,
                 ordinal=ltmodel.ordinal, fileno=fileno)
 def test_toml(self):
     out = ''
     for fname in (MIXED, ALT_MFDS, MULTIPOINT):
         smodel = nrml.to_python(fname, conv)
         for sgroup in smodel:
             for src in sgroup:
                 out += tomldump(src)
     # NB: uncomment the line below to regenerate the TOML file
     # with open(TOML, 'w') as f: f.write(out)
     self.assertEqual(out, open(TOML).read())
Beispiel #5
0
 def store_sm(self, smodel):
     """
     :param smodel: a :class:`openquake.hazardlib.nrml.SourceModel` instance
     """
     h5 = self.hdf5
     sources = h5['source_info']
     source_geom = h5['source_geom']
     gid = len(source_geom)
     for sg in smodel:
         srcs = []
         geoms = []
         for src in sg:
             if hasattr(src, 'mfd'):  # except nonparametric
                 mfdi = len(self.mfds)
                 self.mfds.add(sourcewriter.tomldump(src.mfd))
             else:
                 mfdi = -1
             srcgeom = src.geom()
             n = len(srcgeom)
             geom = numpy.zeros(n, point3d)
             geom['lon'], geom['lat'], geom['depth'] = srcgeom.T
             if len(geom) > 1:  # more than a point source
                 msg = 'source %s' % src.source_id
                 try:
                     geo.utils.check_extent(geom['lon'], geom['lat'], msg)
                 except ValueError as err:
                     logging.error(str(err))
             dic = {
                 k: v
                 for k, v in vars(src).items()
                 if k != 'id' and k != 'src_group_id'
             }
             src.checksum = zlib.adler32(pickle.dumps(dic))
             srcs.append((sg.id, src.source_id, src.code, gid, gid + n,
                          mfdi, src.num_ruptures, 0, 0, 0, src.checksum))
             geoms.append(geom)
             gid += n
         if geoms:
             hdf5.extend(source_geom, numpy.concatenate(geoms))
         if sources:
             hdf5.extend(sources, numpy.array(srcs, source_info_dt))
Beispiel #6
0
 def test_gridded(self):
     # test xml -> toml
     smodel = nrml.to_python(GRIDDED, conv)
     temp = general.gettemp(suffix='.toml')
     with open(temp, 'w') as f:
         tomldump(smodel, f)