Пример #1
0
def work():
  source=NetCdfData(NetCdfData.GRAVITY, DATASET, scale_factor=DATA_UNITS, reference_system=COORDINATES)
  db=DomainBuilder(dim=3, reference_system=COORDINATES)
  db.addSource(source)
  db.setVerticalExtents(depth=thickness, air_layer=l_air, num_cells=n_cells_v)
  db.setFractionalPadding(pad_x=PAD_X, pad_y=PAD_Y)
  db.fixDensityBelow(depth=thickness)

  inv=GravityInversion()
  inv.setSolverTolerance(1e-4)
  inv.setSolverMaxIterations(50)
  inv.setup(db)
  inv.getCostFunction().setTradeOffFactorsModels(MU)

  density = inv.run()
  print("density = %s"%density)

  g, w =  db.getGravitySurveys()[0]
  saveVoxet("result.vo", density=density)
  if saveSilo("result_gravity.silo", density=density, gravity_anomaly=g, gravity_weight=w):
      print("Results saved in result_gravity.silo")
  else:
      print("Failed to save result_gravity.silo. Possibly no Silo support.")

  saveVTK("result_gravity.vtu", density=density, gravity_anomaly=g, gravity_weight=w)
  print("Results saved in result_gravity.vtu")

  saveDataCSV("result_gravity.csv", density=density, x=density.getFunctionSpace().getX())
  print("Results saved in result_gravity.csv")
  print("All done. Have a nice day!")
Пример #2
0
def work():
    # Setup and run the inversion
    source = NetCdfData(NetCdfData.MAGNETIC, DATASET, scale_factor=DATA_UNITS, reference_system=COORDINATES)
    db = DomainBuilder(dim=3, reference_system=COORDINATES)
    db.addSource(source)
    db.setVerticalExtents(depth=thickness, air_layer=l_air, num_cells=n_cells_v)
    db.setFractionalPadding(pad_x=PAD_X, pad_y=PAD_Y)
    db.setBackgroundMagneticFluxDensity(B_b)
    db.fixSusceptibilityBelow(depth=thickness)

    inv = MagneticInversion(self_demagnetization=True)
    inv.setSolverTolerance(1e-4)
    inv.setSolverMaxIterations(50)
    inv.fixMagneticPotentialAtBottom(False)
    inv.setup(db)
    inv.getCostFunction().setTradeOffFactorsModels(MU)

    susceptibility = inv.run()
    print("susceptibility = %s" % susceptibility)

    B, w = db.getMagneticSurveys()[0]
    if saveSilo("result_magnetic.silo", susceptibility=susceptibility, magnetic_anomaly=B, magnetic_weight=w):
        print("Results saved in result_magnetic.silo")
    else:
        print("Failed to save result_magnetic.silo. Possibly no Silo support.")

    saveVTK("result_magnetic.vtu", susceptibility=susceptibility, magnetic_anomaly=B, magnetic_weight=w)
    print("Results saved in result_magnetic.vtu")

    saveDataCSV("result_magnetic.csv", susceptibility=susceptibility, x=susceptibility.getFunctionSpace().getX())
    print("Results saved in result_magnetic.csv")

    print("All done. Have a nice day!")
    def test_numpy_data_2d(self):
        DIM = 2
        testdata = np.arange(20 * 21).reshape(20, 21)
        error = 1. * np.ones(testdata.shape)
        source = NumpyData(DataSource.GRAVITY, testdata, null_value=NUMPY_NULL)
        X0, NP, DX = source.getDataExtents()
        for i in range(DIM):
            self.assertAlmostEqual(X0[i], 0., msg="Data origin wrong")
            self.assertEqual(NP[i],
                             testdata.shape[DIM - i - 1],
                             msg="Wrong number of data points")
            self.assertAlmostEqual(DX[i],
                                   1000. / testdata.shape[DIM - i - 1],
                                   msg="Wrong cell size")

        domainbuilder = DomainBuilder(dim=3)
        domainbuilder.addSource(source)
        domainbuilder.setVerticalExtents(depth=-VMIN,
                                         air_layer=VMAX,
                                         num_cells=NE_V)
        domainbuilder.setElementPadding(PAD_X, PAD_Y)
        dom = domainbuilder.getDomain()
        g, s = domainbuilder.getGravitySurveys()[0]

        outfn = os.path.join(WORKDIR, '_npdata2d.csv')
        saveDataCSV(outfn, g=g, s=s)

        DV = (VMAX - VMIN) / NE_V

        # check data
        nx = NP[0] + 2 * PAD_X
        ny = NP[1] + 2 * PAD_Y
        nz = NE_V
        z_data = int(np.round((ALT - VMIN) / DV) - 1)

        out = np.genfromtxt(outfn,
                            delimiter=',',
                            skip_header=1,
                            dtype=np.float64)
        # recompute nz since ripley might have adjusted number of elements
        nz = len(out) // (nx * ny)
        g_out = out[:, 0].reshape(nz, ny, nx)
        s_out = out[:, 1].reshape(nz, ny, nx)
        self.assertAlmostEqual(
            np.abs(g_out[z_data, PAD_Y:PAD_Y + NP[1], PAD_X:PAD_X + NP[0]] -
                   testdata).max(),
            0.,
            msg="Difference in gravity data area")

        self.assertAlmostEqual(
            np.abs(s_out[z_data, PAD_Y:PAD_Y + NP[1], PAD_X:PAD_X + NP[0]] -
                   error).max(),
            0.,
            msg="Difference in error data area")

        # overwrite data -> should only be padding value left
        g_out[z_data, PAD_Y:PAD_Y + NP[1], PAD_X:PAD_X + NP[0]] = NUMPY_NULL
        self.assertAlmostEqual(np.abs(g_out - NUMPY_NULL).max(),
                               0.,
                               msg="Wrong values in padding area")
Пример #4
0
def work():
  # Setup and run the inversion
  source=NetCdfData(NetCdfData.MAGNETIC, DATASET, scale_factor=DATA_UNITS, reference_system=COORDINATES)
  db=DomainBuilder(dim=3, reference_system=COORDINATES)
  db.addSource(source)
  db.setVerticalExtents(depth=thickness, air_layer=l_air, num_cells=n_cells_v)
  db.setFractionalPadding(pad_x=PAD_X, pad_y=PAD_Y)
  db.setBackgroundMagneticFluxDensity(B_b)
  db.fixSusceptibilityBelow(depth=thickness)

  inv=MagneticInversion(self_demagnetization=True)
  inv.setSolverTolerance(1e-4)
  inv.setSolverMaxIterations(50)
  inv.fixMagneticPotentialAtBottom(False)
  inv.setup(db)
  inv.getCostFunction().setTradeOffFactorsModels(MU)

  susceptibility = inv.run()
  print("susceptibility = %s"%susceptibility)

  B, w =  db.getMagneticSurveys()[0]
  if saveSilo("result_magnetic.silo", susceptibility=susceptibility, magnetic_anomaly=B, magnetic_weight=w):
      print("Results saved in result_magnetic.silo")
  else:
      print("Failed to save result_magnetic.silo. Possibly no Silo support.")

  saveVTK("result_magnetic.vtu", susceptibility=susceptibility, magnetic_anomaly=B, magnetic_weight=w)
  print("Results saved in result_magnetic.vtu")

  saveDataCSV("result_magnetic.csv", susceptibility=susceptibility, x=susceptibility.getFunctionSpace().getX())
  print("Results saved in result_magnetic.csv")

  print("All done. Have a nice day!")
Пример #5
0
    def test_cdf_with_padding(self):
        source = NetCdfData(DataSource.GRAVITY, NC_DATA, ALT, scale_factor=1e-6)
        domainbuilder=DomainBuilder()
        domainbuilder.addSource(source)
        domainbuilder.setVerticalExtents(depth=-VMIN, air_layer=VMAX, num_cells=NE_V)
        domainbuilder.setElementPadding(PAD_X,PAD_Y)
        dom=domainbuilder.getDomain()
        g,s=domainbuilder.getGravitySurveys()[0]

        outfn=os.path.join(WORKDIR, '_ncdata.csv')
        saveDataCSV(outfn, g=g, s=s)

        X0,NP,DX=source.getDataExtents()
        DV=(VMAX-VMIN)/NE_V

        # check metadata
        self.assertEqual(NP, NC_SIZE, msg="Wrong number of data points")
        # this only works if gdal is available

        try:
            import osgeo.osr
            for i in range(len(NC_ORIGIN)):
                self.assertAlmostEqual(X0[i], NC_ORIGIN[i], msg="Data origin wrong")
        except ImportError:
            print("Skipping test of data origin since gdal is not installed.")

        # check data
        nx=NP[0]+2*PAD_X
        ny=NP[1]+2*PAD_Y
        nz=NE_V
        z_data=int(np.round((ALT-VMIN)/DV)-1)
    
        ref=np.genfromtxt(NC_REF, delimiter=',', dtype=np.float64)
        g_ref=ref[:,0].reshape((NP[1],NP[0]))
        s_ref=ref[:,1].reshape((NP[1],NP[0]))

        out=np.genfromtxt(outfn, delimiter=',', skip_header=1, dtype=np.float64)
        # recompute nz since ripley might have adjusted number of elements

        nz=len(out)//(nx*ny)
        g_out=out[:,0].reshape(nz,ny,nx)
        s_out=out[:,1].reshape(nz,ny,nx)

        self.assertAlmostEqual(np.abs(
            g_out[z_data, PAD_Y:PAD_Y+NP[1], PAD_X:PAD_X+NP[0]]-g_ref).max(),
            0., msg="Difference in gravity data area")

        self.assertAlmostEqual(np.abs(
            s_out[z_data, PAD_Y:PAD_Y+NP[1], PAD_X:PAD_X+NP[0]]-s_ref).max(),
            0., msg="Difference in error data area")

        # overwrite data -> should only be padding value left
        g_out[z_data, PAD_Y:PAD_Y+NP[1], PAD_X:PAD_X+NP[0]]=NC_NULL
        self.assertAlmostEqual(np.abs(g_out-NC_NULL).max(), 0.,
                msg="Wrong values in padding area")
Пример #6
0
    def test_numpy_data_2d(self):
        DIM=2
        testdata = np.arange(20*21).reshape(20,21)
        error = 1.*np.ones(testdata.shape)
        source = NumpyData(DataSource.GRAVITY, testdata, null_value=NUMPY_NULL)
        X0,NP,DX=source.getDataExtents()
        for i in range(DIM):
            self.assertAlmostEqual(X0[i], 0., msg="Data origin wrong")
            self.assertEqual(NP[i], testdata.shape[DIM-i-1], msg="Wrong number of data points")
            self.assertAlmostEqual(DX[i], 1000./testdata.shape[DIM-i-1], msg="Wrong cell size")

        domainbuilder=DomainBuilder(dim=3)
        domainbuilder.addSource(source)
        domainbuilder.setVerticalExtents(depth=-VMIN, air_layer=VMAX, num_cells=NE_V)
        domainbuilder.setElementPadding(PAD_X, PAD_Y)
        dom=domainbuilder.getDomain()
        g,s=domainbuilder.getGravitySurveys()[0]

        outfn=os.path.join(WORKDIR, '_npdata2d.csv')
        saveDataCSV(outfn, g=g, s=s)

        DV=(VMAX-VMIN)/NE_V

        # check data
        nx=NP[0]+2*PAD_X
        ny=NP[1]+2*PAD_Y
        nz=NE_V
        z_data=int(np.round((ALT-VMIN)/DV)-1)

        out=np.genfromtxt(outfn, delimiter=',', skip_header=1, dtype=np.float64)
        # recompute nz since ripley might have adjusted number of elements
        nz=len(out)//(nx*ny)
        g_out=out[:,0].reshape(nz,ny,nx)
        s_out=out[:,1].reshape(nz,ny,nx)
        self.assertAlmostEqual(np.abs(
            g_out[z_data, PAD_Y:PAD_Y+NP[1], PAD_X:PAD_X+NP[0]]-testdata).max(),
            0., msg="Difference in gravity data area")

        self.assertAlmostEqual(np.abs(
            s_out[z_data, PAD_Y:PAD_Y+NP[1], PAD_X:PAD_X+NP[0]]-error).max(),
            0., msg="Difference in error data area")

        # overwrite data -> should only be padding value left
        g_out[z_data, PAD_Y:PAD_Y+NP[1], PAD_X:PAD_X+NP[0]]=NUMPY_NULL
        self.assertAlmostEqual(np.abs(g_out-NUMPY_NULL).max(), 0.,
                msg="Wrong values in padding area")
Пример #7
0
def work():
    # Setup and run the inversion
    source = ErMapperData(DataSource.GRAVITY,
                          DATASET,
                          scale_factor=DATA_UNITS,
                          reference_system=COORDINATES)
    db = DomainBuilder(dim=3, reference_system=COORDINATES)
    db.addSource(source)
    db.setVerticalExtents(depth=thickness,
                          air_layer=l_air,
                          num_cells=n_cells_v)
    db.setFractionalPadding(pad_x=PAD_X, pad_y=PAD_Y)
    db.fixDensityBelow(depth=thickness)

    inv = GravityInversion()
    inv.setSolverTolerance(1e-4)
    inv.setSolverMaxIterations(50)
    inv.setup(db)
    inv.getCostFunction().setTradeOffFactorsModels(MU)

    density = inv.run()
    print("density = %s" % density)

    g, w = db.getGravitySurveys()[0]
    try:
        saveSilo("result0.silo",
                 density=density,
                 gravity_anomaly=g,
                 gravity_weight=w)
        print("Results saved in result0.silo")
    except:
        print("Failed to save result0.silo. Possibly no Silo support.")

    saveVTK("result0.vtu",
            density=density,
            gravity_anomaly=g,
            gravity_weight=w)
    print("Results saved in result0.vtu")

    saveDataCSV("result0.csv",
                density=density,
                x=density.getFunctionSpace().getX())
    print("Results saved in result0.csv")

    print("All done. Have a nice day.!")
Пример #8
0
def work():
  # Setup and run the inversion
  grav_source=NetCdfData(NetCdfData.GRAVITY, GRAVITY_DATASET, scale_factor=GRAV_UNITS, reference_system=COORDINATES)
  mag_source=NetCdfData(NetCdfData.MAGNETIC, MAGNETIC_DATASET, scale_factor=MAG_UNITS, reference_system=COORDINATES)
  db=DomainBuilder(dim=3, reference_system=COORDINATES)
  db.addSource(grav_source)
  db.addSource(mag_source)
  db.setVerticalExtents(depth=thickness, air_layer=l_air, num_cells=n_cells_v)
  db.setFractionalPadding(pad_x=PAD_X, pad_y=PAD_Y)
  db.setBackgroundMagneticFluxDensity(B_b)
  db.fixDensityBelow(depth=thickness)
  db.fixSusceptibilityBelow(depth=thickness)

  inv=JointGravityMagneticInversion()
  inv.setSolverTolerance(1e-4)
  inv.setSolverMaxIterations(100)
  inv.setup(db)
  inv.getCostFunction().setTradeOffFactorsModels([mu_gravity, mu_magnetic])
  inv.getCostFunction().setTradeOffFactorsRegularization(mu = [1.,1.], mu_c=1.)

  density, susceptibility = inv.run()
  print("density = %s"%density)
  print("susceptibility = %s"%susceptibility)

  g, wg = db.getGravitySurveys()[0]
  B, wB = db.getMagneticSurveys()[0]
  try:
      saveSilo("result_gravmag.silo", density=density, gravity_anomaly=g, gravity_weight=wg, susceptibility=susceptibility, magnetic_anomaly=B,   magnetic_weight=wB)
      print("Results saved in result_gravmag.silo")
  except:
      print("Failed to save result_gravmag.silo. Possibly no Silo support.")

  saveVTK("result_gravmag.vtu", density=density, gravity_anomaly=g, gravity_weight=wg, susceptibility=susceptibility, magnetic_anomaly=B,   magnetic_weight=wB)
  print("Results saved in result_gravmag.vtu")

  saveDataCSV("result_gravmag.csv", density=density, susceptibility=susceptibility, x=susceptibility.getFunctionSpace().getX())
  print("Results saved in result_gravmag.csv")

  print("All done. Have a nice day!")
Пример #9
0
    ]

    numslices = len(sine_table) - 1

    minval = 0
    maxval = 1

    step = sup(
        maxval -
        minval) / numslices  #The width of the gap between entries in the table

    result = interpolateTable(sine_table, x0, minval, step, toobig)

    #Now we save the input and output for comparison

    saveDataCSV("1d.csv", inp=x0, out=result)

    #Now 2D interpolation

    #This time the sine curve will be at full height along the x (ie x0) axis.
    #Its amplitude will decrease to a flat line along x1=1.1

    #Interpolate works with numpy arrays so we'll use them
    st = numpy.array(sine_table)

    table = [st, 0.5 * st, 0 * st]  #Note that this table is 2D

    #The y dimension should be the outer the dimension of the table
    #Note that the order of tuples for the 2nd and 3rd param is (x,y)
    result2 = interpolateTable(table, x, (minval, 0), (0.55, step), toobig)
    saveDataCSV("2d.csv", inp0=x0, inp2=x1, out=result2)
Пример #10
0
    #The values we take from the domain will range from 0 to 1 (inclusive)

    sine_table=[0, 0.70710678118654746, 1, 0.70710678118654746, 0, -0.70710678118654746, -1, -0.70710678118654746, 0]

    numslices=len(sine_table)-1

    minval=0
    maxval=1

    step=sup(maxval-minval)/numslices   #The width of the gap between entries in the table

    result=interpolateTable(sine_table, x0, minval, step, toobig)

    #Now we save the input and output for comparison

    saveDataCSV("1d.csv", inp=x0, out=result)

    #Now 2D interpolation

    #This time the sine curve will be at full height along the x (ie x0) axis.
    #Its amplitude will decrease to a flat line along x1=1.1

    #Interpolate works with numpy arrays so we'll use them
    st=numpy.array(sine_table)

    table=[st, 0.5*st, 0*st ]   #Note that this table is 2D

    #The y dimension should be the outer the dimension of the table
    #Note that the order of tuples for the 2nd and 3rd param is (x,y)
    result2=interpolateTable(table, x, (minval,0), (0.55, step), toobig)
    saveDataCSV("2d.csv",inp0=x0, inp2=x1, out=result2)
    def test_cdf_with_padding_ellipsoid(self):
        ref = WGS84ReferenceSystem()

        source = NetCdfData(DataSource.GRAVITY,
                            NC_DATA,
                            ALT,
                            reference_system=ref,
                            scale_factor=1e-6)
        domainbuilder = DomainBuilder(reference_system=ref)
        domainbuilder.addSource(source)
        domainbuilder.setVerticalExtents(depth=-VMIN,
                                         air_layer=VMAX,
                                         num_cells=NE_V)
        domainbuilder.setElementPadding(PAD_X, PAD_Y)
        dom = domainbuilder.getDomain()
        g, s = domainbuilder.getGravitySurveys()[0]

        outfn = os.path.join(WORKDIR, '_ncdata.csv')
        saveDataCSV(outfn, g=g, s=s)

        X0, NP, DX = source.getDataExtents()
        DV = (VMAX - VMIN) / NE_V

        # check metadata
        self.assertEqual(NP, NC_SIZE, msg="Wrong number of data points")

        for i in range(len(NC_ORIGIN)):
            self.assertAlmostEqual(X0[i],
                                   NC_ORIGIN_WGS84[i],
                                   msg="Data origin wrong")

        # check data
        nx = NP[0] + 2 * PAD_X
        ny = NP[1] + 2 * PAD_Y
        nz = NE_V
        z_data = int(np.round((ALT - VMIN) / DV) - 1)

        ref = np.genfromtxt(NC_REF, delimiter=',', dtype=np.float64)
        g_ref = ref[:, 0].reshape((NP[1], NP[0]))
        s_ref = ref[:, 1].reshape((NP[1], NP[0]))

        out = np.genfromtxt(outfn,
                            delimiter=',',
                            skip_header=1,
                            dtype=np.float64)

        # recompute nz since ripley might have adjusted number of elements
        nz = len(out) // (nx * ny)
        g_out = out[:, 0].reshape(nz, ny, nx)
        s_out = out[:, 1].reshape(nz, ny, nx)

        self.assertAlmostEqual(
            np.abs(g_out[z_data, PAD_Y:PAD_Y + NP[1], PAD_X:PAD_X + NP[0]] -
                   g_ref).max(),
            0.,
            msg="Difference in gravity data area")

        self.assertAlmostEqual(
            np.abs(s_out[z_data, PAD_Y:PAD_Y + NP[1], PAD_X:PAD_X + NP[0]] -
                   s_ref).max(),
            0.,
            msg="Difference in error data area")

        # overwrite data -> should only be padding value left
        g_out[z_data, PAD_Y:PAD_Y + NP[1], PAD_X:PAD_X + NP[0]] = NC_NULL
        self.assertAlmostEqual(np.abs(g_out - NC_NULL).max(),
                               0.,
                               msg="Wrong values in padding area")
    def _ers_tester(self, filename):
        source = ErMapperData(DataSource.GRAVITY,
                              headerfile=filename,
                              altitude=ALT,
                              scale_factor=1e-6)
        domainbuilder = DomainBuilder()
        domainbuilder.addSource(source)
        domainbuilder.setVerticalExtents(depth=-VMIN,
                                         air_layer=VMAX,
                                         num_cells=NE_V)
        domainbuilder.setElementPadding(PAD_X, PAD_Y)
        dom = domainbuilder.getDomain()
        g, s = domainbuilder.getGravitySurveys()[0]

        outfn = os.path.join(WORKDIR, '_ersdata.csv')
        saveDataCSV(outfn, g=g, s=s)

        X0, NP, DX = source.getDataExtents()
        DV = (VMAX - VMIN) / NE_V

        # check metadata
        self.assertEqual(NP, ERS_SIZE, msg="Wrong number of data points")
        # this test only works if gdal is available
        try:
            import osgeo.osr
            for i in range(len(ERS_ORIGIN)):
                self.assertAlmostEqual(X0[i],
                                       ERS_ORIGIN[i],
                                       msg="Data origin wrong")
        except ImportError:
            print("Skipping test of data origin since gdal is not installed.")

        # check data
        nx = NP[0] + 2 * PAD_X
        ny = NP[1] + 2 * PAD_Y
        nz = NE_V
        z_data = int(np.round((ALT - VMIN) / DV) - 1)

        ref = np.genfromtxt(ERS_REF, delimiter=',', dtype=np.float64)
        g_ref = ref[:, 0].reshape((NP[1], NP[0]))
        s_ref = ref[:, 1].reshape((NP[1], NP[0]))

        out = np.genfromtxt(outfn,
                            delimiter=',',
                            skip_header=1,
                            dtype=np.float64)
        # recompute nz since ripley might have adjusted number of elements
        nz = len(out) // (nx * ny)
        g_out = out[:, 0].reshape(nz, ny, nx)
        s_out = out[:, 1].reshape(nz, ny, nx)
        self.assertAlmostEqual(
            np.abs(g_out[z_data, PAD_Y:PAD_Y + NP[1], PAD_X:PAD_X + NP[0]] -
                   g_ref).max(),
            0.,
            msg="Difference in gravity data area")

        self.assertAlmostEqual(
            np.abs(s_out[z_data, PAD_Y:PAD_Y + NP[1], PAD_X:PAD_X + NP[0]] -
                   s_ref).max(),
            0.,
            msg="Difference in error data area")

        # overwrite data -> should only be padding value left
        g_out[z_data, PAD_Y:PAD_Y + NP[1], PAD_X:PAD_X + NP[0]] = ERS_NULL
        self.assertAlmostEqual(np.abs(g_out - ERS_NULL).max(),
                               0.,
                               msg="Wrong values in padding area")
Пример #13
0
def RegionalCalculation(reg_mask):
    """
    Calculates the "regional" from the entire FEILDS model excluding the
    selected region and outputs gravity at the specified altitude...
    see above for the "residual"
    """

    # read in a gravity data grid to define data computation space
    G_DATA = os.path.join(DATADIR,'Final_BouguerTC_UC15K_qrtdeg.nc')
    FS=ReducedFunction(dom)
    nValues=[NX, NY, 1]
    first = [0, 0, cell_at_altitude]
    multiplier = [1, 1, 1]
    reverse = [0, 0, 0]
    byteorder = BYTEORDER_NATIVE
    gdata = readBinaryGrid(G_DATA, FS, shape=(),
                fill=-999999, byteOrder=byteorder,
                dataType=DATATYPE_FLOAT32, first=first, numValues=nValues,
                multiplier=multiplier, reverse=reverse)
    print("Grid successfully read")

    # get the masking and units sorted out for the data-space
    g_mask = whereNonZero(gdata+999999)

    gdata=gdata*g_mask * GRAV_UNITS

    # if people choose to have air in their region we exclude it from the
    # specified gravity calculation region
    if h_top < 0.:
        reg_mask = reg_mask+mask_air

    live_model = initial_model* whereNonPositive(reg_mask)
    dead_model = initial_model* wherePositive(reg_mask)

    if UseMean is True:
        # calculate the mean density within the selected region
        BackgroundDensity = integrate(dead_model)/integrate(wherePositive(reg_mask))
        print("Density mean for selected region equals = %s"%BackgroundDensity)

        live_model = live_model + BackgroundDensity * wherePositive(reg_mask)

    # create mapping
    rho_mapping = DensityMapping(dom, rho0=live_model)

    # invert sign of gravity field to account for escript's coordinate system
    gdata = -GRAV_UNITS * gdata

    # turn the scalars into vectors (vertical direction)
    d=kronecker(DIM)[DIM-1]
    w=safeDiv(1., g_mask)
    gravity_model=GravityModel(dom, w*d, gdata*d, fixPotentialAtBottom=False, coordinates=COORDINATES)
    gravity_model.rescaleWeights(rho_scale=rho_mapping.getTypicalDerivative())
    phi,_ = gravity_model.getArguments(live_model)
    g_init = -gravity_model.getCoordinateTransformation().getGradient(phi)
    g_init = interpolate(g_init, gdata.getFunctionSpace())
    print("Computed gravity: %s"%(g_init[2]))

    fn=os.path.join(OUTPUTDIR,'regional-gravity')
    if SiloOutput is True:
        saveSilo(fn, density=live_model, gravity_init=g_init, g_initz=-g_init[2], gravitymask=g_mask, modelmask=reg_mask)
        print('SILO file written with the following fields: density (kg/m^3), gravity vector (m/s^2), gz (m/s^2), gravitymask, modelmask')

    # to compare calculated data against input dataset.
    # Not used by default but should work if the input dataset is correct
    #gslice = g_init[2]*wherePositive(g_mask)
    #g_dash = integrate(gslice)/integrate(wherePositive(g_mask))
    #gdataslice = gdata*wherePositive(g_mask)
    #gdata_dash = integrate(gdataslice)/integrate(wherePositive(g_mask))
    #misfit=(gdataslice-gdata_dash)-(gslice-g_dash)
    saveDataCSV(fn+".csv", mask=g_mask, gz=-g_init[2], Long=datacoords[0], Lat=datacoords[1], h=datacoords[2])
    print('CSV file written with the following fields: Longitude (degrees) Latitude (degrees), h (100km), gz (m/s^2)')
Пример #14
0
# mask is = 1 OUTSIDE area of interest

mask_air = whereNonNegative(dom.getX()[2]+spacing[2]/2) #reg_mask for air layer
mask_LONG = wherePositive(Longitude_W-datacoords[0]) + whereNegative(Longitude_E-datacoords[0]) #reg_mask for longitude
mask_LAT = whereNegative(Latitude_N-datacoords[1]) + wherePositive(Latitude_S-datacoords[1])    #reg_mask for latitude
mask_h = wherePositive(datacoords[2]+(h_top/100)) + whereNegative(datacoords[2]+(h_base/100))   #reg_mask for depth

if ReverseSelection:
    reg_mask = whereNonZero(mask_LONG+mask_LAT+mask_h)
else:
    reg_mask = whereZero(mask_LONG+mask_LAT+mask_h)

# prior to any computation, write out the selected region model as CSV
# and Silo if requested
fn = os.path.join(OUTPUTDIR, "region_%s")%(MODEL_PROPERTY)
saveDataCSV(fn+".csv", Long=datacoords[0], Lat=datacoords[1], h=datacoords[2],
            PROPERTY=initial_model, mask=reg_mask)
print("CSV file written with the following fields: Longitude (degrees)"
     +" Latitude (degrees), h (100km), Property (kg/m^3 or Pa)")

if SiloOutput:
    saveSilo(fn, PROPERTY=initial_model, mask=reg_mask)
    print('SILO file written with the following fields: Property (kg/m^3 or Pa), mask')


def ResidualCalculation(reg_mask):
    """
    Calculates the "residual" from the selected region for the entire FEILDS
    model region and outputs gravity at the specified altitude...
    see below for the "regional"
    """