Beispiel #1
0
    def test_with_non_bool(self):
        
        target = properties.Boolean()

        with self.assertRaises(properties.ValidationError) as e:
            value = target('Test', 'string')

        self.assertIn('Test', e.exception.message)
Beispiel #2
0
    def test_with_None_and_no_default(self):
        
        target = properties.Boolean()

        with self.assertRaises(properties.ValidationError) as e:
            value = target('Test', None)

        self.assertIn('Test', e.exception.message)
Beispiel #3
0
 def test_with_bool_string(self):
     target = properties.Boolean()
     self.assertTrue(target('Test', 'true'))
     self.assertTrue(target('Test', 'True'))
     self.assertTrue(target('Test', 'TRUE'))
     self.assertFalse(target('Test', 'false'))
     self.assertFalse(target('Test', 'False'))
     self.assertFalse(target('Test', 'FALSE'))
class OptionsWireframe(_BaseOptionsItem):
    """Options for displaying wireframes on elements

    Currently this is limited to enabled/disabled.
    """
    active = properties.Boolean(
        'Wireframe on/off',
        default=False,
    )
class OptionsTexture(OptionsStaticOpacity):
    """Options for a displayed texture"""
    data = Pointer(
        'Texture data for visualization',
        TextureProjection,
    )

    visible = properties.Boolean(
        'Visibility of texture on/off',
        default=True,
    )
class _BaseElementOptions(_BaseOptions):
    """Base class for various element options"""
    visible = properties.Boolean(
        'Visibility of resource on/off',
        default=True,
    )
    opacity = properties.Instance(
        'Default opacity options on the element',
        OptionsOpacity,
        default=OptionsOpacity,
    )
    color = properties.Instance(
        'Default color options on the element',
        OptionsColor,
        default=OptionsColor,
    )
class MappingContinuous(_BaseDataMapping):
    """Mapping of continuous data to a continuous gradient

    The most common use-case for continuous mappings is colormaps by
    specifying a N x 3 color gradient and a transfer function between
    data values and the gradient as sketched below.

    Note: Visualization clients may have limited support for complicated
    transfer functions. For best results, use
    :code:`mapping.data_controls = [0., 0, 1, 1]` or :code:`[1., 1, 0, 0]`.
    When combined with
    :code:`mapping.visibility = [False, True, True, True, False]`
    you get 5 color regions for your data:

    1) :code:`-Inf` to :code:`data_controls[0]` - Not visible
    2) :code:`data_controls[0]` to :code:`data_controls[1]` - Low gradient value
    3) :code:`data_controls[1]` to :code:`data_controls[2]` - Gradient dynamic range
    4) :code:`data_controls[2]` to :code:`data_controls[3]` - High gradient value
    5) :code:`data_controls[3]` to :code:`Inf` - Not visible

    .. code::

      #      gradient
      #          1
      #          -
      #         -|                      x - - - - - - ->
      # gradient |                     /
      # controls |                    /
      #          |                   /
      #         -|     <- - - - - - x
      #          |
      #          |
      #          -
      #          0
      #                <------------|---|--------------> data
      #                          data_controls
    """

    SUB_TYPE = 'continuous'

    gradient = Pointer(
        'Array defining the gradient',
        Array,
    )
    data_controls = properties.List(
        'Data values for data/gradient inflection points; '
        'these values must be increasing and -inf/inf are implicit '
        'lower/upper values',
        prop=properties.Float(''),
        min_length=2,
        max_length=4,
    )
    gradient_controls = properties.List(
        'Normalized gradient values for data/gradient inflection points; '
        'length must equal len(data_controls)',
        prop=properties.Float('', min=0, max=1),
        min_length=2,
        max_length=4,
        default=lambda: [0., 0., 1., 1.],
    )
    visibility = properties.List(
        'True if region between control points is visible; '
        'length must equal len(data_controls) + 1',
        prop=properties.Boolean('', cast=True),
        min_length=3,
        max_length=5,
        default=lambda: [False, True, True, True, False],
    )
    interpolate = properties.Boolean(
        'If True, interpolate the gradient values; if False, only '
        'use values explicitly in the gradient',
        cast=True,
        default=False,
    )

    @properties.validator
    def _validate_controls(self):
        """Validate lengths of data_controls/gradient_controls/visibility"""
        if len(self.data_controls) != len(self.gradient_controls):
            raise properties.ValidationError(
                message='data and gradient controls must be equal length',
                reason='invalid',
                prop='data_controls',
                instance=self,
            )
        if len(self.data_controls) != len(self.visibility) - 1:
            raise properties.ValidationError(
                message='visibility must be one longer than data controls',
                reason='invalid',
                prop='data_controls',
                instance=self,
            )

    @properties.validator('data_controls')
    def _validate_increasing(self, change):
        """Ensure data_controls are all increasing"""
        if change['value'] is properties.undefined:
            return
        diffs = np.array(change['value'][1:]) - np.array(change['value'][:-1])
        if not np.all(diffs >= 0):
            raise properties.ValidationError(
                message='data controls must not decrease: {}'.format(
                    change['value'],
                ),
                reason='invalid',
                prop='data_controls',
                instance=self,
            )
Beispiel #8
0
    def test_with_None_and_default(self):
        
        target = properties.Boolean(default=False)
        value = target('Test', None)

        self.assertEqual(value, False)
Beispiel #9
0
    def test_with_bool_and_no_default(self):
        
        target = properties.Boolean()
        value = target('Test', True)

        self.assertEqual(value, True)
Beispiel #10
0
 class MixinB(MixinBaseClass):
     b = properties.Boolean("test", default=False)
Beispiel #11
0
 class TestA(properties.HasProperties):
     a = properties.Boolean("test", default=False)
class MappingDiscrete(_BaseDataMapping):
    """Mapping of continuous data to discrete intervals

    These mappings are used to categorize continuous numeric data.
    Define the limits of the intervals by specifying end points, and
    specify if the end points are inclusive in the lower or upper bucket.
    Then assign values to each interval.

    .. code::

      #       values
      #
      #         --                          x - - - - ->
      #
      #         --                  x - - - o
      #
      #         --     <- - - - - - o
      #
      #                <------------|--------|------------> data
      #                             end_points
    """

    SUB_TYPE = 'discrete'

    values = properties.Union(
        'Values corresponding to intervals',
        props=[
            properties.List(
                '',
                properties.Color('', serializer=to_hex, deserializer=from_hex),
                max_length=256,
            ),
            properties.List('', properties.Float(''), max_length=256),
            properties.List(
                '',
                ShortString('', max_length=300),
                max_length=256,
            ),
        ],
    )
    end_points = properties.List(
        'Data end values of discrete intervals; these also correspond to '
        'the start of the next interval. First start and final end are '
        'fixed at -inf and inf, respectively.',
        prop=properties.Float(''),
        max_length=255,
    )
    end_inclusive = properties.List(
        'True if corresponding end is inclusive for lower range and false if '
        'it is inclusive for upper range; must be specified for each interval',
        prop=properties.Boolean('', cast=True),
        max_length=255,
    )
    visibility = properties.List(
        'True if interval is visible; must be specified for each interval',
        prop=properties.Boolean('', cast=True),
        max_length=256,
    )

    @properties.validator
    def _validate_lengths(self):
        """Validate lengths of values/end_points/end_inclusive/visibility"""
        if len(self.values) != len(self.visibility):
            raise properties.ValidationError(
                message='values and visibility must be equal length',
                reason='invalid',
                prop='visibility',
                instance=self,
            )
        if len(self.values) != len(self.end_points) + 1:
            raise properties.ValidationError(
                message='values must be one longer than end points',
                reason='invalid',
                prop='end_points',
                instance=self,
            )
        if len(self.values) != len(self.end_inclusive) + 1:
            raise properties.ValidationError(
                message='values must be one longer than end inclusive',
                reason='invalid',
                prop='end_inclusive',
                instance=self,
            )

    @properties.validator('end_points')
    def _validate_increasing(self, change):
        """Ensure end_points are increasing"""
        if change['value'] is properties.undefined:
            return
        diffs = np.array(change['value'][1:]) - np.array(change['value'][:-1])
        if not np.all(diffs >= 0):
            raise properties.ValidationError(
                message='end points must not decrease: {}'.format(
                    change['value']
                ),
                reason='invalid',
                prop='end_points',
                instance=self,
            )
class MappingCategory(_BaseDataMapping):
    """Mapping of integer index values to categories

    These mappings are used to define categories on
    :class:`spatial.resources.spatial.data.DataCategory` as
    well as color and other visual aspects. Data array values
    correspond to indices which then map to the values. If an
    array value is not present in indices, it is assumed there
    is no data at that location.

    .. code::

      #       values
      #
      #         --                          x
      #
      #         --            x
      #
      #         --       x
      #
      #                  |    |             |
      #                        indices
    """
    SUB_TYPE = 'category'

    values = properties.Union(
        'Values corresponding to indices',
        props=[
            properties.List(
                '',
                properties.Color('', serializer=to_hex, deserializer=from_hex),
                max_length=256,
            ),
            properties.List('', properties.Float(''), max_length=256),
            properties.List(
                '',
                ShortString('', max_length=300),
                max_length=256,
            ),
        ],
    )
    indices = properties.List(
        'Array indices for values',
        properties.Integer('', min=0),
        max_length=256,
    )
    visibility = properties.List(
        'True if category is visible',
        prop=properties.Boolean('', cast=True),
        max_length=256,
    )

    @properties.validator
    def _validate_lengths(self):
        """Validate lengths of values/indices/visibility"""
        if len(self.values) != len(self.indices):
            raise properties.ValidationError(
                message='values and indices must be equal length',
                reason='invalid',
                prop='indices',
                instance=self,
            )
        if len(self.values) != len(self.visibility):
            raise properties.ValidationError(
                message='values and visibility must be equal length',
                reason='invalid',
                prop='visibility',
                instance=self,
            )

    @properties.validator('indices')
    def _validate_indices_unique(self, change):
        """Ensure indices are unique"""
        if change['value'] is properties.undefined:
            return
        if len(change['value']) != len(set(change['value'])):
            raise properties.ValidationError(
                message='indices must be unique: {}'.format(change['value']),
                reason='invalid',
                prop='indices',
                instance=self,
            )

    def to_omf(self, index_map):
        self.validate()
        new_values = []
        if not self.values or isinstance(self.values[0], float):
            nan_value = np.nan
        elif isinstance(self.values[0], string_types):
            nan_value = ''
        else:
            nan_value = [255, 255, 255]
        for ind in index_map:
            try:
                new_values.append(self.values[self.indices.index(ind)])
            except ValueError:
                new_values.append(nan_value)
        omf_legend = omf.Legend(
            name=self.name or '',
            description=self.description or '',
            values=new_values,
        )
        return omf_legend
Beispiel #14
0
class Project(UserContent):
    """Steno3D top-level project"""
    _model_api_location = 'project/steno3d'

    resources = properties.List(
        doc='Project Resources',
        prop=CompositeResource,
        coerce=True,
        default=list,
    )

    public = properties.Boolean(doc='Public visibility of project',
                                default=False)

    _public_online = None

    @classmethod
    def _url_view_from_uid(cls, uid):
        """Get full url from a uid"""
        url = '{base}{mapi}/{uid}'.format(base=Comms.base_url,
                                          mapi='app',
                                          uid=uid)
        return url

    @needs_login
    def upload(self, **kwargs):
        """Upload the project"""
        verbose = kwargs.get('verbose', True)
        if getattr(self, '_upload_data', None) is None:
            assert self.validate()
            self._check_project_quota(verbose)
            self._public_online = self.public
        elif verbose and self._public_online:
            print('This project is PUBLIC. It is viewable by everyone.')
        if verbose and not self._public_online == self.public:
            print('Local privacy changes cannot be applied to '
                  'projects that are already uploaded. To make '
                  'these changes, please use the dashboard on '
                  '{base_url}'.format(base_url=Comms.base_url))
        if verbose:
            print('\rStarting upload: {}'.format(self.title), end='')
        UserContent._upload_size = 1
        UserContent._upload_total_size = self._nbytes() + 1
        UserContent._upload_count = 0
        UserContent._upload_total_count = len(self.resources) + 1
        self._upload(**kwargs)
        self._trigger_ACL_fix()
        if verbose:
            print('\nComplete!')
        if verbose and kwargs.get('print_url', True):
            print(self._url)
        return self._upload_data['uid']

    def _post(self, datadict=None, files=None):
        try:
            return super(Project, self)._post(datadict, files)
        except UploadError:
            if getattr(self, '_upload_data', None) is None:
                self._check_project_quota(False)
            raise

    def _trigger_ACL_fix(self):
        self._put({})

    @properties.validator
    def _validate_resources(self):
        """Check if project resource pointers are correct"""
        for res in self.resources:
            if self not in res.project:
                raise ValueError('Project/resource pointers misaligned: '
                                 'Ensure that resources point to containing '
                                 'project.')
        self._validate_project_size()
        return True

    def _nbytes(self):
        return sum(r._nbytes() for r in self.resources)

    def _validate_project_size(self, res=None):
        if Comms.user.logged_in:
            if res is None:
                res = self.resources
            res_limit = Comms.user.project_resource_limit
            if len(res) > res_limit:
                raise ProjectResourceLimitExceeded(
                    'Total number of resources in project ({res}) '
                    'exceeds limit: {lim}'.format(res=len(self.resources),
                                                  lim=res_limit))
            size_limit = Comms.user.project_size_limit
            sz = self._nbytes()
            if sz > size_limit:
                raise ProjectSizeLimitExceeded(
                    'Total project size ({file} bytes) exceeds limit: '
                    '{lim} bytes'.format(file=sz, lim=size_limit))
        return True

    @properties.observer('resources')
    def _fix_proj_res(self, change):
        before = change['previous']
        after = change['value']
        if before in (None, properties.undefined):
            before = []
        if after in (None, properties.undefined):
            after = []
        for res in after:
            if res not in before and self not in res.project:
                res.project += [self]
        for res in before:
            if res not in after and self in res.project:
                res.project = [p for p in res.project if p is not self]
        if len(set(after)) != len(after):
            post_post = []
            for r in after:
                if r not in post_post:
                    post_post += [r]
            self.resources = post_post

    def _upload_dirty(self, **kwargs):
        dirty = self._dirty
        if 'resources' in dirty:
            [r._upload(**kwargs) for r in self.resources]

    def _get_dirty_data(self, force=False, initialize=False):
        datadict = super(Project, self)._get_dirty_data(force)
        dirty = self._dirty_props
        if 'public' in dirty or force:
            datadict['public'] = self.public
        if ('resources' in dirty or force) and not initialize:
            datadict['resourceUids'] = ','.join(
                (r._json['longUid'] for r in self.resources))
        return datadict

    def _check_project_quota(self, verbose=True):
        if self.public:
            privacy = 'public'
        else:
            privacy = 'private'
        if verbose:
            print('Verifying your quota for ' + privacy + ' projects...')
        resp = Comms.get('api/check/quota?test=ProjectSteno3D')
        resp = resp['json']
        mode = resp.get('mode', None)
        if not mode or mode == 'split':
            key = privacy
        else:
            key = mode
        resp = resp.get(key, None)
        if not resp or 'quota' not in resp or 'count' not in resp:
            pass
        elif resp['quota'] == 'Unlimited':
            pass
        elif resp['count'] >= resp['quota']:
            quota_message = resp.get('message', QUOTA_REACHED)
            raise ProjectQuotaExceeded(
                quota_message.format(
                    priv=privacy,
                    num=resp['quota'],
                    base_url=Comms.base_url,
                ))
        elif verbose and (resp['quota'] - resp['count'] - 1) < 4:
            print(
                QUOTA_IMPENDING.format(
                    remaining=resp['quota'] - resp['count'] - 1,
                    priv=privacy,
                    base_url=Comms.base_url,
                ))
        if verbose and self.public:
            print('This PUBLIC project will be viewable by everyone.')

    @property
    def _url(self):
        if getattr(self, '_upload_data', None) is not None:
            return self._url_view_from_uid(self._upload_data['uid'])

    @property
    @needs_login
    def url(self):
        """url of project if uploaded"""
        if getattr(self, '_upload_data', None) is None:
            print('Project not uploaded: Please upload() '
                  'before accessing the URL.')
        return self._url

    @needs_login
    def plot(self):
        """Display the 3D representation of the content

        You must be logged in to steno3d.com for the plot to display
        """
        if getattr(self, '_upload_data', None) is None:
            print('Project not uploaded: Please upload() ' 'before plotting.')
            return
        url = '{base}{mapi}/{uid}'.format(
            base=Comms.base_url,
            mapi='embed',
            uid=self._upload_data['uid'],
        )
        return plot(url)

    @classmethod
    def _build(cls, uid, copy=True, tab_level='', verbose=True):
        if verbose:
            print('Downloading project', end=': ')
        json = cls._json_from_uid(uid)
        title = '' if json['title'] is None else json['title']
        desc = '' if json['description'] is None else json['description']
        if verbose:
            print(title)
        pub = False
        for a in json['access']:
            if a['user'] == 'Special:PUBLIC':
                pub = True
                break
        is_owner = Comms.user.username == json['owner']['uid']
        if copy is None:
            copy = not is_owner
        elif not copy and not is_owner:
            copy = True
        if copy and verbose:
            print('This is a copy of the {pub} project'.format(
                pub='PUBLIC' if pub else 'private'))
        elif verbose:
            print('This is the original version of the {pub} project'.format(
                pub='PUBLIC' if pub else 'private'))
            print('>> NOTE: Any changes you upload will overwrite the '
                  'project online')
            print('>> ', end='')
            if len(json['perspectiveUids']) > 0:
                print('and existing perspectives may be invalidated. ', end='')
            print('Please upload with caution.')

        proj = Project(public=pub, title=title, description=desc, resources=[])
        for longuid in json['resourceUids']:
            res_string = longuid.split('Resource')[-1].split(':')[0]
            res_class = UserContent._REGISTRY[res_string]
            proj.resources += [
                res_class._build(
                    src=longuid.split(':')[1],
                    copy=copy,
                    tab_level=tab_level + '    ',
                    project=proj,
                    using='ProjectSteno3D:{}'.format(uid),
                )
            ]
        if not copy:
            proj._public_online = pub
            proj._upload_data = json
            proj._mark_clean()
        if verbose:
            print('... Complete!')
        return proj

    @classmethod
    def from_omf(cls, omf_input):
        if isinstance(omf_input, six.string_types):
            from omf import OMFReader
            omf_input = OMFReader(omf_input).get_project()
        if omf_input.__class__.__name__ != 'Project':
            raise ValueError('input must be omf file or Project')
        return cls._build_from_omf(omf_input)

    @classmethod
    def _build_from_omf(cls, omf_project):
        proj = Project(title=omf_project.name,
                       description=omf_project.description,
                       resources=[])
        resource_map = {
            'PointSetElement': 'Point',
            'LineSetElement': 'Line',
            'SurfaceElement': 'Surface',
            'VolumeElement': 'Volume'
        }
        for elem in omf_project.elements:
            res_class = UserContent._REGISTRY[resource_map[
                elem.__class__.__name__]]
            proj.resources += [
                res_class._build_from_omf(elem, omf_project, proj)
            ]
        return proj

    try:
        import omf

        def to_omf(self, filename=None):
            import omf
            project = omf.Project(
                name=self.title or '',
                description=self.description or '',
                elements=[res._to_omf() for res in self.resources],
            )
            if filename is not None:
                omf.OMFWriter(project, filename)
            return project
    except ImportError:
        pass
class Light(_BaseSceneComponent):
    """Light source for a scene"""
    direction = properties.Vector3(
        'Vector pointing from plot center to light', )
    brightness = properties.Float('Intensity of light source')
    enabled = properties.Boolean('Whether light is on or off')
Beispiel #16
0
class MeshOptions(Options):
    """Options related to mesh display"""
    wireframe = properties.Boolean(doc='Wireframe',
                                   default=False,
                                   required=False)
Beispiel #17
0
class BaseFDEMSimulation(BaseEMSimulation):
    """
    We start by looking at Maxwell's equations in the electric
    field \\\(\\\mathbf{e}\\\) and the magnetic flux
    density \\\(\\\mathbf{b}\\\)

    .. math ::

        \mathbf{C} \mathbf{e} + i \omega \mathbf{b} = \mathbf{s_m} \\\\
        {\mathbf{C}^{\\top} \mathbf{M_{\mu^{-1}}^f} \mathbf{b} -
        \mathbf{M_{\sigma}^e} \mathbf{e} = \mathbf{s_e}}

    if using the E-B formulation (:code:`Simulation3DElectricField`
    or :code:`Simulation3DMagneticFluxDensity`). Note that in this case,
    :math:`\mathbf{s_e}` is an integrated quantity.

    If we write Maxwell's equations in terms of
    \\\(\\\mathbf{h}\\\) and current density \\\(\\\mathbf{j}\\\)

    .. math ::

        \mathbf{C}^{\\top} \mathbf{M_{\\rho}^f} \mathbf{j} +
        i \omega \mathbf{M_{\mu}^e} \mathbf{h} = \mathbf{s_m} \\\\
        \mathbf{C} \mathbf{h} - \mathbf{j} = \mathbf{s_e}

    if using the H-J formulation (:code:`Simulation3DCurrentDensity` or
    :code:`Simulation3DMagneticField`). Note that here, :math:`\mathbf{s_m}` is an
    integrated quantity.

    The problem performs the elimination so that we are solving the system
    for \\\(\\\mathbf{e},\\\mathbf{b},\\\mathbf{j} \\\) or
    \\\(\\\mathbf{h}\\\)

    """

    fieldsPair = FieldsFDEM

    mu, muMap, muDeriv = props.Invertible("Magnetic Permeability (H/m)",
                                          default=mu_0)

    mui, muiMap, muiDeriv = props.Invertible(
        "Inverse Magnetic Permeability (m/H)")

    props.Reciprocal(mu, mui)

    forward_only = properties.Boolean(
        "If True, A-inverse not stored at each frequency in forward simulation",
        default=False,
    )

    survey = properties.Instance("a survey object", Survey, required=True)

    # @profile
    def fields(self, m=None):
        """
        Solve the forward problem for the fields.

        :param numpy.ndarray m: inversion model (nP,)
        :rtype: numpy.ndarray
        :return f: forward solution
        """

        if m is not None:
            self.model = m

        try:
            self.Ainv
        except AttributeError:
            if self.verbose:
                print("num_frequencies =", self.survey.num_frequencies)
            self.Ainv = [None for i in range(self.survey.num_frequencies)]

        if self.Ainv[0] is not None:
            for i in range(self.survey.num_frequencies):
                self.Ainv[i].clean()

            if self.verbose:
                print("Cleaning Ainv")

        f = self.fieldsPair(self)

        for nf, freq in enumerate(self.survey.frequencies):
            A = self.getA(freq)
            rhs = self.getRHS(freq)
            self.Ainv[nf] = self.solver(A, **self.solver_opts)
            u = self.Ainv[nf] * rhs
            Srcs = self.survey.get_sources_by_frequency(freq)
            f[Srcs, self._solutionType] = u
            if self.forward_only:
                if self.verbose:
                    print("Fields simulated for frequency {}".format(nf))
                self.Ainv[nf].clean()
        return f

    # @profile
    def Jvec(self, m, v, f=None):
        """
        Sensitivity times a vector.

        :param numpy.ndarray m: inversion model (nP,)
        :param numpy.ndarray v: vector which we take sensitivity product with
            (nP,)
        :param SimPEG.electromagnetics.frequency_domain.fields.FieldsFDEM u: fields object
        :rtype: numpy.ndarray
        :return: Jv (ndata,)
        """

        if f is None:
            f = self.fields(m)

        self.model = m

        # Jv = Data(self.survey)
        Jv = []

        for nf, freq in enumerate(self.survey.frequencies):
            for src in self.survey.get_sources_by_frequency(freq):
                u_src = f[src, self._solutionType]
                dA_dm_v = self.getADeriv(freq, u_src, v, adjoint=False)
                dRHS_dm_v = self.getRHSDeriv(freq, src, v)
                du_dm_v = self.Ainv[nf] * (-dA_dm_v + dRHS_dm_v)

                for rx in src.receiver_list:
                    Jv.append(
                        rx.evalDeriv(src, self.mesh, f, du_dm_v=du_dm_v, v=v))
        return np.hstack(Jv)

    # @profile
    def Jtvec(self, m, v, f=None):
        """
        Sensitivity transpose times a vector

        :param numpy.ndarray m: inversion model (nP,)
        :param numpy.ndarray v: vector which we take adjoint product with (nP,)
        :param SimPEG.electromagnetics.frequency_domain.fields.FieldsFDEM u: fields object
        :rtype: numpy.ndarray
        :return: Jv (ndata,)
        """

        if f is None:
            f = self.fields(m)

        self.model = m

        # Ensure v is a data object.
        if not isinstance(v, Data):
            v = Data(self.survey, v)

        Jtv = np.zeros(m.size)

        for nf, freq in enumerate(self.survey.frequencies):
            for src in self.survey.get_sources_by_frequency(freq):
                u_src = f[src, self._solutionType]
                df_duT_sum = 0
                df_dmT_sum = 0
                for rx in src.receiver_list:
                    df_duT, df_dmT = rx.evalDeriv(src,
                                                  self.mesh,
                                                  f,
                                                  v=v[src, rx],
                                                  adjoint=True)
                    if not isinstance(df_duT, Zero):
                        df_duT_sum += df_duT
                    if not isinstance(df_dmT, Zero):
                        df_dmT_sum += df_dmT

                ATinvdf_duT = self.Ainv[nf] * df_duT_sum

                dA_dmT = self.getADeriv(freq, u_src, ATinvdf_duT, adjoint=True)
                dRHS_dmT = self.getRHSDeriv(freq,
                                            src,
                                            ATinvdf_duT,
                                            adjoint=True)
                du_dmT = -dA_dmT + dRHS_dmT

                df_dmT_sum += du_dmT
                Jtv += np.real(df_dmT_sum)

        return mkvc(Jtv)

    # @profile
    def getSourceTerm(self, freq):
        """
        Evaluates the sources for a given frequency and puts them in matrix
        form

        :param float freq: Frequency
        :rtype: tuple
        :return: (s_m, s_e) (nE or nF, nSrc)
        """
        Srcs = self.survey.get_sources_by_frequency(freq)
        if self._formulation == "EB":
            s_m = np.zeros((self.mesh.nF, len(Srcs)), dtype=complex)
            s_e = np.zeros((self.mesh.nE, len(Srcs)), dtype=complex)
        elif self._formulation == "HJ":
            s_m = np.zeros((self.mesh.nE, len(Srcs)), dtype=complex)
            s_e = np.zeros((self.mesh.nF, len(Srcs)), dtype=complex)

        for i, src in enumerate(Srcs):
            smi, sei = src.eval(self)

            s_m[:, i] = s_m[:, i] + smi
            s_e[:, i] = s_e[:, i] + sei

        return s_m, s_e
Beispiel #18
0
class Simulation3DIntegral(BasePFSimulation):
    """
    magnetic simulation in integral form.

    """

    chi, chiMap, chiDeriv = props.Invertible(
        "Magnetic Susceptibility (SI)", default=1.0
    )

    modelType = properties.StringChoice(
        "Type of magnetization model",
        choices=["susceptibility", "vector"],
        default="susceptibility",
    )

    is_amplitude_data = properties.Boolean(
        "Whether the supplied data is amplitude data", default=False
    )

    def __init__(self, mesh, **kwargs):
        super().__init__(mesh, **kwargs)
        self._G = None
        self._M = None
        self._gtg_diagonal = None
        self.modelMap = self.chiMap
        setKwargs(self, **kwargs)

    @property
    def M(self):
        """
        M: ndarray
            Magnetization matrix
        """
        if getattr(self, "_M", None) is None:

            if self.modelType == "vector":
                self._M = sp.identity(self.nC) * self.survey.source_field.parameters[0]

            else:
                mag = mat_utils.dip_azimuth2cartesian(
                    np.ones(self.nC) * self.survey.source_field.parameters[1],
                    np.ones(self.nC) * self.survey.source_field.parameters[2],
                )

                self._M = sp.vstack(
                    (
                        sdiag(mag[:, 0] * self.survey.source_field.parameters[0]),
                        sdiag(mag[:, 1] * self.survey.source_field.parameters[0]),
                        sdiag(mag[:, 2] * self.survey.source_field.parameters[0]),
                    )
                )

        return self._M

    @M.setter
    def M(self, M):
        """
        Create magnetization matrix from unit vector orientation
        :parameter
        M: array (3*nC,) or (nC, 3)
        """
        if self.modelType == "vector":
            self._M = sdiag(mkvc(M) * self.survey.source_field.parameters[0])
        else:
            M = M.reshape((-1, 3))
            self._M = sp.vstack(
                (
                    sdiag(M[:, 0] * self.survey.source_field.parameters[0]),
                    sdiag(M[:, 1] * self.survey.source_field.parameters[0]),
                    sdiag(M[:, 2] * self.survey.source_field.parameters[0]),
                )
            )

    def fields(self, model):

        model = self.chiMap * model

        if self.store_sensitivities == "forward_only":
            self.model = model
            fields = mkvc(self.linear_operator())
        else:
            fields = np.asarray(self.G @ model.astype(np.float32))

        if self.is_amplitude_data:
            fields = self.compute_amplitude(fields)

        return fields

    @property
    def G(self):

        if getattr(self, "_G", None) is None:

            self._G = self.linear_operator()

        return self._G

    @property
    def nD(self):
        """
        Number of data
        """
        self._nD = self.survey.receiver_locations.shape[0]

        return self._nD

    @property
    def tmi_projection(self):

        if getattr(self, "_tmi_projection", None) is None:

            # Convert from north to cartesian
            self._tmi_projection = mat_utils.dip_azimuth2cartesian(
                self.survey.source_field.parameters[1],
                self.survey.source_field.parameters[2],
            )

        return self._tmi_projection

    def getJtJdiag(self, m, W=None):
        """
        Return the diagonal of JtJ
        """
        self.model = m

        if W is None:
            W = np.ones(self.survey.nD)
        else:
            W = W.diagonal() ** 2
        if getattr(self, "_gtg_diagonal", None) is None:
            diag = np.zeros(self.G.shape[1])
            if not self.is_amplitude_data:
                for i in range(len(W)):
                    diag += W[i] * (self.G[i] * self.G[i])
            else:
                fieldDeriv = self.fieldDeriv
                Gx = self.G[::3]
                Gy = self.G[1::3]
                Gz = self.G[2::3]
                for i in range(len(W)):
                    row = (
                        fieldDeriv[0, i] * Gx[i]
                        + fieldDeriv[1, i] * Gy[i]
                        + fieldDeriv[2, i] * Gz[i]
                    )
                    diag += W[i] * (row * row)
            self._gtg_diagonal = diag
        else:
            diag = self._gtg_diagonal
        return mkvc((sdiag(np.sqrt(diag)) @ self.chiDeriv).power(2).sum(axis=0))

    def Jvec(self, m, v, f=None):
        self.model = m
        dmu_dm_v = self.chiDeriv @ v

        Jvec = self.G @ dmu_dm_v.astype(np.float32)

        if self.is_amplitude_data:
            Jvec = Jvec.reshape((-1, 3)).T
            fieldDeriv_Jvec = self.fieldDeriv * Jvec
            return fieldDeriv_Jvec[0] + fieldDeriv_Jvec[1] + fieldDeriv_Jvec[2]
        else:
            return Jvec

    def Jtvec(self, m, v, f=None):
        self.model = m

        if self.is_amplitude_data:
            v = (self.fieldDeriv * v).T.reshape(-1)
        Jtvec = self.G.T @ v.astype(np.float32)
        return np.asarray(self.chiDeriv.T @ Jtvec)

    @property
    def fieldDeriv(self):

        if getattr(self, "chi", None) is None:
            self.model = np.zeros(self.chiMap.nP)

        if getattr(self, "_fieldDeriv", None) is None:
            fields = np.asarray(self.G.dot((self.chiMap @ self.chi).astype(np.float32)))
            b_xyz = self.normalized_fields(fields)

            self._fieldDeriv = b_xyz

        return self._fieldDeriv

    @classmethod
    def normalized_fields(cls, fields):
        """
        Return the normalized B fields
        """

        # Get field amplitude
        amp = cls.compute_amplitude(fields.astype(np.float64))

        return fields.reshape((3, -1), order="F") / amp[None, :]

    @classmethod
    def compute_amplitude(cls, b_xyz):
        """
        Compute amplitude of the magnetic field
        """

        amplitude = np.linalg.norm(b_xyz.reshape((3, -1), order="F"), axis=0)

        return amplitude

    def evaluate_integral(self, receiver_location, components):
        """
        Load in the active nodes of a tensor mesh and computes the magnetic
        forward relation between a cuboid and a given observation
        location outside the Earth [obsx, obsy, obsz]

        INPUT:
        receiver_location:  [obsx, obsy, obsz] nC x 3 Array

        components: list[str]
            List of magnetic components chosen from:
            'bx', 'by', 'bz', 'bxx', 'bxy', 'bxz', 'byy', 'byz', 'bzz'

        OUTPUT:
        Tx = [Txx Txy Txz]
        Ty = [Tyx Tyy Tyz]
        Tz = [Tzx Tzy Tzz]
        """
        # TODO: This should probably be converted to C
        tol1 = 1e-10  # Tolerance 1 for numerical stability over nodes and edges
        tol2 = 1e-4  # Tolerance 2 for numerical stability over nodes and edges

        rows = {component: np.zeros(3 * self.Xn.shape[0]) for component in components}

        # number of cells in mesh
        nC = self.Xn.shape[0]

        # base cell dimensions
        min_hx, min_hy, min_hz = (
            self.mesh.hx.min(),
            self.mesh.hy.min(),
            self.mesh.hz.min(),
        )

        # comp. pos. differences for tne, bsw nodes. Adjust if location within
        # tolerance of a node or edge
        dz2 = self.Zn[:, 1] - receiver_location[2]
        dz2[np.abs(dz2) / min_hz < tol2] = tol2 * min_hz
        dz1 = self.Zn[:, 0] - receiver_location[2]
        dz1[np.abs(dz1) / min_hz < tol2] = tol2 * min_hz

        dy2 = self.Yn[:, 1] - receiver_location[1]
        dy2[np.abs(dy2) / min_hy < tol2] = tol2 * min_hy
        dy1 = self.Yn[:, 0] - receiver_location[1]
        dy1[np.abs(dy1) / min_hy < tol2] = tol2 * min_hy

        dx2 = self.Xn[:, 1] - receiver_location[0]
        dx2[np.abs(dx2) / min_hx < tol2] = tol2 * min_hx
        dx1 = self.Xn[:, 0] - receiver_location[0]
        dx1[np.abs(dx1) / min_hx < tol2] = tol2 * min_hx

        # comp. squared diff
        dx2dx2 = dx2 ** 2.0
        dx1dx1 = dx1 ** 2.0

        dy2dy2 = dy2 ** 2.0
        dy1dy1 = dy1 ** 2.0

        dz2dz2 = dz2 ** 2.0
        dz1dz1 = dz1 ** 2.0

        # 2D radius component squared of corner nodes
        R1 = dy2dy2 + dx2dx2
        R2 = dy2dy2 + dx1dx1
        R3 = dy1dy1 + dx2dx2
        R4 = dy1dy1 + dx1dx1

        # radius to each cell node
        r1 = np.sqrt(dz2dz2 + R2)
        r2 = np.sqrt(dz2dz2 + R1)
        r3 = np.sqrt(dz1dz1 + R1)
        r4 = np.sqrt(dz1dz1 + R2)
        r5 = np.sqrt(dz2dz2 + R3)
        r6 = np.sqrt(dz2dz2 + R4)
        r7 = np.sqrt(dz1dz1 + R4)
        r8 = np.sqrt(dz1dz1 + R3)

        # compactify argument calculations
        arg1_ = dx1 + dy2 + r1
        arg1 = dy2 + dz2 + r1
        arg2 = dx1 + dz2 + r1
        arg3 = dx1 + r1
        arg4 = dy2 + r1
        arg5 = dz2 + r1

        arg6_ = dx2 + dy2 + r2
        arg6 = dy2 + dz2 + r2
        arg7 = dx2 + dz2 + r2
        arg8 = dx2 + r2
        arg9 = dy2 + r2
        arg10 = dz2 + r2

        arg11_ = dx2 + dy2 + r3
        arg11 = dy2 + dz1 + r3
        arg12 = dx2 + dz1 + r3
        arg13 = dx2 + r3
        arg14 = dy2 + r3
        arg15 = dz1 + r3

        arg16_ = dx1 + dy2 + r4
        arg16 = dy2 + dz1 + r4
        arg17 = dx1 + dz1 + r4
        arg18 = dx1 + r4
        arg19 = dy2 + r4
        arg20 = dz1 + r4

        arg21_ = dx2 + dy1 + r5
        arg21 = dy1 + dz2 + r5
        arg22 = dx2 + dz2 + r5
        arg23 = dx2 + r5
        arg24 = dy1 + r5
        arg25 = dz2 + r5

        arg26_ = dx1 + dy1 + r6
        arg26 = dy1 + dz2 + r6
        arg27 = dx1 + dz2 + r6
        arg28 = dx1 + r6
        arg29 = dy1 + r6
        arg30 = dz2 + r6

        arg31_ = dx1 + dy1 + r7
        arg31 = dy1 + dz1 + r7
        arg32 = dx1 + dz1 + r7
        arg33 = dx1 + r7
        arg34 = dy1 + r7
        arg35 = dz1 + r7

        arg36_ = dx2 + dy1 + r8
        arg36 = dy1 + dz1 + r8
        arg37 = dx2 + dz1 + r8
        arg38 = dx2 + r8
        arg39 = dy1 + r8
        arg40 = dz1 + r8

        if ("bxx" in components) or ("bzz" in components):
            rows["bxx"] = np.zeros((1, 3 * nC))

            rows["bxx"][0, 0:nC] = 2 * (
                ((dx1 ** 2 - r1 * arg1) / (r1 * arg1 ** 2 + dx1 ** 2 * r1))
                - ((dx2 ** 2 - r2 * arg6) / (r2 * arg6 ** 2 + dx2 ** 2 * r2))
                + ((dx2 ** 2 - r3 * arg11) / (r3 * arg11 ** 2 + dx2 ** 2 * r3))
                - ((dx1 ** 2 - r4 * arg16) / (r4 * arg16 ** 2 + dx1 ** 2 * r4))
                + ((dx2 ** 2 - r5 * arg21) / (r5 * arg21 ** 2 + dx2 ** 2 * r5))
                - ((dx1 ** 2 - r6 * arg26) / (r6 * arg26 ** 2 + dx1 ** 2 * r6))
                + ((dx1 ** 2 - r7 * arg31) / (r7 * arg31 ** 2 + dx1 ** 2 * r7))
                - ((dx2 ** 2 - r8 * arg36) / (r8 * arg36 ** 2 + dx2 ** 2 * r8))
            )

            rows["bxx"][0, nC : 2 * nC] = (
                dx2 / (r5 * arg25)
                - dx2 / (r2 * arg10)
                + dx2 / (r3 * arg15)
                - dx2 / (r8 * arg40)
                + dx1 / (r1 * arg5)
                - dx1 / (r6 * arg30)
                + dx1 / (r7 * arg35)
                - dx1 / (r4 * arg20)
            )

            rows["bxx"][0, 2 * nC :] = (
                dx1 / (r1 * arg4)
                - dx2 / (r2 * arg9)
                + dx2 / (r3 * arg14)
                - dx1 / (r4 * arg19)
                + dx2 / (r5 * arg24)
                - dx1 / (r6 * arg29)
                + dx1 / (r7 * arg34)
                - dx2 / (r8 * arg39)
            )

            rows["bxx"] /= 4 * np.pi
            rows["bxx"] *= self.M

        if ("byy" in components) or ("bzz" in components):

            rows["byy"] = np.zeros((1, 3 * nC))

            rows["byy"][0, 0:nC] = (
                dy2 / (r3 * arg15)
                - dy2 / (r2 * arg10)
                + dy1 / (r5 * arg25)
                - dy1 / (r8 * arg40)
                + dy2 / (r1 * arg5)
                - dy2 / (r4 * arg20)
                + dy1 / (r7 * arg35)
                - dy1 / (r6 * arg30)
            )
            rows["byy"][0, nC : 2 * nC] = 2 * (
                ((dy2 ** 2 - r1 * arg2) / (r1 * arg2 ** 2 + dy2 ** 2 * r1))
                - ((dy2 ** 2 - r2 * arg7) / (r2 * arg7 ** 2 + dy2 ** 2 * r2))
                + ((dy2 ** 2 - r3 * arg12) / (r3 * arg12 ** 2 + dy2 ** 2 * r3))
                - ((dy2 ** 2 - r4 * arg17) / (r4 * arg17 ** 2 + dy2 ** 2 * r4))
                + ((dy1 ** 2 - r5 * arg22) / (r5 * arg22 ** 2 + dy1 ** 2 * r5))
                - ((dy1 ** 2 - r6 * arg27) / (r6 * arg27 ** 2 + dy1 ** 2 * r6))
                + ((dy1 ** 2 - r7 * arg32) / (r7 * arg32 ** 2 + dy1 ** 2 * r7))
                - ((dy1 ** 2 - r8 * arg37) / (r8 * arg37 ** 2 + dy1 ** 2 * r8))
            )
            rows["byy"][0, 2 * nC :] = (
                dy2 / (r1 * arg3)
                - dy2 / (r2 * arg8)
                + dy2 / (r3 * arg13)
                - dy2 / (r4 * arg18)
                + dy1 / (r5 * arg23)
                - dy1 / (r6 * arg28)
                + dy1 / (r7 * arg33)
                - dy1 / (r8 * arg38)
            )

            rows["byy"] /= 4 * np.pi
            rows["byy"] *= self.M

        if "bzz" in components:

            rows["bzz"] = -rows["bxx"] - rows["byy"]

        if "bxy" in components:
            rows["bxy"] = np.zeros((1, 3 * nC))

            rows["bxy"][0, 0:nC] = 2 * (
                ((dx1 * arg4) / (r1 * arg1 ** 2 + (dx1 ** 2) * r1))
                - ((dx2 * arg9) / (r2 * arg6 ** 2 + (dx2 ** 2) * r2))
                + ((dx2 * arg14) / (r3 * arg11 ** 2 + (dx2 ** 2) * r3))
                - ((dx1 * arg19) / (r4 * arg16 ** 2 + (dx1 ** 2) * r4))
                + ((dx2 * arg24) / (r5 * arg21 ** 2 + (dx2 ** 2) * r5))
                - ((dx1 * arg29) / (r6 * arg26 ** 2 + (dx1 ** 2) * r6))
                + ((dx1 * arg34) / (r7 * arg31 ** 2 + (dx1 ** 2) * r7))
                - ((dx2 * arg39) / (r8 * arg36 ** 2 + (dx2 ** 2) * r8))
            )
            rows["bxy"][0, nC : 2 * nC] = (
                dy2 / (r1 * arg5)
                - dy2 / (r2 * arg10)
                + dy2 / (r3 * arg15)
                - dy2 / (r4 * arg20)
                + dy1 / (r5 * arg25)
                - dy1 / (r6 * arg30)
                + dy1 / (r7 * arg35)
                - dy1 / (r8 * arg40)
            )
            rows["bxy"][0, 2 * nC :] = (
                1 / r1 - 1 / r2 + 1 / r3 - 1 / r4 + 1 / r5 - 1 / r6 + 1 / r7 - 1 / r8
            )

            rows["bxy"] /= 4 * np.pi

            rows["bxy"] *= self.M

        if "bxz" in components:
            rows["bxz"] = np.zeros((1, 3 * nC))

            rows["bxz"][0, 0:nC] = 2 * (
                ((dx1 * arg5) / (r1 * (arg1 ** 2) + (dx1 ** 2) * r1))
                - ((dx2 * arg10) / (r2 * (arg6 ** 2) + (dx2 ** 2) * r2))
                + ((dx2 * arg15) / (r3 * (arg11 ** 2) + (dx2 ** 2) * r3))
                - ((dx1 * arg20) / (r4 * (arg16 ** 2) + (dx1 ** 2) * r4))
                + ((dx2 * arg25) / (r5 * (arg21 ** 2) + (dx2 ** 2) * r5))
                - ((dx1 * arg30) / (r6 * (arg26 ** 2) + (dx1 ** 2) * r6))
                + ((dx1 * arg35) / (r7 * (arg31 ** 2) + (dx1 ** 2) * r7))
                - ((dx2 * arg40) / (r8 * (arg36 ** 2) + (dx2 ** 2) * r8))
            )
            rows["bxz"][0, nC : 2 * nC] = (
                1 / r1 - 1 / r2 + 1 / r3 - 1 / r4 + 1 / r5 - 1 / r6 + 1 / r7 - 1 / r8
            )
            rows["bxz"][0, 2 * nC :] = (
                dz2 / (r1 * arg4)
                - dz2 / (r2 * arg9)
                + dz1 / (r3 * arg14)
                - dz1 / (r4 * arg19)
                + dz2 / (r5 * arg24)
                - dz2 / (r6 * arg29)
                + dz1 / (r7 * arg34)
                - dz1 / (r8 * arg39)
            )

            rows["bxz"] /= 4 * np.pi

            rows["bxz"] *= self.M

        if "byz" in components:
            rows["byz"] = np.zeros((1, 3 * nC))

            rows["byz"][0, 0:nC] = (
                1 / r3 - 1 / r2 + 1 / r5 - 1 / r8 + 1 / r1 - 1 / r4 + 1 / r7 - 1 / r6
            )
            rows["byz"][0, nC : 2 * nC] = 2 * (
                (((dy2 * arg5) / (r1 * (arg2 ** 2) + (dy2 ** 2) * r1)))
                - (((dy2 * arg10) / (r2 * (arg7 ** 2) + (dy2 ** 2) * r2)))
                + (((dy2 * arg15) / (r3 * (arg12 ** 2) + (dy2 ** 2) * r3)))
                - (((dy2 * arg20) / (r4 * (arg17 ** 2) + (dy2 ** 2) * r4)))
                + (((dy1 * arg25) / (r5 * (arg22 ** 2) + (dy1 ** 2) * r5)))
                - (((dy1 * arg30) / (r6 * (arg27 ** 2) + (dy1 ** 2) * r6)))
                + (((dy1 * arg35) / (r7 * (arg32 ** 2) + (dy1 ** 2) * r7)))
                - (((dy1 * arg40) / (r8 * (arg37 ** 2) + (dy1 ** 2) * r8)))
            )
            rows["byz"][0, 2 * nC :] = (
                dz2 / (r1 * arg3)
                - dz2 / (r2 * arg8)
                + dz1 / (r3 * arg13)
                - dz1 / (r4 * arg18)
                + dz2 / (r5 * arg23)
                - dz2 / (r6 * arg28)
                + dz1 / (r7 * arg33)
                - dz1 / (r8 * arg38)
            )

            rows["byz"] /= 4 * np.pi

            rows["byz"] *= self.M

        if ("bx" in components) or ("tmi" in components):
            rows["bx"] = np.zeros((1, 3 * nC))

            rows["bx"][0, 0:nC] = (
                (-2 * np.arctan2(dx1, arg1 + tol1))
                - (-2 * np.arctan2(dx2, arg6 + tol1))
                + (-2 * np.arctan2(dx2, arg11 + tol1))
                - (-2 * np.arctan2(dx1, arg16 + tol1))
                + (-2 * np.arctan2(dx2, arg21 + tol1))
                - (-2 * np.arctan2(dx1, arg26 + tol1))
                + (-2 * np.arctan2(dx1, arg31 + tol1))
                - (-2 * np.arctan2(dx2, arg36 + tol1))
            )
            rows["bx"][0, nC : 2 * nC] = (
                np.log(arg5)
                - np.log(arg10)
                + np.log(arg15)
                - np.log(arg20)
                + np.log(arg25)
                - np.log(arg30)
                + np.log(arg35)
                - np.log(arg40)
            )
            rows["bx"][0, 2 * nC :] = (
                (np.log(arg4) - np.log(arg9))
                + (np.log(arg14) - np.log(arg19))
                + (np.log(arg24) - np.log(arg29))
                + (np.log(arg34) - np.log(arg39))
            )
            rows["bx"] /= -4 * np.pi

            rows["bx"] *= self.M

        if ("by" in components) or ("tmi" in components):
            rows["by"] = np.zeros((1, 3 * nC))

            rows["by"][0, 0:nC] = (
                np.log(arg5)
                - np.log(arg10)
                + np.log(arg15)
                - np.log(arg20)
                + np.log(arg25)
                - np.log(arg30)
                + np.log(arg35)
                - np.log(arg40)
            )
            rows["by"][0, nC : 2 * nC] = (
                (-2 * np.arctan2(dy2, arg2 + tol1))
                - (-2 * np.arctan2(dy2, arg7 + tol1))
                + (-2 * np.arctan2(dy2, arg12 + tol1))
                - (-2 * np.arctan2(dy2, arg17 + tol1))
                + (-2 * np.arctan2(dy1, arg22 + tol1))
                - (-2 * np.arctan2(dy1, arg27 + tol1))
                + (-2 * np.arctan2(dy1, arg32 + tol1))
                - (-2 * np.arctan2(dy1, arg37 + tol1))
            )
            rows["by"][0, 2 * nC :] = (
                (np.log(arg3) - np.log(arg8))
                + (np.log(arg13) - np.log(arg18))
                + (np.log(arg23) - np.log(arg28))
                + (np.log(arg33) - np.log(arg38))
            )

            rows["by"] /= -4 * np.pi

            rows["by"] *= self.M

        if ("bz" in components) or ("tmi" in components):
            rows["bz"] = np.zeros((1, 3 * nC))

            rows["bz"][0, 0:nC] = (
                np.log(arg4)
                - np.log(arg9)
                + np.log(arg14)
                - np.log(arg19)
                + np.log(arg24)
                - np.log(arg29)
                + np.log(arg34)
                - np.log(arg39)
            )
            rows["bz"][0, nC : 2 * nC] = (
                (np.log(arg3) - np.log(arg8))
                + (np.log(arg13) - np.log(arg18))
                + (np.log(arg23) - np.log(arg28))
                + (np.log(arg33) - np.log(arg38))
            )
            rows["bz"][0, 2 * nC :] = (
                (-2 * np.arctan2(dz2, arg1_ + tol1))
                - (-2 * np.arctan2(dz2, arg6_ + tol1))
                + (-2 * np.arctan2(dz1, arg11_ + tol1))
                - (-2 * np.arctan2(dz1, arg16_ + tol1))
                + (-2 * np.arctan2(dz2, arg21_ + tol1))
                - (-2 * np.arctan2(dz2, arg26_ + tol1))
                + (-2 * np.arctan2(dz1, arg31_ + tol1))
                - (-2 * np.arctan2(dz1, arg36_ + tol1))
            )
            rows["bz"] /= -4 * np.pi

            rows["bz"] *= self.M

        if "tmi" in components:

            rows["tmi"] = np.dot(
                self.tmi_projection, np.r_[rows["bx"], rows["by"], rows["bz"]]
            )

        return np.vstack([rows[component] for component in components])

    @property
    def deleteTheseOnModelUpdate(self):
        deletes = super().deleteTheseOnModelUpdate
        if self.is_amplitude_data:
            deletes += ["_gtg_diagonal"]
        return deletes

    @property
    def coordinate_system(self):
        raise AttributeError(
            "The coordinate_system property has been removed. "
            "Instead make use of `SimPEG.maps.SphericalSystem`."
        )
Beispiel #19
0
import patch
from resource_manager_common import aws_utils

s3 = aws_utils.ClientWrapper(boto3.client('s3'))
api_gateway = aws_utils.ClientWrapper(boto3.client('apigateway'))

API_GATEWAY_SERVICE_NAME = 'apigateway.amazonaws.com'
STAGE_NAME = 'api'

PROPERTY_SCHEMA = {
    'ConfigurationBucket':
    properties.String(),
    'ConfigurationKey':
    properties.String(),
    'CacheClusterEnabled':
    properties.Boolean(default=False),
    'CacheClusterSize':
    properties.String(default='0.5'),
    'SwaggerSettings':
    properties.Dictionary(default={}),
    'MethodSettings':
    properties.Object(
        default={},
        schema={
            '*':
            properties.Object(
                default={},  # path, can be *
                schema={
                    '*':
                    properties.Object(
                        default={},  # method, can be *