Beispiel #1
0
 def __setitem__(self, key, value):
     key = (key, ) if not isinstance(key, tuple) else key
     if all(isinstance(arg, basestring) for arg in key):
         argidxs = EinsumTensor.split_indices(key)
         #----------------------------------------#
         if isinstance(value, EinsumContraction):
             # Carry out the contraction and store the result...
             # TODO Sanity checking for correct shape
             value.contract(dest=EinsumTensor(argidxs, tensor=self))
         elif isinstance(value, EinsumTensor):
             idxs, subidxs = EinsumTensor.split_indices(argidxs,
                                                        include_sub=True)
             eself = EinsumTensor(argidxs, tensor=self)
             if idxs == value.indices and subidxs == value.sub_indices:
                 if value.coeff == 1.0:
                     super(Tensor, eself.sliced_tensor).__setitem__(
                         Ellipsis, value.sliced_tensor)
                 else:
                     super(Tensor, eself.sliced_tensor).__setitem__(
                         Ellipsis, value.coeff * value.sliced_tensor)
                 #np.ndarray.__setitem__(self, Ellipsis, value.tensor)
             elif len(idxs) == len(value.indices):
                 # Just rearrange things...
                 rv = value.sort_to(dest=eself, multiplier=value.coeff)
             else:
                 # We're doing an internal contraction.  EinsumSum.sum_into handles this.
                 tmp = EinsumSum(value)
                 tmp.sum_into(eself)
         elif isinstance(value, EinsumSum):
             # TODO Sanity checking for correct shape
             value.sum_into(dest=EinsumTensor(argidxs, tensor=self))
         elif isinstance(value, Tensor):
             if grendel.sanity_checking_enabled:
                 # Check the shape of the tensor to be assigned to the block
                 # TODO Sanity checking for correct shape
                 pass
             #np.ndarray.__setitem__(dest_tens, tuple([Ellipsis]*self.ndim), value)
             # The user probably made a mistake
             raise ValueError
     else:
         super(Tensor, self).__setitem__(key, value)
 def __getitem__(self, item):
     item = (item, ) if not isinstance(item, tuple) else item
     #----------------------------------------#
     # Handle einstein summation...
     if all(isinstance(i, basestring) for i in item):
         item = EinsumTensor.split_indices(item)
         #----------------------------------------#
         # Representation dependent form
         if self.representation is not None:
             if self.first_dimension_different:
                 # Act like a Tensor instance
                 return super(ComputableTensor,
                              self.for_order(len(item) -
                                             1)).__getitem__(item)
             else:
                 # Act like a Tensor instance
                 return super(ComputableTensor,
                              self.for_order(len(item))).__getitem__(item)
         #----------------------------------------#
         # Coordinate dependent form
         else:
             return self.for_order(len(item)).__getitem__(item)
     #========================================#
     # Representation dependent form
     elif self.representation is not None:
         indices = tuple(i.index if isinstance(i, Coordinate) else i
                         for i in item)
         if self.first_dimension_different:
             # This is a transformation tensor.  The 'order' is one less
             #   than the number of indices.  For instance, the 'first-order'
             #   B tensor has two indices: the internal coordinate and the
             #   cartesian coordinate.
             return self.for_order(len(indices) - 1).__getitem__(indices)
         else:
             # This is a different type of derivative tensor, such as a
             #   force tensor.  The order is the same as the number of indices.
             #   For instance, the 'first-order' energy derivative is a vector, with
             #   one index.
             return self.for_order(len(indices)).__getitem__(indices)
     #----------------------------------------#
     # Coordinate dependent form
     else:  # self.coordinate is not None
         if sanity_checking_enabled:
             if any(isinstance(c, CartesianCoordinate) for c in item)\
                     and not all(isinstance(c, CartesianCoordinate) for c in item):
                 raise ValueError(
                     "mixing of CartesianCoordinates and integers in b tensor"
                     " element retrieval is confusing and thus no longer allowed."
                     " indices were ('{}')".format("', '".join(
                         str(c) for c in item)))
         if all(isinstance(c, CartesianCoordinate) for c in item):
             item = self.coordinate.internal_indices_for_coordinates(*item)
         return self.for_order(len(item)).__getitem__(item)
 def __getitem__(self, item):
     item = (item,) if not isinstance(item, tuple) else item
     #----------------------------------------#
     # Handle einstein summation...
     if all(isinstance(i, basestring) for i in item):
         item = EinsumTensor.split_indices(item)
         #----------------------------------------#
         # Representation dependent form
         if self.representation is not None:
             if self.first_dimension_different:
                 # Act like a Tensor instance
                 return super(ComputableTensor, self.for_order(len(item)-1)).__getitem__(item)
             else:
                 # Act like a Tensor instance
                 return super(ComputableTensor, self.for_order(len(item))).__getitem__(item)
         #----------------------------------------#
         # Coordinate dependent form
         else:
             return self.for_order(len(item)).__getitem__(item)
     #========================================#
     # Representation dependent form
     elif self.representation is not None:
         indices = tuple(i.index if isinstance(i, Coordinate) else i for i in item)
         if self.first_dimension_different:
             # This is a transformation tensor.  The 'order' is one less
             #   than the number of indices.  For instance, the 'first-order'
             #   B tensor has two indices: the internal coordinate and the
             #   cartesian coordinate.
             return self.for_order(len(indices)-1).__getitem__(indices)
         else:
             # This is a different type of derivative tensor, such as a
             #   force tensor.  The order is the same as the number of indices.
             #   For instance, the 'first-order' energy derivative is a vector, with
             #   one index.
             return self.for_order(len(indices)).__getitem__(indices)
     #----------------------------------------#
     # Coordinate dependent form
     else: # self.coordinate is not None
         if sanity_checking_enabled:
             if any(isinstance(c, CartesianCoordinate) for c in item)\
                     and not all(isinstance(c, CartesianCoordinate) for c in item):
                 raise ValueError("mixing of CartesianCoordinates and integers in b tensor"
                                  " element retrieval is confusing and thus no longer allowed."
                                  " indices were ('{}')".format(
                     "', '".join(str(c) for c in item)
                 ))
         if all(isinstance(c, CartesianCoordinate) for c in item):
             item = self.coordinate.internal_indices_for_coordinates(*item)
         return self.for_order(len(item)).__getitem__(item)
Beispiel #4
0
    def __getitem__(self, args):
        """
        Returns the sub-``Tensor`` corresponding to the depth specified.  If the resulting sub-``Tensor`` is just an item
        (i.e. if the ``ndim`` attribute of self is equal to the number of arguments given), the ``numpy.ndarray``
        behavior is used (which just returns the entry, which is a special numpy subclass of ``float`` or ``int``).
        If the resulting sub-``Tensor`` has a ``ndim`` attribute of 1, a ``Vector`` object is returned.
        If the resulting sub-``Tensor`` has a ``ndim`` attribute of 2, a ``Matrix`` object is returned.

        :Examples:


        TODO:  Write example test cases
        TODO: Move this to the class documentation since it doesn't show up in sphinx

        """
        # if a single item is given, handle it as a length-1 tuple
        args = (args, ) if not isinstance(args, tuple) else args
        #----------------------------------------#
        # Handle Einstein summation
        if all(isinstance(arg, basestring) for arg in args):
            argidxs = EinsumTensor.split_indices(args)
            return EinsumTensor(argidxs, self)
        #----------------------------------------#
        else:
            try:
                ret_val = np.ndarray.__getitem__(self, args)
            except:
                raise
            if np.isscalar(ret_val):
                return ret_val
            else:
                shp = ret_val.shape
                if len(shp) == 1:
                    return ret_val.view(Vector)
                elif len(shp) == 2:
                    return ret_val.view(Matrix)
                else:
                    return ret_val
 def __setitem__(self, item, value):
     item = (item, ) if not isinstance(item, tuple) else item
     #----------------------------------------#
     # Handle odd cases
     if sanity_checking_enabled and item is Ellipsis:
         raise NotImplementedError
     #----------------------------------------#
     # Handle einstein summation...
     if all(isinstance(i, basestring) for i in item):
         item = EinsumTensor.split_indices(item)
         #----------------------------------------#
         # Representation dependent form
         if self.representation is not None:
             if self.first_dimension_different:
                 # Act like a Tensor instance (skip nonsense in Derivative tensor)
                 return super(ComputableTensor,
                              self.for_order(len(item) - 1)).__setitem__(
                                  item, value)
             else:
                 # Act like a Tensor instance
                 return super(ComputableTensor,
                              self.for_order(len(item))).__setitem__(
                                  item, value)
         #----------------------------------------#
         # Coordinate dependent form
         else:
             return self.for_order(len(item)).__setitem__(item, value)
     #----------------------------------------#
     elif self.representation is not None:
         # Representation-dependent form
         # Handle the case where the user is setting things in chunks:
         # Try to figure out what they are trying to do.
         if item == (Ellipsis, ) and not self.first_dimension_different:
             try:
                 shp = value.shape
             except AttributeError:
                 raise ValueError(
                     "Setting a DerivativeCollection item by giving an Ellipsis is ambiguous,"
                     " especially if the value doesn't have a 'shape' attribute.  Please set"
                     " items individually or use the 'for_order()' instance method to retrieve"
                     " the individual Tensor to set.")
             tens = self.for_order(len(shp))
             tens[...] = value
             return
         elif self.first_dimension_different and len(item) == 1:
             try:
                 shp = value.shape
             except AttributeError:
                 raise ValueError(
                     "Setting a DerivativeCollection item by giving just one key is ambiguous,"
                     " especially if the value doesn't have a 'shape' attribute.  Please set"
                     " items individually or use the 'for_order()' instance method to retrieve"
                     " the individual Tensor to set.")
             order = len(shp)
             self.for_order(order).__setitem__(item, value)
             return
         elif not np.isscalar(value):
             raise NotImplementedError(
                 "Setting non-scalar chunks of a DerivativeCollection is mostly not implemented;"
                 " the exception is the case of a transformation tensor (a DerivativeCollection"
                 " where 'first_dimension_different' is set to True) where indices for the first"
                 " dimension may be specified along with corresponding chunks.  In other cases,"
                 " it is too difficult to understand to which order Tensor the assignment is being"
                 " made (this may be allowed in the future, but not now).  If you need to do this,"
                 " use the 'for_order()' instance method and explicitly specify the order the"
                 " Tensor to which you wish to make the assignment.")
         #----------------------------------------#
         indices = [
             i.index if isinstance(i, Coordinate) else i for i in item
         ]
         # See notes in __getitem__
         if self.first_dimension_different:
             self.for_order(len(indices) - 1).__setitem__(
                 tuple(indices), value)
             return
         else:
             self.for_order(len(indices)).__setitem__(tuple(indices), value)
             return
     else:  # self.coordinate is not None
         # Coordinate-dependent form
         if item == (Ellipsis, ):
             try:
                 shp = value.shape
             except AttributeError:
                 raise ValueError(
                     "Setting a DerivativeCollection item by giving an Ellipsis is ambiguous,"
                     " especially if the value doesn't have a 'shape' attribute (which is the"
                     " case for the object passed in of type `{ty}`)  Please set items"
                     " individually or use the 'for_order()' instance method to retrieve"
                     " the individual Tensor to set.".format(
                         ty=type(item).__name__))
             tens = self.for_order(len(shp))
             tens[...] = value
             return
         elif not np.isscalar(value):
             raise NotImplementedError(
                 "Setting non-scalar chunks of a DerivativeCollection is mostly not implemented;"
                 " the exception is the case of a transformation tensor (a DerivativeCollection"
                 " where 'first_dimension_different' is set to True) where indices for the first"
                 " dimension may be specified along with corresponding chunks.  In other cases,"
                 " it is too difficult to understand to which order Tensor the assignment is being"
                 " made (this may be allowed in the future, but not now).  If you need to do this,"
                 " use the 'for_order()' instance method and explicitly specify the order the"
                 " Tensor to which you wish to make the assignment.")
         else:
             indices = self.coordinate.internal_indices_for_coordinates(
                 *item)
             self.for_order(len(indices)).__setitem__(indices, value)
 def __setitem__(self, item, value):
     item = (item,) if not isinstance(item, tuple) else item
     #----------------------------------------#
     # Handle odd cases
     if sanity_checking_enabled and item is Ellipsis:
         raise NotImplementedError
     #----------------------------------------#
     # Handle einstein summation...
     if all(isinstance(i, basestring) for i in item):
         item = EinsumTensor.split_indices(item)
         #----------------------------------------#
         # Representation dependent form
         if self.representation is not None:
             if self.first_dimension_different:
                 # Act like a Tensor instance (skip nonsense in Derivative tensor)
                 return super(ComputableTensor, self.for_order(len(item)-1)).__setitem__(item, value)
             else:
                 # Act like a Tensor instance
                 return super(ComputableTensor, self.for_order(len(item))).__setitem__(item, value)
         #----------------------------------------#
         # Coordinate dependent form
         else:
             return self.for_order(len(item)).__setitem__(item, value)
     #----------------------------------------#
     elif self.representation is not None:
         # Representation-dependent form
         # Handle the case where the user is setting things in chunks:
         # Try to figure out what they are trying to do.
         if item == (Ellipsis,) and not self.first_dimension_different:
             try:
                 shp = value.shape
             except AttributeError:
                 raise ValueError(
                     "Setting a DerivativeCollection item by giving an Ellipsis is ambiguous,"
                     " especially if the value doesn't have a 'shape' attribute.  Please set"
                     " items individually or use the 'for_order()' instance method to retrieve"
                     " the individual Tensor to set.")
             tens = self.for_order(len(shp))
             tens[...] = value
             return
         elif self.first_dimension_different and len(item) == 1:
             try:
                 shp = value.shape
             except AttributeError:
                 raise ValueError(
                     "Setting a DerivativeCollection item by giving just one key is ambiguous,"
                     " especially if the value doesn't have a 'shape' attribute.  Please set"
                     " items individually or use the 'for_order()' instance method to retrieve"
                     " the individual Tensor to set.")
             order = len(shp)
             self.for_order(order).__setitem__(item, value)
             return
         elif not np.isscalar(value):
             raise NotImplementedError(
                 "Setting non-scalar chunks of a DerivativeCollection is mostly not implemented;"
                 " the exception is the case of a transformation tensor (a DerivativeCollection"
                 " where 'first_dimension_different' is set to True) where indices for the first"
                 " dimension may be specified along with corresponding chunks.  In other cases,"
                 " it is too difficult to understand to which order Tensor the assignment is being"
                 " made (this may be allowed in the future, but not now).  If you need to do this,"
                 " use the 'for_order()' instance method and explicitly specify the order the"
                 " Tensor to which you wish to make the assignment."
             )
         #----------------------------------------#
         indices = [i.index if isinstance(i, Coordinate) else i for i in item]
         # See notes in __getitem__
         if self.first_dimension_different:
             self.for_order(len(indices)-1).__setitem__(tuple(indices), value)
             return
         else:
             self.for_order(len(indices)).__setitem__(tuple(indices), value)
             return
     else: # self.coordinate is not None
         # Coordinate-dependent form
         if item == (Ellipsis,):
             try:
                 shp = value.shape
             except AttributeError:
                 raise ValueError(
                     "Setting a DerivativeCollection item by giving an Ellipsis is ambiguous,"
                     " especially if the value doesn't have a 'shape' attribute (which is the"
                     " case for the object passed in of type `{ty}`)  Please set items"
                     " individually or use the 'for_order()' instance method to retrieve"
                     " the individual Tensor to set.".format(
                         ty=type(item).__name__
                     ))
             tens = self.for_order(len(shp))
             tens[...] = value
             return
         elif not np.isscalar(value):
             raise NotImplementedError(
                 "Setting non-scalar chunks of a DerivativeCollection is mostly not implemented;"
                 " the exception is the case of a transformation tensor (a DerivativeCollection"
                 " where 'first_dimension_different' is set to True) where indices for the first"
                 " dimension may be specified along with corresponding chunks.  In other cases,"
                 " it is too difficult to understand to which order Tensor the assignment is being"
                 " made (this may be allowed in the future, but not now).  If you need to do this,"
                 " use the 'for_order()' instance method and explicitly specify the order the"
                 " Tensor to which you wish to make the assignment."
             )
         else:
             indices = self.coordinate.internal_indices_for_coordinates(*item)
             self.for_order(len(indices)).__setitem__(indices, value)