Example #1
0
    def __init__(
        self,
        column: AutoMapperColumnOrColumnLikeType,
        check: Union[AutoMapperAnyDataType, List[AutoMapperAnyDataType]],
        value: _TAutoMapperDataType,
        else_: Optional[_TAutoMapperDataType] = None,
    ):
        super().__init__()

        self.column: AutoMapperColumnOrColumnLikeType = column
        if isinstance(check, list):
            self.check: Union[AutoMapperDataTypeBase,
                              List[AutoMapperDataTypeBase]] = [
                                  a if isinstance(a, AutoMapperDataTypeBase)
                                  else AutoMapperValueParser.parse_value(
                                      value=a) for a in check
                              ]
        else:
            self.check = (check if isinstance(check, AutoMapperDataTypeBase)
                          else AutoMapperValueParser.parse_value(value=check))
        self.value: AutoMapperDataTypeBase = (
            value if isinstance(value, AutoMapperDataTypeBase) else
            AutoMapperValueParser.parse_value(value=value))
        if else_:
            self.else_: AutoMapperDataTypeBase = (
                cast(AutoMapperDataTypeBase, else_) if isinstance(
                    value, AutoMapperDataTypeBase) else
                AutoMapperValueParser.parse_value(value=value))
        else:
            self.else_ = AutoMapperDataTypeLiteral(None)
Example #2
0
    def __init__(self,
                 value: Optional[Union[List[_T], AutoMapperDataTypeBase,
                                       List[AutoMapperDataTypeBase],
                                       List[AutoMapperTextLikeBase]]],
                 remove_nulls: bool = True,
                 include_null_properties: bool = True) -> None:
        """
        Generates a list (array) in Spark

        :param value: items to make into an array
        :param remove_nulls: whether to remove nulls from the array
        """
        super().__init__()
        # can a single mapper or a list of mappers
        self.remove_nulls: bool = remove_nulls
        self.value: Union[AutoMapperDataTypeBase, List[AutoMapperDataTypeBase]]
        if not value:
            self.value = []
        if isinstance(value, str):
            self.value = AutoMapperValueParser.parse_value(value=value)
        elif isinstance(value, AutoMapperDataTypeBase):
            self.value = value
        elif isinstance(value, List):
            self.value = [AutoMapperValueParser.parse_value(v) for v in value]
            # if there are more than two items we have to maintain the same schema in children or Spark errors
            if include_null_properties:
                self.include_null_properties(
                    include_null_properties=include_null_properties)
        else:
            raise ValueError(f"{type(value)} is not supported")
Example #3
0
 def set_value_from_kwargs(self, kwargs: Dict[str, Any]) -> None:
     self.value = {
         PythonKeywordCleaner.from_python_safe(name=parameter_name):
         AutoMapperValueParser.parse_value(column_name=parameter_name,
                                           value=parameter_value)
         for parameter_name, parameter_value in kwargs.items()
     }
Example #4
0
 def __init__(self, value: Dict[str, Any]) -> None:
     super().__init__()
     assert isinstance(value, dict)
     self.value: Dict[str, AutoMapperDataTypeBase] = {
         key: AutoMapperValueParser.parse_value(value)
         for key, value in value.items()
     }
Example #5
0
 def __init__(self, value: AutoMapperDataTypeBase):
     super().__init__()
     self.value: AutoMapperDataTypeBase = (
         value
         if isinstance(value, AutoMapperDataTypeBase)
         else AutoMapperValueParser.parse_value(value)
     )
    def __init__(self,
                 check: AutoMapperColumnOrColumnLikeType,
                 value: _TAutoMapperDataType,
                 when_null_or_empty: Optional[_TAutoMapperDataType] = None):
        super().__init__()

        self.check: AutoMapperColumnOrColumnLikeType = check
        self.value: AutoMapperDataTypeBase = value \
            if isinstance(value, AutoMapperDataTypeBase) \
            else AutoMapperValueParser.parse_value(value)
        if when_null_or_empty:
            self.when_null: AutoMapperDataTypeBase = cast(AutoMapperDataTypeBase, when_null_or_empty) \
                if isinstance(value, AutoMapperDataTypeBase) \
                else AutoMapperValueParser.parse_value(value)
        else:
            self.when_null = AutoMapperDataTypeLiteral(None)
    def __init__(
        self,
        dst_column: str,
        value: AutoMapperAnyDataType,
        column_schema: Optional[StructField],
        include_null_properties: bool,
        enable_schema_pruning: bool,
        skip_if_columns_null_or_empty: Optional[List[str]] = None,
    ) -> None:
        """
        This class handles assigning to a single column

        """
        super().__init__()
        # should only have one parameter
        self.dst_column: str = dst_column
        self.column_schema: Optional[StructField] = column_schema
        self.value: AutoMapperDataTypeBase = (
            AutoMapperValueParser.parse_value(column_name=dst_column, value=value)
            if not isinstance(value, AutoMapperDataTypeBase)
            else value
        )
        self.skip_if_columns_null_or_empty: Optional[
            List[str]
        ] = skip_if_columns_null_or_empty
        if include_null_properties:
            self.value.include_null_properties(
                include_null_properties=include_null_properties
            )
        self.enable_schema_pruning: bool = enable_schema_pruning
Example #8
0
 def __init__(
     self,
     *args: _T,
 ):
     super().__init__()
     self.value: List[AutoMapperDataTypeBase] = [
         value if isinstance(value, AutoMapperDataTypeBase) else
         AutoMapperValueParser.parse_value(value=value) for value in args
     ]
    def __init__(
        self,
        *columns: AutoMapperColumnOrColumnLikeType,
    ):
        super().__init__()

        self.columns: List[AutoMapperColumnOrColumnLikeType] = [
            AutoMapperValueParser.parse_value(column) for column in columns
        ]
Example #10
0
    def __init__(self,
                 column: AutoMapperColumnOrColumnLikeType,
                 check: Union[str, List[str]],
                 value: _TAutoMapperDataType,
                 else_: Optional[_TAutoMapperDataType] = None):
        super().__init__()

        self.column: AutoMapperColumnOrColumnLikeType = column
        self.check: Union[str, List[str]] = check
        self.value: AutoMapperDataTypeBase = value \
            if isinstance(value, AutoMapperDataTypeBase) \
            else AutoMapperValueParser.parse_value(value)
        if else_:
            self.else_: AutoMapperDataTypeBase = cast(AutoMapperDataTypeBase, else_) \
                if isinstance(value, AutoMapperDataTypeBase) \
                else AutoMapperValueParser.parse_value(value)
        else:
            self.else_ = AutoMapperDataTypeLiteral(None)
Example #11
0
    def __init__(self,
                 *args: Union[AutoMapperNativeTextType, AutoMapperWrapperType,
                              AutoMapperTextLikeBase, AutoMapperDataTypeBase]):
        super().__init__()

        self.value: List[AutoMapperDataTypeBase] = [
            value if isinstance(value, AutoMapperDataTypeBase) else
            AutoMapperValueParser.parse_value(value) for value in args
        ]
Example #12
0
    def __init__(
        self,
        column: AutoMapperColumnOrColumnLikeType,
        mapping: Dict[Optional[AutoMapperTextInputType],
                      AutoMapperAnyDataType],
        default: Optional[AutoMapperAnyDataType] = None,
    ):
        super().__init__(value="")

        self.column: AutoMapperColumnOrColumnLikeType = column
        self.mapping: Dict[AutoMapperAnyDataType, AutoMapperDataTypeBase] = {
            key: (value if isinstance(value, AutoMapperDataTypeBase) else
                  AutoMapperValueParser.parse_value(value))
            for key, value in mapping.items()
        }
        assert self.mapping
        self.default: AutoMapperDataTypeBase = (
            default if isinstance(default, AutoMapperDataTypeBase) else
            AutoMapperValueParser.parse_value(default))
Example #13
0
    def __init__(
        self,
        check: AutoMapperColumnOrColumnLikeType,
        value: _TAutoMapperDataType,
        when_null: Optional[Union[AutoMapperTextLikeBase,
                                  _TAutoMapperDataType]] = None,
    ):
        super().__init__()

        self.check: AutoMapperColumnOrColumnLikeType = check
        self.value: AutoMapperDataTypeBase = (
            value if isinstance(value, AutoMapperDataTypeBase) else
            AutoMapperValueParser.parse_value(value=value))
        if when_null:
            self.when_null: AutoMapperDataTypeBase = (
                cast(AutoMapperDataTypeBase, when_null) if isinstance(
                    value, AutoMapperDataTypeBase) else
                AutoMapperValueParser.parse_value(value=value))
        else:
            self.when_null = AutoMapperDataTypeLiteral(None)
Example #14
0
    def __init__(self, value: AutoMapperNumberInputType) -> None:
        """
        Converts the value to a timestamp type in Spark


        :param value: value
        :param formats: (Optional) formats to use for trying to parse the value otherwise uses Spark defaults
        """
        super().__init__()

        self.value: AutoMapperDataTypeBase = (
            value if isinstance(value, AutoMapperDataTypeBase) else
            AutoMapperValueParser.parse_value(value=value))
    def __init__(
        self,
        column: AutoMapperColumnOrColumnLikeType,
        if_exists: Optional[_TAutoMapperDataType],
        if_not_exists: Optional[_TAutoMapperDataType],
    ):
        super().__init__()

        self.column: AutoMapperColumnOrColumnLikeType = (
            AutoMapperValueParser.parse_value(value=column))
        self.if_exists_column: Optional[AutoMapperDataTypeBase] = None
        if if_exists:
            self.if_exists_column = (
                if_exists if isinstance(if_exists, AutoMapperDataTypeBase) else
                AutoMapperValueParser.parse_value(value=if_exists))

        self.if_not_exists: Optional[AutoMapperDataTypeBase] = None
        if if_not_exists:
            self.if_not_exists = (if_not_exists if isinstance(
                if_not_exists, AutoMapperDataTypeBase) else
                                  AutoMapperValueParser.parse_value(
                                      value=if_not_exists))
Example #16
0
    def __init__(self,
                 *args: Union[AutoMapperNativeTextType, AutoMapperWrapperType,
                              AutoMapperTextLikeBase,
                              AutoMapperDataTypeBase, ]):
        super().__init__()

        self.value: List[AutoMapperDataTypeBase] = [
            value if isinstance(value, AutoMapperDataTypeBase) else
            AutoMapperValueParser.parse_value(value=value) for value in args
        ]

        # always include null properties in a concat operation
        self.include_null_properties(include_null_properties=True)
Example #17
0
    def __init__(
        self,
        column: AutoMapperColumnOrColumnLikeType,
        check: Union[AutoMapperAnyDataType, List[AutoMapperAnyDataType]],
        value: _TAutoMapperDataType,
    ):
        super().__init__()

        self.column: AutoMapperColumnOrColumnLikeType = column
        if isinstance(check, list):
            self.check: Union[AutoMapperDataTypeBase,
                              List[AutoMapperDataTypeBase]] = [
                                  a if isinstance(a, AutoMapperDataTypeBase)
                                  else AutoMapperValueParser.parse_value(a)
                                  for a in check
                              ]
        else:
            self.check = (check if isinstance(check, AutoMapperDataTypeBase)
                          else AutoMapperValueParser.parse_value(check))
        self.value: AutoMapperDataTypeBase = (
            value if isinstance(value, AutoMapperDataTypeBase) else
            AutoMapperValueParser.parse_value(value))
Example #18
0
    def __init__(self, value: AutoMapperAnyDataType, format_: str) -> None:
        """
        Converts the date or timestamp to a string in Spark


        :param value: value
        :param format_: (Optional) formats to use for trying to format the value otherwise uses Spark defaults
        """
        super().__init__()

        self.value: AutoMapperDataTypeBase = (
            value if isinstance(value, AutoMapperDataTypeBase) else
            AutoMapperValueParser.parse_value(value=value))
        self.format_: str = format_
Example #19
0
 def __init__(self, value: AutoMapperAmountInputType, precision: int, scale: int):
     """
     Specifies the value should be used as a decimal
     :param value:
     :param precision: the maximum total number of digits (on both sides of dot)
     :param scale: the number of digits on right side of dot
     """
     super().__init__()
     self.precision = precision
     self.scale = scale
     self.value: AutoMapperDataTypeBase = (
         value
         if isinstance(value, AutoMapperDataTypeBase)
         else AutoMapperValueParser.parse_value(value=value)
     )
    def __init__(self, **kwargs: Any) -> None:
        """
        base class for complex types
        :param kwargs:
        """
        super().__init__()

        # this flag specifies that we should include all values in the column_spec event NULLs
        self.include_nulls: bool = "include_nulls" in kwargs

        self.value: Dict[str, AutoMapperDataTypeBase] = {
            parameter_name if not parameter_name.endswith(
                "_"
            )  # some property names are python keywords so we have to append with _
            else parameter_name[:-1]:
            AutoMapperValueParser.parse_value(parameter_value)
            for parameter_name, parameter_value in kwargs.items()
        }
 def __init__(
         self,
         dst_column: str,
         value: AutoMapperAnyDataType,
         column_schema: Optional[StructField],
         include_null_properties: bool,
         skip_if_columns_null_or_empty: Optional[List[str]] = None) -> None:
     super().__init__()
     # should only have one parameter
     self.dst_column: str = dst_column
     self.column_schema: Optional[StructField] = column_schema
     self.value: AutoMapperDataTypeBase = AutoMapperValueParser.parse_value(value) \
         if not isinstance(value, AutoMapperDataTypeBase) \
         else value
     self.skip_if_columns_null_or_empty: Optional[
         List[str]] = skip_if_columns_null_or_empty
     if include_null_properties:
         self.value.include_null_properties(
             include_null_properties=include_null_properties)
Example #22
0
    def __init__(self,
                 value: AutoMapperDateInputType,
                 formats: Optional[List[str]] = None) -> None:
        """
        Converts a value to date only
        For datetime use the datetime mapper type


        :param value: value
        :param formats: (Optional) formats to use for trying to parse the value otherwise uses:
                        y-M-d
                        yyyyMMdd
                        M/d/y
        """
        super().__init__()
        # keep string separate so we can parse it to date

        self.value: AutoMapperDataTypeBase = (
            value if isinstance(value, AutoMapperDataTypeBase) else
            AutoMapperValueParser.parse_value(value=value))
        self.formats: Optional[List[str]] = formats
Example #23
0
 def __init__(self, value: AutoMapperAmountInputType):
     super().__init__()
     self.value: AutoMapperDataTypeBase = value \
         if isinstance(value, AutoMapperDataTypeBase) \
         else AutoMapperValueParser.parse_value(value)
Example #24
0
 def __init__(self, **kwargs: AutoMapperAnyDataType) -> None:
     super().__init__()
     self.value: Dict[str, AutoMapperDataTypeBase] = {
         key: AutoMapperValueParser.parse_value(value)
         for key, value in kwargs.items()
     }