Exemple #1
0
    def option(self, key, value):
        """Adds an output option for the underlying data source.

        .. note:: Experimental.
        """
        self._jwrite = self._jwrite.option(key, to_str(value))
        return self
    def option(self, key, value):
        """Adds an output option for the underlying data source.

        .. note:: Experimental.
        """
        self._jwrite = self._jwrite.option(key, to_str(value))
        return self
Exemple #3
0
    def options(self, **options):
        """Adds output options for the underlying data source.

       .. note:: Experimental.
        """
        for k in options:
            self._jwrite = self._jwrite.option(k, to_str(options[k]))
        return self
    def options(self, **options):
        """Adds output options for the underlying data source.

       .. note:: Experimental.
        """
        for k in options:
            self._jwrite = self._jwrite.option(k, to_str(options[k]))
        return self
Exemple #5
0
    def option(self, key, value):
        """Adds an input option for the underlying data source.

        .. note:: Experimental.

        >>> s = spark.readStream.option("x", 1)
        """
        self._jreader = self._jreader.option(key, to_str(value))
        return self
    def option(self, key, value):
        """Adds an input option for the underlying data source.

        .. note:: Experimental.

        >>> s = spark.readStream.option("x", 1)
        """
        self._jreader = self._jreader.option(key, to_str(value))
        return self
    def options(self, **options):
        """Adds input options for the underlying data source.

        .. note:: Experimental.

        >>> s = spark.readStream.options(x="1", y=2)
        """
        for k in options:
            self._jreader = self._jreader.option(k, to_str(options[k]))
        return self
Exemple #8
0
    def options(self, **options):
        """Adds input options for the underlying data source.

        .. note:: Experimental.

        >>> s = spark.readStream.options(x="1", y=2)
        """
        for k in options:
            self._jreader = self._jreader.option(k, to_str(options[k]))
        return self
Exemple #9
0
    def option(self, key, value):
        """Adds an output option for the underlying data source.

        You can set the following option(s) for writing files:
            * ``timeZone``: sets the string that indicates a timezone to be used to format
                timestamps in the JSON/CSV datasources or partition values.
                If it isn't set, it uses the default value, session local timezone.

        .. note:: Evolving.
        """
        self._jwrite = self._jwrite.option(key, to_str(value))
        return self
Exemple #10
0
    def option(self, key, value):
        """Adds an output option for the underlying data source.

        You can set the following option(s) for writing files:
            * ``timeZone``: sets the string that indicates a timezone to be used to format
                timestamps in the JSON/CSV datasources or partition values.
                If it isn't set, it uses the default value, session local timezone.

        .. note:: Evolving.
        """
        self._jwrite = self._jwrite.option(key, to_str(value))
        return self
Exemple #11
0
    def option(self, key: str,
               value: "OptionalPrimitiveType") -> "DataStreamWriter":
        """Adds an output option for the underlying data source.

        .. versionadded:: 2.0.0

        Notes
        -----
        This API is evolving.
        """
        self._jwrite = self._jwrite.option(key, to_str(value))
        return self
Exemple #12
0
    def options(self, **options):
        """Adds output options for the underlying data source.

        You can set the following option(s) for writing files:
            * ``timeZone``: sets the string that indicates a timezone to be used to format
                timestamps in the JSON/CSV datasources or partition values.
                If it isn't set, it uses the default value, session local timezone.

       .. note:: Experimental.
        """
        for k in options:
            self._jwrite = self._jwrite.option(k, to_str(options[k]))
        return self
Exemple #13
0
    def options(self, **options):
        """Adds output options for the underlying data source.

        You can set the following option(s) for writing files:
            * ``timeZone``: sets the string that indicates a timezone to be used to format
                timestamps in the JSON/CSV datasources or partition values.
                If it isn't set, it uses the default value, session local timezone.

       .. note:: Experimental.
        """
        for k in options:
            self._jwrite = self._jwrite.option(k, to_str(options[k]))
        return self
Exemple #14
0
    def options(self,
                **options: "OptionalPrimitiveType") -> "DataStreamWriter":
        """Adds output options for the underlying data source.

        .. versionadded:: 2.0.0

        Notes
        -----
        This API is evolving.
        """
        for k in options:
            self._jwrite = self._jwrite.option(k, to_str(options[k]))
        return self
Exemple #15
0
    def option(self, key, value):
        """Adds an input option for the underlying data source.

        You can set the following option(s) for reading files:
            * ``timeZone``: sets the string that indicates a timezone to be used to parse timestamps
                in the JSON/CSV datasources or partition values.
                If it isn't set, it uses the default value, session local timezone.

        .. note:: Evolving.

        >>> s = spark.readStream.option("x", 1)
        """
        self._jreader = self._jreader.option(key, to_str(value))
        return self
Exemple #16
0
    def option(self, key, value):
        """Adds an input option for the underlying data source.

        You can set the following option(s) for reading files:
            * ``timeZone``: sets the string that indicates a timezone to be used to parse timestamps
                in the JSON/CSV datasources or partition values.
                If it isn't set, it uses the default value, session local timezone.

        .. note:: Evolving.

        >>> s = spark.readStream.option("x", 1)
        """
        self._jreader = self._jreader.option(key, to_str(value))
        return self
Exemple #17
0
    def options(self, **options):
        """Adds input options for the underlying data source.

        You can set the following option(s) for reading files:
            * ``timeZone``: sets the string that indicates a timezone to be used to parse timestamps
                in the JSON/CSV datasources or partition values.
                If it isn't set, it uses the default value, session local timezone.

        .. note:: Experimental.

        >>> s = spark.readStream.options(x="1", y=2)
        """
        for k in options:
            self._jreader = self._jreader.option(k, to_str(options[k]))
        return self
Exemple #18
0
    def options(self, **options):
        """Adds input options for the underlying data source.

        You can set the following option(s) for reading files:
            * ``timeZone``: sets the string that indicates a timezone to be used to parse timestamps
                in the JSON/CSV datasources or partition values.
                If it isn't set, it uses the default value, session local timezone.

        .. note:: Experimental.

        >>> s = spark.readStream.options(x="1", y=2)
        """
        for k in options:
            self._jreader = self._jreader.option(k, to_str(options[k]))
        return self
Exemple #19
0
    def option(self, key: str,
               value: "OptionalPrimitiveType") -> "DataStreamReader":
        """Adds an input option for the underlying data source.

        .. versionadded:: 2.0.0

        Notes
        -----
        This API is evolving.

        Examples
        --------
        >>> s = spark.readStream.option("x", 1)
        """
        self._jreader = self._jreader.option(key, to_str(value))
        return self
Exemple #20
0
    def options(self,
                **options: "OptionalPrimitiveType") -> "DataStreamReader":
        """Adds input options for the underlying data source.

        .. versionadded:: 2.0.0

        Notes
        -----
        This API is evolving.

        Examples
        --------
        >>> s = spark.readStream.options(x="1", y=2)
        """
        for k in options:
            self._jreader = self._jreader.option(k, to_str(options[k]))
        return self