Exemplo n.º 1
0
    def __init__(self, sparkContext: SparkContext, InputLocation: str,
                 startOffset: int, endOffset: int, splitter: FileDataSplitter,
                 carryInputData: bool, newLevel: StorageLevel,
                 sourceEpsgCRSCode: str, targetEpsgCode: str):
        """

        :param sparkContext: SparkContext, the spark context
        :param InputLocation: str, the input location
        :param startOffset:
        :param endOffset:
        :param splitter: FileDataSplitter, File data splitter which should be used to split the data
        :param carryInputData:
        :param newLevel:
        :param sourceEpsgCRSCode: str, the source epsg CRS code
        :param targetEpsgCode: str, the target epsg code
        """
        super().__init__(sparkContext)
        jvm_splitter = FileSplitterJvm(self._jvm, splitter).jvm_instance
        new_level_jvm = JvmStorageLevel(self._jvm, newLevel).jvm_instance

        self._srdd = self._jvm_spatial_rdd(self._jsc, InputLocation,
                                           startOffset, endOffset,
                                           jvm_splitter, carryInputData,
                                           new_level_jvm, sourceEpsgCRSCode,
                                           targetEpsgCode)
Exemplo n.º 2
0
    def __init__(self, sparkContext: SparkContext, InputLocation: str,
                 startOffset: int, endOffset: int, splitter: FileDataSplitter,
                 carryInputData: bool, partitions: int, newLevel: StorageLevel,
                 sourceEpsgCRSCode: str, targetEpsgCode: str):
        """

        :param sparkContext:
        :param InputLocation:
        :param startOffset:
        :param endOffset:
        :param splitter:
        :param carryInputData:
        :param partitions:
        :param newLevel:
        :param sourceEpsgCRSCode:
        :param targetEpsgCode:
        """
        super().__init__(sparkContext)
        jvm_splitter = FileSplitterJvm(self._jvm, splitter).jvm_instance

        new_level_jvm = JvmStorageLevel(self._jvm, newLevel).jvm_instance

        self._srdd = self._jvm_spatial_rdd(self._jsc, InputLocation,
                                           startOffset, endOffset,
                                           jvm_splitter, carryInputData,
                                           partitions, new_level_jvm,
                                           sourceEpsgCRSCode, targetEpsgCode)
Exemplo n.º 3
0
    def __init__(self, sparkContext: SparkContext, InputLocation: str,
                 splitter: FileDataSplitter, carryInputData: bool):
        """

        :param sparkContext:
        :param InputLocation:
        :param splitter:
        :param carryInputData:
        """

        super().__init__(sparkContext)
        jvm_splitter = FileSplitterJvm(self._jvm, splitter).jvm_instance
        self._srdd = self._jvm_spatial_rdd(self._jsc, InputLocation,
                                           jvm_splitter, carryInputData)
Exemplo n.º 4
0
    def __init__(self, sparkContext: SparkContext, InputLocation: str,
                 splitter: FileDataSplitter, carryInputData: bool):
        """

        :param sparkContext: SparkContext, the spark context
        :param InputLocation: str, the input location
        :param splitter: FileDataSplitter, File data splitter which should be used to split the data
        :param carryInputData:
        """

        super().__init__(sparkContext)
        jvm_splitter = FileSplitterJvm(self._jvm, splitter)

        self._srdd = self._jvm_spatial_rdd(self._jsc, InputLocation,
                                           jvm_splitter.jvm_instance,
                                           carryInputData)
Exemplo n.º 5
0
    def __init__(self, sparkContext: SparkContext, InputLocation: str,
                 Offset: int, splitter: FileDataSplitter, carryInputData: bool,
                 partitions: int):
        """

        :param sparkContext:
        :param InputLocation:
        :param Offset:
        :param splitter:
        :param carryInputData:
        :param partitions:
        """

        super().__init__(sparkContext)
        jvm_splitter = FileSplitterJvm(self._jvm, splitter).jvm_instance
        self._srdd = self._jvm_spatial_rdd(sparkContext._jsc, InputLocation,
                                           Offset, jvm_splitter,
                                           carryInputData, partitions)
Exemplo n.º 6
0
    def __init__(self, sparkContext: SparkContext, InputLocation: str,
                 splitter: FileDataSplitter, carryInputData: bool,
                 newLevel: StorageLevel):
        """

        :param sparkContext:
        :param InputLocation:
        :param splitter:
        :param carryInputData:
        :param newLevel:
        """
        super().__init__(sparkContext)
        jvm_splitter = FileSplitterJvm(self._jvm, splitter).jvm_instance
        new_level_jvm = JvmStorageLevel(self._jvm, newLevel).jvm_instance

        self._srdd = self._jvm_spatial_rdd(self._jsc, InputLocation,
                                           jvm_splitter, carryInputData,
                                           new_level_jvm)
Exemplo n.º 7
0
    def __init__(self, sparkContext: SparkContext, InputLocation: str,
                 Offset: int, splitter: FileDataSplitter, carryInputData: bool,
                 newLevel: StorageLevel):
        """

        :param sparkContext: SparkContext, the spark context
        :param InputLocation: str, the input location
        :param Offset:
        :param splitter: FileDataSplitter, File data splitter which should be used to split the data
        :param carryInputData:
        :param newLevel:
        """

        super().__init__(sparkContext)
        jvm_splitter = FileSplitterJvm(self._jvm, splitter)
        new_level_jvm = JvmStorageLevel(self._jvm, newLevel).jvm_instance

        self._srdd = self._jvm_spatial_rdd(self._jsc, InputLocation, Offset,
                                           jvm_splitter.jvm_instance,
                                           carryInputData, new_level_jvm)
Exemplo n.º 8
0
    def __init__(self, sparkContext: SparkContext, InputLocation: str,
                 startOffset: int, endOffset: int, splitter: FileDataSplitter,
                 carryInputData: bool, partitions: int,
                 newLevel: StorageLevel):
        """

        :param sparkContext:
        :param InputLocation:
        :param startOffset:
        :param endOffset:
        :param splitter:
        :param carryInputData:
        :param partitions:
        :param newLevel:
        """
        super().__init__(sparkContext)
        self._srdd = self._jvm_spatial_rdd(
            self._jsc, InputLocation, startOffset, endOffset,
            FileSplitterJvm(self._jvm, splitter).jvm_instance, carryInputData,
            partitions,
            JvmStorageLevel(self._jvm, newLevel).jvm_instance)