def __init__(self, sparkContext: SparkContext, InputLocation: str, startOffset: int, endOffset: int, splitter: FileDataSplitter, carryInputData: bool, newLevel: StorageLevel, sourceEpsgCRSCode: str, targetEpsgCode: str): """ :param sparkContext: SparkContext, the spark context :param InputLocation: str, the input location :param startOffset: :param endOffset: :param splitter: FileDataSplitter, File data splitter which should be used to split the data :param carryInputData: :param newLevel: :param sourceEpsgCRSCode: str, the source epsg CRS code :param targetEpsgCode: str, the target epsg code """ super().__init__(sparkContext) jvm_splitter = FileSplitterJvm(self._jvm, splitter).jvm_instance new_level_jvm = JvmStorageLevel(self._jvm, newLevel).jvm_instance self._srdd = self._jvm_spatial_rdd(self._jsc, InputLocation, startOffset, endOffset, jvm_splitter, carryInputData, new_level_jvm, sourceEpsgCRSCode, targetEpsgCode)
def __init__(self, sparkContext: SparkContext, InputLocation: str, startOffset: int, endOffset: int, splitter: FileDataSplitter, carryInputData: bool, partitions: int, newLevel: StorageLevel, sourceEpsgCRSCode: str, targetEpsgCode: str): """ :param sparkContext: :param InputLocation: :param startOffset: :param endOffset: :param splitter: :param carryInputData: :param partitions: :param newLevel: :param sourceEpsgCRSCode: :param targetEpsgCode: """ super().__init__(sparkContext) jvm_splitter = FileSplitterJvm(self._jvm, splitter).jvm_instance new_level_jvm = JvmStorageLevel(self._jvm, newLevel).jvm_instance self._srdd = self._jvm_spatial_rdd(self._jsc, InputLocation, startOffset, endOffset, jvm_splitter, carryInputData, partitions, new_level_jvm, sourceEpsgCRSCode, targetEpsgCode)
def __init__(self, rawSpatialRDD: JvmSpatialRDD, newLevel: StorageLevel): """ :param rawSpatialRDD: :param newLevel: """ super().__init__(rawSpatialRDD.sc) jsrdd = rawSpatialRDD.jsrdd new_level_jvm = JvmStorageLevel(self._jvm, newLevel).jvm_instance self._srdd = self._jvm_spatial_rdd(jsrdd, new_level_jvm)
def __init__(self, rdd: RDD, newLevel: StorageLevel): self._sc = rdd.ctx self._jvm = self._sc._jvm spatial_rdd = self._jvm.GeoSerializerData.deserializeToPolygonRawRDD( rdd._jrdd) new_level_jvm = JvmStorageLevel(self._jvm, newLevel).jvm_instance srdd = self._jvm_spatial_rdd(spatial_rdd, new_level_jvm) self._srdd = srdd
def __init__(self, rdd: RDD, newLevel: StorageLevel): """ :param rdd: :param newLevel: """ super().__init__(rdd.ctx) spatial_rdd = self._jvm.GeoSerializerData.deserializeToPointRawRDD( rdd._jrdd) new_level_jvm = JvmStorageLevel(self._jvm, newLevel).jvm_instance srdd = self._jvm_spatial_rdd(spatial_rdd, new_level_jvm) self._srdd = srdd
def __init__(self, rawSpatialRDD: JvmSpatialRDD, newLevel: StorageLevel, sourceEpsgCRSCode: str, targetEpsgCode: str): """ :param rawSpatialRDD: :param newLevel: :param sourceEpsgCRSCode: :param targetEpsgCode: """ super().__init__(rawSpatialRDD.sc) jsrdd = rawSpatialRDD.jsrdd new_level_jvm = JvmStorageLevel(self._jvm, newLevel).jvm_instance self._srdd = self._jvm_spatial_rdd(jsrdd, new_level_jvm, sourceEpsgCRSCode, targetEpsgCode)
def __init__(self, sparkContext: SparkContext, InputLocation: str, splitter: FileDataSplitter, carryInputData: bool, newLevel: StorageLevel): """ :param sparkContext: :param InputLocation: :param splitter: :param carryInputData: :param newLevel: """ super().__init__(sparkContext) jvm_splitter = FileSplitterJvm(self._jvm, splitter).jvm_instance new_level_jvm = JvmStorageLevel(self._jvm, newLevel).jvm_instance self._srdd = self._jvm_spatial_rdd(self._jsc, InputLocation, jvm_splitter, carryInputData, new_level_jvm)
def __init__(self, sparkContext: SparkContext, InputLocation: str, splitter: FileDataSplitter, carryInputData: bool, newLevel: StorageLevel): """ :param sparkContext: SparkContext, the spark context :param InputLocation: str, the input location :param splitter: FileDataSplitter, File data splitter which should be used to split the data :param carryInputData: :param newLevel: """ super().__init__(sparkContext) jvm_splitter = FileSplitterJvm(self._jvm, splitter) new_level_jvm = JvmStorageLevel(self._jvm, newLevel).jvm_instance self._srdd = self._jvm_spatial_rdd(self._jsc, InputLocation, jvm_splitter.jvm_instance, carryInputData, new_level_jvm)
def __init__(self, sparkContext: SparkContext, InputLocation: str, startOffset: int, endOffset: int, splitter: FileDataSplitter, carryInputData: bool, partitions: int, newLevel: StorageLevel): """ :param sparkContext: :param InputLocation: :param startOffset: :param endOffset: :param splitter: :param carryInputData: :param partitions: :param newLevel: """ super().__init__(sparkContext) self._srdd = self._jvm_spatial_rdd( self._jsc, InputLocation, startOffset, endOffset, FileSplitterJvm(self._jvm, splitter).jvm_instance, carryInputData, partitions, JvmStorageLevel(self._jvm, newLevel).jvm_instance)