def __init__(self, sparkContext: SparkContext, InputLocation: str, startOffset: int, endOffset: int, splitter: FileDataSplitter, carryInputData: bool, newLevel: StorageLevel, sourceEpsgCRSCode: str, targetEpsgCode: str): """ :param sparkContext: SparkContext, the spark context :param InputLocation: str, the input location :param startOffset: :param endOffset: :param splitter: FileDataSplitter, File data splitter which should be used to split the data :param carryInputData: :param newLevel: :param sourceEpsgCRSCode: str, the source epsg CRS code :param targetEpsgCode: str, the target epsg code """ super().__init__(sparkContext) jvm_splitter = FileSplitterJvm(self._jvm, splitter).jvm_instance new_level_jvm = JvmStorageLevel(self._jvm, newLevel).jvm_instance self._srdd = self._jvm_spatial_rdd(self._jsc, InputLocation, startOffset, endOffset, jvm_splitter, carryInputData, new_level_jvm, sourceEpsgCRSCode, targetEpsgCode)
def __init__(self, sparkContext: SparkContext, InputLocation: str, startOffset: int, endOffset: int, splitter: FileDataSplitter, carryInputData: bool, newLevel: StorageLevel, sourceEpsgCRSCode: str, targetEpsgCode: str): """ :param sparkContext: SparkContext instance :param InputLocation: str, location for loaded file :param startOffset: int, starting offset :param endOffset: int, ending offset :param splitter: FileDataSplitter, data file splitter :param carryInputData: bool, if spatial rdd should keep non geometry attributes :param newLevel: StorageLevel :param sourceEpsgCRSCode: str, epsg code which loaded files is in, ex. epsg:4326 stands for WGS84 :param targetEpsgCode: str, epsg code to transform SpatialRDD """ super().__init__(sparkContext) jvm_splitter = FileSplitterJvm(self._jvm, splitter).jvm_instance new_level_jvm = JvmStorageLevel(self._jvm, newLevel).jvm_instance self._srdd = self._jvm_spatial_rdd(self._jsc, InputLocation, startOffset, endOffset, jvm_splitter, carryInputData, new_level_jvm, sourceEpsgCRSCode, targetEpsgCode)
def __init__(self, rdd: RDD, newLevel: StorageLevel): super().__init__(rdd.ctx) spatial_rdd = self._jvm.GeoSerializerData.deserializeToPolygonRawRDD( rdd._jrdd) new_level_jvm = JvmStorageLevel(self._jvm, newLevel).jvm_instance srdd = self._jvm_spatial_rdd(spatial_rdd, new_level_jvm) self._srdd = srdd
def __init__(self, rawSpatialRDD: JvmSpatialRDD, newLevel: StorageLevel): """ :param rawSpatialRDD: RDD :param newLevel: StorageLevel """ super().__init__(rawSpatialRDD.sc) jsrdd = rawSpatialRDD.jsrdd new_level_jvm = JvmStorageLevel(self._jvm, newLevel).jvm_instance self._srdd = self._jvm_spatial_rdd(jsrdd, new_level_jvm)
def __init__(self, rdd: RDD, newLevel: StorageLevel): super().__init__(rdd.ctx) spatial_rdd = PythonRddToJavaRDDAdapter( self._jvm).deserialize_to_polygon_raw_rdd(rdd._jrdd) new_level_jvm = JvmStorageLevel(self._jvm, newLevel).jvm_instance srdd = self._jvm_spatial_rdd(spatial_rdd, new_level_jvm) self._srdd = srdd
def __init__(self, rdd: RDD, newLevel: StorageLevel): self._sc = rdd.ctx self._jvm = self._sc._jvm spatial_rdd = self._jvm.GeoSerializerData.deserializeToLineStringRawRDD( rdd._jrdd) new_level_jvm = JvmStorageLevel(self._jvm, newLevel).jvm_instance srdd = self._jvm_spatial_rdd(spatial_rdd, new_level_jvm) self._srdd = srdd
def __init__(self, rdd: RDD, newLevel: StorageLevel): self._sc = rdd.ctx self._jvm = self._sc._jvm spatial_rdd = PythonRddToJavaRDDAdapter( self._jvm).deserialize_to_linestring_raw_rdd(rdd._jrdd) new_level_jvm = JvmStorageLevel(self._jvm, newLevel).jvm_instance srdd = self._jvm_spatial_rdd(spatial_rdd, new_level_jvm) self._srdd = srdd
def __init__(self, rawSpatialRDD: JvmSpatialRDD, newLevel: StorageLevel, sourceEpsgCRSCode: str, targetEpsgCode: str): """ :param rawSpatialRDD: JvmSpatialRDD, jvm representation of spatial rdd :param newLevel: StorageLevel :param sourceEpsgCRSCode: str, epsg code which loaded files is in, ex. epsg:4326 stands for WGS84 :param targetEpsgCode: str, epsg code to transform SpatialRDD """ super().__init__(rawSpatialRDD.sc) jsrdd = rawSpatialRDD.jsrdd new_level_jvm = JvmStorageLevel(self._jvm, newLevel).jvm_instance self._srdd = self._jvm_spatial_rdd(jsrdd, new_level_jvm, sourceEpsgCRSCode, targetEpsgCode)
def __init__(self, rawSpatialRDD: JvmSpatialRDD, newLevel: StorageLevel, sourceEpsgCRSCode: str, targetEpsgCode: str): """ :param rawSpatialRDD: :param newLevel: :param sourceEpsgCRSCode: :param targetEpsgCode: """ super().__init__(rawSpatialRDD.sc) jsrdd = rawSpatialRDD.jsrdd new_level_jvm = JvmStorageLevel(self._jvm, newLevel).jvm_instance self._srdd = self._jvm_spatial_rdd(jsrdd, new_level_jvm, sourceEpsgCRSCode, targetEpsgCode)
def __init__(self, sparkContext: SparkContext, InputLocation: str, splitter: FileDataSplitter, carryInputData: bool, newLevel: StorageLevel): """ :param sparkContext: SparkContext instance :param InputLocation: str, location for loaded file :param splitter: FileDataSplitter, data file splitter :param carryInputData: bool, if spatial rdd should keep non geometry attributes :param newLevel: StorageLevel """ super().__init__(sparkContext) jvm_splitter = FileSplitterJvm(self._jvm, splitter).jvm_instance new_level_jvm = JvmStorageLevel(self._jvm, newLevel).jvm_instance self._srdd = self._jvm_spatial_rdd(self._jsc, InputLocation, jvm_splitter, carryInputData, new_level_jvm)
def __init__(self, sparkContext: SparkContext, InputLocation: str, splitter: FileDataSplitter, carryInputData: bool, newLevel: StorageLevel): """ :param sparkContext: SparkContext, the spark context :param InputLocation: str, the input location :param splitter: FileDataSplitter, File data splitter which should be used to split the data :param carryInputData: :param newLevel: """ super().__init__(sparkContext) jvm_splitter = FileSplitterJvm(self._jvm, splitter) new_level_jvm = JvmStorageLevel(self._jvm, newLevel).jvm_instance self._srdd = self._jvm_spatial_rdd(self._jsc, InputLocation, jvm_splitter.jvm_instance, carryInputData, new_level_jvm)
def __init__(self, sparkContext: SparkContext, InputLocation: str, startOffset: int, endOffset: int, splitter: FileDataSplitter, carryInputData: bool, partitions: int, newLevel: StorageLevel): """ :param sparkContext: SparkContext instance :param InputLocation: str, location for loaded file :param startOffset: int, starting offset :param endOffset: int, ending offset :param splitter: FileDataSplitter, data file splitter :param carryInputData: bool, if spatial rdd should keep non geometry attributes :param partitions: int, number of partitions :param newLevel: StorageLevel """ super().__init__(sparkContext) self._srdd = self._jvm_spatial_rdd( self._jsc, InputLocation, startOffset, endOffset, FileSplitterJvm(self._jvm, splitter).jvm_instance, carryInputData, partitions, JvmStorageLevel(self._jvm, newLevel).jvm_instance)
def persist(self, storage_level: StorageLevel): new_jsrdd = self.jsrdd.persist( JvmStorageLevel(self.sc._jvm, storage_level).jvm_instance) self.jsrdd = new_jsrdd