def cartodb_make_valid_geom_local(src_fc): if os.path.splitext(src_fc)[1] == '.shp': source_dir = os.path.dirname(src_fc) # Need to write the outfile to shp-- ogr2ogr can't handle true curves stored in geodatabases # Will represent them as a point, which spatialite will then choke on else: source_dir = os.path.dirname(os.path.dirname(src_fc)) file_name = 'source.shp' arcpy.FeatureClassToFeatureClass_conversion(src_fc, source_dir, file_name) src_fc = os.path.join(source_dir, file_name) sqlite_dir = os.path.join(source_dir, 'sqlite') os.mkdir(sqlite_dir) out_sqlite_path = os.path.join(sqlite_dir, 'out.sqlite') cmd = ['ogr2ogr', '-f', 'SQLite', out_sqlite_path] cmd = add_fc_to_ogr2ogr_cmd(src_fc, cmd) cmd += ["-dsco", "SPATIALITE=yes"] logging.debug('Creating sqlite database') run_subprocess(cmd) table_name = util.gen_paths_shp(src_fc)[2] sql = 'UPDATE {0} SET GEOMETRY = ST_MakeValid(GEOMETRY) WHERE ST_IsValid(GEOMETRY) <> 1;'.format(table_name) cmd = ['spatialite', out_sqlite_path, sql] run_subprocess(cmd) return out_sqlite_path
def cartodb_make_valid_geom_local(src_fc): if os.path.splitext(src_fc)[1] == '.shp': source_dir = os.path.dirname(src_fc) # Need to write the outfile to shp-- ogr2ogr can't handle true curves stored in geodatabases # Will represent them as a point, which spatialite will then choke on else: source_dir = os.path.dirname(os.path.dirname(src_fc)) file_name = 'source.shp' arcpy.FeatureClassToFeatureClass_conversion(src_fc, source_dir, file_name) src_fc = os.path.join(source_dir, file_name) sqlite_dir = os.path.join(source_dir, 'sqlite') os.mkdir(sqlite_dir) out_sqlite_path = os.path.join(sqlite_dir, 'out.sqlite') cmd = ['ogr2ogr', '-f', 'SQLite', out_sqlite_path] cmd = add_fc_to_ogr2ogr_cmd(src_fc, cmd) cmd += ["-dsco", "SPATIALITE=yes", '-dim', '2'] logging.debug('Creating sqlite database') util.run_subprocess(cmd) table_name = util.gen_paths_shp(src_fc)[2] sql = 'UPDATE {0} SET GEOMETRY = ST_MakeValid(GEOMETRY) WHERE ST_IsValid(GEOMETRY) <> 1;'.format( table_name) cmd = ['spatialite', out_sqlite_path, sql] util.run_subprocess(cmd) return out_sqlite_path
def cartodb_make_valid_geom_local(src_fc): if os.path.splitext(src_fc)[1] == '.shp': source_dir = os.path.dirname(src_fc) else: source_dir = os.path.dirname(os.path.dirname(src_fc)) sqlite_dir = os.path.join(source_dir, 'sqlite') os.mkdir(sqlite_dir) out_sqlite_path = os.path.join(sqlite_dir, 'out.sqlite') cmd = ['ogr2ogr', '-f', 'SQLite', out_sqlite_path] cmd = add_fc_to_ogr2ogr_cmd(src_fc, cmd) cmd += ["-dsco", "SPATIALITE=yes"] logging.debug('Creating sqlite database') run_subprocess(cmd) table_name = util.gen_paths_shp(src_fc)[2] sql = 'UPDATE {0} SET GEOMETRY = ST_MakeValid(GEOMETRY) WHERE ST_IsValid(GEOMETRY) <> 1;'.format(table_name) cmd = ['spatialite', out_sqlite_path, sql] run_subprocess(cmd) return out_sqlite_path
def zip_tif(input_tif): """ :param input_tif: path to a tif :return: zipped tif """ basepath, fname, base_fname = util.gen_paths_shp(input_tif) zip_path = os.path.join(basepath, base_fname + '.zip') zf = zipfile.ZipFile(zip_path, 'w', allowZip64=True) zf.write(input_tif) zf.close() return zip_path
def zip_shp(input_shp): """ :param input_shp: path to a shapefile :return: zipped shapefile """ basepath, fname, base_fname = util.gen_paths_shp(input_shp) zip_path = os.path.join(basepath, base_fname + '.zip') zf = zipfile.ZipFile(zip_path, 'w', allowZip64=False) search = os.path.join(basepath, "*.*") files = glob.glob(search) for f in files: bname = os.path.basename(f) if (base_fname in bname) and (bname != base_fname + ".zip"): add_to_zip(f, zf) zip_result = zip_path zf.close() return zip_result
def zip_file(input_fc, temp_zip_dir, download_output=None, archive_output=None, sr_is_local=False): """ :param input_fc: feature class/raster to zip :param temp_zip_dir: output zip dir :param download_output: path to the download output, if required :param archive_output: path to the archive output, if requried :param sr_is_local: if the spatial reference is local, will create a _local.zip in download_output :return: None """ logging.debug('Starting archive.zip_file') basepath, fname, base_fname = util.gen_paths_shp(input_fc) temp_dir = util.create_temp_dir(temp_zip_dir) data_type = arcpy.Describe(input_fc).dataType if data_type in ['FeatureClass', 'ShapeFile']: # Try to create a shapefile first, knowing that the data may be too large and may have to use an FGDB instead logging.debug('trying to zip SHP-----------------') arcpy.FeatureClassToShapefile_conversion(input_fc, temp_dir) out_shp = os.path.join(temp_dir, fname) # If the dir with the shapefile is < 2GB, zip the shapefile if all_files_less_than_2gb(temp_dir): temp_zip = zip_shp(out_shp) else: logging.debug('Some components of SHP > 2 GB; now exporting to GDB instead') # Delete shapefile conversion dir and start fresh temp_dir = util.create_temp_dir(temp_zip_dir) gdb_fc = util.fc_to_temp_gdb(input_fc, temp_dir) gdb_dir = os.path.dirname(os.path.dirname(gdb_fc)) temp_zip = zip_dir(gdb_dir) elif data_type == 'RasterDataset': temp_zip = zip_tif(input_fc) else: logging.error('Unknown data_type: {0}. Exiting the program'.format(data_type)) sys.exit(1) # Define output path for archive zip file and copy temp zip there if archive_output: logging.debug('Archiving {0} in {1}'.format(base_fname, archive_output)) ts = time.time() timestamp = datetime.datetime.fromtimestamp(ts).strftime('%Y%m%d%H%M%S') dst = os.path.splitext(archive_output)[0] + '_{0}.zip'.format(timestamp) shutil.copy(temp_zip, dst) # Define output path for download zip file and copy temp zip there if download_output: logging.debug("Copying {0} to download folder {1}".format(base_fname, download_output)) if sr_is_local: dst = os.path.splitext(download_output)[0] + "_local.zip" else: dst = download_output shutil.copy(temp_zip, dst)
def zip_file(input_fc, temp_zip_dir, download_output=None, archive_output=None, sr_is_local=False): """ :param input_fc: feature class/raster to zip :param temp_zip_dir: output zip dir :param download_output: path to the download output, if required :param archive_output: path to the archive output, if requried :param sr_is_local: if the spatial reference is local, will create a _local.zip in download_output :return: None """ logging.debug('Starting archive.zip_file') basepath, fname, base_fname = util.gen_paths_shp(input_fc) temp_dir = util.create_temp_dir(temp_zip_dir) data_type = arcpy.Describe(input_fc).dataType if data_type in ['FeatureClass', 'ShapeFile']: # Try to create a shapefile first, knowing that the data may be too large and may have to use an FGDB instead logging.debug('trying to zip SHP-----------------') arcpy.FeatureClassToShapefile_conversion(input_fc, temp_dir) out_shp = os.path.join(temp_dir, fname) # If the dir with the shapefile is < 2GB, zip the shapefile if all_files_less_than_2gb(temp_dir): temp_zip = zip_shp(out_shp) else: logging.debug('Some components of SHP > 2 GB; now exporting to GDB instead') # Delete shapefile conversion dir and start fresh temp_dir = util.create_temp_dir(temp_zip_dir) gdb_fc = util.fc_to_temp_gdb(input_fc, temp_dir) gdb_dir = os.path.dirname(os.path.dirname(gdb_fc)) temp_zip = zip_dir(gdb_dir) elif data_type == 'RasterDataset': temp_zip = zip_tif(input_fc) else: logging.error('Unknown data_type: {0}. Exiting the program'.format(data_type)) sys.exit(1) # Define output path for archive zip file and copy temp zip there if archive_output: logging.debug('Archiving {0} in {1}'.format(base_fname, archive_output)) ts = time.time() timestamp = datetime.datetime.fromtimestamp(ts).strftime('%Y%m%d%H%M%S') dst = os.path.splitext(archive_output)[0] + '_{0}.zip'.format(timestamp) if r's3://' in dst: subprocess.check_call(['aws', 's3', 'cp', temp_zip, dst]) else: shutil.copy(temp_zip, dst) # Define output path for download zip file and copy temp zip there if download_output: logging.debug("Copying {0} to download folder {1}".format(base_fname, download_output)) if sr_is_local: dst = os.path.splitext(download_output)[0] + "_local.zip" else: dst = download_output if r's3://' in dst: subprocess.check_call(['aws', 's3', 'cp', temp_zip, dst]) else: shutil.copy(temp_zip, dst)