def get_vals( csv_file, csv_override_file='', get_datum_info=1, field_name='GEOGCS', field_datum='DATUM' ): # load table(s) gcs_table = csv_tools.CSVTable() gcs_table.read_from_csv( csv_file ) if csv_override_file != '': override_table = csv_tools.CSVTable() override_table.read_from_csv( csv_override_file ) # loop for all elements for gcs_code in gcs_table.data.keys(): gcs_code = int(gcs_code) # get the ogr datum name from epsg code srs = osr.SpatialReference() srs.ImportFromEPSG( gcs_code ) gcs_name = srs.GetAttrValue( field_name ) datum_name = srs.GetAttrValue( field_datum ) # get datum code try: gcs_rec = gcs_table.get_record( gcs_code ) # look for value in override table override_rec = None if csv_override_file != '': try: override_rec = override_table.get_record( gcs_code ) except: override_rec = None pass if not override_rec is None: print('code '+str(gcs_code)+' was overriden from '+csv_override_file) #print(str(override_rec)) gcs_rec = override_rec datum_code = int(gcs_rec['DATUM_CODE']) #print(str(gcs_rec)) #print str(gcs_code)+'='+gcs_name+' / '+str(datum_code)+'='+datum_name except: print 'Failed to get gcs record, or datum info for '+str(gcs_code) datum_code = None # store datum name ogr_datum_names[datum_code] = datum_name # store towgs84 parameters if get_datum_info == 1: if gcs_rec['DX'] != '': towgs84_params[datum_code] = [ gcs_rec['DX'],gcs_rec['DY'],\ gcs_rec['DZ'],gcs_rec['RX'],\ gcs_rec['RY'],gcs_rec['RZ'],\ gcs_rec['DS'] ]
def read_stateplane_txt(filename): spt = csv_tools.CSVTable() spt.add_field("ID") spt.add_field("STATE") spt.add_field("ZONE") spt.add_field("PROJ_METHOD") spt.add_field("DATUM") spt.add_field("USGS_CODE") spt.add_field("EPSG_PCS_CODE") src_lines = open(filename).readlines() for line in src_lines: rec = {} rec['STATE'] = string.strip(line[0:16]) rec['ZONE'] = string.strip(line[16:39]) rec['PROJ_METHOD'] = string.strip(line[39:40]) rec['DATUM'] = string.strip(line[48:65]) rec['USGS_CODE'] = string.strip(line[65:]) rec['EPSG_PCS_CODE'] = '' if rec['DATUM'] == 'NAD27': rec['ID'] = str(int(rec['USGS_CODE']) + 10000) else: rec['ID'] = rec['USGS_CODE'] spt.add_record(int(rec['ID']), rec) return spt
def get_vals(csv_file, get_datum_info=1, field_name='GEOGCS', field_datum='DATUM'): # load table(s) gcs_table = csv_tools.CSVTable() gcs_table.read_from_csv(csv_file) # loop for all elements for gcs_code in gcs_table.data.keys(): gcs_code = int(gcs_code) # get the ogr datum name from GDAL/OGR using epsg code srs = osr.SpatialReference() srs.ImportFromEPSG(gcs_code) gcs_name = srs.GetAttrValue(field_name) datum_name = srs.GetAttrValue(field_datum) # get datum code try: gcs_rec = gcs_table.get_record(gcs_code) datum_code = int(gcs_rec['DATUM_CODE']) #print(str(gcs_rec)) #print str(gcs_code)+'='+gcs_name+' / '+str(datum_code)+'='+datum_name except: datum_code = None print 'Failed to get gcs record, or datum info for ' + str( gcs_code) # store datum name and towgs84 parameters if not datum_code is None: ogr_datum_names[datum_code] = datum_name if get_datum_info == 1: if gcs_rec['DX'] != '': towgs84_params[datum_code] = [ gcs_rec['DX'],gcs_rec['DY'],\ gcs_rec['DZ'],gcs_rec['RX'],\ gcs_rec['RY'],gcs_rec['RZ'],\ gcs_rec['DS'] ]
elif code == '8607': target_rec['DZ'] = value elif code == '8608': target_rec['RX'] = value elif code == '8609': target_rec['RY'] = value elif code == '8610': target_rec['RZ'] = value elif code == '8611': target_rec['DS'] = value ############################################################################## # Read and index input files. co_value = csv_tools.CSVTable() co_value.read_from_csv('coordinate_operation_parameter_value.csv', multi=1) co = csv_tools.CSVTable() co.read_from_csv('coordinate_operation.csv') crs = csv_tools.CSVTable() crs.read_from_csv('coordinate_reference_system.csv') datums = csv_tools.CSVTable() datums.read_from_csv('datum.csv') cs = csv_tools.CSVTable() cs.read_from_csv('coordinate_system.csv', multi=0) caxis = csv_tools.CSVTable()
if rec['DATUM'] == 'NAD27': rec['ID'] = str(int(rec['USGS_CODE']) + 10000) else: rec['ID'] = rec['USGS_CODE'] spt.add_record(int(rec['ID']), rec) return spt ############################################################################## # main() spt = read_stateplane_txt('/home/warmerda/gdal/data/stateplane.txt') pcs = csv_tools.CSVTable() pcs.read_from_csv('coordinate_reference_system.csv') coord_sys_by_name = {} ids_to_replace = [] pcs_keys = pcs.data.keys() #pcs_keys = [ 2204, 32036 ] for pcs_key in pcs_keys: rec = pcs.get_record(pcs_key) if rec['COORD_REF_SYS_KIND'] != 'projected': continue dep = rec['DEPRECATED']
if get_datum_info == 1: if gcs_rec['DX'] != '': towgs84_params[datum_code] = [ gcs_rec['DX'],gcs_rec['DY'],\ gcs_rec['DZ'],gcs_rec['RX'],\ gcs_rec['RY'],gcs_rec['RZ'],\ gcs_rec['DS'] ] get_vals(gcs_file, ) get_vals(vertcs_file, 0, 'VERT_CS', 'VERT_DATUM') # ============================================================================= # create tables for output files datum_table = csv_tools.CSVTable() datum_table.read_from_csv(datum_file) # add missing field names for tmp_name in new_field_names: if tmp_name not in datum_table.fields: datum_table.add_field(tmp_name) # Loop over all datums, adding new values where needed for datum_code in datum_table.data.keys(): datum_rec = datum_table.get_record(datum_code) datum_type = datum_rec['DATUM_TYPE'] epsg_datum_name = datum_rec['DATUM_NAME'] tmp_datum_name = epsg_datum_name
feat = lyr.GetNextFeature() if feat is not None: ret = feat.GetField('SRTEXT') else: ret = None ds = None try: shutil.rmtree('tmp.gdb') except: pass return ret gcs_table = csv_tools.CSVTable() gcs_table.read_from_csv('gcs.csv') datum_table = csv_tools.CSVTable() datum_table.read_from_csv('datum.csv') print('%d GCS defined.' % len(gcs_table.data.keys())) esri_gcs_names = {} esri_datum_names = {} try: os.stat(epsg_prj_path) prj_epsg_exists = True except: prj_epsg_exists = False
# # import string import csv_tools def get_crs_uom( crs_rec, cs, caxis ): coord_sys_code = int(crs_rec['COORD_SYS_CODE']) ca_recs = caxis.get_records( coord_sys_code ) return ca_recs[0]['UOM_CODE'] ############################################################################## # Read and index input files. co_value = csv_tools.CSVTable() co_value.read_from_csv( 'coordinate_operation_parameter_value.csv', multi=1 ) co = csv_tools.CSVTable() co.read_from_csv( 'coordinate_operation.csv' ) crs = csv_tools.CSVTable() crs.read_from_csv( 'coordinate_reference_system.csv' ) datums = csv_tools.CSVTable() datums.read_from_csv( 'datum.csv' ) cs = csv_tools.CSVTable() cs.read_from_csv( 'coordinate_system.csv', multi=0 ) caxis = csv_tools.CSVTable()
# $Log: add_esri_column.py,v $ # Revision 1.2 2004/03/20 07:52:44 warmerda # use local paths # # Revision 1.1 2003/06/20 21:28:24 warmerda # New # import string import sys import osr sys.path.append('/home/warmerda/osrs/geotiff/libgeotiff/csv') import csv_tools gcs_table = csv_tools.CSVTable() gcs_table.read_from_csv('gcs.csv') datum_table = csv_tools.CSVTable() datum_table.read_from_csv('datum.csv') print '%d GCS defined.', len(gcs_table.data.keys()) esri_gcs_names = {} esri_datum_names = {} for gcs_code in gcs_table.data.keys(): gcs_code = int(gcs_code) filename = '/u/data/esri/prj/epsg/%d.prj' % gcs_code try: