def georeference_with_gcp(self, gcp_file, output_coords_file, output_gcp_file, rerun=False): if not io.file_exists(output_coords_file) or not io.file_exists( output_gcp_file) or rerun: gcp = GCPFile(gcp_file) if gcp.exists(): # Create coords file, we'll be using this later # during georeferencing with open(output_coords_file, 'w') as f: coords_header = gcp.wgs84_utm_zone() f.write(coords_header + "\n") log.ODM_DEBUG("Generated coords file from GCP: %s" % coords_header) # Convert GCP file to a UTM projection since the rest of the pipeline # does not handle other SRS well. rejected_entries = [] utm_gcp = GCPFile( gcp.create_utm_copy( output_gcp_file, filenames=[p.filename for p in self.photos], rejected_entries=rejected_entries, include_extras=False)) if not utm_gcp.exists(): raise RuntimeError( "Could not project GCP file to UTM. Please double check your GCP file for mistakes." ) for re in rejected_entries: log.ODM_WARNING("GCP line ignored (image not found): %s" % str(re)) if utm_gcp.entries_count() > 0: log.ODM_INFO( "%s GCP points will be used for georeferencing" % utm_gcp.entries_count()) else: raise RuntimeError( "A GCP file was provided, but no valid GCP entries could be used. Note that the GCP file is case sensitive (\".JPG\" is not the same as \".jpg\")." ) self.gcp = utm_gcp else: log.ODM_WARNING("GCP file does not exist: %s" % gcp_file) return else: log.ODM_INFO("Coordinates file already exist: %s" % output_coords_file) log.ODM_INFO("GCP file already exist: %s" % output_gcp_file) self.gcp = GCPFile(output_gcp_file) self.georef = ODM_GeoRef.FromCoordsFile(output_coords_file) return self.georef
def convert_to_dem(self, _file, _file_out, pdalJSON, sample_radius, gdal_res, gdal_radius): # Check if exists f_in if not io.file_exists(_file): log.ODM_ERROR('LAS file does not exist') return False kwargs = { 'bin': context.pdal_path, 'f_in': _file, 'sample_radius': sample_radius, 'gdal_res': gdal_res, 'gdal_radius': gdal_radius, 'f_out': _file_out, 'json': pdalJSON } pipelineJSON = '{{' \ ' "pipeline":[' \ ' "input.las",' \ ' {{' \ ' "type":"filters.sample",' \ ' "radius":"{sample_radius}"' \ ' }},' \ ' {{' \ ' "type":"filters.pmf"' \ ' }},' \ ' {{' \ ' "type":"filters.range",' \ ' "limits":"Classification[2:2]"' \ ' }},' \ ' {{' \ ' "resolution": {gdal_res},' \ ' "radius": {gdal_radius},' \ ' "output_type":"idw",' \ ' "filename":"outputfile.tif"' \ ' }}' \ ' ]' \ '}}'.format(**kwargs) with open(pdalJSON, 'w') as f: f.write(pipelineJSON) system.run('{bin}/pdal pipeline {json} --readers.las.filename={f_in} ' '--writers.gdal.filename={f_out}'.format(**kwargs)) if io.file_exists(kwargs['f_out']): return True else: return False
def restore_session(url): hostname = urlrewrite.get_hostname(url) filename = urlrewrite.hostname_to_filename(hostname) q, wb = None, None if (io.file_exists(filename + ".web", dir=io.LOGDIR)): io.write_err("Restoring web from %s ..." % shcolor.color(shcolor.YELLOW, filename+".web")) wb = io.deserialize(filename + ".web", dir=io.LOGDIR) io.write_err(shcolor.color(shcolor.GREEN, "done\n")) if (io.file_exists(filename + ".session", dir=io.LOGDIR)): io.write_err("Restoring session from %s ..." % shcolor.color(shcolor.YELLOW, filename+".session")) q = io.deserialize(filename + ".session", dir=io.LOGDIR) q = recipe.overrule_records(q) io.write_err(shcolor.color(shcolor.GREEN, "done\n")) return q, wb
def restore_session(url): hostname = urlrewrite.get_hostname(url) filename = urlrewrite.hostname_to_filename(hostname) q, wb = None, None if (io.file_exists(filename + ".web", dir=io.LOGDIR)): io.write_err("Restoring web from %s ..." % shcolor.color(shcolor.YELLOW, filename + ".web")) wb = io.deserialize(filename + ".web", dir=io.LOGDIR) io.write_err(shcolor.color(shcolor.GREEN, "done\n")) if (io.file_exists(filename + ".session", dir=io.LOGDIR)): io.write_err("Restoring session from %s ..." % shcolor.color(shcolor.YELLOW, filename + ".session")) q = io.deserialize(filename + ".session", dir=io.LOGDIR) q = recipe.overrule_records(q) io.write_err(shcolor.color(shcolor.GREEN, "done\n")) return q, wb
def parse_coordinate_system(self, _file): """Write attributes to jobOptions from coord file""" # check for coordinate file existence if not io.file_exists(_file): log.ODM_WARNING('Could not find file %s' % _file) return with open(_file) as f: # extract reference system and utm zone from first line. # We will assume the following format: # 'WGS84 UTM 17N' or 'WGS84 UTM 17N \n' line = f.readline().rstrip() log.ODM_DEBUG('Line: %s' % line) ref = line.split(' ') # match_wgs_utm = re.search('WGS84 UTM (\d{1,2})(N|S)', line, re.I) if ref[0] == 'WGS84' and ref[1] == 'UTM': # match_wgs_utm: datum = ref[0] utm_pole = ref[2][len(ref[2]) - 1] utm_zone = int(ref[2][:len(ref[2]) - 1]) return Proj(proj="utm", zone=utm_zone, datum=datum, no_defs=True) elif '+proj' in line: return Proj(line.strip('\'')) elif 'epsg' in line.lower(): return Proj(init=line) else: raise log.ODM_ERROR('Could not parse coordinates. Bad CRS supplied: %s' % line)
def convert_to_dem(self, _file, _file_out, pdalJSON, sample_radius, gdal_res, gdal_radius): # Check if exists f_in if not io.file_exists(_file): log.ODM_ERROR('LAS file does not exist') return False kwargs = { 'bin': context.pdal_path, 'f_in': _file, 'sample_radius': sample_radius, 'gdal_res': gdal_res, 'gdal_radius': gdal_radius, 'f_out': _file_out, 'json': pdalJSON } pipelineJSON = '{{' \ ' "pipeline":[' \ ' "input.las",' \ ' {{' \ ' "type":"filters.sample",' \ ' "radius":"{sample_radius}"' \ ' }},' \ ' {{' \ ' "type":"filters.pmf",' \ ' "extract":"true"' \ ' }},' \ ' {{' \ ' "resolution": {gdal_res},' \ ' "radius": {gdal_radius},' \ ' "output_type":"idw",' \ ' "filename":"outputfile.tif"' \ ' }}' \ ' ]' \ '}}'.format(**kwargs) with open(pdalJSON, 'w') as f: f.write(pipelineJSON) system.run('{bin}/pdal pipeline {json} --readers.las.filename={f_in} ' '--writers.gdal.filename={f_out}'.format(**kwargs)) if io.file_exists(kwargs['f_out']): return True else: return False
def extract_offsets(self, _file): if not io.file_exists(_file): log.ODM_ERROR('Could not find file %s' % _file) return with open(_file) as f: offsets = f.readlines()[1].split(' ') self.utm_east_offset = float(offsets[0]) self.utm_north_offset = float(offsets[1])
def save_session(wb, queue=None): hostname = urlrewrite.get_hostname(wb.root.url) filename = urlrewrite.hostname_to_filename(hostname) io.write_err("Saving session to %s ..." % shcolor.color(shcolor.YELLOW, filename+".{web,session}")) io.serialize(wb, filename + ".web", dir=io.LOGDIR) if queue: io.serialize(queue, filename + ".session", dir=io.LOGDIR) # only web being saved, ie. spidering complete, remove old session elif io.file_exists(filename + ".session", dir=io.LOGDIR): io.delete(filename + ".session", dir=io.LOGDIR) io.write_err(shcolor.color(shcolor.GREEN, "done\n"))
def save_session(wb, queue=None): hostname = urlrewrite.get_hostname(wb.root.url) filename = urlrewrite.hostname_to_filename(hostname) io.write_err("Saving session to %s ..." % shcolor.color(shcolor.YELLOW, filename + ".{web,session}")) io.serialize(wb, filename + ".web", dir=io.LOGDIR) if queue: io.serialize(queue, filename + ".session", dir=io.LOGDIR) # only web being saved, ie. spidering complete, remove old session elif io.file_exists(filename + ".session", dir=io.LOGDIR): io.delete(filename + ".session", dir=io.LOGDIR) io.write_err(shcolor.color(shcolor.GREEN, "done\n"))
def parse_transformation_matrix(self, _file): if not io.file_exists(_file): log.ODM_ERROR('Could not find file %s' % _file) return # Create a nested list for the transformation matrix with open(_file) as f: for line in f: self.transform += [[float(i) for i in line.split()]] self.utm_east_offset = self.transform[0][3] self.utm_north_offset = self.transform[1][3]
def georeference_with_gps(self, images_path, output_coords_file, rerun=False): try: if not io.file_exists(output_coords_file) or rerun: location.extract_utm_coords(self.photos, images_path, output_coords_file) else: log.ODM_INFO("Coordinates file already exist: %s" % output_coords_file) self.georef = ODM_GeoRef.FromCoordsFile(output_coords_file) except: log.ODM_WARNING('Could not generate coordinates file. The orthophoto will not be georeferenced.') self.gcp = GCPFile(None) return self.georef
def parse_coordinate_system(self, _file): """Write attributes to jobOptions from coord file""" # check for coordinate file existence if not io.file_exists(_file): log.ODM_WARNING('Could not find file %s' % _file) return with open(_file) as f: # extract reference system and utm zone from first line. # We will assume the following format: # 'WGS84 UTM 17N' or 'WGS84 UTM 17N \n' line = f.readline().rstrip() log.ODM_DEBUG('Line: %s' % line) ref = line.split(' ') # match_wgs_utm = re.search('WGS84 UTM (\d{1,2})(N|S)', line, re.I) try: if ref[0] == 'WGS84' and ref[1] == 'UTM': # match_wgs_utm: datum = ref[0] utm_pole = (ref[2][len(ref[2]) - 1]).upper() utm_zone = int(ref[2][:len(ref[2]) - 1]) proj_args = { 'proj': "utm", 'zone': utm_zone, 'datum': datum, 'no_defs': True } if utm_pole == 'S': proj_args['south'] = True return Proj(**proj_args) elif '+proj' in line: return Proj(line.strip('\'')) elif 'epsg' in line.lower(): return Proj(init=line) else: log.ODM_ERROR( 'Could not parse coordinates. Bad CRS supplied: %s' % line) except RuntimeError as e: log.ODM_ERROR( 'Uh oh! There seems to be a problem with your GCP file.\n\n' 'The line: %s\n\n' 'Is not valid. Projections that are valid include:\n' ' - EPSG:*****\n' ' - WGS84 UTM **(N|S)\n' ' - Any valid proj4 string (for example, +proj=utm +zone=32 +north +ellps=WGS84 +datum=WGS84 +units=m +no_defs)\n\n' 'Modify your GCP file and try again.' % line) raise RuntimeError(e)
def parse_transformation_matrix(self, _file): if not io.file_exists(_file): log.ODM_ERROR('Could not find file %s' % _file) return # Create a nested list for the transformation matrix with open(_file) as f: for line in f: # Handle matrix formats that either # have leading or trailing brakets or just plain numbers. line = re.sub(r"[\[\],]", "", line).strip() self.transform += [[float(i) for i in line.split()]] self.utm_east_offset = self.transform[0][3] self.utm_north_offset = self.transform[1][3]
def FromCoordsFile(coords_file): # check for coordinate file existence if not io.file_exists(coords_file): log.ODM_WARNING('Could not find file %s' % coords_file) return srs = None with open(coords_file) as f: # extract reference system and utm zone from first line. # We will assume the following format: # 'WGS84 UTM 17N' or 'WGS84 UTM 17N \n' line = f.readline().rstrip() srs = location.parse_srs_header(line) return ODM_GeoRef(srs)
def create_gcps(self, _file): if not io.file_exists(_file): log.ODM_ERROR('Could not find file %s' % _file) return with open(_file) as f: # parse coordinates lines = f.readlines()[2:] for l in lines: xyz = l.split(' ') if len(xyz) == 3: x, y, z = xyz[:3] elif len(xyz) == 2: x, y = xyz[:2] z = 0 self.gcps.append(ODM_GCPoint(float(x), float(y), float(z)))
def parse_coordinate_system(self, _file): """Write attributes to jobOptions from coord file""" # check for coordinate file existence if not io.file_exists(_file): log.ODM_ERROR('Could not find file %s' % _file) return with open(_file) as f: # extract reference system and utm zone from first line. # We will assume the following format: # 'WGS84 UTM 17N' line = f.readline() log.ODM_DEBUG('Line: %s' % line) ref = line.split(' ') # match_wgs_utm = re.search('WGS84 UTM (\d{1,2})(N|S)', line, re.I) if ref[0] == 'WGS84' and ref[1] == 'UTM': # match_wgs_utm: self.datum = ref[0] self.utm_pole = ref[2][len(ref) - 1] self.utm_zone = int(ref[2][:len(ref) - 1]) # extract east and west offsets from second line. # We will assume the following format: # '440143 4588391' # update EPSG self.epsg = self.calculate_EPSG(self.utm_zone, self.utm_pole) # If the first line looks like "EPSG:n" or "epsg:n" elif ref[0].split(':')[0].lower() == 'epsg': self.epsg = line.split(':')[1] else: log.ODM_ERROR( 'Could not parse coordinates. Bad CRS supplied: %s' % line) return offsets = f.readline().split(' ') self.utm_east_offset = int(offsets[0]) self.utm_north_offset = int(offsets[1]) # parse coordinates lines = f.readlines() for l in lines: xyz = l.split(' ') if len(xyz) == 3: x, y, z = xyz[:3] elif len(xyz) == 2: x, y = xyz[:2] z = 0 self.gcps.append(ODM_GCPoint(float(x), float(y), float(z)))
def parse_coordinate_system(self, _file): """Write attributes to jobOptions from coord file""" # check for coordinate file existence if not io.file_exists(_file): log.ODM_WARNING('Could not find file %s' % _file) return with open(_file) as f: # extract reference system and utm zone from first line. # We will assume the following format: # 'WGS84 UTM 17N' or 'WGS84 UTM 17N \n' line = f.readline().rstrip() log.ODM_DEBUG('Line: %s' % line) ref = line.split(' ') # match_wgs_utm = re.search('WGS84 UTM (\d{1,2})(N|S)', line, re.I) try: if ref[0] == 'WGS84' and ref[1] == 'UTM': # match_wgs_utm: datum = ref[0] utm_pole = (ref[2][len(ref[2]) - 1]).upper() utm_zone = int(ref[2][:len(ref[2]) - 1]) proj_args = { 'proj': "utm", 'zone': utm_zone, 'datum': datum, 'no_defs': True } if utm_pole == 'S': proj_args['south'] = True return Proj(**proj_args) elif '+proj' in line: return Proj(line.strip('\'')) elif 'epsg' in line.lower(): return Proj(init=line) else: log.ODM_ERROR('Could not parse coordinates. Bad CRS supplied: %s' % line) except RuntimeError as e: log.ODM_ERROR('Uh oh! There seems to be a problem with your GCP file.\n\n' 'The line: %s\n\n' 'Is not valid. Projections that are valid include:\n' ' - EPSG:*****\n' ' - WGS84 UTM **(N|S)\n' ' - Any valid proj4 string (for example, +proj=utm +zone=32 +north +ellps=WGS84 +datum=WGS84 +units=m +no_defs)\n\n' 'Modify your GCP file and try again.' % line) raise RuntimeError(e)
def parse_coordinate_system(self, _file): """Write attributes to jobOptions from coord file""" # check for coordinate file existence if not io.file_exists(_file): log.ODM_ERROR('Could not find file %s' % _file) return with open(_file) as f: # extract reference system and utm zone from first line. # We will assume the following format: # 'WGS84 UTM 17N' line = f.readline() log.ODM_DEBUG('Line: %s' % line) ref = line.split(' ') # match_wgs_utm = re.search('WGS84 UTM (\d{1,2})(N|S)', line, re.I) if ref[0] == 'WGS84' and ref[1] == 'UTM': # match_wgs_utm: self.datum = ref[0] self.utm_pole = ref[2][len(ref) - 1] self.utm_zone = int(ref[2][:len(ref) - 1]) # extract east and west offsets from second line. # We will assume the following format: # '440143 4588391' # update EPSG self.epsg = self.calculate_EPSG(self.utm_zone, self.utm_pole) # If the first line looks like "EPSG:n" or "epsg:n" elif ref[0].split(':')[0].lower() == 'epsg': self.epsg = line.split(':')[1] else: log.ODM_ERROR('Could not parse coordinates. Bad CRS supplied: %s' % line) return offsets = f.readline().split(' ') self.utm_east_offset = int(offsets[0]) self.utm_north_offset = int(offsets[1]) # parse coordinates lines = f.readlines() for l in lines: xyz = l.split(' ') if len(xyz) == 3: x, y, z = xyz[:3] elif len(xyz) == 2: x, y = xyz[:2] z = 0 self.gcps.append(ODM_GCPoint(float(x), float(y), float(z)))