def init_tasks(self, _tasks_dict, _odm_app): # dict to store tasks objects tasks = {} # loop over tasks dict for key, in _tasks_dict: # instantiate and append ODMTask task_name = _tasks_dict[key] tasks[key] = ODMTask(key, task_name) # setup tasks if task_name == 'resize': # setup this task command = resize inputs = { 'project_path': _odm_app.project_path, 'args': _odm_app.args, 'photos': _odm_app.photos } elif task_name == 'opensfm': # setup this task command = opensfm inputs = { 'project_path': _odm_app.project_path, 'args': _odm_app.args, 'photos': _odm_app.photos } elif task_name in [ 'cmvs', 'pmvs', 'odm_meshing', 'mvs_texturing', 'odm_georeferencing', 'odm_orthophoto', 'zip_results' ]: # setup this task command = None inputs = {} else: log.ODM_ERROR('task_name %s is not valid' % task_name) # setup task configuration task = tasks[key] task.command = command task.inputs = inputs return tasks
def run_tasks(self): # curr_task = self.tasks['resize'] # self.tasks['resize'] for id in range(self.initial_task_id, self.final_task_id + 1): # catch task with current id task = self.tasks[str(id)] # update task tracking log.ODM_INFO('Running task %s: %s' % (task.id, task.name)) self.current_task_id = task.id # run task task.state = task.run() if task.state == 2: log.ODM_INFO('Succeeded task %s: %s - %s' % (task.id, task.name, system.now())) else: log.ODM_ERROR('Aborted task %s: %s' % (task.id, task.name))
def detect_multi_camera(self): """ Looks at the reconstruction photos and determines if this is a single or multi-camera setup. """ band_photos = {} band_indexes = {} for p in self.photos: if not p.band_name in band_photos: band_photos[p.band_name] = [] if not p.band_name in band_indexes: band_indexes[p.band_name] = p.band_index band_photos[p.band_name].append(p) bands_count = len(band_photos) if bands_count >= 2 and bands_count <= 8: # Validate that all bands have the same number of images, # otherwise this is not a multi-camera setup img_per_band = len(band_photos[p.band_name]) for band in band_photos: if len(band_photos[band]) != img_per_band: log.ODM_ERROR( "Multi-camera setup detected, but band \"%s\" (identified from \"%s\") has only %s images (instead of %s), perhaps images are missing or are corrupted. Please include all necessary files to process all bands and try again." % (band, band_photos[band][0].filename, len(band_photos[band]), img_per_band)) raise RuntimeError("Invalid multi-camera images") mc = [] for band_name in band_indexes: mc.append({ 'name': band_name, 'photos': band_photos[band_name] }) # Sort by band index mc.sort(key=lambda x: band_indexes[x['name']]) return mc return None
def convert_to_las(self, _file, _file_out, json_file): if not self.epsg: log.ODM_ERROR('Empty EPSG: Could not convert to LAS') return kwargs = { 'bin': context.pdal_path, 'f_in': _file, 'f_out': _file_out, 'east': self.utm_east_offset, 'north': self.utm_north_offset, 'epsg': self.epsg, 'json': json_file } # create pipeline file transform.xml to enable transformation pipeline = '{{' \ ' "pipeline":[' \ ' "untransformed.ply",' \ ' {{' \ ' "type":"filters.transformation",' \ ' "matrix":"1 0 0 {east} 0 1 0 {north} 0 0 1 0 0 0 0 1"' \ ' }},' \ ' {{' \ ' "a_srs":"EPSG:{epsg}",' \ ' "offset_x":"{east}",' \ ' "offset_y":"{north}",' \ ' "offset_z":"0",' \ ' "filename":"transformed.las"' \ ' }}' \ ' ]' \ '}}'.format(**kwargs) with open(json_file, 'w') as f: f.write(pipeline) # call pdal system.run( '{bin}/pdal pipeline -i {json} --readers.ply.filename={f_in} ' '--writers.las.filename={f_out}'.format(**kwargs))
def convert_to_las(self, _file, _file_out, json_file): if not self.projection.srs: log.ODM_ERROR('Empty CRS: Could not convert to LAS') return kwargs = { 'bin': context.pdal_path, 'f_in': _file, 'f_out': _file_out, 'east': self.utm_east_offset, 'north': self.utm_north_offset, 'srs': self.projection.srs, 'json': json_file } # create pipeline file las.json to write odm_georeferenced_model.laz point cloud pipeline = '{{' \ ' "pipeline":[' \ ' "untransformed.ply",' \ ' {{' \ ' "type":"writers.las",' \ ' "a_srs":"{srs}",' \ ' "offset_x":"{east}",' \ ' "offset_y":"{north}",' \ ' "offset_z":"0",' \ ' "compression":"laszip",' \ ' "filename":"{f_out}"' \ ' }}' \ ' ]' \ '}}'.format(**kwargs) with open(json_file, 'w') as f: f.write(pipeline) # call pdal system.run( '{bin}/pdal pipeline -i {json} --readers.ply.filename={f_in}'. format(**kwargs))
def utm_to_latlon(self, _file, _photo, idx): gcp = self.gcps[idx] kwargs = { 'epsg': self.epsg, 'file': _file, 'x': gcp.x + self.utm_east_offset, 'y': gcp.y + self.utm_north_offset, 'z': gcp.z } latlon = system.run_and_return( 'echo {x} {y} {z} '.format(**kwargs), 'gdaltransform -s_srs \"EPSG:{epsg}\" ' '-t_srs \"EPSG:4326\"'.format(**kwargs)).split() # Example: 83d18'16.285"W # Example: 41d2'11.789"N # Example: 0.998 if len(latlon) == 3: lon_str, lat_str, alt_str = latlon elif len(latlon) == 2: lon_str, lat_str = latlon alt_str = '' else: log.ODM_ERROR('Something went wrong %s' % latlon) lat_frac = self.coord_to_fractions(latlon[1], ['N', 'S']) lon_frac = self.coord_to_fractions(latlon[0], ['E', 'W']) # read image metadata metadata = pyexiv2.ImageMetadata(_photo.path_file) metadata.read() # set values # GPS latitude key = 'Exif.GPSInfo.GPSLatitude' value = lat_frac[0].split(' ') log.ODM_DEBUG('lat_frac: %s %s %s' % (value[0], value[1], value[2])) metadata[key] = pyexiv2.ExifTag( key, [Fraction(value[0]), Fraction(value[1]), Fraction(value[2])]) key = 'Exif.GPSInfo.GPSLatitudeRef' value = lat_frac[1] metadata[key] = pyexiv2.ExifTag(key, value) # GPS longitude key = 'Exif.GPSInfo.GPSLongitude' value = lon_frac[0].split(' ') metadata[key] = pyexiv2.ExifTag( key, [Fraction(value[0]), Fraction(value[1]), Fraction(value[2])]) key = 'Exif.GPSInfo.GPSLongitudeRef' value = lon_frac[1] metadata[key] = pyexiv2.ExifTag(key, value) # GPS altitude altitude = abs(int(float(latlon[2]) * 100)) key = 'Exif.GPSInfo.GPSAltitude' value = Fraction(altitude, 1) metadata[key] = pyexiv2.ExifTag(key, value) if latlon[2] >= 0: altref = '0' else: altref = '1' key = 'Exif.GPSInfo.GPSAltitudeRef' metadata[key] = pyexiv2.ExifTag(key, altref) # write values metadata.write()