def metadata2eic(bin_lid,parallax,merge=False): """Convert bin image metadata to CSV""" #imagename,lat,lon,head,pitch,roll,alt1,alt2,depth,s,t,o2,cdom,chlorophyll,backscatter,therm if merge: (PITCH_COL, ROLL_COL) = (4, 5) fields = ['imagename', 'alt', 'pitch','roll'] imgdata = resolver['img'].resolve(pid=bin_lid).value logging.info('reading CSV data from .img file %s' % imgdata) (pitch,roll) = ({}, {}) for row in read_csv(LocalFileSource(imgdata)): imagename = remove_extension(row[0]) try: pitch[imagename] = row[PITCH_COL] roll[imagename] = row[ROLL_COL] except KeyError: pass logging.info('merging with parallax-based altitude from %s' % parallax) else: logging.info('reading parallax-based altitude from %s' % parallax) for l in open(parallax): (imagename, _, _, alt) = re.split(r',',l.rstrip()) try: if merge: yield [imagename, alt, pitch[imagename], roll[imagename]] else: yield [imagename, alt, '0.0', '0.0'] except KeyError: pass
def alt(bin_lid): tmp = mkdirs(scratch(bin_lid)) csv_filename = os.path.join(tmp,bin_lid+'_alt.csv') logging.info('listing images for %s' % bin_lid) imagenames = [remove_extension(i) for i in list_images(bin_lid)] logging.info('looking for existing altitude data...') already_done = [] if os.path.exists(csv_filename): for row in read_csv(LocalFileSource(csv_filename)): already_done += [remove_extension(row[0])] logging.info('found %d existing altitude records' % len(already_done)) if len(already_done) == -1: logging.info('emptying CSV file ...') with open(csv_filename,'w') as csv_out: pass pids = [] for n in range(NUM_PROCS): pid = os.fork() if pid == 0: for imagename in imagenames[n::NUM_PROCS]: if imagename not in already_done: tif = img_as_float(read_image(imagename+'.tif')) logging.info('[%d] START aligning %s' % (n, imagename)) x,y,m = stereo2altitude(tif) line = '%s,%d,%d,%.2f' % (imagename,x,y,m) logging.info('[%d] DONE aligned %s' % (n, line)) with open(csv_filename,'a') as csv_out: print >>csv_out, line csv_out.flush() os._exit(0) else: logging.info('spawned process %d' % pid) pids += [pid] for pid in pids: logging.info('waiting for process %d' % pid) os.waitpid(pid,0) logging.info('joined alignment process %d' % pid) # now sort file logging.info('sorting CSV data...') rows = list(read_csv(LocalFileSource(csv_filename))) rows = sorted(rows, key=lambda r: r[0]) csv_out = csv_filename # same as in with open(csv_out,'w') as co: for row in rows: print >>co, ','.join([row[n] for n in range(4)]) logging.info('wrote CSV data to %s' % csv_out)
def get_features_schema(req, features_path=None): if features_path is None: # will raise NotFound if not features file is found, # caller must catch to provide default value in this case features_path = get_product_file(req.parsed,'features') features_source = LocalFileSource(features_path) for row in read_csv(features_source, None, 0, 1): return row
def read_adc(source, target_no=1, limit=-1, schema_version=SCHEMA_VERSION_1): """Convert ADC data in its native format to dictionaries representing each target. Read starting at the specified target number (default 1)""" target_number = target_no-1 for row in read_csv(source, ADC_SCHEMA[schema_version], target_no-1, limit): target_number += 1 # skip 0x0 targets if row[WIDTH] * row[HEIGHT] > 0: # add target number row[TARGET_NUMBER] = target_number yield row
def read_adc(adc_path, target_no=1, limit=-1, schema=None): """Convert ADC data in its native format to dictionaries representing each target. Read starting at the specified target number (default 1)""" target_number = target_no-1 adc_source = LocalFileSource(adc_path) for row in read_csv(adc_source, schema, target_no-1, limit): target_number += 1 # skip 0x0 targets if row[WIDTH] * row[HEIGHT] > 0: # add target number row[TARGET_NUMBER] = target_number yield row
def read_features(features_path): features_source = LocalFileSource(features_path) for row in read_csv(features_source, None, 1, NO_LIMIT): yield row