def configure(self, config): self.config = config self.config.matlab_path = [os.path.join(self.config.matlab_base, md) for md in MATLAB_DIRS] self.deposit = Deposit(self.config.features_deposit, product_type='features') self.multiblob_deposit = Deposit(self.config.features_deposit, product_type='multiblob') self.resolver = parse_stream(self.config.resolver) self.last_check = time.time()
def queue_bin(bin_lid,config): config = get_config('alt_celery.conf') R = parse_stream(config.resolver) psql_connect = config.psql_connect for hit in R['list_images'].resolve_all(pid=bin_lid): (imagename, cfa_LR_path) = hit.lid, hit.p do_alt.s(imagename, cfa_LR_path, psql_connect).apply_async(queue='alt') # FIXME hardcoded queue name
def list_adcs(time_series,resolver,after_year=2012): r = parse_stream(resolver) for s in r['list_adcs'].resolve_all(time_series=time_series): # FIXME hardcoded date = time.strptime(s.date, s.date_format) if date.tm_year > after_year: yield s else: logging.info('%s SKIP, out of date range' % s.pid)
def configure(config=None): app.config[CACHE] = SimpleCache() app.config[CACHE_TTL] = 120 try: if config.debug in ['True', 'true', 'T', 't', 'Yes', 'yes', 'debug']: app.debug = True except AttributeError: pass try: app.config[RESOLVER] = parse_stream(config.resolver) except AttributeError: app.config[RESOLVER] = parse_stream('oii/habcam/image_resolver.xml') try: app.config[PORT] = int(config.port) except: app.config[PORT] = 5061 app.config[METADATA] = Metadata(config)
def get_tenmin(bin_lid): """Find ten minute directory of TIFFs""" # now resolve one of the image files logging.info('searching for ten minute directory for %s' % bin_lid) for imagename in list_images(bin_lid): break resolvers = resolver.parse_stream(RESOLVER) hit = resolvers['image'].resolve(pid=as_tiff(imagename)) return re.sub(r'/[^/]+$','/',hit.value)
def enqueue_blobs(time_series,queue): """config needs psql_connect, resolver""" config = get_config(CONFIG_FILE, time_series) feed = IfcbFeed(config.psql_connect) r = parse_stream(config.resolver) blob_resolver = r['mvco_blob'] pid_resolver = r['pid'] for lid in feed.latest_bins(n=10000): if blob_resolver.resolve(pid=lid,time_series=time_series) is None: pid = pid_resolver.resolve(pid=lid,time_series=time_series).bin_pid print 'No blobs found for %s, enqueuing' % pid extract_blobs.apply_async(args=[time_series, pid],queue=queue)
def list_new_filesets(time_series,psql_connect,resolver,after_year=2012): feed = IfcbFeed(psql_connect) r = parse_stream(resolver) for s in list_adcs(time_series,resolver,after_year): if feed.exists(s.pid): logging.info('%s EXISTS in time series %s' % (s.pid, time_series)) else: logging.info('%s NEW, not already in time series %s' % (s.pid, time_series)) fs = r['fileset'].resolve(pid=s.pid,product='raw',time_series=time_series,day_dir=s.day_dir) if fs is None: logging.warn('%s UNRESOLVABLE cannot find raw files' % s.pid) else: yield fs
def enqueue_features(time_series,queue): """config needs psql_connect, resolver""" config = get_config(CONFIG_FILE, time_series) feed = IfcbFeed(config.psql_connect) r = parse_stream(config.resolver) blob_resolver = r['mvco_blob'] feature_resolver = r['features'] pid_resolver = r['pid'] for lid in feed.latest_bins(n=5000): if blob_resolver.resolve(pid=lid,time_series=time_series) is not None: pid = pid_resolver.resolve(pid=lid,time_series=time_series).bin_pid if feature_resolver.resolve(pid=lid,time_series=time_series) is None: print 'found blobs but no features for %s' % pid extract_features.apply_async(args=[time_series, pid],queue=queue)
def binpid2zip(bin_pid, outfile, resolver_file='oii/ifcb/mvco.xml', resolver=None): """Generate a zip file given a pid (using a resolver) to find the files""" if resolver is None: resolver = parse_stream(resolver_file) hit = resolver['pid'].resolve(pid=bin_pid) with tempfile.NamedTemporaryFile() as hdr: hdr_path = hdr.name drain(UrlSource(bin_pid+'.hdr'), LocalFileSink(hdr_path)) with tempfile.NamedTemporaryFile() as adc: adc_path = adc.name drain(UrlSource(bin_pid+'.adc'), LocalFileSink(adc_path)) with tempfile.NamedTemporaryFile() as roi: roi_path = roi.name drain(UrlSource(bin_pid+'.roi'), LocalFileSink(roi_path)) return bin_zip(hit, hdr_path, adc_path, roi_path, outfile)
def split(bin_lid): """Split TIFFs into L and R""" resolvers = resolver.parse_stream(RESOLVER) suffixes = ['_cfa_' + camera for camera in 'LR'] outdirs = [scratch(bin_lid,bin_lid + suffix) for suffix in suffixes] for od in outdirs: mkdirs(od) imagenames = list(list_images(bin_lid)) (h,w)=(None,None) tiff = None # read an image to determine h,w for imagename in imagenames: for outdir,suffix in zip(outdirs,suffixes): LRout = os.path.join(outdir,remove_extension(imagename) + suffix + '.tif') if h is None: if tiff is None: tiff = as_tiff(imagename) cfain = resolvers['image'].resolve(pid=as_tiff(imagename)).value (h,w) = imread(cfain,plugin='freeimage').shape # now fork pids = [] for n in range(NUM_PROCS): pid = os.fork() if pid == 0: for imagename in imagenames[n::NUM_PROCS]: tiff = None for outdir,suffix,offset in zip(outdirs,suffixes,[0,1]): LRout = os.path.join(outdir,remove_extension(imagename) + suffix + '.tif') if not os.path.exists(LRout): if tiff is None: tiff = as_tiff(imagename) cfain = resolvers['image'].resolve(pid=as_tiff(imagename)).value logging.info('loading %s' % cfain) cfa = imread(cfain,plugin='freeimage') (h,w) = cfa.shape if not os.path.exists(LRout): logging.info('splitting %s -> %s' % (cfain, LRout)) half = w / 2 off = offset * half imsave(LRout,cfa[:,off:off+half],plugin='freeimage') os._exit(0) else: pids += [pid] for pid in pids: os.waitpid(pid,0) logging.info('joined splitting process %d' % pid) return (h,w),outdirs
def alt_bin(bin_lid,resolver,psql_connect): R = parse_stream(resolver) # 'list images' should return: # - lid: imagename (e.g., 201303.20130621.1723.134329.55432.tif') # - p: full path to RAW 16-bit side-by-side stereo TIFF skip = 0 for hit in R['list_images'].resolve_all(pid=bin_lid): (imagename, cfa_LR_path) = hit.lid, hit.p if not alt_exists(imagename,psql_connect): if skip > 0: logging.info('SKIPPED %d image(s)' % skip) skip = 0 logging.info('START %s' % imagename) x,y,alt = compute_alt(cfa_LR_path) if x != 0 or y != 0: write_alt(imagename,alt,x,y,psql_connect) else: skip += 1
with xa(self.psql_connect) as (c,db): try: db.execute('select lat,lon,description from bin_props where lid=%s',(bin_lid,)) (lat,lon,description) = db.fetchone() d = dict(lat=lat,lon=lon,description=description) for k in d.keys(): if d[k] is None: del d[k] return d except: return {} import sys from oii.config import get_config, Configuration if __name__=='__main__': if len(sys.argv) > 1: config = get_config(sys.argv[1]) else: config = Configuration() try: resolvers = resolver.parse_stream(config.resolver) except: resolvers = None #fixity = IfcbFixity(config.psql_connect, resolvers) #fixity.check_all() autoclass = IfcbAutoclass(config.psql_connect) for roc in autoclass.rois_of_class('tintinnid'): print roc
def get_resolver(path): """assumes there is a resolver called 'image'""" resolvers = parse_stream(path) return resolvers['image']
from oii.habcam.lightfield.altitude import stereo2altitude from oii.resolver import parse_stream from oii.iopipes import StagedInputFile, UrlSource, LocalFileSource, drain, LocalFileSink from oii.csvio import read_csv from oii.procutil import Process from oii.image.demosaic import demosaic from oii.utils import remove_extension, change_extension from lightfield_config import * lgfmt = '%(asctime)-15s %(message)s' logging.basicConfig(format=lgfmt,stream=sys.stdout,level=logging.DEBUG) resolver = parse_stream(RESOLVER) def mkdirs(d): """Create directories""" if not os.path.exists(d): os.makedirs(d) logging.info('created directory %s' % d) else: logging.info('directory %s exists' % d) return d def scratch(bin_lid,suffix=''): """Compute path to scratch space""" return resolver['scratch'].resolve(pid=bin_lid,suffix=suffix).value #return os.path.join(SCRATCH,bin_lid,suffix)
def configure(self, config): self.config = config self.config.matlab_path = [os.path.join(self.config.matlab_base, md) for md in MATLAB_DIRS] self.deposit = Deposit(self.config.blob_deposit) self.resolver = parse_stream(self.config.resolver) self.last_check = time.time()
def resolver(): return parse_stream(app.config[RESOLVER])
def list_adcs(time_series,resolver,year_pattern='....'): r = parse_stream(resolver) for s in r['list_adcs'].resolve_all(time_series=time_series,year_pattern=year_pattern): # FIXME hardcoded date = time.strptime(s.date, s.date_format) yield s
# return the image data return image_response(roi_image,pil_format,mimetype) app.secret_key = os.urandom(24) if __name__=='__main__': """First argument is a config file which must at least have psql_connect in it to support feed arguments. Filesystem config is in the resolver.""" if len(sys.argv) > 1: configure(get_config(sys.argv[1])) else: configure() else: configure(get_config(os.environ['IFCB_CONFIG_FILE'])) # FIXME don't use globals # FIXME do this in config rs = parse_stream(app.config[RESOLVER]) binpid2path = rs['binpid2path'] pid_resolver = rs['pid'] blob_resolver = rs['mvco_blob'] fea_resolver = rs['features'] class_scores_resolver = rs['class_scores'] ts_resolver = rs['time_series'] all_series = rs['all_series'] if __name__=='__main__': # print blob_resolver.resolve(pid='http://demi.whoi.edu:5062/mvco/IFCB5_2012_243_142205_00179_blob.png') # print blob_resolver.resolve(pid='http://demi.whoi.edu:5062/Healy1101/IFCB8_2011_210_011714_00005_blob.png') app.run(host='0.0.0.0',port=app.config[PORT])
from oii.ifcb.classification import class_scores_mat2class_label_score, load_class_scores from oii.resolver import parse_stream from oii.ifcb.db import IfcbFeed from oii.times import text2utcdatetime, ISO_8601_FORMAT try: time_series = sys.argv[1] except: time_series = 'mvco' config = get_config('./db.conf',time_series) outdir = config.outdir psql_connect = '%s dbname=%s' % (config.psql_connect, config.dbname) R = parse_stream(config.resolver) NAMESPACE='http://demi.whoi.edu/mvco/' feed = IfcbFeed(psql_connect) start=strptime('2005-01-01T00:00:00Z',ISO_8601_FORMAT); end=strptime('2014-01-01T00:00:00Z',ISO_8601_FORMAT); with xa(psql_connect) as (c, db): bin_lids = list(feed.between(start,end)) N=8 pids = [] for n in range(N): pid = os.fork()