def main(): parser = OptionParser(usage=description) parser.add_option("--out", help="Output data directory", default="flydra2ros") parser.add_option("--db", help="Location of input Flydra db.") (options, args) = parser.parse_args() # @UnusedVariable if not options.db: raise Exception("Please define FlydraDB directory using `--db`.") verbose = True def printv(s): if verbose: print(s) db = FlydraDB(options.db, create=False) configuration = "use_for_report" for sample in db.list_samples(): if not db.has_table(sample, table=SACCADES_TABLE, version=configuration): continue group = guess_group(db, sample) magno = {} table = db.get_table(sample, SACCADES_TABLE, configuration) species = db.get_attr(sample, "species", "Dmelanogaster") magno["species"] = species magno["sample"] = sample # _name if db.has_table(sample, EXP_DATA_TABLE): exp_data = db.get_table(sample, EXP_DATA_TABLE) print(exp_data.dtype) timestamp = exp_data[:]["timestamp"] else: timestamp = None magno["use_for_report"] = convert_saccades_to_ros(table, timestamp) db.release_table(table) if timestamp is not None: db.release_table(exp_data) output_dir = os.path.join(options.out, group) filename = os.path.join(output_dir, "magno_%s.mat" % sample) printv("writing to %s" % filename) if not os.path.exists(output_dir): os.makedirs(output_dir) scipy.io.savemat(filename, {"magno": magno}, oned_as="row") # put species and sample print("closing") db.close()
def main(): parser = OptionParser() parser.add_option("--db", default='flydra_db', help="Data directory") parser.add_option("--image", default="luminance", help="Rendered image to use -- " " corresponding to image 'saccades_view_{start,stop}_X'") parser.add_option("--interactive", help="Start an interactive compmake session." " Otherwise run in batch mode. ", default=False, action="store_true") (options, args) = parser.parse_args() #@UnusedVariable if options.db is None: logger.error('Please specify a directory using --db.') sys.exit(-1) view_start = 'saccades_view_start_%s' % options.image view_stop = 'saccades_view_stop_%s' % options.image view_rstop = 'saccades_view_rstop_%s' % options.image db = FlydraDB(options.db, False) # all samples with enough data all_available = lambda x: db.has_saccades(x) and \ db.has_table(x, view_start) and \ db.has_table(x, view_stop) and \ db.has_table(x, view_rstop) samples = filter(all_available, db.list_samples()) set_namespace('saccade_view_show_%s' % options.image) for sample in samples: comp_prefix(sample) comp(create_and_write_report, options.db, sample, options.image) if options.interactive: # start interactive session compmake_console() else: # batch mode # try to do everything batch_command('make all') # start the console if we are not done # (that is, make all failed for some reason) todo = list(parse_job_list('todo')) if todo: logger.info('Still %d jobs to do.' % len(todo)) sys.exit(-2)
def main(): parser = OptionParser() parser.add_option("--db", default='flydra_db', help="FlydraDB directory") parser.add_option("--nocache", help="Ignores already computed results.", default=False, action="store_true") parser.add_option("--sigma", help="Kernel spread (degrees)", type="float", default=6) parser.add_option("--source", default='luminance', help="Source table") parser.add_option("--target", default='contrast', help="Destination table") (options, args) = parser.parse_args() kernel = get_contrast_kernel(sigma_deg=options.sigma, eyes_interact=False) kernel = kernel.astype('float32').copy('C') db = FlydraDB(options.db, False) if args: do_samples = args else: do_samples = db.list_samples() do_samples = filter(lambda x: db.has_table(x, options.source), do_samples) if not do_samples: raise Exception('No samples with table "%s" found. ' % options.source) for i, sample_id in enumerate(do_samples): logger.info('Sample %s/%s: %s' % (i + 1, len(do_samples), sample_id)) if not db.has_sample(sample_id): raise Exception('Sample %s not found in db.' % sample_id) if not db.has_table(sample_id, options.source): raise Exception('Sample %s does not have table %s; skipping.' \ % (sample_id, options.source)) if db.has_table(sample_id, options.target) and not options.nocache: logger.info('Already computed "%s" for %s; skipping' % \ (options.target, sample_id)) continue luminance = db.get_table(sample_id, options.source) contrast = compute_contrast_for_table(luminance, kernel) db.set_table(sample_id, options.target, contrast) db.release_table(luminance) db.close()
def main(): set_namespace('env_stats') parser = OptionParser() parser.add_option("--db", default='flydra_db', help="FlydraDB directory") (options, args) = parser.parse_args() #@UnusedVariable db = FlydraDB(options.db, False) outdir = os.path.join(options.db, 'out/environment_stats') images = ["luminance", "contrast", "luminance_w", "contrast_w", "hluminance_w", "hcontrast_w"] for image in images: samples = [x for x in db.list_samples() if db.get_attr(x, 'stimulus', None) != 'nopost' and db.has_table(x, image)] if not samples: print "No samples for %s" % samples continue comp_prefix(image) data = comp(compute_environment_autocorrelation, options.db, samples, image) comp(create_report, data, image, outdir) db.close() compmake_console()
def main(): parser = OptionParser(usage=description) parser.add_option("--db", default='flydra_db', help="FlydraDB directory") parser.add_option("--interactive", help="Start compmake interactive session." " Otherwise run in batch mode", default=False, action="store_true") (options, args) = parser.parse_args() #@UnusedVariable db = FlydraDB(options.db, False) set_namespace('video_contrast') samples = db.list_samples() if not samples: print 'No samples found' for id in samples: if db.has_rows(id) and db.has_table(id, 'contrast') and \ db.has_table(id, 'luminance'): config = {'sample': id, 'db': options.db} comp(pg, 'flydra_display_contrast', config, job_id="flydra_display_contrast:%s" % id) if options.interactive: # start interactive session compmake_console() else: # batch mode # try to do everything batch_command('make all') # start the console if we are not done # (that is, make all failed for some reason) todo = list(parse_job_list('todo')) if todo: print('Still %d jobs to do.' % len(todo)) sys.exit(-2)
class FlydraImage(Generator): ''' This block outputs the retinal images from a FlydraDB for a particular sample. ''' Block.alias('flydra_db_image') Block.config('db', 'FlydraDB database directory') Block.config('sample', 'Sample ID -- such as "DATA20080611_191809".') Block.config('image', 'Which retinal image to display.') Block.output('obj_id') Block.output('frame') Block.output('image') def init(self): self.db = FlydraDB(self.config.db) if not self.db.has_sample(self.config.sample): raise ValueError('Sample "%s" not found.' % self.config.sample) if not self.db.has_table(self.config.sample, self.config.image): raise ValueError('Table "%s" not found for sample %s.' % \ (self.config.image, self.config.sample)) self.data = self.db.get_table(self.config.sample, self.config.image) self.next_index = 0 if len(self.data) == 0: self.info('Empty rows for sample %s.' % self.config.sample) self.next_index = None def update(self): row = self.data[self.next_index] t = row['time'] for field in ['obj_id', 'frame']: self.set_output(field, value=row[field], timestamp=t) self.set_output('image', row['value'], timestamp=t) self.next_index += 1 if self.next_index == len(self.data): self.next_index = None def next_data_status(self): # TODO: put new interface if self.next_index is None: # EOF return (False, None) else: return (True, self.data[self.next_index]['time'])
def compute_mean_generic(db, samples, image, operator): ''' db: FlydraDB directory samples: list of IDs ''' db = FlydraDB(db, False) results = { 'samples': {} } ex = Expectation() for i, id in enumerate(samples): progress('Computing mean %s' % image, (i, len(samples)), "Sample %s" % id) if not (db.has_sample(id) and db.has_table(id, image)): raise ValueError('No table "%s" for id %s' % (image, id)) data = db.get_table(id, image) values = data[:]['value'] this = operator(values) # print "id: %s len: %d %d" % (id, len(data), len(values)) ex.update(this, len(data)) results['samples'][id] = this db.release_table(data) results['all'] = ex.get_value() db.close() return results
def main(): parser = OptionParser() parser.add_option("--db", default='flydra_db', help="FlydraDB directory") parser.add_option("--nocache", help="Ignores already computed results.", default=False, action="store_true") parser.add_option("--compute_mu", help="Computes mu and optic flow.", default=False, action="store_true") parser.add_option("--white", help="Computes luminance_w, with the arena" " painted white.", default=False, action="store_true") parser.add_option("--host", help="Use a remote rfsee. Otherwise, use local process.", default=None) (options, args) = parser.parse_args() #@UnusedVariable db = FlydraDB(options.db, False) # look for samples with the rows table do_samples = db.list_samples() do_samples = filter(lambda x: db.has_rows(x) and db.get_attr(x, 'stimulus') == 'nopost', do_samples) if not do_samples: raise Exception('Cannot find samples to hallucinate about.') print "Summary, including nopost." for s in sorted(get_db_stimulus_stats(db, include_nopost=True), key=(lambda x:-x.total_length)): print "stimulus: {s.stimulus:>10} samples: {s.total_number:>5} "\ " total length: {len:>5} minutes".format(s=s, len=s.total_length / (60 * 60)) stimulus_to_use = list(get_stimulus_to_use(db, len(do_samples))) for i, sample in enumerate(do_samples): stimulus = stimulus_to_use[i][0] print sample, stimulus if options.white: target = 'hluminance_w' else: target = 'hluminance' for i, sample_id in enumerate(do_samples): stimulus = stimulus_to_use[i][0] stimulus_xml = stimulus_to_use[i][1] print 'Sample %s/%s: %s' % (i + 1, len(do_samples), sample_id) if not db.has_sample(sample_id): raise Exception('Sample %s not found in db.' % sample_id) if not db.has_rows(sample_id): raise Exception('Sample %s does not have rows table.' % sample_id) if options.compute_mu: if db.has_table(sample_id, 'nearness') and not options.nocache: logger.info('Already computed nearness for %s; skipping' % sample_id) continue else: if db.has_table(sample_id, target) and not options.nocache: logger.info('Already computed luminance for %s; skipping' % sample_id) continue rows = db.get_rows(sample_id) results = render(rows, stimulus_xml, host=options.host, compute_mu=options.compute_mu, white=options.white) db.set_table(sample_id, target, results['luminance']) if options.compute_mu: db.set_table(sample_id, 'hnearness', results['nearness']) db.set_table(sample_id, 'hretinal_velocities', results['retinal_velocities']) db.release_table(rows) db.close()
def main(): parser = OptionParser(usage=description) parser.add_option( "--db", default='flydra_db_directory', help="FlydraDB directory") parser.add_option("--model", help="ProcGraph model name.") parser.add_option( "--needs", help="Comma-separated list of tables required", default="rows,luminance") parser.add_option( "--interactive", help="Start compmake interactive session." " Otherwise run in batch mode", default=False, action="store_true") (options, args) = parser.parse_args() if options.model is None: print "Please specify the model." sys.exit(-3) print("Using FlydraDB directory %r." % options.db) db = FlydraDB(options.db, False) # TODO: make the storage inside options.db? set_namespace('run_pg_model_%s' % options.model) tables = options.needs.split(',') if args: samples = args for sample in samples: if not db.has_sample(sample): raise Exception('Unknown sample %r' % sample) else: samples = db.list_samples() if not samples: print 'No samples found' num_ok = 0 for id in samples: enough = all(map(lambda t: db.has_table(id, t), tables)) if not enough: continue num_ok += 1 config = {'sample': id, 'db': options.db} comp(pg, options.model, config, job_id=id) logger.info( "Found %d/%d samples with tables %s." % (num_ok, len(samples), tables)) if options.interactive: # start interactive session compmake_console() else: # batch mode # try to do everything batch_command('make all') # start the console if we are not done # (that is, make all failed for some reason) todo = list(parse_job_list('todo')) if todo: logger.info('Still %d jobs to do.' % len(todo)) sys.exit(-2)
class SamplesDB: def __init__(self, data, verbose=False): ''' data: base directory ''' if not os.path.exists(data) or not os.path.isdir(data): raise Exception('Could not open directory %s' % data) self.data = data # self.use_cache = True self.use_cache = False self.use_flydra_db = True if self.use_cache: self.open_shelve() if self.use_flydra_db: self.open_flydra_db() self.groups = {} #self.group2samples = {} # maps id to .mat file self.sample2expmat = {} # maps id to .pickle file self.sample2exppickle = {} # list of all configurations self.configurations = set() # maps sample -> group self.sample2group = {} #print "Loading data in %s" % data for group in os.listdir(data): group_dir = os.path.join(data, group) if not os.path.isdir(group_dir): continue # print "Reading group %s" % group group_record = Group() for file in [file for file in os.listdir(group_dir) if file.startswith('data_') and file.endswith('.mat')]: id = file[5:-4] group_record.samples.add(id) self.sample2expmat[id] = os.path.join(group_dir,file) self.sample2group[id] = group for file in [file for file in os.listdir(group_dir) if file.startswith('data_') and file.endswith('.pickle')]: id = file[5:-7] group_record.samples.add(id) self.sample2exppickle[id] = os.path.join(group_dir,file) self.sample2group[id] = group group_record.has_experimental_data = len(group_record.samples) > 0 processed_dir = os.path.join(group_dir, 'processed') if not os.path.exists(processed_dir): if verbose: print "No processed data found for %s." % group pass else: for conf in os.listdir(processed_dir): saccades = os.path.join(processed_dir, conf, 'saccades.mat') if os.path.exists(saccades): group_record.configurations[conf] = saccades # add to general list self.configurations.add(conf) # else: # conf_dir = os.path.join(processed_dir, conf) # for file in [file for file in os.listdir(conf_dir) # if file.startswith('processed_data_') and file.endswith('.mat')]: # id = file[5:-7] # if we don't have exp data, get list of samples from # processed data if group_record.configurations and \ not group_record.has_experimental_data: saccades = saccades_read_mat(saccades) group_record.samples = set(numpy.unique(saccades['sample'])) for sample in group_record.samples: self.sample2group[sample] = group if len(group_record.samples)> 0: self.groups[group] = group_record print "has it", group, group_record.has_experimental_data def open_shelve(self): shelve_fname = os.path.join(self.data, 'shelve') self.shelve = shelve.open(shelve_fname, protocol=pickle.HIGHEST_PROTOCOL) def open_flydra_db(self): self.flydra_db = FlydraDB(os.path.join(self.data, 'sac_flydra_db')) def list_groups(self): """ Returns a list of the groups. """ return natsorted(list(self.groups.keys())) def list_all_samples(self): """ Returns a list of all samples for all groups. """ return natsorted(list(self.sample2group.keys())) def list_samples(self, group): """ Lists the samples in the given group. """ return natsorted(list(self.groups[group].samples)) def list_all_configurations(self): """ Lists all the configurations present in the data. """ return natsorted(self.configurations) def list_configurations(self, group): """ Lists the configurations for the given group. """ return natsorted(list(self.groups[group].configurations.keys())) def get_group_for_sample(self, sample): """ Returns the sample associated to the group. """ return self.sample2group[sample] def get_saccades_for_group(self, group, configuration): """ Returns the saccades for the given group and configuration. If configuration is not passed, we use the default. """ if self.use_flydra_db: table = 'groupsaccades_%s' % configuration if self.flydra_db.has_sample(group) and \ self.flydra_db.has_table(group, table): t = self.flydra_db.get_table(group, table) #value = t.copy() value = t #self.flydra_db.release_table(t) return value if self.use_cache: key = str(('get_saccades_for_group', group, configuration)) if key in self.shelve: return self.shelve[key] filename = self.groups[group].configurations[configuration] saccades = saccades_read_mat(filename) if self.use_flydra_db: if not self.flydra_db.has_sample(group): self.flydra_db.add_sample(group) self.flydra_db.set_table(group, table, saccades) if self.use_cache: self.shelve[key] = saccades return saccades def group_has_experimental_data(self, group): """ Returns true if this group has the raw orientation data. (mamarama has only saccades data. ) """ return self.groups[group].has_experimental_data def has_experimental_data(self, sample): """ Returns true if this sample has the raw orientation data. """ return sample in self.sample2expmat or sample in self. sample2exppickle def get_saccades_for_sample(self, sample, configuration): """ Returns the saccades for the given group and configuration. If configuration is not passed, we use the default. """ if self.use_flydra_db: table = 'saccades_%s' % configuration if self.flydra_db.has_sample(sample) and \ self.flydra_db.has_table(sample, table): t = self.flydra_db.get_table(sample, table) #value = t.copy() value = t #self.flydra_db.release_table(t) return value if self.use_cache: key = str(('get_saccades_for_sample', sample, configuration)) if key in self.shelve: return self.shelve[key] group = self.get_group_for_sample(sample) group_saccades = self.get_saccades_for_group(group, configuration) print group_saccades[0].dtype # with open('tmp.pickle','w') as f: # pickle.dump(f, group_saccades) mine = group_saccades[:]['sample'] == sample saccades = group_saccades[mine] if len(saccades) == 0: raise Exception('No saccades found for %s' % sample) if self.use_flydra_db: if not self.flydra_db.has_sample(sample): self.flydra_db.add_sample(sample) self.flydra_db.set_table(sample, table, saccades) if self.use_cache: self.shelve[key] = saccades return saccades def get_experimental_data(self, sample): # if self.use_flydra_db: # table = 'tethered_data' # if self.flydra_db.has_sample(sample) and \ # self.flydra_db.has_table(sample, table): # t = self.flydra_db.get_table(sample, table) # #value = t.copy() # value = t # #self.flydra_db.release_table(t) # return value # if self.use_cache: if sample in self.shelve: return self.shelve[sample] if sample in self.sample2expmat: data = scipy.io.loadmat(self.sample2expmat[sample], squeeze_me=True) data = data['data'] # convert from array to hash assert isinstance(data, numpy.ndarray) data = dict(map(lambda field: (field, data[field]), data.dtype.fields)) # convert from array to string for k in list(data.keys()): if data[k].dtype.char == 'U': data[k] = str(data[k]) # make sure everything is 1d array def as1d(x): if x.dtype == 'object': x = x.tolist() return x.reshape(len(x)) data['exp_orientation'] = as1d(data['exp_orientation']) data['exp_timestamps'] = as1d(data['exp_timestamps']) elif sample in self.sample2exppickle: with open(self.sample2exppickle[sample], 'rb') as f: data = cPickle.load(f) else: raise Exception('no data for sample %s found' % sample) # if self.use_flydra_db: # if not self.flydra_db.has_sample(sample): # self.flydra_db.add_sample(sample) # # self.flydra_db.set_table(sample, table, data) if self.use_cache: self.shelve[sample] = data return data def __getstate__(self): # do not pickle the shelve all = dict(self.__dict__) all['shelve'] = None all['flydra_db'] = None return all
def main(): parser = OptionParser(usage=description) parser.add_option("--db", default='flydra_db', help="FlydraDB directory") parser.add_option("--nocache", help="Ignores already computed results.", default=False, action="store_true") parser.add_option("--white", help="Computes luminance_w, with the arena" " painted white.", default=False, action="store_true") parser.add_option("--host", help="Use a remote rfsee. Otherwise," "use local process.", default=None) (options, args) = parser.parse_args() if options.db is None: logger.error('Please specify a directory using --db.') sys.exit(-1) db = FlydraDB(options.db) if args: do_samples = args else: # look for samples with the rows table all_samples = db.list_samples() do_samples = filter(lambda x: db.has_saccades(x) and db.has_attr(x, 'stimulus_xml'), all_samples) logger.info('Found %d/%d samples with saccades and stimulus info.' % (len(do_samples), len(all_samples))) image = 'luminance_w' if options.white else 'luminance' target_start = 'saccades_view_start_%s' % image target_stop = 'saccades_view_stop_%s' % image target_rstop = 'saccades_view_rstop_%s' % image target_sstop = 'saccades_view_sstop_%s' % image target_random = 'saccades_view_random_%s' % image for i, sample_id in enumerate(do_samples): logger.info('Sample %s/%s: %s' % (i + 1, len(do_samples), sample_id)) if not db.has_sample(sample_id): raise Exception('Sample %s not found in db.' % sample_id) if not db.has_saccades(sample_id): raise Exception('Sample %s does not have saccades table.' % sample_id) if not db.has_attr(sample_id, 'stimulus_xml'): raise Exception('Sample %s does not have the stimulus' ' information ("stimulus_xml")' % sample_id) # todo: check stale dependencies if db.has_table(sample_id, target_start) and \ db.has_table(sample_id, target_stop) and \ db.has_table(sample_id, target_rstop) and \ db.has_table(sample_id, target_sstop) and \ db.has_table(sample_id, target_random) and \ not options.nocache: logger.info('Targets already computed for %s; skipping' % sample_id) continue # Get the stimulus description stimulus_xml = db.get_attr(sample_id, 'stimulus_xml') saccades = db.get_saccades(sample_id) view_start, view_stop, view_rstop, view_random, view_sstop = \ render_saccades_view( saccades=saccades, stimulus_xml=stimulus_xml, host=options.host, white=options.white) db.set_table(sample_id, target_start, view_start) db.set_table(sample_id, target_stop, view_stop) db.set_table(sample_id, target_rstop, view_rstop) db.set_table(sample_id, target_random, view_random) db.set_table(sample_id, target_sstop, view_sstop) db.release_table(saccades) db.close()
def main(): parser = OptionParser() parser.add_option("--db", default='flydra_db', help="FlydraDB directory") parser.add_option("--nocache", help="Ignores already computed results.", default=False, action="store_true") parser.add_option("--compute_mu", help="Computes mu and optic flow.", default=False, action="store_true") parser.add_option("--white", help="Computes luminance_w, with the arena" " painted white.", default=False, action="store_true") parser.add_option("--host", help="Use a remote rfsee. Otherwise, use local process.", default=None) (options, args) = parser.parse_args() db = FlydraDB(options.db, False) if args: do_samples = args else: # look for samples with the rows table do_samples = db.list_samples() do_samples = filter(lambda x: db.has_rows(x), do_samples) if options.white: target = 'luminance_w' else: target = 'luminance' for i, sample_id in enumerate(do_samples): print 'Sample %s/%s: %s' % (i + 1, len(do_samples), sample_id) if not db.has_sample(sample_id): raise Exception('Sample %r not found in db.' % sample_id) if not db.has_rows(sample_id): raise Exception('Sample %r does not have rows table.' % sample_id) if not db.has_attr(sample_id, 'stimulus_xml'): raise Exception('Sample %r does not have the "stimulus_xml" attribute.' %sample_id) if options.compute_mu: if db.has_table(sample_id, 'nearness') and not options.nocache: logger.info('Already computed nearness for %r; skipping' % sample_id) continue else: if db.has_table(sample_id, target) and not options.nocache: logger.info('Already computed luminance for %r; skipping' % sample_id) continue rows = db.get_rows(sample_id) stimulus_xml = db.get_attr(sample_id, 'stimulus_xml') results = render(rows, stimulus_xml, host=options.host, compute_mu=options.compute_mu, white=options.white) db.set_table(sample_id, target, results['luminance']) if options.compute_mu: db.set_table(sample_id, 'nearness', results['nearness']) db.set_table(sample_id, 'retinal_velocities', results['retinal_velocities']) db.release_table(rows) db.close()
def main(): parser = OptionParser() parser.add_option("--db", default='flydra_db', help="Data directory") parser.add_option("--image", help="Which image to plot.") parser.add_option("--filter", help="Which procgraph filter to use to plot.", default="flydra_simple_video_filter") parser.add_option("--interactive", help="Start compmake interactive session." " Otherwise run in batch mode", default=False, action="store_true") (options, args) = parser.parse_args() if options.image is None: print "Usage: %s [--db DB] --image <image> [ids]" % sys.argv[0] sys.exit(-1) db = FlydraDB(options.db) set_namespace('video_image_%s' % options.image) if args: samples = args else: # look for samples with the rows table samples = db.list_samples() samples = filter(lambda x: db.has_table(x, options.image), samples) if not samples: raise Exception('No samples found at all with available image "%s".' % \ options.image) for id in samples: if not db.has_table(id, options.image): raise Exception('Sample %s does not have table "%s".' % (id, options.image)) config = {'sample': id, 'db': options.db, 'image': options.image, 'filter': options.filter} comp(pg, 'flydra_simple_video', config, job_id="%s" % id) if options.interactive: # start interactive session compmake_console() else: # batch mode # try to do everything batch_command('make all') # start the console if we are not done # (that is, make all failed for some reason) todo = list(parse_job_list('todo')) if todo: print('Still %d jobs to do.' % len(todo)) sys.exit(-2)
def main_filter(args): parser = LenientOptionParser() parser.add_option("--db", default='flydra_db', help="FlydraDB directory") parser.add_option("--min_frames_per_track", default=400, help="Minimum number of frames per track [= %default]") parser.add_option("--confirm_problems", help="Stop interactively on problems with log files'\ '(e.g.: cannot find valid obj_ids) [default: %default]", default=False, action="store_true") parser.add_option("--dynamic_model_name", help="Smoothing dynamical model [default: %default]", default="mamarama, units: mm") parser.add_option("--debug_output", help="Creates debug figures.", default=False, action="store_true") parser.add_option("--nocache", help="Ignores already computed results.", default=False, action="store_true") parser.add_option("--smoothing", help="Uses Kalman-smoothed data.", default=False, action="store_true") (options, args) = parser.parse_args(args) table_name = 'rows' # TODO: use constant table_version = "smooth" if options.smoothing else "kf" if not args: raise UserError('No files or directories specified.') if not os.path.exists(options.db): os.makedirs(options.db) db = FlydraDB(options.db) good_files = get_good_files(where=args, pattern="*.kh5", confirm_problems=options.confirm_problems) if len(good_files) == 0: logger.error("No good files to process") sys.exit(1) n = len(good_files) for i in range(n): (filename, obj_ids, stim_fname) = good_files[i] logger.info('Sample %s/%s: %s' % (i + 1, n, filename)) # only maintain basename stim = os.path.splitext(os.path.basename(stim_fname))[0] sample_id = os.path.splitext(os.path.basename(filename))[0] logger.info("File %d/%d %s %s %s " % (i, n, str(filename), str(obj_ids), stim_fname)) if (db.has_sample(sample_id) and db.has_table(sample_id, table_name, table_version) and not options.nocache): logger.info('Sample %r already computed; skipping.' ' (use --nocache to ignore)' % sample_id) continue all_data = [] for obj_id, rows in get_good_smoothed_tracks(#@UnusedVariable filename=filename, obj_ids=obj_ids, min_frames_per_track=options.min_frames_per_track, dynamic_model_name=options.dynamic_model_name, use_smoothing=options.smoothing): filtered = filter_rows(rows, options) all_data.append(filtered) if not all_data: logger.info('Not enough data found for %r; skipping.' % sample_id) continue if not db.has_sample(sample_id): db.add_sample(sample_id) db.set_attr(sample_id, 'stim_fname', stim_fname) db.set_attr(sample_id, 'stimulus', stim) stim_xml = open(stim_fname).read() db.set_attr(sample_id, 'stimulus_xml', stim_xml) geometry = get_posts_info(stim_xml) print(geometry) db.set_attr(sample_id, 'posts', geometry['posts']) if 'arena' in geometry: db.set_attr(sample_id, 'arena', geometry['arena']) db.add_sample_to_group(sample_id, stim) if stim != 'nopost': db.add_sample_to_group(sample_id, 'posts') rows = numpy.concatenate(all_data) db.set_table(sample=sample_id, table=table_name, data=rows, version=table_version) db.set_attr(sample_id, 'filter_time', datetime.now().strftime("%Y%m%d_%H%M%S")) db.set_attr(sample_id, 'filter_host', platform.node()) db.set_attr(sample_id, 'filter_user', get_user()) db.set_attr(sample_id, 'filter_python_version', platform.python_version()) db.set_attr(sample_id, 'filter_numpy_version', numpy.version.version) db.close()
def main(): parser = OptionParser(usage=description) parser.add_option("--db", default='flydra_db', help="Data directory") parser.add_option("--interactive", default=False, action="store_true", help="Start a compmake interactive session." " Otherwise run in batch mode") parser.add_option("--empty_group_ok", default=False, action="store_true", help="do not give up if one group does not have samples ") (options, args) = parser.parse_args() #@UnusedVariable if options.db is None: logger.error('Please specify a directory using --db.') sys.exit(-1) outdir = os.path.join(options.db, 'out/saccade_view_joint_analysis') db = FlydraDB(options.db, False) set_namespace('saccade_view_joint_analysis') # for each image we do a different report data = {} for image in images: # For each image we have different tables tables = ["saccades_view_%s_%s" % (view.id, image.id) for view in views] all_available = [x for x in db.list_samples() if db.has_saccades(x) and all([db.has_table(x, table) for table in tables])] # We further divide these in post and nopost groups_samples = { 'posts': filter(lambda s: db.get_attr(s, 'stimulus') != 'nopost', all_available), 'noposts': filter(lambda s: db.get_attr(s, 'stimulus') == 'nopost', all_available) } # now, for each group for group in groups: is_hallucination = image.id.startswith('h') white_arena = image.id.endswith('_w') if (not is_hallucination) and white_arena and (group.id == 'noposts'): # if there are not posts, it's useless continue samples = groups_samples[group.id] if not samples: print "Warning: no samples for %s/%s" % (image.id, group.id) continue # global statistics key = (group.id, image.id) job_id = "%s-%s" % key data[key] = comp(compute_stats, options.db, samples, image.id, job_id=job_id) for saccades_set, direction in prod(saccades_sets, dirs): view2result = {} for i, view in enumerate(views): table = tables[i] key = Exp(image=image.id, group=group.id, view=view.id, dir=direction.id, saccades_set=saccades_set.id) job_id = "%s-%s-%s-%s-%s" % key result = comp(compute_saccade_stats, options.db, samples, table, [direction.args, saccades_set.args], job_id=job_id) data[key] = result view2result[view.id] = result page_id = make_page_id(image=image.id, group=group.id, dir=direction.id, saccades_set=saccades_set.id) comp(render_page, view2result, outdir, page_id, job_id=page_id) for saccades_set in saccades_sets: table = "saccades_view_start_%s" % (image.id) exp_id = '%s_%s_%s' % (image.id, group.id, saccades_set.id) results = comp(bet_on_flies, options.db, samples, table, saccades_set, job_id='lasvegas-' + exp_id + '-bet') page_id = exp_id comp(las_vegas_report, os.path.join(outdir, 'lasvegas'), page_id, results, job_id='lasvegas-' + exp_id + '-report') db.close() comp(add_comparisons, data, outdir) filename = os.path.join(outdir, 'gui.html') comp(create_gui_new, filename, menus) if options.interactive: # start interactive session compmake_console() else: # batch mode # try to do everything batch_command('make all') # exit with error if we are not done # (that is, make all failed for some reason) todo = list(parse_job_list('todo')) if todo: logger.info('Still %d jobs to do.' % len(todo)) sys.exit(-2)