def main():
    set_namespace('env_stats')
    
    parser = OptionParser()

    parser.add_option("--db", default='flydra_db', help="FlydraDB directory")

    (options, args) = parser.parse_args() #@UnusedVariable


    db = FlydraDB(options.db, False)
    outdir = os.path.join(options.db, 'out/environment_stats')

    images = ["luminance", "contrast", "luminance_w", "contrast_w",
              "hluminance_w", "hcontrast_w"]
              
    for image in images:
        samples = [x for x in db.list_samples() 
                      if db.get_attr(x, 'stimulus', None) != 'nopost' and
                      db.has_table(x, image)]
        
        if not samples:
            print "No samples for %s" % samples
            continue
            
        comp_prefix(image)        
        data = comp(compute_environment_autocorrelation, options.db, samples, image)
        
        comp(create_report, data, image, outdir)
    

    db.close()
    
    compmake_console()
def main():
    parser = OptionParser(usage=description)
    parser.add_option("--out", help="Output data directory", default="flydra2ros")
    parser.add_option("--db", help="Location of input Flydra db.")

    (options, args) = parser.parse_args()  # @UnusedVariable

    if not options.db:
        raise Exception("Please define FlydraDB directory using `--db`.")

    verbose = True

    def printv(s):
        if verbose:
            print(s)

    db = FlydraDB(options.db, create=False)

    configuration = "use_for_report"

    for sample in db.list_samples():
        if not db.has_table(sample, table=SACCADES_TABLE, version=configuration):
            continue

        group = guess_group(db, sample)
        magno = {}

        table = db.get_table(sample, SACCADES_TABLE, configuration)

        species = db.get_attr(sample, "species", "Dmelanogaster")

        magno["species"] = species
        magno["sample"] = sample  # _name

        if db.has_table(sample, EXP_DATA_TABLE):
            exp_data = db.get_table(sample, EXP_DATA_TABLE)
            print(exp_data.dtype)
            timestamp = exp_data[:]["timestamp"]
        else:
            timestamp = None

        magno["use_for_report"] = convert_saccades_to_ros(table, timestamp)

        db.release_table(table)
        if timestamp is not None:
            db.release_table(exp_data)

        output_dir = os.path.join(options.out, group)
        filename = os.path.join(output_dir, "magno_%s.mat" % sample)

        printv("writing to %s" % filename)

        if not os.path.exists(output_dir):
            os.makedirs(output_dir)
        scipy.io.savemat(filename, {"magno": magno}, oned_as="row")

        # put species and sample
    print("closing")
    db.close()
def main():
    parser = OptionParser()

    parser.add_option("--db", default='flydra_db', help="FlydraDB directory")

    parser.add_option("--nocache", help="Ignores already computed results.",
                      default=False, action="store_true")    
    
    parser.add_option("--sigma", help="Kernel spread (degrees)",
                      type="float", default=6)
   
    parser.add_option("--source", default='luminance', help="Source table")
    parser.add_option("--target", default='contrast', help="Destination table")
    
    (options, args) = parser.parse_args()

    kernel = get_contrast_kernel(sigma_deg=options.sigma, eyes_interact=False)
    kernel = kernel.astype('float32').copy('C')
    
    db = FlydraDB(options.db, False)
    
    if args:
        do_samples = args
    else:
        do_samples = db.list_samples()
        do_samples = filter(lambda x: db.has_table(x, options.source),
                            do_samples)
        
    if not do_samples:
        raise Exception('No samples with table "%s" found. ' % options.source)
    
    for i, sample_id in enumerate(do_samples):
        
        logger.info('Sample %s/%s: %s' % (i + 1, len(do_samples), sample_id))
        
        if not db.has_sample(sample_id):
            raise Exception('Sample %s not found in db.' % sample_id)
        
        if not db.has_table(sample_id, options.source):
            raise Exception('Sample %s does not have table %s; skipping.' \
                            % (sample_id, options.source))
        
        if db.has_table(sample_id, options.target) and not options.nocache:
            logger.info('Already computed "%s" for %s; skipping' % \
                (options.target, sample_id))
            continue

        luminance = db.get_table(sample_id, options.source)
    
        contrast = compute_contrast_for_table(luminance, kernel)
        
        db.set_table(sample_id, options.target, contrast)
        
        db.release_table(luminance)
        
    db.close()
def main():
    parser = OptionParser()
    
    parser.add_option("--db", help="FlydraDB directory")
    
    (options, args) = parser.parse_args() #@UnusedVariable
        
    if not options.db:
        raise Exception('Please define the FlydraDB directory using `--db`.')
    
    db = FlydraDB(options.db)  
    
    choose = {                 
        'andrea_detector': {
         'Dpseudoobscura': 'threshold16',
         'Dananassae': 'threshold10',
         'Dhydei': 'threshold7',
         'Dmelanogaster': 'threshold1',
         'Darizonae': 'threshold9',
         'Dmojavensis': 'threshold7',
         'peter': 'peters_conf',
         'mamaramaposts': 'use_for_report',
         'mamaramanoposts': 'use_for_report'
        },
        'ros_detector': {
         'Dpseudoobscura': 'filt_kalman-amp_th_10-th_4',
         'Dananassae': 'filt_kalman-amp_th_10-th_4',
         'Dhydei': 'filt_kalman-amp_th_10-th_4',
         'Dmelanogaster': 'filt_kalman-amp_th_10-th_4',
         'Darizonae': 'filt_kalman-amp_th_10-th_4',
         'Dmojavensis': 'filt_kalman-amp_th_10-th_4'
        }
    }
    
    for official, choices in choose.items():
        for group, version in choices.items():
            print("Group %r: %r -> %r" % (group, version, official))
            samples = db.list_samples_for_group(group)
            for sample in samples:
                print(" sample %r" % sample)
                table = db.get_table(sample, SACCADES_TABLE, version=version) 
                
                copy = numpy.array(table, dtype=table.dtype)
                db.set_table(sample=sample, table=SACCADES_TABLE,
                             version=official, data=copy)
                db.release_table(table)


    db.close()
def main():
    parser = OptionParser(usage=description)
    parser.add_option("--saccade_data", help="Main data directory",
                      default='saccade_data')
    parser.add_option("--db", help='Location of output Flydra db.')
        
    (options, args) = parser.parse_args() #@UnusedVariable
    
    if not options.db:
        raise Exception('Please define FlydraDB directory using `--db`.')
    
    verbose = True
    
    flydra_db = FlydraDB(options.db, create=True)
    
    matlab_dir = options.saccade_data
    for group in os.listdir(matlab_dir):
        group_dir = os.path.join(matlab_dir, group)
        if not os.path.isdir(group_dir):                
            continue
        
        if verbose:
            print("Opening {0}".format(group))
        
        for file in [file for file in os.listdir(group_dir) 
            if (file.startswith('magno_')) \
               and file.endswith('.mat')]:
            
            sample = file[file.index('_') + 1:file.index('.')]
            
            if verbose:
                print("  - Considering sample {0}".format(sample.__repr__()))
        
            if not flydra_db.has_sample(sample):
                flydra_db.add_sample(sample)
            flydra_db.add_sample_to_group(sample, group)
#           flydra_db.add_sample_to_group(sample, 'ros')
            
            filename = os.path.join(group_dir, file)
            
            exp_data, attributes = read_raw_data(filename)
            
            consider_importing_processed(flydra_db, sample, exp_data, attributes)
            
            flydra_db.set_attr(sample, 'species', attributes['species'])
            flydra_db.set_attr(sample, 'background', attributes['background'])            
            flydra_db.set_table(sample, EXP_DATA_TABLE, exp_data)
            
    flydra_db.close()
def compute_environment_autocorrelation(db, samples, image, maxlag=50):
    nsensors = 1398
    results = numpy.ndarray(shape=(nsensors, 2 * maxlag + 1))
    
    db = FlydraDB(db, create=False)
    
    block_size = 50
    num_blocks = int(numpy.ceil(nsensors * 1.0 / block_size))
    for b in range(num_blocks):
        start = block_size * b
        stop = min(start + block_size, nsensors)
        
        progress('Computing autocorrelation', (b, num_blocks))
         
        data = [[] for i in range(nsensors)]
        
        for k, sample in enumerate(samples):
            progress('getting data', (k, len(samples)), sample)
            table = db.get_table(sample, image)
        
            chunk = (table[:]['value'][:, start:stop]).copy()
            for j, i in enumerate(range(start, stop)): 
                data[i].append(chunk[:, j])
            
            db.release_table(table)
            
        for j, i in enumerate(range(start, stop)):
            progress('Computing correlation', (j, stop - start))
            x = numpy.concatenate(data[i])
            corr, lags = xcorr(x, maxlag=maxlag)
            assert(len(lags) == 2 * maxlag + 1)
            results[i, :] = corr
        
    db.close()
    
    data = {
        'results': results,
        'lags': lags
    }
    
    return data
def get_all_data_for_signal(db, samples, interval_function,
                            signal, signal_component):
    
    db = FlydraDB(db, False)
    
    all = []
    for id in samples:
        
        if not db.has_rows(id):
            logger.warning('Could not find rows table for %s; skipping.' % 
                           (id))
            continue
        
        rows_table = db.get_rows(id)
        
        try:
            interval = interval_function(db, id, rows_table) 
        except Exception as e:
            logger.warning('Cannot compute interval for sample %s: %s '\
                           % (id, e))
            db.release_table(rows_table)
            continue
        
        rows = rows_table[interval]
        
        s = extract_signal(rows, signal, signal_component)
        
        all.append(s)
        
        db.release_table(rows_table)
    
    
    
    db.close()
    
    return numpy.concatenate(all)
Example #8
0
def compute_mean_generic(db, samples, image, operator):
    ''' 
    db: FlydraDB directory
    samples: list of IDs
    '''
    db = FlydraDB(db, False)
    
    results = { 'samples': {} }
    
    ex = Expectation()
    
    for i, id in enumerate(samples):
        progress('Computing mean %s' % image,
                 (i, len(samples)), "Sample %s" % id)
    
        if not (db.has_sample(id) and db.has_table(id, image)):
            raise ValueError('No table "%s" for id %s' % (image, id))
        
        data = db.get_table(id, image)
        
        values = data[:]['value']
        
        this = operator(values)
        
        # print "id: %s   len: %d  %d" % (id, len(data), len(values))
        ex.update(this, len(data))
    
        results['samples'][id] = this
            
        db.release_table(data)

    results['all'] = ex.get_value()
        
    db.close()
    
    return results 
def main():
    
    parser = OptionParser()
    
    parser.add_option("--db", default='flydra_db', help="FlydraDB directory")

    parser.add_option("--nocache", help="Ignores already computed results.",
                      default=False, action="store_true")

    parser.add_option("--compute_mu", help="Computes mu and optic flow.",
                      default=False, action="store_true")
    
    parser.add_option("--white", help="Computes luminance_w, with the arena"
                      " painted white.", default=False, action="store_true")
    
    parser.add_option("--host", help="Use a remote rfsee. Otherwise, use local process.",
                       default=None)
    
    (options, args) = parser.parse_args() #@UnusedVariable
     
        
    db = FlydraDB(options.db, False)
    
        
    # look for samples with the rows table
    do_samples = db.list_samples()
    do_samples = filter(lambda x: db.has_rows(x) and 
                        db.get_attr(x, 'stimulus') == 'nopost',
                        do_samples)
    if not do_samples:
        raise Exception('Cannot find samples to hallucinate about.')
        
    print "Summary, including nopost."
    for s in sorted(get_db_stimulus_stats(db, include_nopost=True),
                       key=(lambda x:-x.total_length)):
        print "stimulus: {s.stimulus:>10}  samples: {s.total_number:>5}  "\
              " total length: {len:>5} minutes".format(s=s, len=s.total_length / (60 * 60))
        
    
      
    stimulus_to_use = list(get_stimulus_to_use(db, len(do_samples)))
    
    for i, sample in enumerate(do_samples):
        stimulus = stimulus_to_use[i][0]
        print sample, stimulus 
    
    if options.white:
        target = 'hluminance_w'
    else:
        target = 'hluminance'
    
    for i, sample_id in enumerate(do_samples):
        stimulus = stimulus_to_use[i][0]
        stimulus_xml = stimulus_to_use[i][1]
            
        print 'Sample %s/%s: %s' % (i + 1, len(do_samples), sample_id)
        
        if not db.has_sample(sample_id):
            raise Exception('Sample %s not found in db.' % sample_id)
        
        if not db.has_rows(sample_id):
            raise Exception('Sample %s does not have rows table.' % sample_id)
         
        if options.compute_mu:
            if db.has_table(sample_id, 'nearness') and not options.nocache:
                logger.info('Already computed nearness for %s; skipping' % sample_id)
                continue
        else:
            if db.has_table(sample_id, target) and not options.nocache:
                logger.info('Already computed luminance for %s; skipping' % sample_id)
                continue
        
        rows = db.get_rows(sample_id)
         
        results = render(rows, stimulus_xml, host=options.host,
                         compute_mu=options.compute_mu, white=options.white)
   
        db.set_table(sample_id, target, results['luminance'])
        
        if options.compute_mu:
            db.set_table(sample_id, 'hnearness', results['nearness'])
            db.set_table(sample_id, 'hretinal_velocities',
                         results['retinal_velocities'])
        
        db.release_table(rows)    
    
    db.close()
def main():
    
    parser = OptionParser(usage=description)

    parser.add_option("--db", default='flydra_db', help="FlydraDB directory")

    parser.add_option("--nocache", help="Ignores already computed results.",
                      default=False, action="store_true")
    
    parser.add_option("--white", help="Computes luminance_w, with the arena"
                      " painted white.", default=False, action="store_true")
    
    parser.add_option("--host", help="Use a remote rfsee. Otherwise," 
                      "use local process.", default=None)
    
    (options, args) = parser.parse_args()
    

    if options.db is None:
        logger.error('Please specify a directory using --db.')
        sys.exit(-1)
        
        
    db = FlydraDB(options.db)
    
    if args:
        do_samples = args
        
    else:
        # look for samples with the rows table
        all_samples = db.list_samples()
        do_samples = filter(lambda x: db.has_saccades(x) and 
                                      db.has_attr(x, 'stimulus_xml'),
                            all_samples)
        logger.info('Found %d/%d samples with saccades and stimulus info.' % 
                    (len(do_samples), len(all_samples)))
    
    image = 'luminance_w' if options.white else 'luminance'
        
    target_start = 'saccades_view_start_%s' % image
    target_stop = 'saccades_view_stop_%s' % image
    target_rstop = 'saccades_view_rstop_%s' % image
    target_sstop = 'saccades_view_sstop_%s' % image
    target_random = 'saccades_view_random_%s' % image
    
    for i, sample_id in enumerate(do_samples):
        
        logger.info('Sample %s/%s: %s' % (i + 1, len(do_samples), sample_id))
        
        if not db.has_sample(sample_id):
            raise Exception('Sample %s not found in db.' % sample_id)
        
        if not db.has_saccades(sample_id):
            raise Exception('Sample %s does not have saccades table.' % sample_id)
        
        if not db.has_attr(sample_id, 'stimulus_xml'):
            raise Exception('Sample %s does not have the stimulus'
                            ' information ("stimulus_xml")' % sample_id)
       
        # todo: check stale dependencies
        if db.has_table(sample_id, target_start) and \
            db.has_table(sample_id, target_stop) and \
            db.has_table(sample_id, target_rstop) and \
            db.has_table(sample_id, target_sstop) and \
            db.has_table(sample_id, target_random) and \
            not options.nocache:
            logger.info('Targets already computed for %s; skipping' % sample_id)
            continue
        
        # Get the stimulus description
        stimulus_xml = db.get_attr(sample_id, 'stimulus_xml')
        saccades = db.get_saccades(sample_id)
        
        view_start, view_stop, view_rstop, view_random, view_sstop = \
            render_saccades_view(
                saccades=saccades,
                stimulus_xml=stimulus_xml,
                host=options.host,
                white=options.white)
   
        db.set_table(sample_id, target_start, view_start)
        db.set_table(sample_id, target_stop, view_stop)
        db.set_table(sample_id, target_rstop, view_rstop)
        db.set_table(sample_id, target_random, view_random)
        db.set_table(sample_id, target_sstop, view_sstop)
        
        db.release_table(saccades)
        
    db.close()
def main():
    parser = OptionParser(usage=description)
    parser.add_option("--saccade_data", help="Main data directory",
                      default='saccade_data')
    parser.add_option("--db", help="FlydraDB directory")
     
    parser.add_option("--verbose", help='Verbose output',
                      default=False, action="store_true")
        
    (options, args) = parser.parse_args() #@UnusedVariable
    
    if not options.db:
        raise Exception('Please define FlydraDB directory using `--db`.')
    
    def printv(s):
        if options.verbose:
            print(s)
        
    flydra_db = FlydraDB(options.db, create=True)
    
    matlab_dir = options.saccade_data
    for group in os.listdir(matlab_dir):
        group_dir = os.path.join(matlab_dir, group)
        if not os.path.isdir(group_dir):                
            continue
        
        printv("Opening {0}".format(group))
        
#        
#            
#            exp_data, attributes = read_raw_data(filename)
#            
#            consider_importing_processed(flydra_db, sample, exp_data, attributes)
#            
#            flydra_db.set_attr(sample, 'species', attributes['species'])
#            flydra_db.set_attr(sample, 'background', attributes['background'])
#            
#            flydra_db.set_table(sample, EXP_DATA_TABLE, exp_data)
#            flydra_db.add_sample_to_group(sample, group)
#            flydra_db.add_sample_to_group(sample, 'ros')
#            
    
        processed_dir = os.path.join(group_dir, 'processed')
        
        if not os.path.exists(processed_dir):
            printv("No processed data found for group %r." % group)
            continue
        
        for conf in os.listdir(processed_dir):
            # first look for saccades.mat
            saccades_file = os.path.join(processed_dir, conf, 'saccades.mat')
            if os.path.exists(saccades_file):
                printv('Loading from file %r.' % saccades_file)
                saccades = saccades_read_mat(saccades_file)
                samples = numpy.unique(saccades['sample'])
                for sample in samples:
                    if not flydra_db.has_sample(sample):
                        flydra_db.add_sample(sample)
                    flydra_db.add_sample_to_group(sample, group)
                    sample_saccades = saccades[saccades[:]['sample'] == sample]
                    flydra_db.set_table(sample=sample, table=SACCADES_TABLE,
                                        version=conf, data=sample_saccades)
#            else:
#                prefix = 'data_'
#        suffix = '.mat'
#        for file in [file for file in os.listdir(group_dir) 
#            if (file.startswith(prefix)) and file.endswith(suffix)]:
#            
#            sample = file[len(prefix):file.index('.')]
#            
#            if verbose:
#                print("  - Considering sample {0}".format(sample.__repr__()))
#        
#            if not flydra_db.has_sample(sample):
#                flydra_db.add_sample(sample)
#            
#            filename = os.path.join(group_dir, file)
# 
# 
#            
#        else:
#            for conf in os.listdir(processed_dir):                
#                saccades = os.path.join(processed_dir, conf, 'saccades.mat')
#                if os.path.exists(saccades): 
#                    group_record.configurations[conf] = saccades
#                    # add to general list
#                    self.configurations.add(conf)
##                    else:
##                        conf_dir = os.path.join(processed_dir, conf)
##                        for file in [file for file in os.listdir(conf_dir) 
##                            if file.startswith('processed_data_') and file.endswith('.mat')]: 
##                                  id = file[5:-7]
#
#            # if we don't have exp data, get list of samples from
#            # processed data
#            if group_record.configurations and \
#                not group_record.has_experimental_data:
#                saccades = saccades_read_mat(saccades)
#                group_record.samples = set(numpy.unique(saccades['sample']))
#                for sample in group_record.samples:
#                    self.sample2group[sample] = group
#
#        if len(group_record.samples)> 0:
#            self.groups[group] = group_record
#                
#            print "has it", group, group_record.has_experimental_data
#        
    flydra_db.close()
Example #12
0
def main():
    
    parser = OptionParser()
    
    parser.add_option("--db", default='flydra_db', help="FlydraDB directory")

    parser.add_option("--nocache", help="Ignores already computed results.",
                      default=False, action="store_true")

    parser.add_option("--compute_mu", help="Computes mu and optic flow.",
                      default=False, action="store_true")
    
    parser.add_option("--white", help="Computes luminance_w, with the arena"
                      " painted white.", default=False, action="store_true")
    
    parser.add_option("--host", help="Use a remote rfsee. Otherwise, use local process.",
                       default=None)
    
    (options, args) = parser.parse_args()
     
        
    db = FlydraDB(options.db, False)
    
    if args:
        do_samples = args
    else:
        # look for samples with the rows table
        do_samples = db.list_samples()
        do_samples = filter(lambda x: db.has_rows(x), do_samples)
    
    if options.white:
        target = 'luminance_w'
    else:
        target = 'luminance'
    
    for i, sample_id in enumerate(do_samples):
        
        print 'Sample %s/%s: %s' % (i + 1, len(do_samples), sample_id)
        
        if not db.has_sample(sample_id):
            raise Exception('Sample %r not found in db.' % sample_id)
        
        if not db.has_rows(sample_id):
            raise Exception('Sample %r does not have rows table.' % sample_id)
       
        if not db.has_attr(sample_id, 'stimulus_xml'):
            raise Exception('Sample %r does not have the "stimulus_xml" attribute.'
                            %sample_id)
       
        if options.compute_mu:
            if db.has_table(sample_id, 'nearness') and not options.nocache:
                logger.info('Already computed nearness for %r; skipping' % sample_id)
                continue
        else:
            if db.has_table(sample_id, target) and not options.nocache:
                logger.info('Already computed luminance for %r; skipping' % sample_id)
                continue
        
        rows = db.get_rows(sample_id)
        
        stimulus_xml = db.get_attr(sample_id, 'stimulus_xml')
        
        results = render(rows, stimulus_xml, host=options.host,
                         compute_mu=options.compute_mu, white=options.white)
   
        db.set_table(sample_id, target, results['luminance'])
        
        if options.compute_mu:
            db.set_table(sample_id, 'nearness', results['nearness'])
            db.set_table(sample_id, 'retinal_velocities',
                         results['retinal_velocities'])
        
        db.release_table(rows)    
   
    db.close()
Example #13
0
def main_filter(args):
    parser = LenientOptionParser()
    
    parser.add_option("--db", default='flydra_db', help="FlydraDB directory")

    parser.add_option("--min_frames_per_track", default=400,
        help="Minimum number of frames per track [= %default]")

    parser.add_option("--confirm_problems",
                      help="Stop interactively on problems with log files'\
                      '(e.g.: cannot find valid obj_ids) [default: %default]",
                      default=False, action="store_true")

    parser.add_option("--dynamic_model_name",
                      help="Smoothing dynamical model [default: %default]",
                      default="mamarama, units: mm")
    
    parser.add_option("--debug_output", help="Creates debug figures.",
                      default=False, action="store_true")

    parser.add_option("--nocache", help="Ignores already computed results.",
                      default=False, action="store_true")

    parser.add_option("--smoothing", help="Uses Kalman-smoothed data.",
                      default=False, action="store_true")

    
    (options, args) = parser.parse_args(args)
    
    table_name = 'rows' # TODO: use constant
    table_version = "smooth" if options.smoothing else "kf"
    
    
    if not args:
        raise UserError('No files or directories specified.')
         
    if not os.path.exists(options.db):
        os.makedirs(options.db)

    db = FlydraDB(options.db)

    good_files = get_good_files(where=args, pattern="*.kh5",
                                confirm_problems=options.confirm_problems)

    if len(good_files) == 0:
        logger.error("No good files to process")
        sys.exit(1)

    n = len(good_files)
    for i in range(n):
        (filename, obj_ids, stim_fname) = good_files[i]
        
        logger.info('Sample %s/%s: %s' % (i + 1, n, filename))
        
        
        # only maintain basename
        stim = os.path.splitext(os.path.basename(stim_fname))[0]
        sample_id = os.path.splitext(os.path.basename(filename))[0]
        
            
        logger.info("File %d/%d %s %s %s " % 
                    (i, n, str(filename), str(obj_ids), stim_fname))
       
        if (db.has_sample(sample_id) 
            and db.has_table(sample_id, table_name, table_version)
            and not options.nocache):
            logger.info('Sample %r already computed; skipping.'
                        ' (use --nocache to ignore)' % sample_id)
            continue
  
        all_data = [] 

        for obj_id, rows in get_good_smoothed_tracks(#@UnusedVariable
                filename=filename,
                obj_ids=obj_ids,
                min_frames_per_track=options.min_frames_per_track,
                dynamic_model_name=options.dynamic_model_name,
                use_smoothing=options.smoothing):

            filtered = filter_rows(rows, options)
            all_data.append(filtered)
             
        if not all_data:
            logger.info('Not enough data found for %r; skipping.' % sample_id)
            continue

  
        if not db.has_sample(sample_id):
            db.add_sample(sample_id)
        db.set_attr(sample_id, 'stim_fname', stim_fname)
        db.set_attr(sample_id, 'stimulus', stim)
        stim_xml = open(stim_fname).read()
        db.set_attr(sample_id, 'stimulus_xml', stim_xml)
        
        geometry = get_posts_info(stim_xml)
        print(geometry)
        db.set_attr(sample_id, 'posts', geometry['posts'])
        if 'arena' in geometry:
            db.set_attr(sample_id, 'arena', geometry['arena'])


        db.add_sample_to_group(sample_id, stim)
        if stim != 'nopost':
            db.add_sample_to_group(sample_id, 'posts')
            

        rows = numpy.concatenate(all_data)
        db.set_table(sample=sample_id,
                     table=table_name,
                     data=rows,
                     version=table_version)
    
        
        db.set_attr(sample_id, 'filter_time', datetime.now().strftime("%Y%m%d_%H%M%S"))
        db.set_attr(sample_id, 'filter_host', platform.node())
        db.set_attr(sample_id, 'filter_user', get_user())
        db.set_attr(sample_id, 'filter_python_version', platform.python_version())
        db.set_attr(sample_id, 'filter_numpy_version', numpy.version.version)
        
         
    db.close()
def main():
    parser = OptionParser(usage=description)

    parser.add_option("--db", default='flydra_db', help="Data directory")

    parser.add_option("--interactive", default=False, action="store_true",
                      help="Start a compmake interactive session."
                      " Otherwise run in batch mode") 

    parser.add_option("--empty_group_ok",
                      default=False, action="store_true",
                      help="do not give up if one group does not have samples ")


    (options, args) = parser.parse_args() #@UnusedVariable
    
    if options.db is None:
        logger.error('Please specify a directory using --db.')
        sys.exit(-1)

    outdir = os.path.join(options.db, 'out/saccade_view_joint_analysis')

    db = FlydraDB(options.db, False)
        
    set_namespace('saccade_view_joint_analysis')
    
    # for each image we do a different report
    data = {}
    for image in images:
        
        # For each image we have different tables
        tables = ["saccades_view_%s_%s" % (view.id, image.id) for view in views]
 
        all_available = [x for x in db.list_samples() if db.has_saccades(x) and 
                          all([db.has_table(x, table) for table in tables])] 
        
        # We further divide these in post and nopost
        groups_samples = {
            'posts':
                filter(lambda s: db.get_attr(s, 'stimulus') != 'nopost', all_available),
            'noposts':
                filter(lambda s: db.get_attr(s, 'stimulus') == 'nopost', all_available)
        }
        
        # now, for each group
        for group in groups:
             
            is_hallucination = image.id.startswith('h')
            white_arena = image.id.endswith('_w') 
        
            if (not is_hallucination) and white_arena and (group.id == 'noposts'):
                # if there are not posts, it's useless 
                continue
        
            samples = groups_samples[group.id] 
            if not samples:
                print "Warning: no samples for %s/%s" % (image.id, group.id)
                continue 
      
            # global statistics
            key = (group.id, image.id)
            job_id = "%s-%s" % key
            data[key] = comp(compute_stats, options.db,
                             samples, image.id, job_id=job_id)

            for saccades_set, direction in prod(saccades_sets, dirs):             
                view2result = {}
                for i, view in enumerate(views):                
                    table = tables[i]
                    key = Exp(image=image.id, group=group.id,
                              view=view.id, dir=direction.id,
                              saccades_set=saccades_set.id)
                    job_id = "%s-%s-%s-%s-%s" % key
                    
                    result = comp(compute_saccade_stats, options.db,
                             samples, table,
                             [direction.args, saccades_set.args],
                             job_id=job_id)
                    
                    data[key] = result
                    view2result[view.id] = result
          
                page_id = make_page_id(image=image.id, group=group.id,
                           dir=direction.id, saccades_set=saccades_set.id)
                
                comp(render_page, view2result, outdir, page_id, job_id=page_id)
            
            for saccades_set in saccades_sets:
                table = "saccades_view_start_%s" % (image.id)
                exp_id = '%s_%s_%s' % (image.id, group.id, saccades_set.id)
                
                results = comp(bet_on_flies, options.db, samples, table, saccades_set,
                               job_id='lasvegas-' + exp_id + '-bet')
                page_id = exp_id
                comp(las_vegas_report, os.path.join(outdir, 'lasvegas'), page_id, results,
                              job_id='lasvegas-' + exp_id + '-report')
            
            
    db.close()
    

    comp(add_comparisons, data, outdir)
    

    filename = os.path.join(outdir, 'gui.html')   
    comp(create_gui_new, filename, menus)
    
    
    if options.interactive:
        # start interactive session
        compmake_console()
    else:
        # batch mode
        # try to do everything
        batch_command('make all')
        # exit with error if we are not done
        # (that is, make all failed for some reason)
        todo = list(parse_job_list('todo')) 
        if todo:
            logger.info('Still %d jobs to do.' % len(todo))
            sys.exit(-2)