def add_frames(self,E,edge_feature_row_breaks,
                edge_orientations,abst_threshold):
     new_E = E.copy()
     esp.threshold_edgemap(new_E,.30,edge_feature_row_breaks,abst_threshold=abst_threshold)
     esp.spread_edgemap(new_E,edge_feature_row_breaks,edge_orientations,spread_length=3)
     if not self.processed_frames:
         self.E = np.mean(new_E,axis=1)
         self.processed_frames = True
     else:
         self.E = (self.E * self.num_frames + np.sum(new_E,axis=1))/(self.num_frames+new_E.shape[1])
     self.num_frames += new_E.shape[1]
def get_detections(neg_E,mean_template,bg_len,mean_bgd,
                   edge_feature_row_breaks,edge_orientations,
                   spread_length=3,abst_threshold = .0001*np.ones(8)):
    num_detections = neg_E.shape[1] - mean_template.shape[1]
    detections = []
    for d in xrange(num_detections):
        E_segment = neg_E[:,d:d+mean_template.shape[1]].copy()
        esp.threshold_edgemap(E_segment,.30,edge_feature_row_breaks,abst_threshold=abst_threshold)
        esp.spread_edgemap(E_segment,edge_feature_row_breaks,edge_orientations,spread_length=spread_length)        
        if d > bg_len:
            bg = np.maximum(np.minimum(np.mean(neg_E[:,d-bg_len:d],
                                               axis=1),
                                       .4),
                            .01)
        else:
            bg = mean_bgd.copy()                            
        P,C =  tt.score_template_background_section(mean_template,
                                                    bg,E_segment)
        detections.append((P+C,d))
    return detections
def get_training_template(train_data_iter):
    patterns = []
    train_data_iter.reset_exp()
    for datum_id in xrange(train_data_iter.num_data):
        if datum_id % 10 == 0:
            print datum_id
    if train_data_iter.next(wait_for_positive_example=True,
                            compute_patterns=True,
                            max_template_length=40):
        # the context length is 11
        for p in train_data_iter.patterns:
            pattern = p.copy()
            esp.threshold_edgemap(pattern,.30,edge_feature_row_breaks,report_level=False,abst_threshold=abst_threshold)
            esp.spread_edgemap(pattern,edge_feature_row_breaks,edge_orientations,spread_length=5)
            patterns.append(pattern)
    else:
        break
    _,_ ,\
        registered_examples,template \
        = et.simple_estimate_template(patterns)
    return registered_examples, template
# important to think about the consequences of the 
# comparison

    
mean_template_match = np.empty((quantile_level.shape[0],
                                len(all_patterns)))

for i in xrange(len(all_patterns)):
    # get the background signal processing done
    if all_bgds[i].shape[1] > 0:
        
    # process the edges for each of the different thresholds
    for j in xrange(quantile_levels.shape[0]):
        a,tau_alpha_vals[i] = esp.threshold_edgemap(all_patterns[i],.30,edge_feature_row_breaks, 
                                                    abst_threshold=)
    esp.spread_edgemap(a,edge_feature_row_breaks,edge_orientations)



for i in xrange(len(all_patterns)):
    a,tau_alpha_vals[i] = esp.threshold_edgemap(p,.30,edge_feature_row_breaks,report_level=True)

# threshold pattern edges
for i in xrange(len(patterns)):
    esp.threshold_edgemap(patterns[i],.30,edge_feature_row_breaks)
    esp.spread_edgemap(patterns[i],edge_feature_row_breaks,edge_orientations)
    if bgds[i].shape[1] > 0:
                esp.threshold_edgemap(bgds[i],.30,edge_feature_row_breaks)
                esp.spread_edgemap(bgds[i],edge_feature_row_breaks,edge_orientations)
                # compute background mean
                bgds[i] = np.mean(bgds[i],axis=1)
all_def_templates = np.empty((def_range.shape[0],
                            tpm.def_template.shape[0],
                            tpm.def_template.shape[1]))
for d in xrange(def_range.shape[0]):
    tpm.get_def_template(def_range[d])
    all_def_templates[d] = tpm.def_template.copy()
    

optimal_detection_scores = -np.inf * np.ones((len(tuning_patterns_context),def_range.shape[0]))
optimal_detection_idx = np.zeros((len(tuning_patterns_context),def_range.shape[0]))
for c_id in xrange(len(tuning_patterns_context)):
    print c_id
    cur_context = tuning_patterns_context[c_id]
    num_detections = cur_context.shape[1] - tpm.length_range[1]
    win_length = tpm.length_range[1]
    for d in xrange(num_detections):
        E_window = cur_context[:,d:d+win_length].copy()
        esp.threshold_edgemap(E_window,.30,edge_feature_row_breaks,report_level=False,abst_threshold=abst_threshold)
        esp.spread_edgemap(E_window,edge_feature_row_breaks,edge_orientations,spread_length=3)
        # base detection
        for deformation in def_range:
            def_template = all_def_templates[10+deformation]
            P,C = tt.score_template_background_section(def_template,tpm.bg,E_window)
            score = P+C
            if score > optimal_detection_scores[c_id,deformation]:
                optimal_detection_scores[c_id,deformation] = score
                optimal_detection_idx[c_id,deformation] = d
            
                                                   

#E
template=mean_template
bgd_length=26
mean_background=mean_bgd
#edge_feature_row_breaks
#edge_orientations
abst_threshold=abst_threshold
spread_length=3



pbgd = E[:,pattern_times[0][0]-bg_len:pattern_times[0][0]].copy()
pbgd2 = pbgd.copy()
esp.threshold_edgemap(pbgd2,.30,edge_feature_row_breaks,
                              abst_threshold=abst_threshold)
esp.spread_edgemap(pbgd2,edge_feature_row_breaks,edge_orientations,
                           spread_length=spread_length)
pbgd2 = np.mean(pbgd2,axis=1)
pbgd2 = np.maximum(np.minimum(pbgd2,.4),.1)

template_height,template_length = template.shape
num_detections = E.shape[1]-template_length+1
E_background, estimated_background_idx = self._get_E_background(E,num_detections,bgd_length, mean_background,
                                                                edge_feature_row_breaks,
                                                                edge_orientations,
                                                                abst_threshold=abst_threshold,
                                                                spread_length=spread_length)


# found that 
for frame_id in xrange(bg_len,E_background.shape[1]):
    bg_seg = E[:,frame_id-bg_len:frame_id].copy()
cPickle.dump(tune_data_iter,output)
output.close()

aar_patterns = []
train_data_iter.reset_exp()
for datum_id in xrange(train_data_iter.num_data):
    if datum_id % 10 == 0:
        print datum_id
    if train_data_iter.next(wait_for_positive_example=True,
                            compute_patterns=True,
                            max_template_length=40):
        # the context length is 11
        for p in train_data_iter.patterns:
            pattern = p.copy()
            esp.threshold_edgemap(pattern,.30,edge_feature_row_breaks,report_level=False,abst_threshold=abst_threshold)
            esp.spread_edgemap(pattern,edge_feature_row_breaks,edge_orientations,spread_length=2)
            aar_patterns.append(pattern)
    else:
        break

_,_ ,\
        registered_examples,template \
        = et.simple_estimate_template(aar_patterns)

np.save('aar_template053112',template)
np.save('registered_examples_aar053112',registered_examples)
mean_background = np.load(root_path+'Experiments/050812/mean_background_liy051012.npy')

data_iter = tune_data_iter
import template_speech_rec.classification as cl
def get_roc_coarse(data_iter, classifier,
                   allowed_overlap = .2,
            edge_feature_row_breaks= np.array([   0.,   
                                               45.,   
                                               90.,  
                                               138.,  
                                               186.,  
                                               231.,  
                                               276.,  
                                               321.,  
                                               366.]),
            edge_orientations=np.array([[ 1.,  0.],
                                        [-1.,  0.],
                                        [ 0.,  1.],
                                        [ 0., -1.],
                                        [ 1.,  1.],
                                        [-1., -1.],
                                        [ 1., -1.],
                                        [-1.,  1.]]),
            abst_threshold=np.array([.025,.025,.015,.015,
                                      .02,.02,.02,.02]),
            spread_radius=3):
    """
    Find the appropriate threshold for the coarse classifier, this
    should be run on tuning data, and then we can get a level for the
    tradeoff between false positives and false negatives the first
    pair is the roc curve for the count test and the second is for the
    coarse likelihood test

    The way this works is that we find the scores for each window and
    then we rank them and remove overlapping windows that are lower
    rank if the overlap is below a certain percentage

    We are assuming that the classifier has been implemented and initialized properly
    """
    num_frames = 0
    all_positive_counts = []
    all_positive_likes = []
    all_negative_counts = []
    all_negative_likes = []
    data_iter.reset_exp()
    for datum_id in xrange(data_iter.num_data):
        if datum_id % 10 == 0:
            print "working on example", datum_id
        if data_iter.next(compute_pattern_times=True,
                            max_template_length=classifier.window[1]):
            pattern_times = data_iter.pattern_times
            num_detections = data_iter.E.shape[1] - classifier.window[1]
            num_frames += data_iter.E.shape[1]
            coarse_count_scores = -np.inf * np.ones(num_detections)
            coarse_like_scores = -np.inf * np.ones(num_detections)
            for d in xrange(num_detections):
                E_segment = data_iter.E[:,d:d+classifier.window[1]].copy()                
                esp.threshold_edgemap(E_segment,.30,edge_feature_row_breaks,report_level=False,abst_threshold=abst_threshold)
                esp.spread_edgemap(E_segment,edge_feature_row_breaks,edge_orientations,spread_length=3)
                coarse_count_scores[d] = classifier.coarse_score_count(E_segment)
                coarse_like_scores[d] = classifier.coarse_score_like_no_bg(E_segment)
            # now we get the indices sorted
            count_indices = remove_overlapping_examples(np.argsort(coarse_count_scores),
                                                        classifier.coarse_length,
                                                        int(allowed_overlap*classifier.coarse_length))
            like_indices = remove_overlapping_examples(np.argsort(coarse_like_scores),
                                                       classifier.coarse_length,
                                                       int(allowed_overlap*classifier.coarse_length))
            positive_counts, negative_counts =  get_pos_neg_scores(count_indices,pattern_times,
                                                                     coarse_count_scores)
            positive_likes, negative_likes = get_pos_neg_scores(like_indices,pattern_times,
                                                                coarse_like_scores)
            all_positive_counts.extend(positive_counts)
            all_negative_counts.extend(negative_counts)
            all_positive_likes.extend(positive_likes)
            all_negative_likes.extend(negative_likes)
    count_roc = get_roc(np.sort(all_positive_counts)[::-1],
                        np.sort(all_negative_counts)[::-1],
                        num_frames)
    like_roc = get_roc(np.sort(all_positive_likes)[::-1], 
                       np.sort(all_negative_likes)[::-1],
                       num_frames)
    return count_roc, like_roc
 patterns = exp.get_patterns(E,phns,phn_times,s)
 patterns_context = exp.get_patterns(E,phns,phn_times,s,context=True,template_length=33)
 bgds = exp.get_pattern_bgds(E,phns,phn_times,s,bg_len)
 fronts_backs = exp.get_pattern_fronts_backs(E,phns,phn_times,s,bg_len)
 E_avg.add_frames(E,edge_feature_row_breaks,
            edge_orientations,abst_threshold)
 # threshold pattern edges
 for i in xrange(len(patterns)):
     all_raw_patterns_context.append(patterns_context[i].copy())
     all_raw_bgds.append(bgds[i].copy())
     _, edge_thresholds = esp.threshold_edgemap(patterns[i],.30,edge_feature_row_breaks,report_level=True,abst_threshold=abst_threshold)
     # we record both the thresholds
     # and the length to see if there is a relationship
     pattern_edge_thresholds.append(edge_thresholds)
     pattern_lengths.append(patterns[i].shape[1])
     esp.spread_edgemap(patterns[i],edge_feature_row_breaks,edge_orientations,spread_length=5)
     if bgds[i].shape[1] > 0:
         _,edge_thresholds = esp.threshold_edgemap(bgds[i],.30,edge_feature_row_breaks,report_level=True,abst_threshold=abst_threshold)
         bgd_edge_thresholds.append(edge_thresholds)
         esp.spread_edgemap(bgds[i],edge_feature_row_breaks,edge_orientations,spread_length=5)
         # compute background mean
         bgds[i] = np.mean(bgds[i],axis=1)
         # impose floor and ceiling constraints on values
         bgds[i] = np.maximum(np.minimum(bgds[i],.4),.05)
     else:
         bgds[i] = np.random.rand(patterns[i].shape[0]).reshape(patterns[i].shape[0],1)
         bgds[i] = np.mean(bgds[i],axis=1)
         bgds[i] = np.maximum(np.minimum(bgds[i],.4),.05)
         empty_bgds.append(pattern_num)
 pattern_num += len(patterns)
 all_patterns.extend(patterns)
    

mean_background = E_avg.E.copy()
mean_background = np.maximum(np.minimum(mean_background,.4),.05)



for pattern in xrange(len(all_patterns)):
    # do the thresholding
    esp.threshold_edgemap(all_patterns[pattern],.30,
                          train_data_iter.edge_feature_row_breaks,
                          report_level=False,
                          abst_threshold=train_data_iter.abst_threshold)
    esp.spread_edgemap(all_patterns[pattern],
                       train_data_iter.edge_feature_row_breaks,
                       train_data_iter.edge_orientations,spread_length=5)


template_height, template_length, \
    registered_templates,mean_template \
    = et.simple_estimate_template(all_patterns,
                                  template_length=33)

template_shape = np.array([template_height,template_length])
np.save('mean_template050612',mean_template)
np.save('template_shape050612',template_shape)


#########################################
#
 num_detections = data_iter.E.shape[1] - classifier.window[1]
 num_frames += data_iter.E.shape[1]
 scores_adapt_bg = -np.inf * np.ones(num_detections)
 scores_mel = -np.inf * np.ones(num_detections)
 bg = mean_background.copy()
 E_mel,e_breaks_mel,\
 e_orientations_mel= esp.get_edgemap_no_threshold(train_data_iter.s,
                 train_data_iter.sample_rate,
             train_data_iter.num_window_samples,
             train_data_iter.num_window_step_samples,
             train_data_iter.fft_length,8000,7,
                                      use_mel = True)
 for d in xrange(num_detections):
     E_segment = data_iter.E[:,d:d+classifier.window[1]].copy()                
     esp.threshold_edgemap(E_segment,.30,edge_feature_row_breaks,report_level=False,abst_threshold=abst_threshold)
     esp.spread_edgemap(E_segment,edge_feature_row_breaks,edge_orientations,spread_length=3)
     E_segment_mel = E_mel[:,d:d+classifier.window[1]].copy()
     esp.threshold_edgemap(E_segment_mel,.30,e_breaks_mel,report_level=False,abst_threshold=abst_threshold)
     esp.spread_edgemap(E_segment_mel,e_breaks_mel,e_orientations_mel,spread_length=3)
     bg = np.minimum(.4,
                      np.maximum(np.mean(E_segment,axis=1),
                                 .1))
     scores_adapt_bg[d] = classifier.score(E_segment,bg)
     scores_mel[d] = classifier.score_no_bg(E_segment_mel)
 # now we get the indices sorted
 indices_adapt_bg = cl.remove_overlapping_examples(np.argsort(scores_adapt_bg)[::-1],
                                                   classifier.window[1],
                                                   int(allowed_overlap*classifier.window[1]))
 indices_mel = cl.remove_overlapping_examples(np.argsort(scores_mel)[::-1],
                                                 classifier.window[1],
                                                 int(allowed_overlap*classifier.window[1]))


E,edge_feature_row_breaks,\
            edge_orientations= texp.get_edgemap_no_threshold(train_data_iter.s)

abst_threshold = np.array([0.025,0.025,0.015,0.015,
                           0.02,0.02,0.02,0.02])


del(E)
del(s)

for p in all_patterns[1:]:
    esp.threshold_edgemap(p,.30,edge_feature_row_breaks,report_level=False,abst_threshold=abst_threshold)
    esp.spread_edgemap(p,edge_feature_row_breaks,edge_orientations,spread_length=5)

template_height, template_length, \
    registered_templates,mean_template \
    = et.simple_estimate_template(all_patterns)

template_shape = np.array([template_height,template_length])
np.save('mean_template_piy050912',mean_template)
np.save('template_shape_piy050912',template_shape)
np.save('registered_templates_piy050912',registered_templates)


mean_template = np.load('mean_template_piy050912.npy')
template_shape = np.load('template_shape_piy050912.npy')

# time to get a baseline for how well the pattern applies to examples