def analysis_confKK(hybdatadict, SDparams,prb, detectioncrit, defaultKKparams, paramtochange, listparamvalues, detcrit = None):
    ''' Analyse results of one parameter family of KK jobs'''
    outlistKK = rkk.one_param_varyKK(hybdatadict, SDparams,prb, detectioncrit, defaultKKparams, paramtochange, listparamvalues) 
    #outlistKK = [listbasefiles, outputdicts]
    
    argTD = [hybdatadict, SDparams,prb, detectioncrit]      
    if ju.is_cached(ds.test_detection_algorithm,*argTD):
        print 'Yes, you have run detection_statistics.test_detection_algorithm() \n'
        detcrit_groundtruth = ds.test_detection_algorithm(hybdatadict, SDparams,prb, detectioncrit)
    else:
        print 'You need to run detection_statistics.test_detection_algorithm() \n in order to obtain a groundtruth' 
        
    detcritclu =  detcrit_groundtruth['detected_groundtruth']
    
    NumSpikes = detcritclu.shape[0]
    
    cluKK = np.zeros((len(outlistKK[0]),NumSpikes))
    confusion = []
    for k, basefilename in enumerate(outlistKK[0]):
        clufile = hybdatadict['output_path'] + basefilename + '.clu.1'
        print os.path.isfile(clufile)
        if os.path.isfile(clufile):
            cluKK[k,:] =   np.loadtxt(clufile, dtype = np.int32, skiprows =1)
        else:
            print '%s does not exist '%(clufile)
        
        conf = get_confusion_matrix(cluKK[k,:],detcritclu)
        print conf
        confusion.append(conf)
        
    return confusion    
def get_execution_times(hybdatadict, SDparams,prb,detectioncrit, defaultKKparams, paramtochange, listparamvalues, extralabel = None):
    outlistKK = rkk.one_param_varyKK(hybdatadict, SDparams,prb,detectioncrit, defaultKKparams, paramtochange, listparamvalues)
    outputdir = hybdatadict['output_path']
    #embed()
    
    
    for k, basefilename in enumerate(outlistKK[0]):
        p= subprocess.Popen(['grep "That took" %s/%s.klg.1'%(outputdir,basefilename)], stdout = subprocess.PIPE, stderr = subprocess.PIPE)
        out, err = p.communicate()
        #p= subprocess.Popen(['grep','"That took"', '%s.clu.1'%(basefilename)], stdout = subprocess.PIPE, stderr = subprocess.PIPE)
        #p= subprocess.Popen(['more', '%s.clu.1'%(basefilename)], stdout = subprocess.PIPE, stderr = subprocess.PIPE)
        print out
    return out    
def get_execution_times_simple(hybdatadict, SDparams,prb,detectioncrit,defaultKKparams, paramtochange, listparamvalues,outtimesfile, extralabel = None):
    outlistKK = rkk.one_param_varyKK(hybdatadict, SDparams,prb,detectioncrit,  defaultKKparams, paramtochange, listparamvalues)
    outputdir = hybdatadict['output_path']
    
    
    for k, basefilename in enumerate(outlistKK[0]):
        os.system('echo "%s" >> %s/%s '%(basefilename,outputdir,outtimesfile))
        os.system('more %s/%s.klg.1 | grep "MinClusters">> %s/%s'%(outputdir,basefilename,outputdir,outtimesfile))
        os.system('more %s/%s.klg.1 | grep "MaxClusters">> %s/%s'%(outputdir,basefilename,outputdir,outtimesfile))
        os.system('more %s/%s.klg.1 | grep "UseDistributional">> %s/%s'%(outputdir,basefilename,outputdir,outtimesfile))
        os.system('more %s/%s.klg.1 | grep "PenaltyK">> %s/%s'%(outputdir,basefilename,outputdir,outtimesfile))
        os.system('more %s/%s.klg.1 | grep "That took">> %s/%s'%(outputdir,basefilename,outputdir,outtimesfile))
        os.system('more %s/%s.klg.1 | grep "iterations">> %s/%s'%(outputdir,basefilename,outputdir,outtimesfile))
        os.system('more %s/%s.klg.1 | grep "Iterations">> %s/%s'%(outputdir,basefilename,outputdir,outtimesfile))