def run(self, name, datafiles, goldnet_file):
        import numpy

        os.chdir(os.environ["gene_path"])

        datastore = ReadData(datafiles[0], "steadystate")
        for file in datafiles[1:]:
            datastore.combine(ReadData(file, "steadystate"))
        datastore.normalize()

        settings = {}
        settings = ReadConfig(settings)
        # TODO: CHANGE ME
        settings["global"]["working_dir"] = os.getcwd() + '/'

        # Setup job manager
        print "Starting new job manager"
        jobman = JobManager(settings)

        # Make GENIE3 jobs
        genie3 = GENIE3()
        genie3.setup(datastore, settings, name)

        print "Queuing job..."
        jobman.queueJob(genie3)

        print jobman.queue
        print "Running queue..."
        jobman.runQueue()
        jobman.waitToClear()

        print "Queue finished"
        job = jobman.finished[0]
        print job.alg.gene_list
        print job.alg.read_output(settings)
        jobnet = job.alg.network
        print "PREDICTED NETWORK:"
        print job.alg.network.network
        print jobnet.original_network

        if goldnet_file != None:
            goldnet = Network()
            goldnet.read_goldstd(goldnet_file)
            print "GOLD NETWORK:"
            print goldnet.network
            print jobnet.analyzeMotifs(goldnet).ToString()
            print jobnet.calculateAccuracy(goldnet)

        return jobnet.original_network
    def run(self, ts_file, name=None, delta_t=30):

        os.chdir(os.environ["gene_path"])

        print "Reading in knockout data"
        timeseries_storage = ReadData(ts_file, "timeseries")

        settings = {}
        settings = ReadConfig(settings)
        # TODO: CHANGE ME
        settings["global"]["working_dir"] = os.getcwd() + "/"

        # Setup job manager
        print "Starting new job manager"
        jobman = JobManager(settings)

        # Make Banjo jobs
        banjojob = Banjo()
        if delta_t != None:
            settings["global"]["time_series_delta_t"] = int(delta_t)
        else:
            settings["global"]["time_series_delta_t"] = 30
        if name != None:
            banjojob.setup(timeseries_storage, settings, name)
        else:
            banjojob.setup(timeseries_storage, settings)

        print "Queuing job..."
        jobman.queueJob(banjojob)

        print jobman.queue
        print "Running queue..."
        jobman.runQueue()
        jobman.waitToClear()

        print "Queue finished"
        job = jobman.finished[0]
        print job.alg.gene_list
        print job.alg.read_output(settings)
        jobnet = job.alg.network
        print "PREDICTED NETWORK:"
        # print job.alg.network.network
        # print jobnet.original_network

        return jobnet.original_network
    def run(self, kofile, tsfile, wtfile, datafiles, name, goldnet_file, normalize=False):
        os.chdir(os.environ["gene_path"])
        knockout_storage = ReadData(kofile, "knockout")
        print "Reading in knockout data"
        wildtype_storage = ReadData(wtfile, "steadystate")

        if datafiles == []:
          other_storage = None
        else:
          other_storage = ReadData(datafiles[0], "steadystate")
          for file in datafiles[1:]:
              other_storage.combine(ReadData(file, "steadystate"))

        timeseries_storage = None
        if tsfile != None:
            timeseries_storage = ReadData(tsfile, "timeseries")
            #for ts in timeseries_storage:
                #ts.normalize()

        #if normalize:
            #knockout_storage.normalize()
            #wildtype_storage.normalize()
            #other_storage.normalize()


        settings = {}
        settings = ReadConfig(settings)
        # TODO: CHANGE ME
        settings["global"]["working_dir"] = os.getcwd() + '/'

        # Setup job manager
        print "Starting new job manager"
        jobman = JobManager(settings)

        # Make inferelator jobs
        inferelatorjob = inferelator()
        inferelatorjob.setup(knockout_storage, wildtype_storage, settings, timeseries_storage, other_storage, name)

        print "Queuing job..."
        jobman.queueJob(inferelatorjob)

        print jobman.queue
        print "Running queue..."
        jobman.runQueue()
        jobman.waitToClear()

        print "Queue finished"
        job = jobman.finished[0]
        #print job.alg.gene_list
        #print job.alg.read_output(settings)
        jobnet = job.alg.network
        #print "PREDICTED NETWORK:"
        #print job.alg.network.network
        print jobnet.original_network

        if goldnet_file != None:
            goldnet = Network()
            goldnet.read_goldstd(goldnet_file)
            #print "GOLD NETWORK:"
            #print goldnet.network
            #print jobnet.analyzeMotifs(goldnet).ToString()
            print jobnet.calculateAccuracy(goldnet)
            import AnalyzeResults
            tprs, fprs, rocs = AnalyzeResults.GenerateMultiROC(jobman.finished, goldnet )
            ps, rs, precs = AnalyzeResults.GenerateMultiPR(jobman.finished, goldnet)
            print "Area Under ROC"
            print rocs

            print "Area Under PR"
            print precs

        return jobnet.original_network

#for i, exp in enumerate(pert_baseline.experiments):
    ## For each experiment, replace the value with the diff between base
    ## and dex data
    #pert = pert_data.experiments[i]
    #for gene1 in pert_baseline.gene_list:
        #baseval = exp.ratios[gene1]
        #pertval = pert.ratios[gene1]
        #pert.ratios[gene1] = pertval-baseval

        #print gene1, baseval, pertval, pert.ratios[gene1]

genie3job = GENIE3()
genie3job.setup(ko_pert_data["combined"], settings, "Genie3_KO_Mult")
jobman.queueJob(genie3job)
both_genie3 = genie3job

genie3job = GENIE3()
genie3job.setup(pert_data["multifactorial_data"], settings, "Genie3_Mult_Only")
jobman.queueJob(genie3job)
pert_genie3 = genie3job

genie3job = GENIE3()
genie3job.setup(pert_data["knockout_data"], settings, "Genie3_KO_Only")
jobman.queueJob(genie3job)
ko_genie3 = genie3job

mczjob = MCZ()
mczjob.setup(ko_pert_data["knockout_data"], ko_pert_data["ss_data"], settings, None, ko_pert_data["multifactorial_data"], "MCZ-KO_Mult")
jobman.queueJob(mczjob)
    settings["global"]["experiment_name"] + "-" + t + "/"
os.mkdir(settings["global"]["output_dir"])

# Read in the gold standard network

# Read in the gold standard network
goldnet = Network()
#goldnet.read_goldstd(settings["global"]["large_network_goldnet_file"])
ko_file, kd_file, ts_file, wt_file, mf_file, goldnet = get_example_data_files(sys.argv[1], settings)


# Read data into program
# Where the format is "FILENAME" "DATATYPE"
knockout_storage = ReadData(ko_file[0], "knockout")
knockdown_storage = ReadData(kd_file[0], "knockdown")
timeseries_storage = ReadData(ts_file[0], "timeseries")
wildtype_storage = ReadData(wt_file[0], "wildtype")



# Setup job manager
jobman = JobManager(settings)

clusterjob = Cmonkey()
clusterjob.setup(knockout_storage, settings)

jobman.queueJob(clusterjob)
jobman.runQueue()
jobman.waitToClear()

    ts_file = settings["global"]["dream4100_network_timeseries_file"].split()
    wt_file = settings["global"]["dream4100_network_wildtype_file"].split()

# Read data into program
# Where the format is "FILENAME" "DATATYPE"
knockout_storage = ReadData(ko_file[0], "knockout")
knockdown_storage = ReadData(kd_file[0], "knockdown")
timeseries_storage = ReadData(ts_file[0], "timeseries")
wildtype_storage = ReadData(wt_file[0], "wildtype")



# Setup job manager
jobman = JobManager(settings)

# Make NIR jobs
min_restk = max(len(knockout_storage.gene_list) / 5, 3)
max_restk = len(knockout_storage.gene_list) / 2 + 1
rest_list = list(set([3,5,20,21] + [i for i in range(min_restk, max_restk)]))
rest_list = [3,5,10,15,12,20,21]
for i in rest_list:
    nirjob = NIR()
    nirjob.setup(knockout_storage, settings, "NIR_K="+str(i), 5, i)
    jobman.queueJob(nirjob)

print jobman.queue
jobman.runQueue()
jobman.waitToClear()

SaveResults(jobman.finished, goldnet, settings, "Overall", 4)
# Read data into program
# Where the format is "FILENAME" "DATATYPE"
mf_storage = ReadData(mf_file[0], "multifactorial")
knockout_storage = ReadData(ko_file[0], "knockout")
knockdown_storage = ReadData(kd_file[0], "knockdown")
wildtype_storage = ReadData(wt_file[0], "wildtype")
timeseries_storage = ReadData(ts_file[0], "timeseries")
gene_list = knockout_storage.gene_list

# Setup job manager
jobman = JobManager(settings)

# MCZ
mczjob = MCZ()
mczjob.setup(knockout_storage, wildtype_storage, settings, timeseries_storage, knockdown_storage, "MCZ")
jobman.queueJob(mczjob)

# CLR
clrjob = CLR()
clrjob.setup(knockout_storage, settings, "CLR", "plos", 6)
jobman.queueJob(clrjob)

# GENIE3
mf_storage.combine(knockout_storage)
mf_storage.combine(wildtype_storage)
mf_storage.combine(knockdown_storage)
genie3job = GENIE3()
genie3job.setup(mf_storage, settings, "GENIE3")
jobman.queueJob(genie3job)

## TLCLR
goldnet.read_goldstd("algorithms/genenetweaver/InSilicoSize10-Ecoli1_goldstandard.tsv")


# Read data into program
# Where the format is "FILENAME" "DATATYPE"
knockout_storage = ReadData(ko_file, "knockout")
knockdown_storage = ReadData(kd_file, "knockdown")
timeseries_storage = ReadData(ts_file, "timeseries")
wildtype_storage = ReadData(wt_file, "wildtype")


# Setup job manager
jobman = JobManager(settings)

# Make MCZ job
mczjob = MCZ()
mczjob.setup(knockout_storage, wildtype_storage, settings, timeseries_storage, knockdown_storage, "MCZ")
jobman.queueJob(mczjob)

print jobman.queue
jobman.runQueue()
jobman.waitToClear()


tprs, fprs, rocs = GenerateMultiROC(
    jobman.finished, goldnet, False, settings["global"]["output_dir"] + "/OverallROC.pdf"
)
ps, rs, precs = GenerateMultiPR(jobman.finished, goldnet, False, settings["global"]["output_dir"] + "/OverallPR.pdf")

SaveResults(jobman.finished, goldnet, settings)
# Gather networks
ko_file, kd_file, ts_file, wt_file, mf_file, goldnet = get_example_data_files(sys.argv[1], settings)

# Read data into program
# Where the format is "FILENAME" "DATATYPE"
mf_storage = ReadData(mf_file[0], "multifactorial")
knockout_storage = ReadData(ko_file[0], "knockout")
knockdown_storage = ReadData(kd_file[0], "knockdown")
wildtype_storage = ReadData(wt_file[0], "wildtype")
timeseries_storage = ReadData(ts_file[0], "timeseries")
gene_list = knockout_storage.gene_list
votejob = MCZ()
votejob.setup(knockout_storage, wildtype_storage, settings, timeseries_storage, knockdown_storage, "SimAnnealing")
jobman = JobManager(settings)
jobman.queueJob(votejob)
votejob = jobman.queue[0]
jobman.runQueue()
jobman.waitToClear("VotingJob")

# Send to voting algorithm
dream410 = ["dream410","dream410_2","dream410_3","dream410_4","dream410_5"]
#dream410 = ["dream410","dream410_2"]
dream4100 = ["dream4100","dream4100_2","dream4100_3","dream4100_4","dream4100_5"]
if sys.argv[1] == "dream410":
  networks = dream410
elif sys.argv[1] == "dream4100":
  networks = dream4100
else:
  networks = [sys.argv[1]]
results = []
#knockout_storage = ReadData(ko_file[0], "knockout")
knockout_storage = None
#knockdown_storage = ReadData(kd_file[0], "knockdown")
knockdown_storage = None
timeseries_storage = ReadData(ts_file[0], "timeseries")
wildtype_storage = ReadData(wt_file[0], "wildtype")



# Setup job manager
jobman = JobManager(settings)

# Make BANJO jobs
tlclrjob = TLCLR()
tlclrjob.setup(knockout_storage, wildtype_storage, settings, timeseries_storage, knockdown_storage, "TLCLR_All_Data")
jobman.queueJob(tlclrjob)

tlclrjob = TLCLR()
tlclrjob.setup(knockout_storage, wildtype_storage, settings, timeseries_storage, None, "TLCLR_No_KD")
jobman.queueJob(tlclrjob)

#tlclrjob = TLCLR()
#tlclrjob.setup(None, wildtype_storage, settings, timeseries_storage, knockdown_storage, "TLCLR_No_KO")
#jobman.queueJob(tlclrjob)

#tlclrjob = TLCLR()
#tlclrjob.setup(None, wildtype_storage, settings, timeseries_storage, None, "TLCLR_No_KO_or_KD")
#jobman.queueJob(tlclrjob)


print jobman.queue
# Read data into program
# Where the format is "FILENAME" "DATATYPE"
knockout_storage = ReadData(ko_file[0], "knockout")
knockdown_storage = ReadData(kd_file[0], "knockdown")
timeseries_storage = ReadData(ts_file[0], "timeseries")
wildtype_storage = ReadData(wt_file[0], "wildtype")
mf_storage = ReadData(mf_file[0], "multifactorial")

# Setup job manager
jobman = JobManager(settings)

# Make BANJO jobs
mczjob = MCZ()
mczjob.setup(knockout_storage, wildtype_storage, settings, timeseries_storage, knockdown_storage, "MCZ_Alone")
jobman.queueJob(mczjob)

clrjob = CLR()
clrjob.setup(knockout_storage, settings, "clr_" + t + "_Bins-" + str(6), "plos", 6)
jobman.queueJob(clrjob)

#cojob = ConvexOptimization()
#cojob.setup(knockout_storage, settings, "ConvOpt_T-Plos",None, None, 0.04)
#jobman.queueJob(cojob)

mf_storage.combine(knockout_storage)
mf_storage.combine(wildtype_storage)
mf_storage.combine(knockdown_storage)
genie3job = GENIE3()
genie3job.setup(mf_storage, settings, "MF_KO_WT_KD")
jobman.queueJob(genie3job)
os.mkdir(settings["global"]["output_dir"])


# Get config file for Banjo
settings = ReadConfig(settings, "./config/default_values/banjo.cfg")
#settings = ReadConfig(settings, settings["banjo"]["config"])

# Setup job manager
jobman = JobManager(settings)

# Make BANJO jobs
settings["banjo"]["discretization_policy"] = "q4"
settings["banjo"]["max_time"] = "1"
bjob = Banjo()
bjob.setup(timeseries_storage, settings, "banjo_" + settings["banjo"]["discretization_policy"] )
jobman.queueJob(bjob)

settings["banjo"]["discretization_policy"] = "q3"
bjob = Banjo()
bjob.setup(timeseries_storage, settings, "banjo_" + settings["banjo"]["discretization_policy"] )
jobman.queueJob(bjob)

settings["banjo"]["discretization_policy"] = "q2"
bjob = Banjo()
bjob.setup(timeseries_storage, settings, "banjo_" + settings["banjo"]["discretization_policy"] )
jobman.queueJob(bjob)

settings["banjo"]["discretization_policy"] = "q5"
bjob = Banjo()
bjob.setup(timeseries_storage, settings, "banjo_" + settings["banjo"]["discretization_policy"] )
jobman.queueJob(bjob)
# Make BANJO jobs
#mczjob = MCZ()
#mczjob.setup(knockout_storage, wildtype_storage, settings, timeseries_storage, knockdown_storage, "mcz-test-run-1")
#jobman.queueJob(mczjob)

#print jobman.queue
#jobman.runQueue()
#jobman.waitToClear()

accs = []
precs = []

cojob = ConvexOptimization()
cojob.setup(knockout_storage, settings, "ConvOpt_Baseline")
jobman.queueJob(cojob)

#accs.append("MCZ:")
#for job in jobman.finished:
    ##threshnet = job.alg.network.copy()
    ##threshnet.network = threshnet.apply_threshold(0)
    ##accs.append((job.alg.name, threshnet.calculateAccuracy(goldnet)))

    ##pre, rec, area = GeneratePR(job.alg.network, goldnet, True, False, job.alg.name)
    ##precs.append((job.alg.name, area))
    ##for i in range(8, 10):
    ##for i in [15,20,25,30,35,5,3,1,2,50]:
    #num_edge_list = [x for x in range(21)]
    ##num_edge_list += [ 25, 30, 45, 50, 55, 60, 65, 70 ]
    #num_edge_list = [70, 80, 50, 10]
    #for i in num_edge_list:
if sys.argv[1] == "dream4100":
    goldnet.read_goldstd(settings["global"]["dream4100_network_goldnet_file"])
    #Get a list of the knockout files
    ko_file = settings["global"]["dream4100_network_knockout_file"].split()
    kd_file = settings["global"]["dream4100_network_knockdown_file"].split()
    ts_file = settings["global"]["dream4100_network_timeseries_file"].split()
    wt_file = settings["global"]["dream4100_network_wildtype_file"].split()

# Read data into program
# Where the format is "FILENAME" "DATATYPE"
knockout_storage = ReadData(ko_file[0], "knockout")
knockdown_storage = ReadData(kd_file[0], "knockdown")
timeseries_storage = ReadData(ts_file[0], "timeseries")
wildtype_storage = ReadData(wt_file[0], "wildtype")

# Setup job manager
jobman = JobManager(settings)

# Make clr jobs
for t in ['normal', 'rayleigh', 'beta', 'plos', 'kde']:
  for n in range(5,15):
    clrjob = CLR()
    clrjob.setup(knockout_storage, settings, "clr_" + t + "_Bins-" + str(n), t, n)
    jobman.queueJob(clrjob)

print jobman.queue
jobman.runQueue()
jobman.waitToClear()
accs, precs, rocs = SaveResults(jobman.finished, goldnet, settings, "Overall", 4)

grid = Generate_Grid("dfg4grn", None, settings, ["eta_z", "lambda_w", "tau"], 5).test_list

jobman = JobManager(settings)

dfg = DFG4GRN()
settings["dfg4grn"]["eta_z"] = 0.1
settings["dfg4grn"]["lambda_w"] = 0.01
settings["dfg4grn"]["tau"] = 3.5
dfg.setup(
    timeseries_storage,
    TFList(timeseries_storage[0].gene_list),
    settings,
    "EtaZ-{0}_LamdaW-{1}_Tau-{2}".format(0.1, 0.01, 3.5),
    20,
)
jobman.queueJob(dfg)

dfg = DFG4GRN()
settings["dfg4grn"]["eta_z"] = 0.01
settings["dfg4grn"]["lambda_w"] = 0.001
settings["dfg4grn"]["tau"] = 3
dfg.setup(
    timeseries_storage,
    TFList(timeseries_storage[0].gene_list),
    settings,
    "EtaZ-{0}_LamdaW-{1}_Tau-{2}".format(0.01, 0.001, 3),
    20,
)
jobman.queueJob(dfg)
for i, p in enumerate(grid):
    settings["dfg4grn"]["eta_z"] = p[0]
# Read data into program
# Where the format is "FILENAME" "DATATYPE"
knockout_storage = ReadData(ko_file[0], "knockout")
knockdown_storage = ReadData(kd_file[0], "knockdown")
timeseries_storage = ReadData(ts_file[0], "timeseries")
wildtype_storage = ReadData(wt_file[0], "wildtype")



# Setup job manager
jobman = JobManager(settings)

# Make BANJO jobs
infjob = InferelatorPipeline()
infjob.setup(knockout_storage, wildtype_storage, settings, timeseries_storage, knockdown_storage, "InferelatorPipeline")
jobman.queueJob(infjob)

print jobman.queue
jobman.runQueue()
jobman.waitToClear()

accs = []
precs = []

#dfg = DFG4GRN()
#dfg.setup(timeseries_storage,  TFList(timeseries_storage[0].gene_list), settings, "DFG4GRN_Baseline", 20)
#jobman.queueJob(dfg)



#import pickle
ts_storage = [kno3_1, kno3_2, kno3_3, kno3_4]
#for s in ts_storage:
    #s.normalize()

# Setup job manager
jobman = JobManager(settings)

# Make BANJO jobs
#mczjob = MCZ()
#mczjob.setup(knockout_storage, wildtype_storage, settings, timeseries_storage, knockdown_storage, "MCZ")
#jobman.queueJob(mczjob)

clr_cnlojob = CLR()
clr_cnlojob.setup(cnlo_storage, settings, "clr_cnlo")
jobman.queueJob(clr_cnlojob)

genie3_cnlojob = GENIE3()
genie3_cnlojob.setup(cnlo_storage, settings, "genie3_cnlo")
jobman.queueJob(genie3_cnlojob)

clr_no3job = CLR()
clr_no3job.setup(no3_storage, settings, "clr_no3")
jobman.queueJob(clr_no3job)

genie3_no3job = GENIE3()
genie3_no3job.setup(no3_storage, settings, "genie3_no3")
jobman.queueJob(genie3_no3job)

clr_cnlo_no3job = CLR()
clr_cnlo_no3job.setup(cnlo_no3_storage, settings, "clr_cnlo_no3")
permtp = 1


meth = "BBSR"
inf = Inferelator2(settings)
settings["inferelator2"]["num_cores"] = 12
settings["inferelator2"]["num_bootstraps"] = 50
#settings["inferelator2"]["num_bootstraps"] = 1
settings["inferelator2"]["permtp"] = permtp
settings["inferelator2"]["permfp"] = permfp
settings["inferelator2"]["nCv"] = 8
settings["inferelator2"]["perctp"] = perctp
settings["inferelator2"]["percfp"] = percfp
settings["inferelator2"]["method"] = meth
inf.setup(None, wildtypes[exp_name], settings, ts_storage, None, "Inferelator2-TSAlone-{0}-{1}".format(meth, exp_name), None)
jobman.queueJob(inf)

meth = "BBSR"
inf = Inferelator2(settings)
settings["inferelator2"]["num_cores"] = 12
settings["inferelator2"]["num_bootstraps"] = 50
#settings["inferelator2"]["num_bootstraps"] = 1
settings["inferelator2"]["permtp"] = permtp
settings["inferelator2"]["permfp"] = permfp
settings["inferelator2"]["nCv"] = 8
settings["inferelator2"]["perctp"] = perctp
settings["inferelator2"]["percfp"] = percfp
settings["inferelator2"]["method"] = meth
inf.setup(None, wildtypes[exp_name], settings, ts_storage, knockdowns[exp_name], "Inferelator2-KD-TS-{0}-{1}".format(meth, exp_name), None)
jobman.queueJob(inf)
def get_network_results(name, settings, cache):
  print "STARTING", name

  if name in cache.keys():
    print "CACHE HIT"
    return cache[name]

  ko_file, kd_file, ts_file, wt_file, mf_file, goldnet = get_example_data_files(name, settings)

  # Create date string to append to output_dir
  t = datetime.now().strftime("%Y-%m-%d_%H.%M.%S")
  settings["global"]["output_dir"] = settings["global"]["output_dir_save"] + "/" + \
      settings["global"]["experiment_name"] + "-" + t + "-" + name + "/"
  os.mkdir(settings["global"]["output_dir"])

  # Get a list of the multifactorial files

  # Read data into program
  # Where the format is "FILENAME" "DATATYPE"
  mf_storage = ReadData(mf_file[0], "multifactorial")
  knockout_storage = ReadData(ko_file[0], "knockout")
  knockdown_storage = ReadData(kd_file[0], "knockdown")
  wildtype_storage = ReadData(wt_file[0], "wildtype")
  timeseries_storage = ReadData(ts_file[0], "timeseries")
  gene_list = knockout_storage.gene_list

  # Setup job manager
  jobman = JobManager(settings)

  # MCZ
  mczjob = MCZ()
  mczjob.setup(knockout_storage, wildtype_storage, settings, timeseries_storage, knockdown_storage, "MCZ")
  jobman.queueJob(mczjob)

  # CLR
  clrjob = CLR()
  clrjob.setup(knockout_storage, settings, "CLR", "plos", 6)
  jobman.queueJob(clrjob)

  # GENIE3
  mf_storage.combine(knockout_storage)
  mf_storage.combine(wildtype_storage)
  mf_storage.combine(knockdown_storage)
  genie3job = GENIE3()
  genie3job.setup(mf_storage, settings, "GENIE3")
  jobman.queueJob(genie3job)

  ## TLCLR
  tlclrjob = TLCLR()
  tlclrjob.setup(knockout_storage, wildtype_storage, settings, timeseries_storage, knockdown_storage, "TLCLR")
  jobman.queueJob(tlclrjob)

  #if sys.argv[1] != "dream4100":
      #cojob = ConvexOptimization()
      #cojob.setup(knockout_storage, settings, "ConvOpt_T-"+ str(0.01),None, None, 0.01)
      #jobman.queueJob(cojob)

  ### DFG4GRN
  dfg = DFG4GRN()
  settings["dfg4grn"]["eta_z"] = 0.01
  settings["dfg4grn"]["lambda_w"] = 0.001
  settings["dfg4grn"]["tau"] = 3
  dfg.setup(timeseries_storage, TFList(timeseries_storage[0].gene_list), settings, "DFG", 20)
  jobman.queueJob(dfg)

  ### Inferelator

  ### NIR
  nirjob = NIR()
  nirjob.setup(knockout_storage, settings, "NIR", 5, 5)
  jobman.queueJob(nirjob)

  #### TDARACNE
  settings = ReadConfig(settings, "./config/default_values/tdaracne.cfg")
  bjob = tdaracne()
  settings["tdaracne"]["num_bins"] = 4
  bjob.setup(timeseries_storage, settings, "TDARACNE")
  jobman.queueJob(bjob)


  print jobman.queue
  jobman.runQueue()
  jobman.waitToClear(name)
  SaveResults(jobman.finished, goldnet, settings, name)

  cache[name] = jobman.finished[:]

  return cache[name]
os.mkdir(settings["global"]["output_dir"])

knockdown_filenames = settings["global"]["small_network_knockdown_file"].split()

knockdown_storage = ReadData(knockdown_filenames[0], "knockdown")

from nirest import *

settings = ReadConfig(settings, "./config/default_values/nirest.cfg")
settings = ReadConfig(settings, settings["nirest"]["config"])

jobman = JobManager(settings)

nirrun = NIRest()
nirrun.setup(knockdown_storage, settings, "nirest-test")
jobman.queueJob(nirrun)

print jobman.queue
jobman.runQueue()
jobman.waitToClear()

accs = []

for job in jobman.finished:
    print job.alg.gene_list
    print job.alg.read_output(settings)
    print job.alg.network.calculateAccuracy(goldnet)
    report = job.alg.network.analyzeMotifs(goldnet)
    print report.ToString()

genie3nets = {}
for i in range(20):
    for name in data.keys():
        ts_storage = data[name]
        settings["global"]["time_series_delta_t"] = (1008.0 / (len(ts_storage[0].experiments)-1))
        combined = ReadData(exp_data_directory + '/' + name + '/' + timeseries_filename, "timeseries")[0]

        for ts in timeseries_as_steady_state[name][1:11]:
            combined.combine(ts)
        #combined.combine(knockouts[name])
        combined.combine(multifactorials[name])

        genie3job = GENIE3()
        genie3job.setup(combined, settings, "Genie3_TimeSeries_{0}_{1}".format(name, i))
        jobman.queueJob(genie3job)
        genie3nets[name] = genie3job
        genie3job.goldnet = goldnets[name]


jobman.runQueue()
jobman.waitToClear()


for job in jobman.finished:
    job.alg.network.normalize()

#tprs, fprs, rocs = GenerateMultiROC(jobman.finished, goldnet, False, settings["global"]["output_dir"] + "/OverallROC.pdf")
#ps, rs, precs = GenerateMultiPR(jobman.finished, goldnet, False, settings["global"]["output_dir"] + "/OverallPR.pdf")

#for job in jobman.finished:
print len(ts_storage[1].experiments)
# for s in ts_storage:
# s.normalize()

# Setup job manager
jobman = JobManager(settings)

# Train on 15
dfg15 = DFG4GRN()
settings["dfg4grn"]["eta_z"] = 0.1
settings["dfg4grn"]["lambda_w"] = 0.1
settings["dfg4grn"]["tau"] = 3
settings["dfg4grn"]["delta_t"] = "3 3 3 3 3"
dfg15.setup(ts_storage, TFList(tfs), settings, "DFG-15_LambdaW-{0}".format(0.1, d), 20, None, None, None, False)
jobman.queueJob(dfg15)

settings["global"]["time_series_delta_t"] = "3 3 3 3 3 5"
# Train on 20
dfg20 = DFG4GRN()
settings["dfg4grn"]["eta_z"] = 0.1
settings["dfg4grn"]["lambda_w"] = 0.1
settings["dfg4grn"]["tau"] = 3
settings["dfg4grn"]["delta_t"] = "3 3 3 3 3 5"
dfg20.setup(ts_storage_20, TFList(tfs), settings, "DFG-20_LambdaW-{0}".format(0.1, d), 20, None, None, None, False)
jobman.queueJob(dfg20)


jobman.runQueue()
jobman.waitToClear()