Пример #1
0
def findPathwayList():
    pathways = []
    codes = []
    # looks for pathways that have gpickles generated originally
    for file in os.listdir("gpickles"):
        if file.endswith(".gpickle"):
            if os.path.isfile('pickles/' + file[:-8] + '_1_local1.pickle'):
                codes.append(file[:-8])
            else:
                print(file[:-8] + ' has no output')
    print(codes)
    # for each of these pathways, we find the output of the rule determination and scoring procedures and put them together.
    for code in codes:
        pathVals = []
        rules = []
        for i in range(1, 6):
            [bruteOut1, dev, storeModel, storeModel3, equivalents,
             dev2] = pickle.Unpickler(
                 open('pickles/' + code + '_' + str(i) + '_local1.pickle',
                      "rb")).load()
            model = modelHolder(storeModel3)
            pathVals.append(
                pickle.Unpickler(
                    open('pickles/' + code + '_' + str(i) + '_scores1.pickle',
                         "rb")).load())
            rules.append(writeModel(bruteOut1, model))
        graph = nx.read_gpickle("gpickles/" + code + ".gpickle")
        ImportanceVals = {}
        for node in range(len(storeModel[1])):
            # ImportanceVals[storeModel[1][node]]=math.log(np.mean([pathVals[i][node] for i in range(5)]),2)
            ImportanceVals[storeModel[1][node]] = float(
                np.mean(
                    [math.log(1. + pathVals[i][node], 2) for i in range(5)]))
        # add nodes removed during network simplification back in
        removedNodes = pickle.Unpickler(
            open('pickles/' + code + '_addLaterNodes.pickle', "rb")).load()
        doubleRemoveNodes = []
        for node in removedNodes:
            if node[1] in ImportanceVals:
                ImportanceVals[node[0]] = ImportanceVals[node[1]]
            else:
                doubleRemoveNodes.append(node)
        count = 0
        while len(doubleRemoveNodes) > 0 and count < 1000:
            count = count + 1
            tripleRemoveNodes = []
            for node in doubleRemoveNodes:
                if node[1] in ImportanceVals:
                    ImportanceVals[node[0]] = ImportanceVals[node[1]]
                else:
                    tripleRemoveNodes.append(node)
            doubleRemoveNodes = tripleRemoveNodes
        pathways.append([code, ImportanceVals, rules, graph])
        if count > 1000:
            print('failed to add removed nodes back in:' +
                  str(doubleRemoveNodes))
    return pathways
def findPathwayList():
    pathways = []
    codes = []
    # looks for pathways that have gpickles generated originally
    for file in os.listdir("gpickles"):
        if file.endswith(".gpickle"):
            if os.path.isfile('pickles/' + file[:-8] + '_1_local1.pickle'):
                codes.append(file[:-8])
            else:
                print(file[:-8] + ' has no output')
    print(codes)
    # for each of these pathways, we find the output of the rule determination and scoring procedures and put them together.
    for code in codes:
        pathVals = []
        rules = []
        for i in range(1, 6):
            [bruteOut1, dev, storeModel, storeModel3, equivalents,
             dev2] = pickle.Unpickler(
                 open('pickles/' + code + '_' + str(i) + '_local1.pickle',
                      "rb")).load()
            model = modelHolder(storeModel3)
            pathVals.append(
                pickle.Unpickler(
                    open('pickles/' + code + '_' + str(i) + '_scores1.pickle',
                         "rb")).load())
            rules.append(writeModel(bruteOut1, model))
        print(pathVals)
        graph = nx.read_gpickle("gpickles/" + code + ".gpickle")
        ImportanceVals = {}  # average importance vals over trials
        for node in range(len(storeModel[1])):
            ImportanceVals[storeModel[1][node]] = float(
                np.mean([pathVals[i][node] for i in range(5)]))
        # add nodes removed during network simplification back in
        print(ImportanceVals)
        pathways.append([code, ImportanceVals, rules, graph])
    return pathways
	
	# set up parameters of run, model
	params=paramClass()
	model=modelClass(graph,sampleList, False)
	model.updateCpointers()

	storeModel=[(model.size), list(model.nodeList), list(model.individualParse), list(model.andNodeList) , list(model.andNodeInvertList), list(model.andLenList),	list(model.nodeList), dict(model.nodeDict), list(model.initValueList)]
	
	# put lack of KOs, initial values into correct format
	knockoutLists, knockinLists= setupEmptyKOKI(len(sampleList))
	newInitValueList=genInitValueList(sampleList,model)
	model.initValueList=newInitValueList

	# find rules by doing GA then local search
	model1, dev, bruteOut =GAsearchModel(model, sampleList, params, knockoutLists, knockinLists, name, boolC) # run GA
	bruteOut1, equivalents, dev2 = localSearch(model1, bruteOut, sampleList, params, knockoutLists, knockinLists, boolC) # run local search
	
	# output results
	storeModel3=[(model.size), list(model.nodeList), list(model.individualParse), list(model.andNodeList) , list(model.andNodeInvertList), list(model.andLenList),	list(model.nodeList), dict(model.nodeDict), list(model.initValueList)]
	outputList=[bruteOut1,dev,storeModel, storeModel3, equivalents, dev2]
	pickle.dump( outputList, open( name+"_local1.pickle", "wb" ) ) # output rules

	# calculate importance scores and output
	scores1=calcImportance(bruteOut1,params,model1, sampleList,knockoutLists, knockinLists, boolC)
	pickle.dump( scores1, open( name+"_scores1.pickle", "wb" ) )

	# write rules
	with open(name+"_rules.txt", "w") as text_file:
		text_file.write(writeModel(bruteOut1, model1))
	print("--- %s seconds ---" % (time.time() - start_time))
Пример #4
0
                                          'RSV_miic_out', boolC)  # run GA
    bruteOut1, equivalents, dev2 = localSearch(model1, bruteOut, sampleList,
                                               params, knockoutLists,
                                               knockinLists,
                                               boolC)  # run local search
    storeModel3 = [(model.size),
                   list(model.nodeList),
                   list(model.individualParse),
                   list(model.andNodeList),
                   list(model.andNodeInvertList),
                   list(model.andLenList),
                   list(model.nodeList),
                   dict(model.nodeDict),
                   list(model.initValueList)]
    outputList = [bruteOut1, dev, storeModel3, equivalents, dev2]
    pickle.dump(outputList, open('RSV_miic_out' + "_local1.pickle",
                                 "wb"))  # output rules
    # calculate importance scores and output
    scores1 = calcImportance(bruteOut1, params, model, sampleList,
                             knockoutLists, knockinLists, boolC)
    pickle.dump(scores1, open('RSV_miic_out' + "_scores1.pickle", "wb"))
    ImportanceVals = {}
    for node in range(len(storeModel3[1])):
        ImportanceVals[storeModel3[1][node]] = float(scores1[node])

    # write out rules
    modeler = writeModel(bruteOut1, model1)
    rules = modeler.replace('not', '~').split('\n')
    outputGraphMiic(graph, modeler, ImportanceVals)

    print("--- %s seconds ---" % (time.time() - start_time))