def hillclimberMain(data, iterationNumber): '''Calls different functions to perform hillclimber''' colorNumber = 2 # prepare list colorList = [0] * len(data) # color graph randomly for i in range (0, len(colorList) - 1): colorList[i] = random.randint(0, colorNumber) output = check.Checklist(colorList, data) # keep iterating as long as there are still errors while(len(output) != 0): # try to eliminate errors with iteration colorList = hillclimber(iterationNumber, colorNumber, colorList, data) # check if there are still errors output = check.Checklist(colorList, data) # if errors increase color number colorNumber = colorNumber + 1 graph.makeGraph(colorList, data) return colorList
def detalleAnimal(id): if 'nombre' in session: print(id + ' IDDDDDDDDDDDDDDDDD') makeGraph(id) data = getAnimalDetail(id) source = "../static/grafica{0}.png".format(id) return render_template("animalHistoryGraph.html", details=data, source=source) else: return render_template('login.html')
def systematicApproach(l): for counter, option in enumerate(narrow_list): SAresults = search(option) SAresults = search(option) SA_id_list = SAresults['IdList'] summary_details(SA_id_list) remove_supp_authors(m_lastAuthor, m_authorList) graph.makeGraph(authorDict) graph.nodeDegree(graph.g) graph.graphRP(graph.PgenL, graph.RgenL) graph.graphAnalysis(graph.g, counter) #graph.writeToCSV(graph.clusterCo, graph.eig_cen, graph.deg_cen, counter) #graph.inclusiveGraphs(graph.communities, graph.densities) graph.printGraphingLists(graph.communities, graph.totalNodes, graph.degreeCentAvg, graph.degreeCentMedian, graph.betweennessCent, graph.clusterCoAvg)
def main(algorithm, network, isMapType): '''Calls different functions to color a network''' # ----------------------------- Select data type ----------------------------- # load in data if network is 'random': totalConnections, tuplesList = randomconnections.randomConnections(20, 0, 190) data = socialload.loadData(tuplesList) elif isMapType is True: loadin.loadData(network) else: socialload.loadData(network) # ----------------------------- Select algorithm ----------------------------- # color network if algorithm is 'shell': colorList = lowestcolor.shell(data, None) # color islands too for i, a in enumerate(colorList): if a == None: colorList = lowestcolor.shell(data, i) elif algorithm is 'annealing': colorList = annealing.annealingMain(data, 10000) elif algorithm is 'hillclimber': colorList = hillclimber.hillclimberMain(data, 10000) elif algorithm is 'degree': colorList = lowestcolor.degree(data) elif algorithm is 'hybrid': colorList = hybrid.algorithm(data) elif algorithm is 'clockwise': colorList = lowestcolor.clockwise(data, None) # color islands too for i, a in enumerate(colorList): if a == None: colorList = lowestcolor.clockwise(data, i) # check if no errors output = check.Checklist(colorList, data) print output # ----------------------------- Obtain testdata ----------------------------- # Test degree maximum = len(data[lowestcolor.getLongest(data)][1]) # find biggest clique figurelist = figuresearch.buildFigures(data) biggest = figuresearch.findBiggestClique(figurelist) colors = check.checkColors(colorList) # make graph of colored network graph.makeGraph(colorList, data)
from Bio import Entrez import graph import utilities Entrez.email = '*****@*****.**' Entrez.api_key = 'f513b4e2e1a0b578c9d3dd731e36f19f7f08' def __init__(self, email): Entrez.email = '*****@*****.**' Entrez.api_key = 'f513b4e2e1a0b578c9d3dd731e36f19f7f08' if __name__ == '__main__': results = utilities.search('etiology') id_list = results['IdList'] papers = utilities.fetch_details(id_list) utilities.summary_details(id_list) utilities.systematicApproach(utilities.narrow_list) utilities.remove_supp_authors(utilities.m_lastAuthor, utilities.m_authorList) graph.makeGraph(utilities.authorDict) graph.nodeDegree(graph.g) graph.graphAnalysis(graph.g)
def makeQueries(filename): """creates a graph(list of cities) from the flightdata file returns a list of cities (each containing a list of flights)""" f = open(filename, 'r') queries = [] for q in f.read().replace("\n", "").replace("]", "").replace(" ", "").strip().split("["): if q != "": q = q.replace("(", "") q = q.replace(")", "") parts = q.split(',') currQ = flightClasses.Query(parts[0], parts[1], parts[2], parts[3], parts[4], parts[5], parts[6], int(parts[7])) queries.append(currQ) return queries #testing file for the project g = graph.makeGraph(sys.argv[1]) q = makeQueries(sys.argv[2]) for k in q: x = algorithm.getFlightSolutions(k, g) ret = str(flightClasses.printSolutions(k, x)) + "\n" print(ret)
# Pages definition file. Page response handler classes should be defined here. import tornado.web import tornado.template import graph import flightClasses import algorithm g = graph.makeGraph("testFiles/testdata2") class LandingHandler(tornado.web.RequestHandler): """Class which allows users to make requests for flights Additionally, request responses will be displayed as an element of the page this class genereates. """ def get(self): loader = tornado.template.Loader("templates/") self.write( loader.load("landing.html").generate(cities=sorted(g.getCityNames()), airlines=["None"] + g.getAirlines()) ) class QueryHandler(tornado.web.RequestHandler): """Class which handles a request for flights""" def get(self): loader = tornado.template.Loader("templates/") if self.get_argument("origCity") == self.get_argument("desCity"): self.write(
temp[random.randint(0, size - 1)] = random.randint(0, max_col) new_len = len(check.Checklist(temp, data)) delta_score = new_len - max_error a = delta_score / T print a, delta_score, T chance = math.exp(-(a)) # print chance T = T * math.pow(g, i) evaluate = random.uniform(0, 1) if chance >= evaluate: max_error = new_len CCL = temp i += 1 return CCL def algorithm(data, CCL): CCL = [0] * len(CCL) for i in range(1, 100): CCL = anealing(CCL, data, i, len(check.Checklist(CCL, data))) if len(check.Checklist(CCL, data)) == 0: break # chaneColoring(CCL, check.Checklist(CCL, data), i, data) return CCL if __name__ == "__main__": data = socialload.loadData('network1.txt') CCL = [None] * len(data) CCL = algorithm(data, CCL) graph.makeGraph(countryColorList, data) print("--- %s seconds ---" % (time.time() - start_time))
#Pages definition file. Page response handler classes should be defined here. import tornado.web import tornado.template import graph import flightClasses import algorithm g = graph.makeGraph("testFiles/testdata2") class LandingHandler(tornado.web.RequestHandler): """Class which allows users to make requests for flights Additionally, request responses will be displayed as an element of the page this class genereates. """ def get(self): loader = tornado.template.Loader("templates/") self.write(loader.load("landing.html").generate( cities=sorted(g.getCityNames()), airlines=["None"] + g.getAirlines() )) class QueryHandler(tornado.web.RequestHandler): """Class which handles a request for flights""" def get(self): loader = tornado.template.Loader("templates/") if (self.get_argument("origCity") == self.get_argument("desCity")): self.write(loader.load("errorResponse.html").generate(msg="The origin and destination city you entered are the same")) else:
def historiaAnimal(): if 'nombre' in session: makeGraph() return render_template("animalHistory.html") else: return render_template('login.html')
def historiaAnimal(): makeGraph() return render_template("animalHistory.html")
import sys def makeQueries(filename): """creates a graph(list of cities) from the flightdata file returns a list of cities (each containing a list of flights)""" f = open(filename, "r") queries = [] for q in f.read().replace("\n", "").replace("]", "").replace(" ", "").strip().split("["): if q != "": q = q.replace("(", "") q = q.replace(")", "") parts = q.split(",") currQ = flightClasses.Query( parts[0], parts[1], parts[2], parts[3], parts[4], parts[5], parts[6], int(parts[7]) ) queries.append(currQ) return queries # testing file for the project g = graph.makeGraph(sys.argv[1]) q = makeQueries(sys.argv[2]) for k in q: x = algorithm.getFlightSolutions(k, g) ret = str(flightClasses.printSolutions(k, x)) + "\n" print(ret)