def h_show(): #GetData data_freutag = freutag.query.all() data_spruch = spruchdestages.query.all() data_event = event.query.all() data_besuch = besuch.query.all() a = dbreader.dummy(data_freutag) b = dbreader.dummy(data_spruch) c = dbreader.dummy(data_event) d = dbreader.dummy(data_besuch) aa = dbgetlastone_freutag() print("DEBUG III: " + str(aa)) #GetSites FolderContent = Sites.GetFolderContent() show = Sites.SiteName(FolderContent) return render_template(show, freutag=a, spruch=b, event=c, besuch=d, len_freutag=len(a))
def update_site(site_name, metric, form, original_metric_file, metric_file, metric_reasons_file, metric_log_file, metric_groups, metric_states, metric_adfs_groups): """Function called after form submit in the main page. 1. Removing site from sites' list 2. Removing old reason 3. Adding updated site to sites' list 4. Adding new reason to reasons' list 5. Writintg new event in log """ metric_name = Sites(metric_file, original_metric_file) user = User(adfs_login, metric_name.sites, metric_groups, adfs_groups, metric_adfs_groups, metric) reasons = Reason(metric_reasons_file) if site_name in user.managed_sites: for site in metric_name.sites: if site[1] == site_name and site[2] == form.getvalue( "old-status") and form.getvalue( "new-status") in metric_states: metric_name.sites.remove(site) #color = find_color(metric, form.getvalue('new-status')) #updated_site = [ #datetime.now().strftime("%Y-%m-%d %H:%M:%S"), #site_name, #form.getvalue("new-status"), #color, #form.getvalue("url") #] for site in reasons.sites: if site[0] == site_name: reasons.sites.remove(site) if form.getvalue('new-status') != "no_override": color = find_color(metric, form.getvalue('new-status')) updated_site = [ datetime.now().strftime("%Y-%m-%d %H:%M:%S"), site_name, form.getvalue("new-status"), color, form.getvalue("url") ] metric_name.sites.append(updated_site) reasons.sites.append([ site_name, re.sub('[\s+]', ' ', form.getvalue("reason")) ]) reasons.write_changes(reasons.sites) metric_name.write_changes(metric_name.sites) log = Log(metric_log_file) new_log = [ datetime.now().strftime("%Y-%m-%d %H:%M:%S"), adfs_login, site_name, form.getvalue('new-status'), form.getvalue('reason') ] log.write_changes(new_log) url = "https://cmssst.web.cern.ch/cmssst/man_override/cgi/manualOverride.py/%s" % metric print "Status: 302 Moved" print "Location: %s" % url print
def index(metric, original_metric_file, metric_file, metric_reasons_file, metric_groups, metric_adfs_groups): """Function generates main page. For example - manualOverride.py/lifestatus """ template = env.get_template('%s.html' % metric) metric_name = Sites(metric_file, original_metric_file) user = User(adfs_login, metric_name.sites, metric_groups, adfs_groups, metric_adfs_groups, metric) reasons = Reason(metric_reasons_file) metric_name.sites = sorted(metric_name.sites, key = itemgetter(1)) for site in metric_name.sites: for item in reasons.sites: if site[1] == item[0]: site.append(item[1]) print template.render(sites = metric_name.sites, adfs_login = adfs_login, user_info = user.user_info, metricName = metric)
def __init__(self): self.__conf = Configure() self.__associations = Associations() self.__sites = Sites() resultList = [] xReader = XMLReader() xParser = XMLParser() confTree = xReader.getTree('xml/conf.xml') if confTree == None: exit() searchParams = xParser.getSearchParams(confTree) searchSites = xParser.getSearchSites(confTree) pagesToSearch = xParser.getPagesToSearch(confTree) self.masterInspectionPath = xParser.getMIXML(confTree) self.__conf.setParams(searchSites, searchParams, pagesToSearch) keywordTree = xReader.getTree('xml/keywords.xml') fKeywordTree = xReader.getTree('xml/f_keywords.xml') if keywordTree == None or fKeywordTree == None: exit() keywords = xParser.getKeywords(keywordTree) fKeywords = xParser.getKeywords(fKeywordTree) avoids = xParser.getAvoids(keywordTree) fAvoids = xParser.getAvoids(fKeywordTree) self.__associations.setParams(keywords, avoids, fKeywords, fAvoids) sitesTree = xReader.getTree('xml/sites.xml') if sitesTree == None: exit() goodSites, badSites = xParser.getSites(sitesTree) self.__sites.setParams(goodSites, badSites)
def update_site(site_name, metric, form, original_metric_file, metric_file, metric_reasons_file, metric_log_file, metric_groups, metric_states, metric_adfs_groups): """Function called after form submit in the main page. 1. Removing site from sites' list 2. Removing old reason 3. Adding updated site to sites' list 4. Adding new reason to reasons' list 5. Writintg new event in log """ metric_name = Sites(metric_file, original_metric_file) user = User(adfs_login, metric_name.sites, metric_groups, adfs_groups, metric_adfs_groups, metric) reasons = Reason(metric_reasons_file) if site_name in user.managed_sites: for site in metric_name.sites: if site[1] == site_name and site[2] == form.getvalue("old-status") and form.getvalue("new-status") in metric_states: metric_name.sites.remove(site) #color = find_color(metric, form.getvalue('new-status')) #updated_site = [ #datetime.now().strftime("%Y-%m-%d %H:%M:%S"), #site_name, #form.getvalue("new-status"), #color, #form.getvalue("url") #] for site in reasons.sites: if site[0] == site_name: reasons.sites.remove(site) if form.getvalue('new-status') != "no_override": color = find_color(metric, form.getvalue('new-status')) updated_site = [ datetime.now().strftime("%Y-%m-%d %H:%M:%S"), site_name, form.getvalue("new-status"), color, form.getvalue("url") ] metric_name.sites.append(updated_site) reasons.sites.append([site_name, re.sub('[\s+]', ' ', form.getvalue("reason"))]) reasons.write_changes(reasons.sites) metric_name.write_changes(metric_name.sites) log = Log(metric_log_file) new_log = [datetime.now().strftime("%Y-%m-%d %H:%M:%S"), adfs_login, site_name, form.getvalue('new-status'), form.getvalue('reason')] log.write_changes(new_log) url = "https://cmssst.web.cern.ch/cmssst/man_override/cgi/manualOverride.py/%s" %metric print "Status: 302 Moved" print "Location: %s" % url print
def get_sites_data(): sites = Sites.get_sites_from_db() return JSONEncoder().encode(sites)
def run_job(): while True: Sites.get_data_from_site_and_store()
plt.close("all") fig, ax = plt.subplots(figsize=[5, 5], tight_layout={"pad": 0.1}) ax.tick_params( axis='both', which='both', bottom=False, left=False, labelbottom=False, labelleft=False, length=0, ) L = 1000 N = L * L sites = Sites(L, L) bonds = makeSquareLattice(L, L) bonds = shuffleList(bonds) nSteps = 10 step = round(len(bonds) / nSteps) # assert nSteps*step == len(bonds), "nSteps should be a divisor of len(bonds)" if nSteps * step != len(bonds): print("warning: nSteps not a divisor of len(bonds)") nn = round(len(bonds) / step) print("step: " + str(step)) print("nn: " + str(nn)) fig2, axes = plt.subplots(2, ceil(nSteps / 2), figsize=[10, 3.5],
from plotter import Plot from sites import Sites import arrow def timestr(): result = arrow.now().format('YY-MM-DD HH:mm:ss') return result sites = Sites().sites print('%s - Starting' % timestr()) for site in sites: try: print('%s - %s' % (timestr(), site), end='') plot = Plot(site=site) if plot.data is not None: plot.save_graph_data() except Exception as e: print('%s - Exception for site: %s, %s' % (timestr(), site, str(e))) print(' - completed') print('%s - Completed' % timestr())
from sites import Sites lifeStatus_file = '/afs/cern.ch/user/c/cmssst/www/man_override/lifestatus/manualLifeStatus.txt' prodStatus_file = '/afs/cern.ch/user/c/cmssst/www/man_override/prodstatus/manualProdStatus.txt' crabStatus_file = '/afs/cern.ch/user/c/cmssst/www/man_override/crabstatus/manualCrabStatus.txt' io_bound_file = '/afs/cern.ch/user/c/cmssst/www/ioBound/io.txt' real_cores_file = '/afs/cern.ch/user/c/cmssst/www/realCores/real.txt' prod_cores_file = '/afs/cern.ch/user/c/cmssst/www/others/prod.txt' cpu_bound_file = '/afs/cern.ch/user/c/cmssst/www/cpuBound/cpu.txt' manual_overriden = Sites(lifeStatus_file, lifeStatus_file) manual_overriden.write_changes(manual_overriden.sites) prodstatus = Sites(prodStatus_file, prodStatus_file) prodstatus.write_changes(prodstatus.sites) crabstatus = Sites(crabStatus_file, crabStatus_file) crabstatus.write_changes(crabstatus.sites) io_bound = Sites(io_bound_file, io_bound_file) io_bound.write_changes(io_bound.sites) real_cores = Sites(real_cores_file, real_cores_file) real_cores.write_changes(real_cores.sites) prod_cores = Sites(prod_cores_file, prod_cores_file) prod_cores.write_changes(prod_cores.sites) cpu_bound = Sites(cpu_bound_file, cpu_bound_file) cpu_bound.write_changes(cpu_bound.sites)
def main(args): if len(args) == 1: helpMessage() #generateSites: It generates two files: normal data file and GLPK data file with a randomly set of Sites according the range specified by user. elif args[1] == 'generateSites': try: GLPK = True if args[2] == 'GLPK' else False enable = 1 if GLPK == True else 0 filename = str(args[2 + enable]) totalSites = int(args[3 + enable]) longitudeRange = (float(args[4 + enable]),float(args[5 + enable])) latitudeRange = (float(args[6 + enable]),float(args[7 + enable])) sites = Sites(filename, GLPK, totalSites,longitudeRange,latitudeRange) print("Info: {}.dat file created succesfully".format(filename)) except: helpMessage() #solveModel: It solves the TSP problem using Ant Colony Optimization according the ACO parameters gived by user. elif args[1] == 'solveModel': try: filename = str(args[2]) iterations = int(args[3]) totalAnts = int(args[4]) alpha = float(args[5]) beta = float(args[6]) rho = float(args[7]) Q = int(args[8]) scheme = int(args[9]) sites = Sites(filename) locations = sites.getLocations() GLPK = checkGLPK(filename) graph = Graph(locations, GLPK) aco = ACO(iterations, totalAnts, alpha, beta, rho, Q, scheme) startTime = time.time() path, cost = aco.solveModel(graph) runTime = (time.time() - startTime) route = sites.generateRoute(path) print('cost: {}, runTime: {}, route: {}'.format(cost, runTime, route)) plot(locations, path) except: helpMessage() #solveMultipleModels: It solves the TSP problem using multiple colonies varying its parameters to return the best colony parameters. elif args[1] == 'solveMultipleModels': try: filename = str(args[2]) iterationsPerColony = int(args[3]) totalAntsAlterations = int(args[4]) totalAntsRange = (int(args[5]),int(args[6])) alphaAlterations = int(args[7]) alphaRange = (float(args[8]),float(args[9])) betaAlterations = int(args[10]) betaRange = (float(args[11]),float(args[12])) rhoAlterations = int(args[13]) rhoRange = (float(args[14]),float(args[15])) QAlterations = int(args[16]) QRange = (int(args[17]),int(args[18])) sites = Sites(filename) locations = sites.getLocations() GLPK = checkGLPK(filename) graph = Graph(locations, GLPK) totalAntsAlterations = randomInteger(totalAntsAlterations, totalAntsRange[0],totalAntsRange[1]) alphaAlterations = randomFloat(alphaAlterations, alphaRange[0],alphaRange[1]) betaAlterations = randomFloat(betaAlterations, betaRange[0], betaRange[1]) rhoAlterations = randomFloat(rhoAlterations, rhoRange[0], rhoRange[1]) QAlterations = randomInteger(QAlterations, QRange[0], QRange[1]) schemes = [0,1,2] logSize = 10 bestCosts = [0]*logSize bestColonies = [0]*logSize bestParameters = [[0]*9]*logSize print("iterationsPerColony: {}".format(iterationsPerColony)) colony = 0 for totalAntsAlteration in totalAntsAlterations: for alphaAlteration in alphaAlterations: for betaAlteration in betaAlterations: for rhoAlteration in rhoAlterations: for QAlteration in QAlterations: for scheme in schemes: aco = ACO(iterationsPerColony, totalAntsAlteration, alphaAlteration, betaAlteration, rhoAlteration, QAlteration, scheme) startTime = time.time() path, cost = aco.solveModel(graph) runTime = (time.time() - startTime) if cost <= bestCosts[0] or bestCosts[0] == 0: for i in range(logSize-1,0,-1): bestColonies[i] = bestColonies[i-1] bestCosts[i] = bestCosts[i-1] bestParameters[i] = bestParameters[i-1] bestColonies[0] = colony bestCosts[0] = cost bestParameters[0] = [cost, colony, totalAntsAlteration, alphaAlteration, betaAlteration, rhoAlteration, QAlteration, scheme, runTime] exportResults(filename, iterationsPerColony, bestParameters) print("\nbestCosts: {}".format(bestCosts)) print("bestColonies: {}".format(bestColonies)) print("cost: {}, colony: {}, totalAnts: {}, alpha: {}, beta: {}, rho: {}, Q: {}, scheme: {}, runTime: {}".format(round(cost,2), colony, totalAntsAlteration, round(alphaAlteration,2), round(betaAlteration,2), round(rhoAlteration,2), QAlteration, scheme, round(runTime,3))) exportLog(filename, iterationsPerColony, cost, colony, totalAntsAlteration, alphaAlteration, betaAlteration, rhoAlteration, QAlteration, scheme, runTime) colony += 1 except: helpMessage() #plotRoute: It show the Sites on the map and generate its path or route. elif args[1] == 'plotRoute': try: filename = str(args[2]) route = [int(indexSite) for indexSite in args[3:]] sites = Sites(filename) locations = sites.getLocations() path = sites.generatePath(route) plot(locations, path) except: helpMessage() else: helpMessage()
class BLParent(): """docstring for BLParent""" __conf = None __associations = None __sites = None resultList = None masterInspectionPath = None def __init__(self): self.__conf = Configure() self.__associations = Associations() self.__sites = Sites() resultList = [] xReader = XMLReader() xParser = XMLParser() confTree = xReader.getTree('xml/conf.xml') if confTree == None: exit() searchParams = xParser.getSearchParams(confTree) searchSites = xParser.getSearchSites(confTree) pagesToSearch = xParser.getPagesToSearch(confTree) self.masterInspectionPath = xParser.getMIXML(confTree) self.__conf.setParams(searchSites, searchParams, pagesToSearch) keywordTree = xReader.getTree('xml/keywords.xml') fKeywordTree = xReader.getTree('xml/f_keywords.xml') if keywordTree == None or fKeywordTree == None: exit() keywords = xParser.getKeywords(keywordTree) fKeywords = xParser.getKeywords(fKeywordTree) avoids = xParser.getAvoids(keywordTree) fAvoids = xParser.getAvoids(fKeywordTree) self.__associations.setParams(keywords, avoids, fKeywords, fAvoids) sitesTree = xReader.getTree('xml/sites.xml') if sitesTree == None: exit() goodSites, badSites = xParser.getSites(sitesTree) self.__sites.setParams(goodSites, badSites) def startSubProcesses(self): CM = ConnectionManager() lt = ListTool() sitesList = [] sitesList = lt.addOnlyUniqueFromList(self.__sites.goodSites, self.__sites.badSites) CM.initializeConnection( self.__associations.keywordsList, self.__associations.avoidsList, sitesList, self.__conf.siteToSearchList, self.__conf.pagesToSearch, self.__conf.searchParamsList) CM.startThread() CM.join() CM.parseResults() self.resultList = CM.getResults() def createMasterInspectionXML(self, delChildXMLs = False): lt = ListTool() os = OSTool() sort = Sorter() insp = Inspector() xmls = os.getFilesInDir('results/') xmls = lt.popByWord(xmls, self.masterInspectionPath) XMLInspections = insp.getInspections(xmls) if len(XMLInspections) == 0: print('No files read.') exit() XMLInspections = sort.sortInspectionList(XMLInspections) xWriter = XMLWriter() xWriter.writeMIXML(XMLInspections, self.masterInspectionPath) if delChildXMLs: for xml in xmls: os.deleteFile(xml) def startServerProg(self): os = OSTool() os.startProgram('google-chrome', 'localhost:80/tracker/')
from latticemaker import makeSquareLattice, shuffleList import numpy as np for L in [100, 200]: N = L*L nBonds = 2*N its = 100 # number of iterations P = np.zeros([nBonds, its]) P2 = np.zeros([nBonds, its]) # P_inf**2 size = np.zeros([nBonds, its]) s = np.zeros([nBonds, its]) p = np.zeros(nBonds) for j in range(its): sites = Sites(L, L) bonds = makeSquareLattice(L, L) bonds = shuffleList(bonds) for i in range(nBonds): sites.activate([bonds[i]]) P[i, j] = sites.giantComponent P2[i, j] = pow(sites.giantComponent/N, 2) s[i, j] = sites.averageSquaredSize size[i, j] = sites.sizeOfLargestCluster p[i] = i/nBonds # p[i] = (N - np.sum(sites.sites == -1))/N P = np.mean(P, axis=1) P2 = np.mean(P2, axis=1) s = np.mean(s, axis=1)