def scheduleSingleSet(self, jobSet, limits): jobSet.sort(key=lambda x: x.canStart) for job in jobSet: newLimits = [(max(limits[0], job.canStart), min(limits[1], job.canEnd))] for subjob in jobSet: updatedLimits = [] for limit in newLimits: mockJob = Job() mockJob.canStart = limit[0] mockJob.canEnd = limit[1] overlap = checkOverlap(mockJob, subjob) updatedLimit = getNewLimits(subjob, overlap, limit) if isinstance(updatedLimit[0], list): for sublist in updatedLimit: updatedLimits.append(sublist) else: updatedLimits.append(updatedLimit) newLimits = updatedLimits if not self.fitLimits(job, newLimits): break limitForJob = self.fitLimits(job, newLimits) if limitForJob: self.scheduleSingleJob(job, limitForJob[0], limitForJob[0] + job.duration)
def LocalOptimisationStrategy(self, sol, bestFit): limit = len(sol) - 3 rng = range(1, limit) bestFound = [] bestFitness = bestFit newFitness = 0 actSol = sol bestSol = deepcopy(sol) for i in rng: for j in range(i + 1, limit): indexI = bestSol.index(Job(i, 0, [], [], [], 0)) indexJ = bestSol.index(Job(j, 0, [], [], [], 0)) if self.CanSwap(bestSol, indexI, indexJ): bestSol[indexI], bestSol[indexJ] = bestSol[ indexJ], bestSol[indexI] else: continue super(SerialScheduleLocalOpt, self).ResetJobs() newFitness, actSol = self.OptSSGS(bestSol, min(indexI, indexJ)) bestSol[indexJ], bestSol[indexI] = bestSol[indexI], bestSol[ indexJ] if newFitness == None: continue if newFitness < bestFitness: bestSol = deepcopy(actSol) bestFitness = newFitness return (bestSol, bestFitness)
def execute_maint_script(self, script_name): ''' execute a user-defined function script_name -- name of the function to run ''' Package.execute_maint_script(self) script_path = "%s/%s.py" % (self.maint_dir, script_name) start_dir = get_slash_cwd() os.chdir(self.maint_dir) if not os.path.isfile(script_path): msg = "%s does not exist" % script_path raise BadPackage, (self.name, msg) sys.path.append(self.maint_dir ) import_string = 'import %s' % script_name cmd = 'status = %s.execute(self.config, Logger)' % script_name job = Job(import_string, cmd, self.config) status = job.execute() sys.path.remove(self.maint_dir) os.chdir(start_dir) if status == None: status = OK if type(status) != type(1): msg = "Invalid status type (%s: '%s')" Logger.warning(msg % (type(status), status)) status = FAIL return status
def __init__(self, id=None, name=None, route=[], priority=0, dueDate=0, orderDate=0, extraPropertyDict=None, order=None, currentStation=None, isCritical=False, **kw): Job.__init__(self, id, name, route=route, priority=priority, dueDate=dueDate, orderDate=orderDate, extraPropertyDict=extraPropertyDict, isCritical=isCritical, currentStation=currentStation) self.order = order # parent order of the order component # TODO: in case the order is not given as argument (when the component is given as WIP) have to give a manager as argument # or create the initiate the parent order not as WIP if self.order: # if the order is not None, and the order.manager is given if self.order.manager: self.manager = self.order.manager # used by printRoute if self.order: self.alias = self.order.alias + 'C' + str(len( G.OrderComponentList))
def __init__(self, id=None, name=None, route=[], priority=0, dueDate=None, orderDate=None, extraPropertyDict=None, order=None, requestingComponent = None, isCritical=False, **kw): Job.__init__(self, id, name, route, priority, dueDate, orderDate, extraPropertyDict, isCritical) self.order=order # parent order of the order component # TODO: in case the order is not given as argument (when the component is given as WIP) have to give a manager as argument # or create the initiate the parent order not as WIP if self.order: # if the order is not None, and the order.manager is given if self.order.manager: self.manager=self.order.manager # if the componentType of the component is Auxiliary then there need a requesting Component be defined # the requestingComponent is the component that needs the auxiliary component during its processing # the auxiliary component should then be added to the requestingComponent's auxiliaryList self.requestingComponent = requestingComponent # the id of the requesting component # used by printRoute if self.order: self.alias=self.order.alias+'C'+str(len(G.OrderComponentList))
def GET(self, name=None, version=None): jobs = [] if version: title = "%s %s" % (name, version) package = "%s/%s" % (name, version) query = Job.select( IN( Job.q.package, Select( Package.q.id, AND(Package.q.name == name, Package.q.version == version)))) else: title = package = name query = Job.select( IN(Job.q.package, Select(Package.q.id, Package.q.name == name))) result, page, nb_pages = _paginate_query( query.orderBy(DESC(Job.q.package))) jobs.extend(result) return render.base(page=render.tab(jobs=jobs, page=page, nb_pages=nb_pages), \ hostname=socket.gethostname(), \ title=title, \ package=package, \ archs=RebuilddConfig().arch, \ dists=RebuilddConfig().get('build', 'dists').split(' '))
def main(self): print "JobStarter starting up" while True: rawData = self.r_server.blpop("Granted_Queue")[1] self.runParamDict = pickle.loads(rawData) # write the paramFile back to disk (why?) #YAML.write(self.runParamDict, self.runParamDict['Filename']) try: # setup correct job class and start up job if self.runParamDict['ClassType'] == 'Local': print "Starting local job thread for '%s'" % self.runParamDict['Filename'] j = Job(configDict=self.runParamDict) else: print "Starting remote job for '%s'" % self.runParamDict['Filename'] j = RemoteJob(configDict=self.runParamDict) j.start() print "Job Started." pollTime = time.time() + self.runParamDict['PollPeriod'] * 60 self.r_server.zadd("Running_Queue", rawData, pollTime) except: print "The following job failed:'%s'" % self.runParamDict['Filename'] print time.asctime(), traceback.print_exc() print self.r_server.rpush("Result_Queue", rawData) # since this blocks, clear after queuing up self.runParamDict = None
def __init__(self, id=None, name=None, route=[], priority=0, dueDate=None, orderDate=None, extraPropertyDict=None, componentType='Basic', order=None, requestingComponent = None, readyForAssembly = 0, isCritical=False): Job.__init__(self, id, name, route, priority, dueDate, orderDate, extraPropertyDict, isCritical) self.auxiliaryList=[] # Holds the auxiliary components that the component needs for a certain processing self.order=order # parent order of the order component # TODO: in case the order is not given as argument (when the component is given as WIP) have to give a manager as argument # or create the initiate the parent order not as WIP if self.order: # if the order is not None, and the order.manager is given if self.order.manager: self.manager=self.order.manager # TODO: isCritical argument is deprecated # self.isCritical=isCritical # this should be self.order.isCritical. Added now for testing self.componentType = componentType # the type of the component which can be Basic/Secondary/Auxiliary # if the componentType of the component is Auxiliary then there need a requesting Component be defined # the requestingComponent is the component that needs the auxiliary component during its processing # the auxiliary component should then be added to the requestingComponent's auxiliaryList self.requestingComponent = requestingComponent # the id of the requesting component self.readyForAssembly = readyForAssembly # flag informing weather the component was received # by the MouldAssembleBuffer
def list_jobs(self, limit=30, username=None, application=None, state=None, created=None, started=None, ended=None): url = "{0}/api/jobs/list/".format(self._serverUrl) data = { 'username': username, 'application': application, 'state': state } if created: data.update({'created': ';'.join(created)}) if started: data.update({'started': ';'.join(started)}) if ended: data.update({'ended': ';'.join(ended)}) r = self.post(url, data=data) res = [] for values in r.json(): job = Job(self) job.load(values) res.append(job) return res
def main(self): print "ResultFetcher starting up" while True: t = time.time() data = self.r_server.zrangebyscore("Running_Queue", 0, t) for d in data: self.runParamDict = pickle.loads(d) if self.runParamDict['ClassType'] == 'Local': j = Job(configDict=self.runParamDict) jobStr = 'local job' else: j = RemoteJob(configDict=self.runParamDict) jobStr = 'remote job' print "Polling %s for '%s'" % (jobStr, self.runParamDict['Filename']) retCode = j.isFinished() if retCode > 0: if retCode == 1: # case: no except print "Job complete for '%s'" % self.runParamDict['Filename'] try: j.getReturnData() except AttributeError: # pass if j without getReturnData() e.g. local pass else: print "Exception for '%s'!!!" % self.runParamDict['Filename'] self.r_server.rpush("Result_Queue", pickle.dumps(self.runParamDict)) else: pollTime = time.time() + self.runParamDict['PollPeriod'] * 60 self.r_server.zadd("Running_Queue", pickle.dumps(self.runParamDict), pollTime) self.runParamDict = None self.r_server.zremrangebyscore("Running_Queue", 0, t) time.sleep(60) # hard wait (since no blocking method on ordered sets)
def get_jobs(self, name, version=None, dist=None, arch=None): """Dump a job status""" if version: pkgs = Package.selectBy(name=name, version=version) else: pkgs = Package.selectBy(name=name) if not pkgs.count(): return [] retjobs = [] if dist and arch: for pkg in pkgs: retjobs.extend(Job.selectBy(package=pkg, dist=dist, arch=arch)) elif dist: for pkg in pkgs: retjobs.extend(Job.selectBy(package=pkg, dist=dist)) elif arch: for pkg in pkgs: retjobs.extend(Job.selectBy(package=pkg, arch=arch)) else: for pkg in pkgs: retjobs.extend(Job.selectBy(package=pkg)) return retjobs
def add_job(self, name, version, priority, dist, mailto=None, arch=None): """Add a job""" if not arch: arch = self.cfg.arch[0] if not Dists().get_dist(dist, arch): RebuilddLog.error("Couldn't find dist/arch in the config file for %s_%s on %s/%s, don't adding it" \ % (name, version, dist, arch)) return False pkgs = Package.selectBy(name=name, version=version) if pkgs.count(): # If several packages exists, just take the first pkg = pkgs[0] else: # Maybe we found no packages, so create a brand new one! pkg = Package(name=name, version=version, priority=priority) jobs_count = Job.selectBy(package=pkg, dist=dist, arch=arch, mailto=mailto, status=JobStatus.WAIT).count() if jobs_count: RebuilddLog.error("Job already existing for %s_%s on %s/%s, don't adding it" \ % (pkg.name, pkg.version, dist, arch)) return False job = Job(package=pkg, dist=dist, arch=arch) job.status = JobStatus.WAIT job.arch = arch job.mailto = mailto log = Log(job=job) RebuilddLog.info("Added job for %s_%s on %s/%s for %s" \ % (name, version, dist, arch, mailto)) return True
def __init__(self, id=None, name=None, route=[], priority=0, dueDate=0, orderDate=0, extraPropertyDict=None, order=None, remainingProcessingTime={}, remainingSetupTime={}, currentStation=None, isCritical=False,**kw): Job.__init__(self, id, name, route=route, priority=priority, dueDate=dueDate, orderDate=orderDate, extraPropertyDict=extraPropertyDict, isCritical=isCritical, remainingProcessingTime=remainingProcessingTime, remainingSetupTime=remainingSetupTime, currentStation=currentStation) self.order=order # parent order of the order component # TODO: in case the order is not given as argument (when the component is given as WIP) have to give a manager as argument # or create the initiate the parent order not as WIP if self.order: # if the order is not None, and the order.manager is given if self.order.manager: self.manager=self.order.manager # used by printRoute if self.order: self.alias=self.order.alias+'C'+str(len(G.OrderComponentList))
def GET(self, name=None, version=None): jobs = [] if version: title = "%s %s" % (name, version) package = "%s/%s" % (name, version) query = Job.select(IN(Job.q.package, Select(Package.q.id, AND( Package.q.name==name, Package.q.version==version)))) else: title = package = name query = Job.select(IN(Job.q.package, Select(Package.q.id, Package.q.name==name))) result, page, nb_pages = _paginate_query(query.orderBy( DESC(Job.q.package))) jobs.extend(result) return render.base(page=render.tab(jobs=jobs, page=page, nb_pages=nb_pages), \ hostname=socket.gethostname(), \ title=title, \ package=package, \ archs=RebuilddConfig().arch, \ dists=RebuilddConfig().get('build', 'dists').split(' '))
def __init__(self, id=None, name=None, route=[], priority=0, dueDate=0, orderDate=0, extraPropertyDict=None, order=None, currentStation=None, isCritical=False,**kw): Job.__init__(self, id, name, route=route, priority=priority, dueDate=dueDate, orderDate=orderDate, extraPropertyDict=extraPropertyDict, isCritical=isCritical,currentStation=currentStation) self.order=order # parent order of the order component # TODO: in case the order is not given as argument (when the component is given as WIP) have to give a manager as argument # or create the initiate the parent order not as WIP if self.order: # if the order is not None, and the order.manager is given if self.order.manager: self.manager=self.order.manager # variable to be used by OperatorRouter self.hot=False # TODO: isCritical argument is deprecated # self.isCritical=isCritical # this should be self.order.isCritical. Added now for testing # used by printRoute if self.order: self.alias=self.order.alias+'C'+str(len(G.OrderComponentList))
def __init__(self, id=None, name=None, route=[], priority=0, dueDate=0, orderDate=0, extraPropertyDict=None, order=None, remainingProcessingTime={}, remainingSetupTime={}, currentStation=None, requestingComponent = None, isCritical=False, **kw): Job.__init__(self, id=id, name=name, route=route, priority=priority, dueDate=dueDate, orderDate=orderDate, extraPropertyDict=extraPropertyDict, isCritical=isCritical, remainingProcessingTime=remainingProcessingTime, remainingSetupTime=remainingSetupTime, currentStation=currentStation) self.order=order # parent order of the order component # TODO: in case the order is not given as argument (when the component is given as WIP) have to give a manager as argument # or create the initiate the parent order not as WIP if self.order: # if the order is not None, and the order.manager is given if self.order.manager: self.manager=self.order.manager #======================================================================= # if the componentType of the component is Auxiliary then there need a requesting Component be defined # the requestingComponent is the component that needs the auxiliary component during its processing # the auxiliary component should then be added to the requestingComponent's auxiliaryList self.requestingComponent = requestingComponent # the id of the requesting component #======================================================================= # used by printRoute if self.order: self.alias=self.order.alias+'C'+str(len(G.OrderComponentList)) route = [x for x in self.route] # copy self.route #Below it is to assign an order decomposition if it was not assigned in JSON #have to talk about it with NEX odAssigned=False for element in route: elementIds = element.get('stationIdsList',[]) for obj in G.ObjList: for elementId in elementIds: if obj.id==elementId and obj.type=='OrderDecomposition': odAssigned=True if not odAssigned: odId=None for obj in G.ObjList: if obj.type=='OrderDecomposition': odId=obj.id break if odId: route.append({'stationIdsList':[odId],\ 'processingTime':\ {'distributionType':'Fixed',\ 'mean':'0'}}) self.route=route # add the OrderDesign to the DesignList and the OrderComponentList G.OrderComponentList.append(self) G.DesignList.append(self) G.WipList.append(self)
def add_job(self, cpu_steps, mem_per_step, net_sched, usb_sched): with PyOS.pid_mutex: PyOS.pid += 1 job = Job(PyOS.pid, cpu_steps, mem_per_step, net_sched, usb_sched, self) self.cpu.add_job() self.mem.add_job() self.jobs.append(job) job.start()
def addJob(job_list): signature = {} signature['name'] = input("Enter Job Name: ") signature['job_length'] = input("Enter Job Length: ") job = Job(signature) priority = binary_search(job_list, job, 0, len(job_list) - 1) job.setPriority(priority) job_list.insert(priority, job)
def get_new_jobs(self): """Feed jobs list with waiting jobs and lock them""" max_new = self.cfg.getint('build', 'max_jobs') count_current = len(self.jobs) with self.jobs_locker: if count_current >= max_new: return 0 jobs = [] for dist in Dists().dists: jobs.extend(Job.selectBy(status=JobStatus.WAIT, dist=dist.name, arch=dist.arch)[:max_new]) count_new = 0 for job in jobs: # Look for higher versions ? if self.cfg.getboolean('build', 'build_more_recent'): packages = Package.selectBy(name=job.package.name) candidate_packages = [] candidate_packages.extend(packages) candidate_packages.sort(cmp=Package.version_compare) candidate_packages.reverse() newjob = None # so there are packages with higher version number # try to see if there's a job for us for cpackage in candidate_packages: candidate_jobs = [] candidate_jobs.extend(Job.selectBy(package=cpackage, dist=job.dist, arch=job.arch)) for cjob in candidate_jobs: if newjob and newjob != cjob and cjob.status == JobStatus.WAIT: cjob.status = JobStatus.GIVEUP elif cjob.status == JobStatus.WAIT: newjob = cjob job = newjob # We have to check because it might have changed # between our first select and the build_more_recent stuffs if not job or job.status != JobStatus.WAIT: continue # Check dependencies if not job.is_allowed_to_build(): continue job.status = JobStatus.WAIT_LOCKED job.host = socket.gethostname() self.jobs.append(job) count_new += 1 count_current += 1 if count_current >= max_new: break return count_new
def __init__(self, name, age, location, jobTitle): Person.__init__( self, name, age, location, Language("human-human", "italian", "mother tongue", ["communicative", "informative", "educational"])) Job.__init__(self, "VDU operator", jobTitle, "Milan Area", ["money", "family well-being", "personal fulfillment"]) self.hasChildren = True
def read_from_file(path): """ Read an instance from file. Keyword arguments: path: full path name of a instance file. Returns a 6-tuple: j1: number of inbound jobs j2: number of outbound jobs m1: number of machines in first stage m2: number of machines in second stage jobs_1: list of first stage jobs jobs_2: list of second stage jobs """ with open(path) as f: lines = [line.rstrip('\n') for line in f] j1 = int(lines[0]) # number of inbound jobs j2 = int(lines[1]) # number of outbound jobs m1 = int(lines[2]) # number of first stage machines m2 = int(lines[3]) # number of second stage machines p1 = [] # processing time j1 for p in lines[5:j1 + 5]: p1.append(int(p)) p2 = [] # processing time j2 for p in lines[j1 + 6:j2 + j1 + 6]: p2.append(int(p)) # Precedent/Sucessor matrix precedents_matrix = [l.split() for l in lines[j2 + j1 + 7:]] predecessors = [[] for _ in range(j2)] for i in range(j2): for j in range(j1): if precedents_matrix[i][j] == '1': predecessors[i].append(j) successors = [[] for _ in range(j1)] for j in range(j1): for i in range(j2): if precedents_matrix[i][j] == '1': successors[j].append(i) jobs_1 = [] for i in range(len(p1)): jobs_1.append(Job(i, p1[i], successors=successors[i])) jobs_2 = [] for i in range(len(p2)): jobs_2.append(Job(i, p2[i], predecessors=predecessors[i])) # print("M1: {} M2: {} P1: {} P2: {} Predecessors: {} Successors: {}".format(j1, j2, m1, m2, predecessors, successors)) return j1, j2, m1, m2, jobs_1, jobs_2
def __init__(self, id=None, name=None, route=[], priority=0, dueDate=0, orderDate=0, extraPropertyDict=None, componentType='Basic', order=None, requestingComponent=None, readyForAssembly=0, isCritical=False, remainingProcessingTime={}, remainingSetupTime={}, currentStation=None, **kw): Job.__init__(self, id, name, route=route, priority=priority, dueDate=dueDate, orderDate=orderDate, extraPropertyDict=extraPropertyDict, isCritical=isCritical, remainingProcessingTime=remainingProcessingTime, remainingSetupTime=remainingSetupTime, currentStation=currentStation) #======================================================================= self.auxiliaryList = [ ] # Holds the auxiliary components that the component needs for a certain processing #======================================================================= self.order = order # parent order of the order component # TODO: in case the order is not given as argument (when the component is given as WIP) have to give a manager as argument # or create the initiate the parent order not as WIP if self.order: # if the order is not None, and the order.manager is given if self.order.manager: self.manager = self.order.manager self.componentType = componentType # the type of the component which can be Basic/Secondary/Auxiliary #======================================================================= # if the componentType of the component is Auxiliary then there need a requesting Component be defined # the requestingComponent is the component that needs the auxiliary component during its processing # the auxiliary component should then be added to the requestingComponent's auxiliaryList self.requestingComponent = requestingComponent # the id of the requesting component #======================================================================= self.readyForAssembly = readyForAssembly # flag informing weather the component was received # by the MouldAssembleBuffer # used by printRoute if self.order: self.alias = self.order.alias + 'C' + str(len( G.OrderComponentList)) G.OrderComponentList.append(self) G.WipList.append(self)
def jobList(): if request.method == "GET": data = request.args.get('category_var', None) if data == 'all': jobs = Job.getAllJobs() return render_template('job_list.html', jobs=jobs) else: jobs = Job.getListJobWithCategory(category=data) return render_template('job_list.html', jobs=jobs)
def generate_job_request(self): job = Job() # create the job request = JobRequest(job=job) job.set_request_id( request.get_id() ) # set the request_id in the job (needed to identify postponed request) print("request: start new job") self.job_count -= 1 return request
def __init__(self, id=None, name=None, route=[], priority=0, dueDate=0, orderDate=0, extraPropertyDict=None, order=None, requestingComponent = None, isCritical=False, **kw): Job.__init__(self, id=id, name=name, route=route, priority=priority, dueDate=dueDate, orderDate=orderDate, extraPropertyDict=extraPropertyDict, isCritical=isCritical) self.order=order # parent order of the order component # TODO: in case the order is not given as argument (when the component is given as WIP) have to give a manager as argument # or create the initiate the parent order not as WIP if self.order: # if the order is not None, and the order.manager is given if self.order.manager: self.manager=self.order.manager #======================================================================= # if the componentType of the component is Auxiliary then there need a requesting Component be defined # the requestingComponent is the component that needs the auxiliary component during its processing # the auxiliary component should then be added to the requestingComponent's auxiliaryList self.requestingComponent = requestingComponent # the id of the requesting component #======================================================================= # used by printRoute if self.order: self.alias=self.order.alias+'C'+str(len(G.OrderComponentList)) route = [x for x in self.route] # copy self.route #Below it is to assign an order decomposition if it was not assigned in JSON #have to talk about it with NEX odAssigned=False for element in route: elementIds = element.get('stationIdsList',[]) for obj in G.ObjList: for elementId in elementIds: if obj.id==elementId and obj.type=='OrderDecomposition': odAssigned=True if not odAssigned: odId=None for obj in G.ObjList: if obj.type=='OrderDecomposition': odId=obj.id break if odId: route.append({'stationIdsList':[odId],\ 'processingTime':\ {'distributionType':'Fixed',\ 'mean':'0'}}) self.route=route # add the OrderDesign to the DesignList and the OrderComponentList G.OrderComponentList.append(self) G.DesignList.append(self) G.WipList.append(self)
def isOverlapping(job, overlapping): for i in range(0, len(overlapping)): start = min(map(lambda x: x.canStart, overlapping[i])) end = max(map(lambda x: x.canEnd, overlapping[i])) mockJob = Job() mockJob.canEnd = end mockJob.canStart = start overlap = checkOverlap(mockJob, job) if -1 <= overlap <= 1: return i return -1
def __init__(self,orderID, MAid, SPid, PPOSid, qty, minQty, origWeek, future): Job.__init__(self, id=MAid) self.type = 'item' self.orderID = orderID self.MAid = MAid self.SPid = SPid self.PPOSid = PPOSid self.qty = qty self.minQty = minQty self.originalWeek = origWeek self.future = future # if 1 suggests that the MA belongs to the future demand (0 for the PPOS to be disaggregated) self.weekPlan = self.originalWeek
def requeue_job(self, job_id): """Requeue a failed job""" if Job.selectBy(id=job_id).count() == 0: RebuilddLog.error("There is no job related to %s that is in the job list" % job_id) return False job = Job.selectBy(id=job_id)[0] if job.status in FailedStatus: job.status = JobStatus.WAIT job.host = "" return True
def __init__(self, orderID, MAid, SPid, PPOSid, qty, minQty, origWeek, future): Job.__init__(self, id=MAid) self.type = 'item' self.orderID = orderID self.MAid = MAid self.SPid = SPid self.PPOSid = PPOSid self.qty = qty self.minQty = minQty self.originalWeek = origWeek self.future = future # if 1 suggests that the MA belongs to the future demand (0 for the PPOS to be disaggregated) self.weekPlan = self.originalWeek
def GET_buildstats(self, distarch=None): graph = self.graph_init() if distarch == "/": graph.title = "Build status" jobs = Job.selectBy() else: dindex = distarch.rindex("/") graph.title = "Build status for %s" % distarch[1:] jobs = Job.selectBy(arch=distarch[dindex+1:], dist=distarch[1:dindex]) graph.setData(self.compute_stats(jobs)) tmp = tempfile.TemporaryFile() graph.draw(tmp) tmp.seek(0) return tmp.read()
def requeue_job(self, job_id): """Requeue a failed job""" if Job.selectBy(id=job_id).count() == 0: RebuilddLog.error( "There is no job related to %s that is in the job list" % job_id) return False job = Job.selectBy(id=job_id)[0] if job.status in FailedStatus: job.status = JobStatus.WAIT job.host = "" return True
def limitJobSet(self, jobs): minimumStart = min(map(lambda job: job.canStart, jobs)) maximumEnd = max(map(lambda job: job.canEnd, jobs)) mockedJob = Job() mockedJob.canStart = minimumStart mockedJob.canEnd = maximumEnd for scheduled in self.scheduled: overlap = checkOverlap(mockedJob, scheduled) if overlap == -1: minimumStart = scheduled.finished elif overlap == 1: maximumEnd = scheduled.begin else: continue return minimumStart, maximumEnd
def loadFromDisk(file_name='jobs.json'): """convert a json of jobs to a job array""" signature_container = [] with open(file_name) as dump_file: signature_container = json.load(dump_file) job_list = [] for signature in signature_container: job = Job(signature) job.setPriority(signature['priority']) determineRoutine(job) job_list.append(job) quickSortJobs(job_list, 0, len(job_list) - 1) updatePriority(job_list) return job_list
def validParams(): params = Job.validParams() params.addRequiredParam('chunks', "The number of PBS chunks.") # Only one of either of the next two paramteres can be specified params.addParam('mpi_procs', "The number of MPI processes per chunk.") params.addParam('total_mpi_procs', "The total number of MPI processes to use divided evenly among chunks.") params.addParam('place', 'scatter:excl', "The PBS job placement scheme to use.") params.addParam('walltime', '4:00:00', "The requested walltime for this job.") params.addParam('no_copy', "A list of files specifically not to copy") params.addParam('copy_files', "A list of files specifically to copy") params.addStringSubParam('combine_streams', '#PBS -j oe', "Combine stdout and stderror into one file (needed for NO EXPECTED ERR)") params.addStringSubParam('threads', '--n-threads=THREADS', "The number of threads to run per MPI process.") params.addStringSubParam('queue', '#PBS -q QUEUE', "Which queue to submit this job to.") params.addStringSubParam('module', 'module load MODULE', 'moose-dev-gcc', "The module to load.") params.addStringSubParam('cli_args', 'CLI_ARGS', "Any extra command line arguments to tack on.") params.addStringSubParam('notifications', '#PBS -m NOTIFICATIONS', "The PBS notifications to enable: 'b' for begin, 'e' for end, 'a' for abort.") params.addStringSubParam('notify_address', '#PBS -M NOTIFY_ADDRESS', "The email address to use for PBS notifications") # Soft linked output during run params.addParam('soft_link_output', False, "Create links to your STDOUT and STDERR files in your working directory during the run.") params.addRequiredParam('moose_application', "The full path to the application to run.") params.addRequiredParam('input_file', "The input file name.") return params
def start(self): # 把派发作业队列和完成作业队列注册到网络上 BaseManager.register('get_dispatched_job_queue', callable=self.get_dispatched_job_queue) BaseManager.register('get_finished_job_queue', callable=self.get_finished_job_queue) # 监听端口和启动服务 manager = BaseManager(address=('0.0.0.0', 8888), authkey=b'jobs') manager.start() # 使用上面注册的方法获取队列 dispatched_jobs = manager.get_dispatched_job_queue() finished_jobs = manager.get_finished_job_queue() # 这里一次派发10个作业,等到10个作业都运行完后,继续再派发10个作业 job_id = 0 while True: for i in range(0, 10): job_id += 1 job = Job(job_id) print('Dispatch job: %s' % job.job_id) dispatched_jobs.put(job) while not dispatched_jobs.empty(): job = finished_jobs.get(60) print('Finished Job: %s' % job.job_id) manager.shutdown()
def GET_package(self, package=None): graph = self.graph_init() if package == "/": graph.title = "Build status" jobs = Job.selectBy() else: dindex = package.rindex("/") graph.title = "Build status for %s" % package[1:] pkg = Package.selectBy(version=package[dindex+1:], name=package[1:dindex])[0] jobs = Job.selectBy(package=pkg) graph.setData(self.compute_stats(jobs)) tmp = tempfile.TemporaryFile() graph.draw(tmp) tmp.seek(0) return tmp.read()
def GET_buildstats(self, distarch=None): graph = self.graph_init() if distarch == "/": graph.title = "Build status" jobs = Job.selectBy() else: dindex = distarch.rindex("/") graph.title = "Build status for %s" % distarch[1:] jobs = Job.selectBy(arch=distarch[dindex + 1:], dist=distarch[1:dindex]) graph.setData(self.compute_stats(jobs)) tmp = tempfile.TemporaryFile() graph.draw(tmp) tmp.seek(0) return tmp.read()
def behavior(self): while (True): #In this slot, a job gets created # with probability = arrival_probability if (random.random() < self.arrival_probability): #create a job and timestamp it job = Job(self.env.now) self.num_jobs_created += 1 #wait for a delta amount of time yield (self.env.timeout(0.1)) #check if there's place at the output buffer if (self.outp.can_put()): #output the job self.outp.put(job) #print("T=", self.env.now+0.0, self.name,"output job",job,"to",self.outp) else: self.num_jobs_lost += 1 self.blocking_probability = float(self.num_jobs_lost) / float( self.num_jobs_created) #wait till the end of the slot yield (self.env.timeout(0.9)) else: #wait till the next slot yield (self.env.timeout(1))
def add(): if request.method == "GET": return render_template('backend/add.html', job_category=JobCategory.getAllJobType()) else: title = request.form['title'] cat = request.form['category'] desc = request.form['description'] reqire = request.form['reqirement'] month = request.form['element_4_1'] day = request.form['element_4_2'] year = request.form['element_4_3'] dateLine = day + "/" + month + "/" + year hrName = request.form['name'] hrPhone = request.form['phone'] hrEmail = request.form['email'] hrWeb = request.form['website'] hrAddress = request.form['address'] job = Job(title=title, cat=cat, desc=desc, reqire=reqire, dateLine=dateLine, hrName=hrName, hrPhone=hrPhone, hrEmail=hrEmail, hrWeb=hrWeb, hrAddress=hrAddress) db.session.add(job) db.session.commit() return redirect("/admin_panel/add")
def Main(): scheduler = LocalSearchScheduler(4) # scheduler.printStatus() with open(JOBS_INPUT_FILE) as jobs_file: reader = csv.reader(jobs_file, delimiter=',') for line in reader: job = Job(JobType.getJobTypeFromInt(int(line[0])), int(line[2]), int(line[1])) scheduler.addJobToDict(job) scheduler.scheduleAllOnOneMachine() scheduler.printStatus() current_makespan = scheduler.makespan scheduler.moveJobs() tries = 0 while tries < 10: current_makespan = scheduler.makespan scheduler.moveJobs() if not scheduler.isLegal(): print("Ilegal schedule") if current_makespan <= scheduler.makespan: tries += 1 print("next try") scheduler.printStatus()
def main(): ''' main example to construct / run queue of jobs ''' # create queue job_queue = JobQueue("test") # start and end date start_date = date(2018, 1, 1) end_date = date(2018, 1, 11) # this would be a useful example of what to run cmdline = "loadDate -db TAQ -date {yyyymmdd}" # this is a less useful example, but serves for example purposes on linux at least cmdline = "echo Hello, today is {yyyymmdd}" num_threads = 4 # overrides # this call will populate based on below params # can also just call add_job over and over as needed logging.basicConfig(filename='main.log', level=logging.INFO, format='%(asctime)s %(message)s', datefmt='%Y-%m-%d %I:%M:%S %p') job_queue.populate(start_date, end_date, cmdline) # here's how to add one manually my_job = Job('99999999', 'echo Manually added job') job_queue.add(my_job) # create queue runner queue_runner = QueueRunner(job_queue, num_threads) print("beginning run of queue") # run queue until complete queue_runner.run() # log stats at end queue_runner.print_all_job_details() print("completed run of queue")
def fix_jobs(self, print_result=True): """If rebuildd crashed, reset jobs to a valid state""" jobs = [] jobs.extend(Job.selectBy(host=socket.gethostname(), status=JobStatus.WAIT_LOCKED)) jobs.extend(Job.selectBy(host=socket.gethostname(), status=JobStatus.BUILDING)) for job in jobs: if print_result: print "I: Fixing job %s (was %s)" % (job.id, JobStatus.whatis(job.status)) job.host = None job.status = JobStatus.WAIT job.build_start = None job.build_end = None return True
def ReturnJobByID( job_id): pool = connectionPool.getInstance() connection_object = pool.connection_pool.get_connection() commandPart1 = "SELECT jobs.job_id, jobs.job_type_id, jobs.address, jobs.details, jobs.cus_id, users.full_name, job_types.job_name " commandPart2 = "FROM jobs " commandPart3 = "INNER JOIN job_types ON jobs.job_type_id = job_types.job_type_id " commandPart4 = "INNER JOIN users ON jobs.cus_id = users.user_id " commandPart5 = "where job_id = '{}';".format(job_id) command = commandPart1 + commandPart2 + commandPart3 + commandPart4 + commandPart5 #print(command) connection_object.cmd_query(command) job_Return = connection_object.get_rows() #should only be one row so pull the first try: Job_Return = job_Return[0][0] except: connection_object.close() return -1 Job_output = Job(int(Job_Return[0]), int(Job_Return[1]), Job_Return[2].decode("utf-8" ), Job_Return[3].decode("utf-8" ), int(Job_Return[4]), Job_Return[5].decode("utf-8" ), Job_Return[6].decode("utf-8" )) connection_object.close() return Job_output
def test_if_provide_a_job_with_invalid_words_and_valid_words_assert_fail( self): job_offer = { "content": "Full Stack Vue Developer and Ruby", "uid": "test" } self.assertFalse(Job(job_offer).isValid())
def ReturnJobList(): pool = connectionPool.getInstance() connection_object = pool.connection_pool.get_connection() commandPart1 = "SELECT jobs.job_id, jobs.job_type_id, jobs.address, jobs.details, jobs.cus_id, users.full_name, job_types.job_name \n" commandPart2 = "FROM jobs \n" commandPart3 = "INNER JOIN job_types ON jobs.job_type_id = job_types.job_type_id \n" commandPart4 = "INNER JOIN users ON jobs.cus_id = users.user_id\n" commandPart5 = "order by job_id;" command = commandPart1 + commandPart2 + commandPart3 + commandPart4 + commandPart5 print(command) connection_object.cmd_query(command) job_Return = connection_object.get_rows() try: Job_Return = job_Return[0] except: connection_object.close() return -1 connection_object.close() joblist = [] for i in Job_Return: #print(i) nextJob = Job(int(i[0]), int(i[1]), i[2].decode("utf-8" ), i[3].decode("utf-8" ), int(i[4]), i[5].decode("utf-8" ), i[6].decode("utf-8" )) joblist.append(nextJob) return joblist
def __init__(self, id=None, name=None, route=[], priority=0, dueDate=None, orderDate=None, isCritical=False, componentsList=[], manager=None, basicsEnded=0, componentsReadyForAssembly=0, extraPropertyDict=None, **kw): Job.__init__(self, id=id, name=name, route=route, priority=priority, dueDate=dueDate, orderDate=orderDate, isCritical=isCritical, extraPropertyDict=extraPropertyDict) self.componentsList = componentsList # list of components that the order will be broken into self.components = [] # list of all the child components of the order self.assemblyComponents = [ ] # list of the required components to build the mould self.assemblyRequested = False # flag used to check whether a mould is created out of other orderComponents #======================================================================= self.basicComponentsList = [ ] # list that holds the Basic Components of the order self.secondaryComponentsList = [ ] # list that holds the Secondary Components of the order self.auxiliaryComponentsList = [ ] # list of the auxiliary components of the order self.basicsEnded = basicsEnded # flag that informs that the basic components of the order are finished self.manager = manager # the manager responsible to handle the order #======================================================================= # flag that informs weather the components needed for the assembly are present in the Assembly Buffer self.componentsReadyForAssembly = componentsReadyForAssembly # self.decomposed=False # used by printRoute self.alias = 'O' + str(len(G.OrderList)) def createRoute(self, route): return route
def add_request(self, request): """ Adds parsing request to pool's jobs queue. @type request: Request @param request: parsing request object """ self.jobs_queue.put(Job(request))
def GET(self, jobid=None): job = Job.selectBy(id=jobid)[0] try: with open(job.logfile, "r") as build_logfile: build_log = build_logfile.read() except IOError, error: build_log = job.log.text
def add_deps(self, job_id, dependency_ids): if Job.selectBy(id=job_id).count() == 0: RebuilddLog.error("There is no job related to %s that is in the job list" % job_id) return False job = Job.selectBy(id=job_id)[0] deps = [] for dep in dependency_ids: if Job.selectBy(id=dep).count() == 0: RebuilddLog.error("There is no job related to %s that is in the job list" % dep) return False dep_job = Job.selectBy(id=dep)[0] deps.append(dep_job) job.add_deps(deps) return True
def __init__(self, id=None, name=None, route=[], priority=0, dueDate=0, orderDate=0, extraPropertyDict=None, componentType='Basic', order=None, requestingComponent = None, readyForAssembly = 0, isCritical=False, remainingProcessingTime={}, remainingSetupTime={}, currentStation=None, **kw): Job.__init__(self, id, name, route=route, priority=priority, dueDate=dueDate, orderDate=orderDate, extraPropertyDict=extraPropertyDict, isCritical=isCritical, remainingProcessingTime=remainingProcessingTime, remainingSetupTime=remainingSetupTime, currentStation=currentStation) #======================================================================= self.auxiliaryList=[] # Holds the auxiliary components that the component needs for a certain processing #======================================================================= self.order=order # parent order of the order component # TODO: in case the order is not given as argument (when the component is given as WIP) have to give a manager as argument # or create the initiate the parent order not as WIP if self.order: # if the order is not None, and the order.manager is given if self.order.manager: self.manager=self.order.manager self.componentType = componentType # the type of the component which can be Basic/Secondary/Auxiliary #======================================================================= # if the componentType of the component is Auxiliary then there need a requesting Component be defined # the requestingComponent is the component that needs the auxiliary component during its processing # the auxiliary component should then be added to the requestingComponent's auxiliaryList self.requestingComponent = requestingComponent # the id of the requesting component #======================================================================= self.readyForAssembly = readyForAssembly # flag informing weather the component was received # by the MouldAssembleBuffer # used by printRoute if self.order: self.alias=self.order.alias+'C'+str(len(G.OrderComponentList)) G.OrderComponentList.append(self) G.WipList.append(self)
def GET(self, dist, arch=None): jobs = [] jobs.extend(Job.select(sqlobject.AND(Job.q.arch == arch, Job.q.dist == dist), orderBy=sqlobject.DESC(Job.q.creation_date))[:10]) return render.base(page=render.tab(jobs=jobs), \ arch=arch, \ dist=dist, \ title="%s/%s" % (dist, arch), \ hostname=socket.gethostname(), \ archs=RebuilddConfig().arch, \ dists=RebuilddConfig().get('build', 'dists').split(' '))
def GET(self): jobs = [] query = Job.select(orderBy=DESC(Job.q.creation_date)) result, page, nb_pages = _paginate_query(query) jobs.extend(result) return render.base(\ page=render.tab(jobs=jobs, page=page, nb_pages=nb_pages), \ title="all builds", \ hostname=socket.gethostname(), \ archs=RebuilddConfig().arch, \ dists=RebuilddConfig().get('build', 'dists').split(' '))
def __init__(self, id=None, name=None, route=[], priority=0, dueDate=None, orderDate=None, extraPropertyDict=None, order=None, isCritical=False): Job.__init__(self, id, name, route, priority, dueDate, orderDate, extraPropertyDict, isCritical) self.order=order # parent order of the order component # TODO: in case the order is not given as argument (when the component is given as WIP) have to give a manager as argument # or create the initiate the parent order not as WIP if self.order: # if the order is not None, and the order.manager is given if self.order.manager: self.manager=self.order.manager # variable to be used by OperatorRouter self.hot=False # TODO: isCritical argument is deprecated # self.isCritical=isCritical # this should be self.order.isCritical. Added now for testing
def submit_job_posting(): email = request.forms.get('email') print("email:" + email) compName = request.forms.get('compName') print("compName:" + compName) jobTitle = request.forms.get('jobTitle') print("jobTitle:" + jobTitle) skillReq = request.forms.get('skillReq') openings = request.forms.get('openings') trainMat = request.forms.get('trainingMaterial') budget = request.forms.get('budget') waitTime = request.forms.get('waitTime') #DB handling newJob = Job(compName, jobTitle, waitTime, openings, budget, trainMat, skillReq) if(newJob.save()): #yay print("success") else: print("error!") return template('home.tpl',benefit = 2)
def GET(self, dist, arch=None): jobs = [] result, page, nb_pages = _paginate_query(Job.select( AND(Job.q.arch == arch, Job.q.dist == dist), orderBy=DESC(Job.q.creation_date))) jobs.extend(result) return render.base(page=render.tab(jobs=jobs, page=page, nb_pages=nb_pages), \ arch=arch, \ dist=dist, \ title="%s/%s" % (dist, arch), \ hostname=socket.gethostname(), \ archs=RebuilddConfig().arch, \ dists=RebuilddConfig().get('build', 'dists').split(' '))
def GET(self, name=None, version=None): jobs = [] if version: pkg = Package.selectBy(name=name, version=version)[0] title = "%s %s" % (name, version) package = "%s/%s" % (name, version) else: pkg = Package.selectBy(name=name)[0] title = package = name jobs.extend(Job.selectBy(package=pkg)) return render.base(page=render.tab(jobs=jobs), \ hostname=socket.gethostname(), \ title=title, \ package=package, \ archs=RebuilddConfig().arch, \ dists=RebuilddConfig().get('build', 'dists').split(' '))
def validParams(): params = Job.validParams() params.addRequiredParam("chunks", "The number of PBS chunks.") # Only one of either of the next two paramteres can be specified params.addParam("mpi_procs", "The number of MPI processes per chunk.") params.addParam("total_mpi_procs", "The total number of MPI processes to use divided evenly among chunks.") params.addParam("place", "scatter:excl", "The PBS job placement scheme to use.") params.addParam("walltime", "4:00:00", "The requested walltime for this job.") params.addParam("no_copy", "A list of files specifically not to copy") params.addParam("copy_files", "A list of files specifically to copy") params.addStringSubParam( "combine_streams", "#PBS -j oe", "Combine stdout and stderror into one file (needed for NO EXPECTED ERR)" ) params.addStringSubParam("threads", "--n-threads=THREADS", "The number of threads to run per MPI process.") params.addStringSubParam("queue", "#PBS -q QUEUE", "Which queue to submit this job to.") params.addStringSubParam("cli_args", "CLI_ARGS", "Any extra command line arguments to tack on.") params.addStringSubParam( "notifications", "#PBS -m NOTIFICATIONS", "The PBS notifications to enable: 'b' for begin, 'e' for end, 'a' for abort.", ) params.addStringSubParam( "notify_address", "#PBS -M NOTIFY_ADDRESS", "The email address to use for PBS notifications" ) # Soft linked output during run params.addParam( "soft_link_output", False, "Create links to your STDOUT and STDERR files in your working directory during the run.", ) params.addRequiredParam("moose_application", "The full path to the application to run.") params.addRequiredParam("input_file", "The input file name.") return params
def addJob(self, jobObject): jobObject = Job(jobObject) jobObject_w_list = jobObject.getWriteList() jobObject_r_list = jobObject.getReadList() jobObject_rq_list = jobObject.getReadRequestList() jobObject_wq_list = jobObject.getWriteRequestList() for w in jobObject_w_list: self.WR_list.append(w) for r in jobObject_r_list: self.RD_list.append(r) for rq in jobObject_rq_list: self.RQ_list.append(rq) for wq in jobObject_wq_list: self.WQ_list.append(wq) self.Jobs[jobObject.getName()] = jobObject