def LocalOptimisationStrategy(self, sol, bestFit):
        limit = len(sol) - 3
        rng = range(1, limit)

        bestFound = []
        bestFitness = bestFit
        newFitness = 0

        actSol = sol
        bestSol = deepcopy(sol)
        for i in rng:
            for j in range(i + 1, limit):
                indexI = bestSol.index(Job(i, 0, [], [], [], 0))
                indexJ = bestSol.index(Job(j, 0, [], [], [], 0))
                if self.CanSwap(bestSol, indexI, indexJ):
                    bestSol[indexI], bestSol[indexJ] = bestSol[
                        indexJ], bestSol[indexI]
                else:
                    continue

                super(SerialScheduleLocalOpt, self).ResetJobs()
                newFitness, actSol = self.OptSSGS(bestSol, min(indexI, indexJ))
                bestSol[indexJ], bestSol[indexI] = bestSol[indexI], bestSol[
                    indexJ]

                if newFitness == None:
                    continue
                if newFitness < bestFitness:
                    bestSol = deepcopy(actSol)
                    bestFitness = newFitness
        return (bestSol, bestFitness)
Exemplo n.º 2
0
    def read_from_file(path):
        """
            Read an instance from file.

            Keyword arguments:
                path: full path name of a instance file.

            Returns a 6-tuple:
                j1: number of inbound jobs
                j2: number of outbound jobs
                m1: number of machines in first stage
                m2: number of machines in second stage
                jobs_1: list of first stage jobs
                jobs_2: list of second stage jobs
        """
        with open(path) as f:
            lines = [line.rstrip('\n') for line in f]

        j1 = int(lines[0])  # number of inbound jobs
        j2 = int(lines[1])  # number of outbound jobs
        m1 = int(lines[2])  # number of first stage machines
        m2 = int(lines[3])  # number of second stage machines

        p1 = []  # processing time j1
        for p in lines[5:j1 + 5]:
            p1.append(int(p))

        p2 = []  # processing time j2
        for p in lines[j1 + 6:j2 + j1 + 6]:
            p2.append(int(p))

        # Precedent/Sucessor matrix
        precedents_matrix = [l.split() for l in lines[j2 + j1 + 7:]]

        predecessors = [[] for _ in range(j2)]
        for i in range(j2):
            for j in range(j1):
                if precedents_matrix[i][j] == '1':
                    predecessors[i].append(j)

        successors = [[] for _ in range(j1)]
        for j in range(j1):
            for i in range(j2):
                if precedents_matrix[i][j] == '1':
                    successors[j].append(i)

        jobs_1 = []
        for i in range(len(p1)):
            jobs_1.append(Job(i, p1[i], successors=successors[i]))

        jobs_2 = []
        for i in range(len(p2)):
            jobs_2.append(Job(i, p2[i], predecessors=predecessors[i]))

        # print("M1: {} M2: {} P1: {} P2: {} Predecessors: {} Successors: {}".format(j1, j2, m1, m2, predecessors, successors))
        return j1, j2, m1, m2, jobs_1, jobs_2
Exemplo n.º 3
0
    def start(self):
        # 把派发作业队列和完成作业队列注册到网络上
        BaseManager.register('get_dispatched_job_queue',
                             callable=self.get_dispatched_job_queue)
        BaseManager.register('get_finished_job_queue',
                             callable=self.get_finished_job_queue)

        # 监听端口和启动服务
        manager = BaseManager(address=('0.0.0.0', 8888), authkey=b'jobs')
        manager.start()

        # 使用上面注册的方法获取队列
        dispatched_jobs = manager.get_dispatched_job_queue()
        finished_jobs = manager.get_finished_job_queue()

        # 这里一次派发10个作业,等到10个作业都运行完后,继续再派发10个作业
        job_id = 0
        while True:
            for i in range(0, 10):
                job_id += 1
                job = Job(job_id)
                print('Dispatch job: %s' % job.job_id)
                dispatched_jobs.put(job)

            while not dispatched_jobs.empty():
                job = finished_jobs.get(60)
                print('Finished Job: %s' % job.job_id)

        manager.shutdown()
Exemplo n.º 4
0
 def scheduleSingleSet(self, jobSet, limits):
     jobSet.sort(key=lambda x: x.canStart)
     for job in jobSet:
         newLimits = [(max(limits[0],
                           job.canStart), min(limits[1], job.canEnd))]
         for subjob in jobSet:
             updatedLimits = []
             for limit in newLimits:
                 mockJob = Job()
                 mockJob.canStart = limit[0]
                 mockJob.canEnd = limit[1]
                 overlap = checkOverlap(mockJob, subjob)
                 updatedLimit = getNewLimits(subjob, overlap, limit)
                 if isinstance(updatedLimit[0], list):
                     for sublist in updatedLimit:
                         updatedLimits.append(sublist)
                 else:
                     updatedLimits.append(updatedLimit)
             newLimits = updatedLimits
             if not self.fitLimits(job, newLimits):
                 break
         limitForJob = self.fitLimits(job, newLimits)
         if limitForJob:
             self.scheduleSingleJob(job, limitForJob[0],
                                    limitForJob[0] + job.duration)
Exemplo n.º 5
0
    def behavior(self):

        while (True):

            #In this slot, a job gets created
            # with probability = arrival_probability
            if (random.random() < self.arrival_probability):

                #create a job and timestamp it
                job = Job(self.env.now)
                self.num_jobs_created += 1

                #wait for a delta amount of time
                yield (self.env.timeout(0.1))

                #check if there's place at the output buffer
                if (self.outp.can_put()):
                    #output the job
                    self.outp.put(job)
                    #print("T=", self.env.now+0.0, self.name,"output job",job,"to",self.outp)
                else:
                    self.num_jobs_lost += 1

                self.blocking_probability = float(self.num_jobs_lost) / float(
                    self.num_jobs_created)
                #wait till the end of the slot
                yield (self.env.timeout(0.9))

            else:
                #wait till the next slot
                yield (self.env.timeout(1))
Exemplo n.º 6
0
 def test_if_provide_a_job_with_invalid_words_and_valid_words_assert_fail(
         self):
     job_offer = {
         "content": "Full Stack Vue Developer and Ruby",
         "uid": "test"
     }
     self.assertFalse(Job(job_offer).isValid())
Exemplo n.º 7
0
    def list_jobs(self,
                  limit=30,
                  username=None,
                  application=None,
                  state=None,
                  created=None,
                  started=None,
                  ended=None):
        url = "{0}/api/jobs/list/".format(self._serverUrl)

        data = {
            'username': username,
            'application': application,
            'state': state
        }
        if created: data.update({'created': ';'.join(created)})
        if started: data.update({'started': ';'.join(started)})
        if ended: data.update({'ended': ';'.join(ended)})
        r = self.post(url, data=data)
        res = []

        for values in r.json():
            job = Job(self)
            job.load(values)
            res.append(job)
        return res
Exemplo n.º 8
0
def main():
    '''
        main example to construct / run queue of jobs
    '''
    # create queue
    job_queue = JobQueue("test")
    # start and end date
    start_date = date(2018, 1, 1)
    end_date = date(2018, 1, 11)
    # this would be a useful example of what to run
    cmdline = "loadDate -db TAQ -date {yyyymmdd}"
    # this is a less useful example, but serves for example purposes on linux at least
    cmdline = "echo Hello, today is {yyyymmdd}"
    num_threads = 4
    # overrides

    # this call will populate based on below params
    # can also just call add_job over and over as needed
    logging.basicConfig(filename='main.log',
                        level=logging.INFO,
                        format='%(asctime)s %(message)s',
                        datefmt='%Y-%m-%d %I:%M:%S %p')
    job_queue.populate(start_date, end_date, cmdline)

    # here's how to add one manually
    my_job = Job('99999999', 'echo Manually added job')
    job_queue.add(my_job)
    # create queue runner
    queue_runner = QueueRunner(job_queue, num_threads)
    print("beginning run of queue")
    # run queue until complete
    queue_runner.run()
    # log stats at end
    queue_runner.print_all_job_details()
    print("completed run of queue")
Exemplo n.º 9
0
    def ReturnJobList():
        pool = connectionPool.getInstance()
        connection_object = pool.connection_pool.get_connection() 



        commandPart1 = "SELECT jobs.job_id, jobs.job_type_id, jobs.address, jobs.details, jobs.cus_id, users.full_name, job_types.job_name \n"
        commandPart2 = "FROM jobs \n"
        commandPart3 = "INNER JOIN job_types ON jobs.job_type_id = job_types.job_type_id \n"
        commandPart4 = "INNER JOIN users ON jobs.cus_id = users.user_id\n"
        commandPart5 = "order by job_id;"

        command = commandPart1 + commandPart2 + commandPart3 + commandPart4 + commandPart5
        print(command)

        connection_object.cmd_query(command)
        job_Return = connection_object.get_rows()

        try:
            Job_Return = job_Return[0]
        except:
            connection_object.close()
            return -1

        connection_object.close()

        joblist = []

        for i in Job_Return:
            #print(i)
            nextJob = Job(int(i[0]), int(i[1]), i[2].decode("utf-8" ), i[3].decode("utf-8" ), int(i[4]), i[5].decode("utf-8" ), i[6].decode("utf-8" ))
            joblist.append(nextJob)

        return joblist
Exemplo n.º 10
0
    def ReturnJobByID( job_id):
        pool = connectionPool.getInstance()
        connection_object = pool.connection_pool.get_connection() 

        commandPart1 = "SELECT jobs.job_id, jobs.job_type_id, jobs.address, jobs.details, jobs.cus_id, users.full_name, job_types.job_name "
        commandPart2 = "FROM jobs "
        commandPart3 = "INNER JOIN job_types ON jobs.job_type_id = job_types.job_type_id "
        commandPart4 = "INNER JOIN users ON jobs.cus_id = users.user_id "
        commandPart5 = "where job_id = '{}';".format(job_id)


        command = commandPart1 + commandPart2 + commandPart3 + commandPart4 + commandPart5
        #print(command)

        connection_object.cmd_query(command)
        job_Return = connection_object.get_rows()
        #should only be one row so pull the first
        try:
            Job_Return = job_Return[0][0]
        except:
            connection_object.close()
            return -1

        Job_output =  Job(int(Job_Return[0]), int(Job_Return[1]), Job_Return[2].decode("utf-8" ), Job_Return[3].decode("utf-8" ), int(Job_Return[4]), Job_Return[5].decode("utf-8" ), Job_Return[6].decode("utf-8" ))
        connection_object.close()

        return Job_output
Exemplo n.º 11
0
def Main():

    scheduler = LocalSearchScheduler(4)
    #    scheduler.printStatus()
    with open(JOBS_INPUT_FILE) as jobs_file:
        reader = csv.reader(jobs_file, delimiter=',')
        for line in reader:
            job = Job(JobType.getJobTypeFromInt(int(line[0])), int(line[2]),
                      int(line[1]))
            scheduler.addJobToDict(job)
        scheduler.scheduleAllOnOneMachine()
    scheduler.printStatus()

    current_makespan = scheduler.makespan
    scheduler.moveJobs()
    tries = 0
    while tries < 10:
        current_makespan = scheduler.makespan
        scheduler.moveJobs()
        if not scheduler.isLegal():
            print("Ilegal schedule")

        if current_makespan <= scheduler.makespan:
            tries += 1
            print("next try")
            scheduler.printStatus()
Exemplo n.º 12
0
def add():
    if request.method == "GET":

        return render_template('backend/add.html',
                               job_category=JobCategory.getAllJobType())
    else:
        title = request.form['title']
        cat = request.form['category']
        desc = request.form['description']
        reqire = request.form['reqirement']
        month = request.form['element_4_1']
        day = request.form['element_4_2']
        year = request.form['element_4_3']
        dateLine = day + "/" + month + "/" + year
        hrName = request.form['name']
        hrPhone = request.form['phone']
        hrEmail = request.form['email']
        hrWeb = request.form['website']
        hrAddress = request.form['address']

        job = Job(title=title,
                  cat=cat,
                  desc=desc,
                  reqire=reqire,
                  dateLine=dateLine,
                  hrName=hrName,
                  hrPhone=hrPhone,
                  hrEmail=hrEmail,
                  hrWeb=hrWeb,
                  hrAddress=hrAddress)
        db.session.add(job)
        db.session.commit()
        return redirect("/admin_panel/add")
Exemplo n.º 13
0
def addJob(job_list):
    signature = {}
    signature['name'] = input("Enter Job Name: ")
    signature['job_length'] = input("Enter Job Length: ")
    job = Job(signature)
    priority = binary_search(job_list, job, 0, len(job_list) - 1)
    job.setPriority(priority)
    job_list.insert(priority, job)
Exemplo n.º 14
0
def getSingleJobs(index, initJob):
    job = Job()
    job.number = index
    job.canStart = initJob[1]
    job.canEnd = initJob[2]
    job.weight = initJob[3]
    job.duration = initJob[0]
    return job
Exemplo n.º 15
0
    def add_request(self, request):
        """
        Adds parsing request to pool's jobs queue.

        @type  request: Request
        @param request: parsing request object
        """
        self.jobs_queue.put(Job(request))
Exemplo n.º 16
0
 def create_jobs(self, amount):
     padding = 40
     job_cache = []
     while len(self.jobs) < amount:
         rx = random.randint(padding, self.game_area.width - padding)
         ry = random.randint(padding, self.game_area.height - padding)
         if (rx, ry) not in job_cache:
             new_job = Job(utils.Point(rx, ry))
             self.jobs.append(new_job)
             job_cache.append((rx, ry))
Exemplo n.º 17
0
def test_add_job():
    a = Job("Dusting my Chihuahuas", "Bart", "from midnight till dawn",
            "central park's blood stone")
    JobCollection.add_job(a)
    assert len(JobCollection) == 1
    assert a in JobCollection

    a_copy = Job("Dusting my Chihuahuas", "Bart", "from midnight till dawn",
                 "central park's blood stone")
    assert a_copy in JobCollection

    b = Job("Doing math homework", "Daisy", "6-8 on weekdays",
            "Gatsby's house")
    JobCollection.add_job(b)
    assert len(JobCollection) == 2
    assert b in JobCollection

    JobCollection.add_job(a_copy)
    assert len(JobCollection) == 2
Exemplo n.º 18
0
    def generate_job_request(self):
        job = Job()  # create the job
        request = JobRequest(job=job)
        job.set_request_id(
            request.get_id()
        )  # set the request_id in the job (needed to identify postponed request)
        print("request:     start new job")
        self.job_count -= 1

        return request
Exemplo n.º 19
0
def isOverlapping(job, overlapping):
    for i in range(0, len(overlapping)):
        start = min(map(lambda x: x.canStart, overlapping[i]))
        end = max(map(lambda x: x.canEnd, overlapping[i]))
        mockJob = Job()
        mockJob.canEnd = end
        mockJob.canStart = start
        overlap = checkOverlap(mockJob, job)
        if -1 <= overlap <= 1:
            return i
    return -1
Exemplo n.º 20
0
    def create_job(self, job_id, blob, target):
        '''Creates a new Job object populated with all the goodness it needs to mine.'''

        if self._id is None:
            raise self.StateException('Not subscribed')

        return Job(subscription_id=self.id,
                   job_id=job_id,
                   blob=blob,
                   target=target,
                   proof_of_work=self.ProofOfWork)
Exemplo n.º 21
0
def convertToIndeedJob(link):
    jobSoup = BeautifulSoup(requests.get(link,
                                         headers={'user-agent': 'Chrome/63.0.3239.132'}).content, 'lxml')
    try:
        jobList.append(Job(jobSoup.find(
            'h3', class_="jobsearch-JobInfoHeader-title").text, jobSoup.find(
            'div', class_="icl-u-lg-mr--sm").text, jobSoup.find(
            'div', class_="jobsearch-JobMetadataHeader-item").text, jobSoup.find(
            'div', class_="jobsearch-jobDescriptionText").text, link))
    except:
        return
Exemplo n.º 22
0
    def getOffers(self):
        for job_offer in self.response.json():
            offer = {
                "uid": job_offer['url'],
                "content": job_offer['title'] + " " + job_offer['description']
            }

            if Job(offer).isValid():
                self.jobs.append(self.info(job_offer))

        return self.jobs if not None else 'No Jobs offer for ' + __name__
Exemplo n.º 23
0
def test():
    employee1 = Employee()
    employee1.setPeople("Pavel", 27.06, 21)
    employee1.setEmployee(65743821, 1.11, "programmer")

    employee2 = Employee()
    employee2.setPeople("Anton", 11.11, 25)
    employee2.setEmployee(999999, 3.11, "manager")

    task1 = Job()
    task1.setJob("сложная задача", employee1, "удачи МЭН")

    task2 = Job()
    task2.setJob("лёгкая задача", employee2, "не облажайся")

    project = Project()
    project.setProject("INTEL8080", "commercial", "active",
                       [task1.__str__(), task2.__str__()])
    file = open("test.txt", "w")
    file.write(project.__str__())
    print(project.__str__())
Exemplo n.º 24
0
    def getJobs(self):
        query = "select * from jobs"
        self.cursor.execute(query)
        jobResults = self.cursor.fetchall()
        jobs = []

        for result in jobResults:
            job = Job(result[1], result[2], result[3], result[4], result[6],
                      result[5])
            jobs.append(job)

        return jobs
Exemplo n.º 25
0
 def __init__(self, problem_matrix):
     self.num_jobs = problem_matrix[0][0]
     self.num_machines = problem_matrix[0][1]
     self.num_operations = self.num_jobs * self.num_machines
     self.jobs = []
     for i in range(1, len(problem_matrix)):
         job = Job(i - 1, problem_matrix[i])
         self.jobs.append(job)
     if (len(self.jobs) != self.num_jobs):
         print(
             'Error: Number of job lines are not consistent with number of jobs'
         )
Exemplo n.º 26
0
    def intro(self):
        print("Welcome to Scroll!")
        print("Here are the possible classes: \n")

        print(Job("warrior"))
        print(Job("wizard"))
        print(Job("bard"))

        MyJob = input("Which class do you want to be? ").lower()

        while Job(MyJob).dmg == None:
            print("Sorry, that isn't a possible class. \n")
            MyJob = input("Which class do you want to be? ").lower()

        MyJob = Job(MyJob)

        self.job = MyJob.name
        self.dmg = MyJob.dmg
        self.hp = MyJob.hp
        self.hpMax = MyJob.hp
        self.items = []
        for item in MyJob.items:
            if Item(item).mode == "weapon":
                self.weapon = item
            if Item(item).mode == "armor":
                self.armor = item
            if Item(item).mode == "consumable":
                self.consumables += [item]
                self.items += [item]

        print("\n")
        self.name = input("What is your character's name? ")
        print("\n")

        print("This is you: \n")
        print(self)

        print("Every turn you will type the action you want to preform.")
        print("Type 'start game' when you are ready to begin.")
        print("Type '(h)elp' to see all possible moves. \n")
Exemplo n.º 27
0
 def ParseDurations(lines, index):
     jobs = {}
     i = index
     while not (lines[i].startswith("***")):
         spl = lines[i].split()
         number = int(spl[0]) - 1
         duration = int(spl[2])
         tmp = []
         for j in range(3, len(spl)):
             tmp.append(int(spl[j]))
         jobs[number] = Job(number, duration, tmp, [], [], 0)
         i = i + 1
     return jobs
Exemplo n.º 28
0
def test_job_complex_1():
    task1 = ComputingTask(20)
    task2 = TransferTask(1000000)
    job = Job([task1, task2])
    request1 = job.try_step()
    assert request1 == ResourceUsage({'cpu_usage': 20})
    result = job.do_step(usage_response=ResourceUsage({'cpu_usage': 20}))
    assert result is False
    request2 = job.try_step()
    assert request2 == ResourceUsage({'network': 1000000})
    result = job.do_step(usage_response=ResourceUsage({'network': 1000000}))
    assert result is True
    request3 = job.try_step()
    assert request3 == ResourceUsage()
Exemplo n.º 29
0
def job_builder(pending_jobs_list, order, job_index, location_dict,
                completion_cb):
    """
    Job Builder
    Takes in order consisting of: keyword, priority and arguments; and creates a Job class instance
    containing Task class instances depening on the keyword and arguments.
    Then inserts/appends the Job to the Pending Jobs list depening on the priority.
    """
    index = "00" + str(job_index) if job_index < 10 else (
        "0" + str(job_index) if job_index < 100 else str(job_index))
    keyword = order[0]
    priority = order[1]
    rough_job = Job("job" + index,
                    completion_cb=completion_cb,
                    priority=priority,
                    keyword=keyword)

    if keyword == OrderKeyword.TRANSPORT.name:
        # Transport order, consists of moving somewhere, getting loaded, moving somewhere, getting unloaded.
        from_loc = order[2]
        to_loc = order[3]
        rough_job.add_task(RobotMoveBase(location_dict[from_loc]))
        rough_job.add_task(AwaitingLoadCompletion())
        rough_job.add_task(RobotMoveBase(location_dict[to_loc]))
        rough_job.add_task(AwaitingUnloadCompletion())
    elif keyword == OrderKeyword.MOVE.name:
        # Move order, consists of moving somehwere.
        to_loc = order[2]
        rough_job.add_task(RobotMoveBase(location_dict[to_loc]))
    elif keyword == OrderKeyword.LOAD.name:
        # Load order on the spot.
        rough_job.add_task(AwaitingLoadCompletion())
    elif keyword == OrderKeyword.UNLOAD.name:
        # Unload order on the spot.
        rough_job.add_task(AwaitingUnloadCompletion())

    # Loop over the current list of Pending Jobs with index, find the last spot in the list within the same priority section.
    for index, job in enumerate(pending_jobs_list):
        priority = job.priority.value
        if priority < rough_job.priority.value:
            print(NAME + "Inserting Rough Job (" + rough_job.id +
                  ") at position " + str(index))
            pending_jobs_list.insert(index, rough_job)
            break
    else:  # Looped over all jobs in the pending_jobs_list and it wasn't inserted, so just append to the end.
        print(NAME + "Appending Rough Job (" + rough_job.id + ") to end")
        pending_jobs_list.append(rough_job)

    return job_index + 1
Exemplo n.º 30
0
 def getPossibleChanges(self, job, overlaps):
     output = []
     for overlappingJob in overlaps:
         startSet = []
         endSet = []
         if overlappingJob.canStart + job.duration <= job.canEnd:
             mockStart = Job()
             mockStart.canStart = overlappingJob.canStart
             mockStart.canEnd = overlappingJob.canStart + job.duration
             for subJob in overlaps:
                 if -1 <= checkOverlap(mockStart, subJob) <= 1:
                     startSet.append(subJob)
         if overlappingJob.canEnd + job.duration <= job.canEnd:
             mockEnd = Job()
             mockEnd.canStart = overlappingJob.canEnd
             mockEnd.canEnd = overlappingJob.canEnd + job.duration
             for subJob in overlaps:
                 if -1 <= checkOverlap(mockEnd, subJob) <= 1:
                     endSet.append(subJob)
         if len(startSet) > 0 and startSet not in output:
             output.append((startSet, overlappingJob.canStart))
         if len(endSet) > 0 and endSet not in output:
             output.append((endSet, overlappingJob.canEnd))
     return output