def test_proper_simple_input(self):
        """
        Scheduler.generate_scripts for proper simple input
        :return:
        """
        job = Job('basic_valid.json')
        # here i could create tmp object to spoofing Job for testing purposes,
        # but for now I'm too lazy and I have trust that 'validate' function is flawless
        # TODO change that
        G.VALIDATOR.validate(job)
        ######
        ok_(self.scheduler.generate_scripts(job), "Generating script")
        check_file(os.path.join(self.scheduler.work_path, job.id(), 'pbs.sh'),
                   ["#!/bin/sh",
                    "one 30",
                    "100 two",
                    "nanana -20.1 nanana",
                    "99.2",
                    "[1.2, 2.1, 77.4]"])

        check_file(os.path.join(self.scheduler.work_path, job.id(),'subdir','bla.txt'),
                   ["-- 1.2",
                   "-- 2.1",
                    "-- 77.4"])

        check_file(os.path.join(self.scheduler.work_path, job.id(), 'input.txt'),
                   ["1.2 m",
                    "2.1 m",
                    "77.4 m"])
Beispiel #2
0
 def __init__ (self, file_id, commit_id, path, rev):
     Job.__init__(self)
     self.file_id = file_id
     self.commit_id = commit_id
     self.path = path
     self.rev = rev
     self.authors = None
    def calculate_times(self, queue, num_jobs, cpu_slice):

        completionTime = 0
        remaining_slice = 0
        while len(Priority.queue) > 0:
            running = queue.pop()
            print("JobId, Exec time, Priority", running.JobId,
                  running.execution_time, running.priority)
            Job.set_waiting_time(running, completionTime)
            job_burst = Job.get_execution_time(running)
            while job_burst > 0:
                job_burst = job_burst - cpu_slice
                if job_burst < 0:
                    remaining_slice = abs(job_burst)
                    completionTime = completionTime + job_burst + cpu_slice
                    cpu_slice = remaining_slice

                else:
                    completionTime = completionTime + cpu_slice
            Job.set_completion_time(running, completionTime)
            Job.set_turnaround_time(running, Job.get_completion_time(running))
            Priority.list_priority.append(running)

            print("Completion time", running.completion_time)
            print("Turnaround time", running.turnaroundTime)
            print("Waiting ", running.waiting_time)
        throughput = completionTime / num_jobs
        print(" Throughput ", completionTime, num_jobs, throughput)
        total_TurnarounTime = completionTime / num_jobs
        print("Average Turn around time ", total_TurnarounTime)
        return Priority.list_priority
Beispiel #4
0
 def dive(self,*paths):
   for path in paths:
     files = os.listdir(path)
     fileList = []
     dirList = []
     for f in files:
       f = os.path.join(path,f)
       if os.path.isfile(f) and not self.archive.isIgnored(f):
         fileList.append(f)
       if os.path.isdir(f):
         dirList.append(f)
     if fileList != []:
       self.archive.addFiles(*fileList).setPriority(-1)
     if dirList != []:
       j = Job(self.dive,args=dirList )
       j.setPriority(-2)
       self.archive.jobs.enqueue(j)
 def test_proper_input_object_variable(self):
     """
     Scheduler.generate_scripts for proper simple input with object variable
     :return:
     """
     job = Job('test_valid_job.json')
     # here i could create tmp object to spoofing Job for testing purposes,
     # but for now I'm too lazy and I have trust that 'validate' function is flawless
     # TODO change that
     G.VALIDATOR.validate(job)
     ok_(self.scheduler.generate_scripts(job), "Generating script")
     check_file(os.path.join(self.scheduler.work_path, job.id(),'pbs.sh'),
                 ["#!/bin/sh",
                 "2.3",
                 "20150317 135200",
                 "34:",
                 "    B: 21 ?",
                 "    B: 30 ?",
                 "    B: 41 ?"])
Beispiel #6
0
def executeJob(nearestJobName, nearestJob):
    try:
        #print(nearestJobName,nearestJob)
        if int(nearestJob[nearestJobName]["remainingSec"]) == 0:
            log(nearestJobName + " Job Execution has been started!!")
            loop = asyncio.new_event_loop()
            asyncio.set_event_loop(loop)
            loop.run_until_complete(Job.executeJob(nearestJobName))

    except Exception as e:
        log(" Warning: Scheduler is free- No job for this instance" + str(e))
Beispiel #7
0
async def check_pending_notifications():
    try:

        #con_dict={"con_4":{"server": "94.156.144.217", "user": "******", "password": "******", "database": "hotels", "charset": "utf8", "port": 3306, "use_unicode": "True","connType":"mysql"}}
        #insertConnection(con_dict)
        #job={"JOB3":{"conName":"","jobStatement":"","interval":0,"scheuledTime":"","description":"","type":"","startDate":"","endDate":"17/04/2019","isActive":1,"lastRunDate":"-1"}}
        #Job.setJob(job)
        #step={"step1":{"jobName":"","stepNo":1,"statement":"","interval":"","stepType":"urlDownload","isActive":1,"parameter":"p1=2|p2=5|p3='ddsd'"}}
        #addStep("JOB1",step)
        #print()
        #getResult("CALL `getEmailTemplate`();")
        #ExecuteStep.executeStep("Willdo|step1")
        #Url.callApi()

        Job.prepareJobQueue()
        #Job.getNearestJob()
        #Job.executeJob(Job.getNearestJob())
        #nearestJob=Job.getNearestJob() #Disabled in v3.0
        #nearestJobName=Json_evaluation.getJsonByKey(filename=__jobQueue__,key=nearestJob) #Disabled in v3.0
        readyQueue = Json_evaluation.readJSON(filename=__jobQueue__)
        for nearestJobName in readyQueue.keys():
            if readyQueue[nearestJobName]['remainingSec'] == 0:
                jobThread = threading.Thread(target=executeJob,
                                             args=(
                                                 nearestJobName,
                                                 readyQueue,
                                             ))
                jobThread.daemon = True
                jobThread.start()
        #print(data)
        #print(data["remainingSec"])

        #Job.lastRunUpdate(nearestJob)

        #filename=""
        #if email_template[0][8] == 1:
        #    filename = getFilename(email_template[0][11])
        #send_report(filename)
        pass
    except Exception as e:
        print("#Warning: No Job  is Pending." + str(e))
Beispiel #8
0
 def __init__(self, hunks, path, rev):
     Job.__init__(self)
     self.hunks = hunks
     self.path = path
     self.rev = rev
     self.bug_revs = {}
Beispiel #9
0
    def calculate_times(self, queue, num_jobs, cpu_slice):
        """
       This definition is responsible to run the priority scheduling algorithm and
       calculate waiting time, completion time, turnaround time for each process and
       total turnaround time and throughput.
       :param queue: queue containing jobs, sorted according to priorities.
       num_jobs: number of jobs
       cpu_slice: cpu time slice for which jobs can run.
       :return:
       """
        #completionTime is a responsible for keeping a count of total completion time
        completionTime = 0
        Total_completion = 0

        #remaining_slice is responsible for keeing a count of time slice remaining after
        # a particular process completes running
        remaining_slice = 0

        while len(
                Priority.queue) > 0:  # Run till the queue of jobs is not empty
            running = queue.pop()
            print("JobId:", running.JobId, "Execution time:",
                  running.execution_time, "Priority: ", running.priority)
            Job.set_waiting_time(running, completionTime)
            job_burst = Job.get_execution_time(running)
            while job_burst > 0:  # Run till execution time is still remaining
                job_burst = job_burst - cpu_slice
                if job_burst < 0:
                    remaining_slice = abs(job_burst)
                    completionTime = completionTime + job_burst + cpu_slice
                    cpu_slice = remaining_slice
                    #Total_completion = Total_completion + completionTime

                else:  # if execution time is 0 or less then 0, i.e if process is complete
                    completionTime = completionTime + cpu_slice

            # set completeion time of running job
            Job.set_completion_time(running, completionTime)

            # set turnaround time of running job
            Job.set_turnaround_time(running, Job.get_completion_time(running))

            # append a completed job to a list
            Priority.list_priority.append(running)

            print("Completion time", running.completion_time)
            print("Turnaround time", running.turnaroundTime)
            print("Waiting ", running.waiting_time)
            print(
                "-----------------------------------------------------------------"
            )

        # calculate total throughput
        Priority.priority_throughput = num_jobs / completionTime
        print("Priority Completion time:", completionTime)
        print(" Throughput ", Priority.priority_throughput)

        # calculate total turnaround time
        total_TurnarounTime = completionTime / num_jobs
        print("Average Turn around time ", total_TurnarounTime)
        print(
            "-----------------------------------------------------------------"
        )
        return Priority.list_priority
Beispiel #10
0
 def __init__ (self, hunks, path, rev):
     Job.__init__(self)
     self.hunks = hunks
     self.path = path
     self.rev = rev
     self.bug_revs = {}
    def calculate_times(self, queue, num_jobs, cpu_slice):
        """
        This definition is responsible to run the CFS scheduling algorithm and
        calculate waiting time, completion time, turnaround time for each process and
        total turnaround time and throughput.
        :param queue: queue containing jobs, sorted according to execution.
        num_jobs: number of jobs
        cpu_slice: cpu time slice for which jobs can run.
        :return:
        """
        total_completion = 0
        completion_time = 0.0
        number_of_jobs = num_jobs
        while (len(queue) > 0):
            flag = 0
            temp_slice = (cpu_slice / number_of_jobs)
            for i in range(number_of_jobs):
                running = CFS.queue.pop()

                running.execution_time = running.execution_time - temp_slice
                running.cpu_burst = running.cpu_burst + temp_slice
                if (running.execution_time > 0):
                    CFS.queue.appendleft(running)
                    completion_time = completion_time + temp_slice

                elif running.execution_time <= 0:
                    flag = flag + 1
                    completion_time = completion_time + (
                        temp_slice - abs(running.execution_time))
                    # running.completion_time = completion_time

                    #total_completion = total_completion + completion_time

                    # set completeion time of running job
                    Job.set_completion_time(running, completion_time)

                    # set turnaround time of running job
                    Job.set_turnaround_time(running,
                                            Job.get_completion_time(running))

                    waitingTime = Job.get_completion_time(
                        running) - running.cpu_burst
                    # running.waiting_time = waitingTime

                    # set waiting time of running job
                    Job.set_waiting_time(running, waitingTime)

                    CFS.cfs_jobs.append(running)

                    print("JobId:", running.JobId)
                    print("Completion time", running.completion_time)
                    print("Turnaround time",
                          running.completion_time - running.arrival_time)
                    print("Waiting ", running.waiting_time)
                    print(
                        "-----------------------------------------------------------------"
                    )

            if flag > 0:
                number_of_jobs = number_of_jobs - flag

        print("CFS Completion time:", completion_time)

        CFS.cfs_throughput = num_jobs / completion_time
        print("Throughput ", self.cfs_throughput)

        # calculate total turnaround time
        total_TurnarounTime = completion_time / num_jobs

        print("Average Turn around time ", total_TurnarounTime)
        print(
            "-----------------------------------------------------------------"
        )

        return CFS.cfs_jobs
Beispiel #12
0
def submitForm(formName, masterName):
    if request.method == 'POST':
        result_dict = request.form.to_dict(flat=True)
        for k in result_dict.keys():
            data = k.replace("'", "\"")
        data = json.loads(data)
        masterKey = data[masterName]
        data = {masterKey: data}
        try:
            if formName == 'Jobs':
                Job.setJob(data,
                           isNew=1,
                           path=__path__,
                           histoyPath=__historyPath__,
                           logPath=__logPath__)
                return "Job Added Successfully!!"
            elif formName == 'JobUpdate':
                Job.setJob(data,
                           isNew=0,
                           path=__path__,
                           histoyPath=__historyPath__,
                           logPath=__logPath__)
                return "Job updated Successfully!!"
            elif formName == 'newCon':
                insertConnection(data, path=__path__, logPath=__logPath__)
                return "Connection Created Successfully!!"
            elif formName == 'updateCon':
                insertConnection(data, path=__path__, logPath=__logPath__)
                return "Connection updated Successfully!!"
            elif formName == 'newParam':
                Parameter.addParam(data, path=__path__, logPath=__logPath__)
                return "Parameter created Successfully!!"
            elif formName == 'updateParam':
                Parameter.addParam(data, path=__path__, logPath=__logPath__)
                return "Parameter updated Successfully!!"
            elif formName == 'newEmail':
                Email.addSmpt(data, path=__path__, logPath=__logPath__)
                return "Email created Successfully!!"
            elif formName == 'updateEmail':
                Email.addSmpt(data, path=__path__, logPath=__logPath__)
                return "Email updated Successfully!!"
            elif formName == 'newStep':

                jobName = data[masterKey]["jobName"]
                data = {
                    str(data[masterKey]["jobName"] + "|" + data[masterKey]['stepName']):
                    data[masterKey]
                }
                Job.addStep(str(jobName),
                            data,
                            path=__path__,
                            logPath=__logPath__)
                return "Step Created Successfully under " + jobName + " !!"
            elif formName == 'manageStep':

                jobName = data[masterKey]["jobName"]
                data = {
                    str(data[masterKey]["jobName"] + "|" + data[masterKey]['stepName']):
                    data[masterKey]
                }
                #print(data)
                Job.addStep(str(jobName),
                            data,
                            path=__path__,
                            logPath=__logPath__)
                return "Step Updated Successfully under " + jobName + " !!"
            elif formName == 'assigneParam':
                stepData = Json_evaluation.getJsonByKey(key=masterKey,
                                                        filename=__stepsFile__,
                                                        path=__path__)
                print(stepData)
                if str(stepData["parameter"]) == "":
                    stepData["parameter"] = str(
                        data[masterKey]['paramName']) + str(
                            data[masterKey]['paramOption'])
                else:
                    stepData["parameter"] += "|" + str(
                        data[masterKey]['paramName']) + str(
                            data[masterKey]['paramOption'])
                stepData = {masterKey: stepData}
                Json_evaluation.updateJson(dict=stepData,
                                           filename=__stepsFile__,
                                           path=__path__)
                return "Parameter Assined Successfully!!"
        except Exception as e:
            print(str(e))
        return "<h2>Success</h2>"
Beispiel #13
0
def getStepList(jobName):
    try:
        return jsonify(Job.getStepsByJob(jobName, path=__path__))
    except Exception as e:
        return str(e), 500
Beispiel #14
0
def getActiveJobs():
    #return ""
    try:
        return jsonify(Job.getActiveJobs(path=__path__))
    except Exception as e:
        return str(e), 500