Exemplo n.º 1
0
 def handle_output(self,job):
     #import resultHandle
     files = []
     for f in job["output_files"]:
         output_filename =  f
         outputfile = mig.get_file(output_filename, f)
         log(self.logfile, "Retrieved output file for job "+job["id"],self.debug_mode)
           #print "opening ", destDir+filepath, "to", destDir
         files.append(outputfile)
     return files
Exemplo n.º 2
0
def download_result(job):
    dl_files = []
    result_files = job["result_files"]
    for f in result_files:
        if mig.path_exists(f):
            dl = mig.get_file(f)
            dl_files.append(dl)
        #else:
        #    return []

    return dl_files
Exemplo n.º 3
0
def main():
    """
    Executes the bash file test_executable.sh in a grid job. 
    Afterwards, the result is downloaded and printed to screen.
    """

    # mig.debug_mode_on() # uncomment to enable debug print outs
    # mig.local_mode_on() # uncomment to enable local mode execution

    mig.test_connection()  # Check if we can connect to the MiG server

    # The program we want to execute on the grid
    executable_file = "test_executable.sh"
    # The shell command to execute on the grid resource
    cmd = "./test_executable.sh > out.txt"
    # Create and submit the grid job
    job_id = mig.create_job(cmd,
                            output_files=["out.txt"],
                            executables=[executable_file])
    print "\nJob (ID : %s) submitted. \n\n" % job_id

    # Wait for the job to finish while monitoring the status
    polling_frequency = 10  # seconds
    while not mig.job_finished(job_id):
        job_info = mig.job_info(job_id)  # get an info dictionary
        print 'Grid job : %(ID)s \t %(STATUS)s ' % job_info
        time.sleep(polling_frequency)  # wait a while before polling again

    # Download the result file and print
    output_file = mig.get_file("out.txt")
    f = open(output_file)
    print "Output file (%s) contains :\n %s \n\n" % (output_file,
                                                     str(f.readlines()))
    f.close()

    # Clean up
    os.remove(output_file)  # remove locally
    mig.remove(output_file)  # remove on the MiG server
    print "Output (" + output_file + ") deleted."
Exemplo n.º 4
0
def main():
    """
    Run five grid jobs executing the bash file parameter_sweet_script.sh with different input arguments.
    When a job has finished executing, the corresponding output file is downloaded.
    Finally, the output contents are printed.
    """

    # mig.debug_mode_on() # uncomment to enable debug print outs
    # mig.local_mode_on() # uncomment to enable local mode execution
    mig.test_connection()  # Check if we can connect to the MiG server

    input_values = range(5)  # Input parameters
    # The program we want to execute on grid resources
    executable_file = "parameter_sweep_script.sh"

    print "\nStarting grid jobs:\n"

    jobs = []
    for i in input_values:  # Start a job for each input
        output_file = "output%s.txt" % i  # The output file name
        # The shell command to start the script on the resource
        cmd = "./parameter_sweep_script.sh %i > %s" % (i, output_file)
        # Run the job resources on any vgrid
        resource_requirements = {"VGRID": "ANY"}
        # Start the grid job
        job_id = mig.create_job(cmd,
                                output_files=[output_file],
                                executables=[executable_file],
                                resource_specifications=resource_requirements)
        jobs.append((job_id, output_file))
        print "Job (ID : %s) submitted." % job_id
    print "\n\n"

    print "Monitor job status...\n"  # Now we wait for results

    finished_jobs = []
    while len(finished_jobs) < len(jobs):
        for id, output_file in jobs:
            job_info = mig.job_info(id)  # get an info dictionary
            print 'Grid job : %(ID)s \t %(STATUS)s ' % job_info
            if mig.job_finished(id) and id not in finished_jobs:
                # Download the output file from the server
                mig.get_file(output_file)
                finished_jobs.append(id)
                mig.remove(
                    output_file)  # clean up the result file on the server

        time.sleep(10)  # Wait a few seconds before trying again
        print "\n\n"

    print "All jobs finished."
    # Clean up the result files and print out the contents
    print "Cleaning up."
    output_lines = []
    for _, output_file in jobs:
        fh = open(output_file)
        output_lines.append(" ".join(fh.readlines()))
        fh.close()
        os.remove(output_file)
        print "Output file (" + output_file + ") deleted."

    print "\n\nOutput contents : \n\n%s\n" % "\n".join(output_lines)
Exemplo n.º 5
0
def main():
    """
    Find edit distance value between entries in a reference file. First, divide the file into smaller blocks 
    and create a grid job for each. levenshtein.py is used to process each input block.
    When a job has finished executing, the corresponding output file is downloaded.
    """

    # mig.debug_mode_on() # uncomment to enable debug print outs
    # mig.local_mode_on() # uncomment to enable local mode execution

    reference_file = "ref1000.txt"
    # The reference blocks. One for each job we want to run.
    block_files = create_blocks(reference_file, block_size=200)

    # These are static input files for each job.
    levenshtein_files = [
        "Levenshtein_ucs4.so", "Levenshtein_ucs2.so", "levenshtein.py",
        "Levenshtein_i686.so"
    ]

    resource_requirements = {}
    resource_requirements[
        "RUNTIMEENVIRONMENT"] = "PYTHON-2"  # we need python on the resource
    resource_requirements["VGRID"] = "ANY"

    jobs = []

    # Start a grid job for each block file
    for block_file in block_files:
        output_file = block_file.strip(".txt") + "_output.txt"
        cmd = "$PYTHON levenshtein.py %s > %s" % (block_file, output_file)
        input_files = []
        input_files.extend(levenshtein_files)
        input_files.append(block_file)
        job_id = mig.create_job(cmd,
                                input_files=input_files,
                                output_files=output_file,
                                resource_specifications=resource_requirements)
        jobs.append((job_id, output_file))
        print "started job %s" % job_id

    jobs_done = 0
    # now wait for the results
    while len(jobs):
        print "Checking job status. jobs done : %i." % jobs_done
        try:
            for job_id, result_file in jobs:
                print "Checking job %s." % job_id
                if mig.job_finished(job_id):
                    if not os.path.exists("output"):
                        os.mkdir("output")
                    mig.get_file(result_file, "output/" + result_file)
                    jobs.remove((job_id, result_file))
                    jobs_done += 1

                    print "Job done. Downloaded result file %s." % result_file

            time.sleep(10)  # wait a little before polling
        except KeyboardInterrupt:
            job_ids = [x[0] for x in jobs]
            mig.cancel_jobs(job_ids)
            print "Cancelled jobs."
            break