Exemplo n.º 1
0
                model.get_so_results(save=True)
                try:
                    zip_up_dir(job)
                except:
                    logging.exception('Exception: could not zip up job directory for job ' + str(job.id))
                job.save()
                try:
                    email_notify.send_email(job)
                except:
                    logging.exception('Exception: error sending email')
            elif job.job_type == 'SS':
                #Collate the results, and ship them off in a new condor job to be averaged
                #Use this to keep track of the number of jobs we split the task in to
                condor_jobs = models.CondorJob.objects.filter(parent=job)
                cj = model.prepare_ss_process_job(len(condor_jobs), job.runs, rank=job.rank)
                #Submit the new job to condor
                condor_job_id = condor_submit(cj['spec_file'], username=str(job.user.username), results=True)
                #And store a new condor job in the database
                condor_job = models.CondorJob(parent=job, spec_file=cj['spec_file'], std_output_file=cj['std_output_file'], std_error_file = cj['std_error_file'], log_file=cj['log_file'], job_output=cj['job_output'], queue_status='Q', queue_id=condor_job_id)
                condor_job.save()
                job.status='X' # Set the job status as processing on condor

                #Update the condor job count
                if job.condor_jobs == None:
                    job.condor_jobs = 1
                else:
                    job.condor_jobs += 1
                
                job.last_update=datetime.datetime.today()
                job.save()
Exemplo n.º 2
0
         zip_up_dir(job)
     except:
         logging.exception(
             'Exception: could not zip up job directory for job ' +
             str(job.id))
     job.save()
     try:
         email_notify.send_email(job)
     except:
         logging.exception('Exception: error sending email')
 elif job.job_type == 'SS':
     #Collate the results, and ship them off in a new condor job to be averaged
     #Use this to keep track of the number of jobs we split the task in to
     condor_jobs = models.CondorJob.objects.filter(parent=job)
     cj = model.prepare_ss_process_job(len(condor_jobs),
                                       job.runs,
                                       rank=job.rank)
     #Submit the new job to condor
     condor_job_id = condor_submit(cj['spec_file'],
                                   username=str(job.user.username),
                                   results=True)
     #And store a new condor job in the database
     condor_job = models.CondorJob(
         parent=job,
         spec_file=cj['spec_file'],
         std_output_file=cj['std_output_file'],
         std_error_file=cj['std_error_file'],
         log_file=cj['log_file'],
         job_output=cj['job_output'],
         queue_status='Q',
         queue_id=condor_job_id)