def prepare_data(jobs):
    output = ""
    for job, variation in jobs:
        model = CopasiModel(job.get_filename())
        
        results = model.get_so_results()
        
        for result in results:
            output += "['" + result['name'] + "_max'," + str(variation) + "," + result['max_result'] + "],\n"
            output += "['" + result['name'] + "_min'," + str(variation) + "," + result['min_result'] + "],\n"

    return output
def prepare_data(jobs):
    output = ""
    for job, variation in jobs:
        model = CopasiModel(job.get_filename())

        results = model.get_so_results()

        for result in results:
            output += "['" + result['name'] + "_max'," + str(
                variation) + "," + result['max_result'] + "],\n"
            output += "['" + result['name'] + "_min'," + str(
                variation) + "," + result['min_result'] + "],\n"

    return output
    #Collate the results

    #Get the list of jobs marked as finished, waiting for processing
    waiting = models.Job.objects.filter(status='W')
    for job in waiting:
        logging.debug('Processing results for complete job ' + str(job.id) + ', User: '******'SO':
                #TODO: doesn't do validation step yet. This step should probably be condorised.
                #Mark the job as complete
                job.status='C'
                job.finish_time=datetime.datetime.today()
                job.last_update=datetime.datetime.today()

                model.get_so_results(save=True)
                try:
                    zip_up_dir(job)
                except:
                    logging.exception('Exception: could not zip up job directory for job ' + str(job.id))
                job.save()
                try:
                    email_notify.send_email(job)
                except:
                    logging.exception('Exception: error sending email')
            elif job.job_type == 'SS':
                #Collate the results, and ship them off in a new condor job to be averaged
                #Use this to keep track of the number of jobs we split the task in to
                condor_jobs = models.CondorJob.objects.filter(parent=job)
                cj = model.prepare_ss_process_job(len(condor_jobs), job.runs, rank=job.rank)
                #Submit the new job to condor
    #Get the list of jobs marked as finished, waiting for processing
    waiting = models.Job.objects.filter(status='W')
    for job in waiting:
        logging.debug('Processing results for complete job ' + str(job.id) +
                      ', User: '******'SO':
                #TODO: doesn't do validation step yet. This step should probably be condorised.
                #Mark the job as complete
                job.status = 'C'
                job.finish_time = datetime.datetime.today()
                job.last_update = datetime.datetime.today()

                model.get_so_results(save=True)
                try:
                    zip_up_dir(job)
                except:
                    logging.exception(
                        'Exception: could not zip up job directory for job ' +
                        str(job.id))
                job.save()
                try:
                    email_notify.send_email(job)
                except:
                    logging.exception('Exception: error sending email')
            elif job.job_type == 'SS':
                #Collate the results, and ship them off in a new condor job to be averaged
                #Use this to keep track of the number of jobs we split the task in to
                condor_jobs = models.CondorJob.objects.filter(parent=job)