Ejemplo n.º 1
0
def _evaluate_model(data_form, layers_form, html_data, fit=True, user=None, run_info=None):
    """
        Refl1d fitting job
    """
    # Save the model first
    fit_problem = save_fit_problem(data_form, layers_form, None, user)
    constraint_list = Constraint.objects.filter(fit_problem=fit_problem)

    try:
        base_name = os.path.split(data_form.cleaned_data['data_path'])[1]
    except:
        base_name = data_form.cleaned_data['data_path']

    fit_dir = 'reflectivity_fits'
    if run_info is not None and 'proposal' in run_info:
        fit_dir = os.path.join(fit_dir, run_info['proposal'])

    ascii_data = extract_ascii_from_div(html_data)
    work_dir = os.path.join(settings.REFL1D_JOB_DIR, user.username)
    output_dir = os.path.join(work_dir, fit_dir, base_name)
    # Get fitter options
    options = {}
    if user is not None:
        obj, _ = FitterOptions.objects.get_or_create(user=user)
        options = obj.get_dict()

    template = 'reflectivity_model.py.template'
    script = job_handling.create_model_file(data_form, layers_form, template=template,
                                            data_file=os.path.join(work_dir, '__data.txt'), ascii_data=ascii_data,
                                            output_dir=output_dir, fit=fit, options=options, constraints=constraint_list)

    server = Server.objects.get_or_create(title='Analysis', hostname=settings.JOB_HANDLING_HOST, port=settings.JOB_HANDLING_PORT)[0]

    python_path = settings.JOB_HANDLING_INTERPRETER
    if settings.JOB_HANDLING_HOST == 'localhost':
        python_path = sys.executable
    python2_interpreter = Interpreter.objects.get_or_create(name='python2', path=python_path)[0]
    server.interpreters.set([python2_interpreter,])

    job = Job.objects.get_or_create(title=data_form.cleaned_data['data_path'], #'Reflectivity fit %s' % time.time(),
                                    program=script,
                                    remote_directory=work_dir,
                                    remote_filename='fit_job.py',
                                    owner=user,
                                    interpreter=python2_interpreter,
                                    server=server)[0]
    submit_job_to_server.delay(
        job_pk=job.pk,
        password='',
        username=user.username,
        log_policy=LogPolicy.LOG_TOTAL,
        store_results='',
        remote = not settings.JOB_HANDLING_HOST == 'localhost'
    )

    # Update the remote job info
    fit_problem.remote_job = job
    fit_problem.save()
    return {'job_id': job.pk}
Ejemplo n.º 2
0
    def post(self, request, *args, **kwargs):
        (interpreter, _) = Interpreter.objects.get_or_create(
            name='Python',
            path=settings.EXAMPLE_PYTHON_PATH,
            arguments=settings.EXAMPLE_PYTHON_ARGUMENTS,
        )

        (server, _) = Server.objects.get_or_create(
            title='Example Server',
            hostname=settings.EXAMPLE_SERVER_HOSTNAME,
            port=settings.EXAMPLE_SERVER_PORT,
        )

        logger.debug("Running job in {} using {}".format(server, interpreter))

        num_jobs = len(Job.objects.all())

        program = textwrap.dedent('''\
        from __future__ import print_function
        import time
        for i in range(10):
            with open('django_remote_submission_example_out_{}.txt'.format(i), 'wt') as f:
                print('Line {}'.format(i), file=f)
                print('Line {}'.format(i))
            time.sleep(1)
        ''')

        (job, _) = Job.objects.get_or_create(
            title='Example Job #{}'.format(num_jobs),
            program=program,
            remote_directory=settings.EXAMPLE_REMOTE_DIRECTORY,
            remote_filename=settings.EXAMPLE_REMOTE_FILENAME,
            owner=request.user,
            server=server,
            interpreter=interpreter,
        )

        submit_job_to_server.delay(
            job_pk=job.pk,
            password=settings.EXAMPLE_REMOTE_PASSWORD,
            username=settings.EXAMPLE_REMOTE_USER,
        )

        return HttpResponse('success')
Ejemplo n.º 3
0
server.interpreters.set([python2_interpreter, python3_interpreter])

job = Job.objects.get_or_create(
    title='My Job Title',
    program='print("hello world")',
    remote_directory='/tmp/',
    remote_filename='test.py',
    owner=request.user,
    server=server,
    interpreter=python2_interpreter,
)[0]

# Using delay calls celery:
modified_files = submit_job_to_server.delay(
    job_pk=job.pk,
    password=request.POST.get('password'),
    remote=False,
)

copy_key_to_server(
    username=env.remote_user,
    password=env.remote_password,
    hostname=env.server_hostname,
    port=env.server_port,
    public_key_filename=None,  # finds it automaticaly
)

delete_key_from_server(
    username=env.remote_user,
    password=env.remote_password,
    hostname=env.server_hostname,
Ejemplo n.º 4
0
def evaluate_simultaneous_fit(request, instrument, data_id, run_info):
    """
        Assemble all the information for co-refinement
    """
    error_list = []
    data_path, fit_problem = get_fit_problem(request, instrument, data_id)
    # Decide on an output directory
    try:
        base_name = os.path.split(data_path)[1]
    except:
        base_name = data_path
    if run_info is not None and 'proposal' in run_info:
        fit_dir = os.path.join('reflectivity_fits', run_info['proposal'],
                               'simultaneous')
    else:
        fit_dir = os.path.join('reflectivity_fits', 'simultaneous')

    work_dir = os.path.join(settings.REFL1D_JOB_DIR, request.user.username)
    output_dir = os.path.join(work_dir, fit_dir, base_name)
    # Get fitter options
    obj, _ = FitterOptions.objects.get_or_create(user=request.user)
    options = obj.get_dict()

    data_files = []
    expt_names = []
    data_ids = []
    # Process the parent data set
    script_models = "\n# run %s/%s #############################################################\n" % (
        instrument, data_id)
    script_part, data, expt_name, errors = _process_fit_problem(
        fit_problem, instrument, data_id, options, work_dir, output_dir)
    script_models += script_part
    data_files.append(data)
    expt_names.append(expt_name)
    error_list.extend(errors)
    data_ids.append(data_path)

    # Then the data sets appended to the parent data set
    #TODO: check is_active
    for item in SimultaneousModel.objects.filter(fit_problem=fit_problem):
        instrument_, data_id_ = parse_data_path(item.dependent_data)
        _, extra_fit = get_fit_problem(request, instrument_, data_id_)
        script_models += "\n# run %s/%s #############################################################\n" % (
            instrument_, data_id_)
        script_part, data, expt_name, errors = _process_fit_problem(
            extra_fit, instrument_, data_id_, options, work_dir, output_dir)
        script_models += script_part
        data_files.append(data)
        expt_names.append(expt_name)
        error_list.extend(errors)
        data_ids.append(item.dependent_data)

    # Now the constraints
    script_models += "\n# Constraints ##################################################################\n"
    for item in SimultaneousConstraint.objects.filter(fit_problem=fit_problem,
                                                      user=request.user):
        script_models += item.get_constraint(sample_name='sample') + '\n'

    data_script = job_handling.assemble_data_setup(data_files)
    job_script = job_handling.assemble_job(script_models, data_script,
                                           expt_names, data_ids, options,
                                           work_dir, output_dir)

    # Submit job
    server = Server.objects.get_or_create(title='Analysis',
                                          hostname=settings.JOB_HANDLING_HOST,
                                          port=settings.JOB_HANDLING_PORT)[0]

    python_path = settings.JOB_HANDLING_INTERPRETER
    if settings.JOB_HANDLING_HOST == 'localhost':
        python_path = sys.executable
    python2_interpreter = Interpreter.objects.get_or_create(
        name='python2', path=python_path)[0]
    server.interpreters.set([
        python2_interpreter,
    ])

    job = Job.objects.get_or_create(title=data_path,
                                    program=job_script,
                                    remote_directory=work_dir,
                                    remote_filename='fit_job.py',
                                    owner=request.user,
                                    interpreter=python2_interpreter,
                                    server=server)[0]
    submit_job_to_server.delay(
        job_pk=job.pk,
        password='',
        username=request.user.username,
        log_policy=LogPolicy.LOG_TOTAL,
        store_results='',
        remote=not settings.JOB_HANDLING_HOST == 'localhost')

    # Update the remote job info
    simul_fit, _ = SimultaneousFit.objects.get_or_create(
        user=request.user, fit_problem=fit_problem)
    simul_fit.remote_job = job
    simul_fit.save()

    return dict(job_id=job.pk, error_list=error_list)