Esempio n. 1
0
def job_publish(request, org_id, record_group_id, job_id):
    LOGGER.debug(request.POST)

    # capture entered publish set id
    publish_set_id = request.POST.get('publish_set_id', None)

    # override with pre-existing publish set id is selected
    if request.POST.get('existing_publish_set_id', None) is not None:
        publish_set_id = request.POST.get('existing_publish_set_id')

    # get published subsets to include in
    published_subsets = request.POST.getlist('published_subsets', [])

    # get CombineJob
    cjob = CombineJob.get_combine_job(job_id)

    # init publish
    cjob.publish_bg_task(
        publish_set_id=publish_set_id,
        in_published_subsets=published_subsets)

    # set gms
    gmc = GlobalMessageClient(request.session)
    gmc.add_gm({
        'html': '<p><strong>Publishing Job:</strong><br>%s<br><br><strong>Publish Set ID:</strong><br>%s</p><p><a href="%s"><button type="button" class="btn btn-outline-primary btn-sm">View Published Records</button></a></p>' % (
            cjob.job.name, publish_set_id, reverse('published')),
        'class': 'success'
    })

    return redirect('record_group',
                    org_id=cjob.job.record_group.organization.id,
                    record_group_id=cjob.job.record_group.id)
Esempio n. 2
0
def clone_jobs(request):
    LOGGER.debug('cloning jobs')

    job_ids = request.POST.getlist('job_ids[]')

    # get downstream toggle
    downstream_toggle = request.POST.get('downstream_clone_toggle', False)
    if downstream_toggle == 'true':
        downstream_toggle = True
    elif downstream_toggle == 'false':
        downstream_toggle = False

    # get rerun toggle
    rerun_on_clone = request.POST.get('rerun_on_clone', False)
    if rerun_on_clone == 'true':
        rerun_on_clone = True
    elif rerun_on_clone == 'false':
        rerun_on_clone = False

    # set of jobs to rerun
    job_clone_set = set()

    # loop through job_ids and add
    for job_id in job_ids:
        cjob = CombineJob.get_combine_job(job_id)
        job_clone_set.add(cjob.job)

    # sort and run
    ordered_job_clone_set = sorted(list(job_clone_set), key=lambda j: j.id)

    # initiate Combine BG Task
    combine_task = CombineBackgroundTask(
        name="Clone Jobs",
        task_type='clone_jobs',
        task_params_json=json.dumps({
            'ordered_job_clone_set': [j.id for j in ordered_job_clone_set],
            'downstream_toggle': downstream_toggle,
            'rerun_on_clone': rerun_on_clone
        })
    )
    combine_task.save()

    # run celery task
    bg_task = tasks.clone_jobs.delay(combine_task.id)
    LOGGER.debug('firing bg task: %s', bg_task)
    combine_task.celery_task_id = bg_task.task_id
    combine_task.save()

    # set gms
    gmc = GlobalMessageClient(request.session)
    gmc.add_gm({
        'html': '<strong>Cloning Job(s):</strong><br>%s<br><br>Including downstream? <strong>%s</strong><br><br>Refresh this page to update status of Jobs cloning. <button class="btn-sm btn-outline-primary" onclick="location.reload();">Refresh</button>' % (
            '<br>'.join([str(j.name) for j in ordered_job_clone_set]), downstream_toggle),
        'class': 'success'
    })

    # return, as requested via Ajax which will reload page
    return JsonResponse({'results': True})
Esempio n. 3
0
def stop_jobs(request):
    LOGGER.debug('stopping jobs')

    job_ids = request.POST.getlist('job_ids[]')
    LOGGER.debug(job_ids)

    # get downstream toggle
    downstream_toggle = request.POST.get('downstream_stop_toggle', False)
    if downstream_toggle == 'true':
        downstream_toggle = True
    elif downstream_toggle == 'false':
        downstream_toggle = False

    # set of jobs to rerun
    job_stop_set = set()

    # loop through job_ids
    for job_id in job_ids:

        # get CombineJob
        cjob = CombineJob.get_combine_job(job_id)

        # if including downstream
        if downstream_toggle:

            # add rerun lineage for this job to set
            job_stop_set.update(cjob.job.get_downstream_jobs())

        # else, just job
        else:

            job_stop_set.add(cjob.job)

    # sort and run
    ordered_job_delete_set = sorted(list(job_stop_set), key=lambda j: j.id)

    # # loop through and update visible elements of Job for front-end
    for job in ordered_job_delete_set:
        LOGGER.debug('stopping Job: %s', job)

        # stop job
        job.stop_job()

    # set gms
    gmc = GlobalMessageClient(request.session)
    gmc.add_gm({
        'html':
        '<p><strong>Stopped Job(s):</strong><br>%s</p>' %
        ('<br>'.join([j.name for j in ordered_job_delete_set])),
        'class':
        'danger'
    })

    # return
    return JsonResponse({'results': True})
Esempio n. 4
0
def record_group_run_jobs(request, org_id, record_group_id):
    group = RecordGroup.objects.get(pk=int(record_group_id))
    jobs = group.all_jobs()
    tasks.rerun_jobs(jobs)
    gmc = GlobalMessageClient(request.session)
    gmc.add_gm({
        'html': '<strong>Preparing to Rerun Job(s):</strong><br>%s' % '<br>'.join(
            [str(j.name) for j in jobs]),
        'class': 'success'
    })
    return redirect('organization', org_id=group.organization_id)
Esempio n. 5
0
def gm_delete(request):
    if request.method == 'POST':
        # get gm_id
        gm_id = request.POST.get('gm_id')

        # init GlobalMessageClient
        gmc = GlobalMessageClient(request.session)

        # delete by id
        results = gmc.delete_gm(gm_id)

        # redirect
        return JsonResponse({'gm_id': gm_id, 'num_removed': results})
Esempio n. 6
0
def rerun_jobs(request):
    LOGGER.debug('re-running jobs')

    # get job ids
    job_ids = request.POST.getlist('job_ids[]')

    # get downstream toggle
    downstream_toggle = bool_for_string(
        request.POST.get('downstream_rerun_toggle', False))
    upstream_toggle = bool_for_string(
        request.POST.get('upstream_rerun_toggle', False))

    # set of jobs to rerun
    job_rerun_set = set()

    # loop through job_ids
    for job_id in job_ids:

        # get CombineJob
        cjob = CombineJob.get_combine_job(job_id)

        # if including downstream
        if downstream_toggle:
            # add rerun lineage for this job to set
            job_rerun_set.update(
                cjob.job.get_downstream_jobs(include_self=False))

        if upstream_toggle:
            job_rerun_set.update(
                cjob.job.get_upstream_jobs(include_self=False))

        # else, just job
        job_rerun_set.add(cjob.job)

    # sort and run
    ordered_job_rerun_set = sorted(list(job_rerun_set), key=lambda j: j.id)

    tasks.rerun_jobs(ordered_job_rerun_set)

    # set gms
    gmc = GlobalMessageClient(request.session)
    gmc.add_gm({
        'html':
        '<strong>Preparing to Rerun Job(s):</strong><br>%s<br><br>Refresh this page to update status of Jobs rerunning. <button class="btn-sm btn-outline-primary" onclick="location.reload();">Refresh</button>'
        % '<br>'.join([str(j.name) for j in ordered_job_rerun_set]),
        'class':
        'success'
    })

    # return, as requested via Ajax which will reload page
    return JsonResponse({'results': True})
Esempio n. 7
0
def record_group_stop_jobs(request, org_id, record_group_id):
    group = RecordGroup.objects.get(pk=int(record_group_id))
    jobs = group.all_jobs()
    for job in jobs:
        LOGGER.debug('stopping Job: %s', job)
        job.stop_job()

    gmc = GlobalMessageClient(request.session)
    gmc.add_gm({
        'html': '<p><strong>Stopped Job(s):</strong><br>%s</p>' % (
            '<br>'.join([j.name for j in jobs])),
        'class': 'danger'
    })

    return redirect('organization', org_id=group.organization_id)
Esempio n. 8
0
def job_unpublish(request, org_id, record_group_id, job_id):
    # get CombineJob
    cjob = CombineJob.get_combine_job(job_id)

    # init unpublish
    cjob.unpublish_bg_task()

    # set gms
    gmc = GlobalMessageClient(request.session)
    gmc.add_gm({
        'html': '<p><strong>Unpublishing Job:</strong><br>%s</p><p><a href="%s"><button type="button" class="btn btn-outline-primary btn-sm">View Published Records</button></a></p>' % (
            cjob.job.name, reverse('published')),
        'class': 'success'
    })

    return redirect('record_group',
                    org_id=cjob.job.record_group.organization.id,
                    record_group_id=cjob.job.record_group.id)
Esempio n. 9
0
 def test_gm_delete(self):
     gmc = GlobalMessageClient()
     gmc.load_most_recent_session()
     gm_dict = {"id": "test_msg", "msg": "test global message"}
     gmc.add_gm(gm_dict)
     response = self.client.post('/combine/gm/delete',
                                 {"gm_id": "test_msg"})
     json = response.json()
     self.assertEqual(json['gm_id'], 'test_msg')
     self.assertEqual(json['num_removed'], 1)
Esempio n. 10
0
def export_tabular_data(request, export_source=None, job_id=None, subset=None):
    # get records per file
    records_per_file = request.POST.get('records_per_file', False)
    if records_per_file in ['', False]:
        records_per_file = 500

    # get mapped fields export type
    tabular_data_export_type = request.POST.get('tabular_data_export_type')

    # get archive type
    archive_type = request.POST.get('archive_type')

    # get fm config json
    fm_export_config_json = request.POST.get('fm_export_config_json')

    # export for single job
    if export_source == 'job':
        LOGGER.debug('exporting tabular data from Job')

        # retrieve job
        cjob = CombineJob.get_combine_job(int(job_id))

        # initiate Combine BG Task
        combine_task = CombineBackgroundTask(
            name='Export Tabular Data for Job: %s' % cjob.job.name,
            task_type='export_tabular_data',
            task_params_json=json.dumps({
                'job_id':
                cjob.job.id,
                'records_per_file':
                int(records_per_file),
                'tabular_data_export_type':
                tabular_data_export_type,
                'archive_type':
                archive_type,
                'fm_export_config_json':
                fm_export_config_json
            }))
        combine_task.save()

        # handle export output configurations
        combine_task = _handle_export_output(request, export_source,
                                             combine_task)

        # run celery task
        background_task = tasks.export_tabular_data.delay(combine_task.id)
        LOGGER.debug('firing bg task: %s', background_task)
        combine_task.celery_task_id = background_task.task_id
        combine_task.save()

        # set gm
        gmc = GlobalMessageClient(request.session)
        target = "Job:</strong><br>%s" % cjob.job.name
        gmc.add_gm({
            'html':
            '<p><strong>Exporting Tabular Data for %s</p><p><a href="%s"><button type="button" '
            'class="btn btn-outline-primary btn-sm">View Background Tasks</button></a></p>'
            % (target, reverse('bg_tasks')),
            'class':
            'success'
        })

        return redirect('job_details',
                        org_id=cjob.job.record_group.organization.id,
                        record_group_id=cjob.job.record_group.id,
                        job_id=cjob.job.id)

    # export for published
    if export_source == 'published':
        LOGGER.debug('exporting tabular data from published records')

        # get instance of Published model
        # TODO: not used
        PublishedRecords()

        # initiate Combine BG Task
        combine_task = CombineBackgroundTask(
            name='Export Tabular Data for Published Records',
            task_type='export_tabular_data',
            task_params_json=json.dumps({
                'published':
                True,
                'subset':
                subset,
                'records_per_file':
                int(records_per_file),
                'tabular_data_export_type':
                tabular_data_export_type,
                'archive_type':
                archive_type,
                'fm_export_config_json':
                fm_export_config_json
            }))
        combine_task.save()

        # handle export output configurations
        combine_task = _handle_export_output(request, export_source,
                                             combine_task)

        # run celery task
        background_task = tasks.export_tabular_data.delay(combine_task.id)
        LOGGER.debug('firing bg task: %s', background_task)
        combine_task.celery_task_id = background_task.task_id
        combine_task.save()

        # set gm
        gmc = GlobalMessageClient(request.session)
        target = ":</strong><br>Published Records"
        gmc.add_gm({
            'html':
            '<p><strong>Exporting Tabular Data for %s</p><p><a href="%s"><button type="button" '
            'class="btn btn-outline-primary btn-sm">View Background Tasks</button></a></p>'
            % (target, reverse('bg_tasks')),
            'class':
            'success'
        })

        return redirect('published')
Esempio n. 11
0
def export_mapped_fields(request,
                         export_source=None,
                         job_id=None,
                         subset=None):
    # get mapped fields export type
    mapped_fields_export_type = request.POST.get('mapped_fields_export_type')

    # check for Kibana check
    kibana_style = request.POST.get('kibana_style', False)
    if kibana_style:
        kibana_style = True

    # get archive type
    archive_type = request.POST.get('archive_type')

    # get selected fields if present
    mapped_field_include = request.POST.getlist('mapped_field_include', False)

    # export for single job
    if export_source == 'job':
        LOGGER.debug('exporting mapped fields from Job')

        # retrieve job
        cjob = CombineJob.get_combine_job(int(job_id))

        # initiate Combine BG Task
        combine_task = CombineBackgroundTask(
            name='Export Mapped Fields for Job: %s' % cjob.job.name,
            task_type='export_mapped_fields',
            task_params_json=json.dumps({
                'job_id':
                cjob.job.id,
                'mapped_fields_export_type':
                mapped_fields_export_type,
                'kibana_style':
                kibana_style,
                'archive_type':
                archive_type,
                'mapped_field_include':
                mapped_field_include
            }))
        combine_task.save()

        # handle export output configurations
        combine_task = _handle_export_output(request, export_source,
                                             combine_task)

        # run celery task
        background_task = tasks.export_mapped_fields.delay(combine_task.id)
        LOGGER.debug('firing bg task: %s', background_task)
        combine_task.celery_task_id = background_task.task_id
        combine_task.save()

        # set gm
        gmc = GlobalMessageClient(request.session)
        target = "Job:</strong><br>%s" % cjob.job.name
        gmc.add_gm({
            'html':
            '<p><strong>Exporting Mapped Fields for %s</p><p><a href="%s"><button type="button" '
            'class="btn btn-outline-primary btn-sm">View Background Tasks</button></a></p>'
            % (target, reverse('bg_tasks')),
            'class':
            'success'
        })

        return redirect('job_details',
                        org_id=cjob.job.record_group.organization.id,
                        record_group_id=cjob.job.record_group.id,
                        job_id=cjob.job.id)

    # export for published
    if export_source == 'published':
        LOGGER.debug('exporting mapped fields from published records')

        # initiate Combine BG Task
        combine_task = CombineBackgroundTask(
            name='Export Mapped Fields for Published Records',
            task_type='export_mapped_fields',
            task_params_json=json.dumps({
                'published':
                True,
                'subset':
                subset,
                'mapped_fields_export_type':
                mapped_fields_export_type,
                'kibana_style':
                kibana_style,
                'archive_type':
                archive_type,
                'mapped_field_include':
                mapped_field_include
            }))
        combine_task.save()

        # handle export output configurations
        combine_task = _handle_export_output(request, export_source,
                                             combine_task)

        # run celery task
        background_task = tasks.export_mapped_fields.delay(combine_task.id)
        LOGGER.debug('firing bg task: %s', background_task)
        combine_task.celery_task_id = background_task.task_id
        combine_task.save()

        # set gm
        gmc = GlobalMessageClient(request.session)
        target = ":</strong><br>Published Records"
        gmc.add_gm({
            'html':
            '<p><strong>Exporting Mapped Fields for %s</p><p><a href="%s"><button type="button" '
            'class="btn btn-outline-primary btn-sm">View Background Tasks</button></a></p>'
            % (target, reverse('bg_tasks')),
            'class':
            'success'
        })

        return redirect('published')
Esempio n. 12
0
def stateio_import(request):
    """
        Import state
        """

    if request.method == 'GET':

        return render(request, 'core/stateio_import.html',
                      {'breadcrumbs': breadcrumb_parser(request)})

    if request.method == 'POST':

        # capture optional export name
        import_name = request.POST.get('import_name', None)
        if import_name == '':
            import_name = None
        LOGGER.debug('initializing import: %s', import_name)

        # handle filesystem location
        if request.POST.get('filesystem_location', None) not in ['', None]:
            export_path = request.POST.get('filesystem_location').strip()
            LOGGER.debug('importing state based on filesystem location: %s',
                         export_path)

        # handle URL
        elif request.POST.get('url_location', None) not in ['', None]:
            export_path = request.POST.get('url_location').strip()
            LOGGER.debug('importing state based on remote location: %s',
                         export_path)

        # handle file upload
        elif request.FILES.get('export_upload_payload', None) is not None:

            LOGGER.debug('handling file upload')

            # save file to disk
            payload = request.FILES.get('export_upload_payload', None)
            new_file = '/tmp/%s' % payload.name
            with open(new_file, 'wb') as file:
                file.write(payload.read())
                payload.close()

            # set export_path
            export_path = new_file
            LOGGER.debug('saved uploaded state to %s', export_path)

        # init export as bg task
        combine_task = StateIOClient.import_state_bg_task(
            import_name=import_name, export_path=export_path)

        # retrieve StateIO instance, use metadata for msg
        stateio_instance = StateIO.objects.get(
            id=combine_task.task_params['stateio_id'])

        # set gms
        gmc = GlobalMessageClient(request.session)
        gmc.add_gm({
            'html':
            '<p><strong>Importing State:</strong><br>%s</p><p>Refresh this page for updates: <button class="btn-sm btn-outline-primary" onclick="location.reload();">Refresh</button></p>'
            % (stateio_instance.name),
            'class':
            'success'
        })

        return redirect('stateio')
Esempio n. 13
0
def stateio_export(request):
    """
        Export state
        """

    if request.method == 'GET':
        # generate hierarchy_dict
        job_hierarchy = _stateio_prepare_job_hierarchy()

        # generate config scenarios
        config_scenarios = _stateio_prepare_config_scenarios()

        # return
        return render(
            request, 'core/stateio_export.html', {
                'job_hierarchy_json': json.dumps(job_hierarchy),
                'config_scenarios_json': json.dumps(config_scenarios),
                'breadcrumbs': breadcrumb_parser(request)
            })

    if request.method == 'POST':

        # capture optional export name
        export_name = request.POST.get('export_name', None)
        if export_name == '':
            export_name = None
        LOGGER.debug('initializing export: %s', export_name)

        # capture and parse jobs_hierarchy_ids
        jobs_hierarchy_ids = request.POST.getlist('jobs_hierarchy_ids[]')
        jobs = [
            int(obj.split('|')[-1]) for obj in jobs_hierarchy_ids
            if obj.startswith('job')
        ]
        record_groups = [
            int(obj.split('|')[-1]) for obj in jobs_hierarchy_ids
            if obj.startswith('record_group')
        ]
        orgs = [
            int(obj.split('|')[-1]) for obj in jobs_hierarchy_ids
            if obj.startswith('org')
        ]

        # capture and parse config_scenarios_ids
        config_scenarios_ids = [
            config_id
            for config_id in request.POST.getlist('config_scenarios_ids[]')
            if '|' in config_id
        ]

        # init export as bg task
        combine_task = StateIOClient.export_state_bg_task(
            export_name=export_name,
            jobs=jobs,
            record_groups=record_groups,
            orgs=orgs,
            config_scenarios=
            config_scenarios_ids  # preserve prefixes through serialization
        )

        # retrieve StateIO instance, use metadata for msg
        stateio_instance = StateIO.objects.get(
            id=combine_task.task_params['stateio_id'])

        # set gms
        gmc = GlobalMessageClient(request.session)
        gmc.add_gm({
            'html':
            '<p><strong>Exporting State:</strong><br>%s</p><p>Refresh this page for updates: <button class="btn-sm btn-outline-primary" onclick="location.reload();">Refresh</button></p>'
            % (stateio_instance.name),
            'class':
            'success'
        })

        # return
        return JsonResponse({'msg': 'success'})
Esempio n. 14
0
def delete_jobs(request):
    LOGGER.debug('deleting jobs')

    job_ids = request.POST.getlist('job_ids[]')
    LOGGER.debug(job_ids)

    # get downstream toggle
    downstream_toggle = request.POST.get('downstream_delete_toggle', False)
    if downstream_toggle == 'true':
        downstream_toggle = True
    elif downstream_toggle == 'false':
        downstream_toggle = False

    # set of jobs to delete
    job_delete_set = set()

    # loop through job_ids
    for job_id in job_ids:

        # get CombineJob
        cjob = CombineJob.get_combine_job(job_id)

        # if including downstream
        if downstream_toggle:

            # add delete lineage for this job to set
            job_delete_set.update(cjob.job.get_downstream_jobs())

        # else, just job
        else:

            job_delete_set.add(cjob.job)

    # sort and run
    ordered_job_delete_set = sorted(list(job_delete_set), key=lambda j: j.id)

    # # loop through and update visible elements of Job for front-end
    for job in ordered_job_delete_set:
        LOGGER.debug('deleting Job: %s', job)

        # set job status to deleting
        job.name = "%s (DELETING)" % job.name
        job.deleted = True
        job.status = 'deleting'
        job.save()

        # initiate Combine BG Task
        combine_task = CombineBackgroundTask(
            name='Delete Job: #%s' % job.name,
            task_type='delete_model_instance',
            task_params_json=json.dumps({
                'model': 'Job',
                'job_id': job.id
            })
        )
        combine_task.save()

        # run celery task
        bg_task = tasks.delete_model_instance.delay('Job', job.id, )
        LOGGER.debug('firing bg task: %s', bg_task)
        combine_task.celery_task_id = bg_task.task_id
        combine_task.save()

    # set gms
    gmc = GlobalMessageClient(request.session)
    gmc.add_gm({
        'html': '<p><strong>Deleting Job(s):</strong><br>%s</p><p>Refresh this page to update status of removing Jobs. <button class="btn-sm btn-outline-primary" onclick="location.reload();">Refresh</button></p>' % (
            '<br>'.join([j.name for j in ordered_job_delete_set])),
        'class': 'danger'
    })

    # return
    return JsonResponse({'results': True})
Esempio n. 15
0
def job_update(request, org_id, record_group_id, job_id):
    """
    Update Job in one of several ways:
        - re-map and index
        - run new / different validations
    """

    # retrieve job
    cjob = CombineJob.get_combine_job(int(job_id))

    # if GET, prepare form
    if request.method == 'GET':
        # get validation scenarios
        validation_scenarios = ValidationScenario.objects.all()

        # get field mappers
        field_mappers = FieldMapper.objects.all()
        orig_fm_config_json = cjob.job.get_fm_config_json()

        # get all bulk downloads
        bulk_downloads = DPLABulkDataDownload.objects.all()

        # get update type from GET params
        update_type = request.GET.get('update_type', None)

        # render page
        return render(request, 'core/job_update.html', {
            'cjob': cjob,
            'update_type': update_type,
            'validation_scenarios': validation_scenarios,
            'field_mappers': field_mappers,
            'bulk_downloads': bulk_downloads,
            'xml2kvp_handle': xml2kvp.XML2kvp(),
            'orig_fm_config_json': orig_fm_config_json,
            'breadcrumbs': breadcrumb_parser(request)
        })

    # if POST, submit job
    if request.method == 'POST':

        LOGGER.debug('updating job')
        LOGGER.debug(request.POST)

        # retrieve job
        cjob = CombineJob.get_combine_job(int(job_id))

        # get update type
        update_type = request.POST.get('update_type', None)
        LOGGER.debug('running job update: %s', update_type)

        # handle re-index
        if update_type == 'reindex':
            # get preferred metadata index mapper
            fm_config_json = request.POST.get('fm_config_json')

            # init re-index
            cjob.reindex_bg_task(fm_config_json=fm_config_json)

            # set gms
            gmc = GlobalMessageClient(request.session)
            gmc.add_gm({
                'html': '<p><strong>Re-Indexing Job:</strong><br>%s</p>'
                        '<p><a href="%s"><button type="button" '
                        'class="btn btn-outline-primary btn-sm">View Background Tasks</button></a></p>' % (
                            cjob.job.name, reverse('bg_tasks')),
                'class': 'success'
            })

            return redirect('job_details',
                            org_id=cjob.job.record_group.organization.id,
                            record_group_id=cjob.job.record_group.id,
                            job_id=cjob.job.id)

        # handle new validations
        if update_type == 'validations':
            # get requested validation scenarios
            validation_scenarios = request.POST.getlist(
                'validation_scenario', [])

            # get validations
            validations = ValidationScenario.objects.filter(
                id__in=[int(vs_id) for vs_id in validation_scenarios])

            # init bg task
            cjob.new_validations_bg_task([vs.id for vs in validations])

            # set gms
            gmc = GlobalMessageClient(request.session)
            gmc.add_gm({
                'html': '<p><strong>Running New Validations for Job:</strong><br>%s<br>'
                        '<br><strong>Validation Scenarios:</strong><br>%s</p>'
                        '<p><a href="%s"><button type="button" '
                        'class="btn btn-outline-primary btn-sm">View Background Tasks</button></a></p>' % (
                            cjob.job.name, '<br>'.join([vs.name for vs in validations]), reverse('bg_tasks')),
                'class': 'success'
            })

            return redirect('job_details',
                            org_id=cjob.job.record_group.organization.id,
                            record_group_id=cjob.job.record_group.id,
                            job_id=cjob.job.id)

        # handle validation removal
        if update_type == 'remove_validation':
            # get validation scenario to remove
            jv_id = request.POST.get('jv_id', False)

            # initiate Combine BG Task
            cjob.remove_validation_bg_task(jv_id)

            # set gms
            validation_scenario = JobValidation.objects.get(
                pk=int(jv_id)).validation_scenario
            gmc = GlobalMessageClient(request.session)
            gmc.add_gm({
                'html': '<p><strong>Removing Validation for Job:</strong><br>%s<br><br>'
                        '<strong>Validation Scenario:</strong><br>%s</p><p><a href="%s"><button type="button" '
                        'class="btn btn-outline-primary btn-sm">View Background Tasks</button></a></p>' % (
                            cjob.job.name, validation_scenario.name, reverse('bg_tasks')),
                'class': 'success'
            })

            return redirect('job_details',
                            org_id=cjob.job.record_group.organization.id,
                            record_group_id=cjob.job.record_group.id,
                            job_id=cjob.job.id)

        # handle validation removal
        if update_type == 'dbdm':
            # get validation scenario to remove
            dbdd_id = request.POST.get('dbdd', False)

            # initiate Combine BG Task
            cjob.dbdm_bg_task(dbdd_id)

            # set gms
            dbdd = DPLABulkDataDownload.objects.get(pk=int(dbdd_id))
            gmc = GlobalMessageClient(request.session)
            gmc.add_gm({
                'html': '<p><strong>Running DPLA Bulk Data comparison for Job:</strong><br>%s<br><br>'
                        '<strong>Bulk Data S3 key:</strong><br>%s</p><p><a href="%s"><button type="button" '
                        'class="btn btn-outline-primary btn-sm">View Background Tasks</button></a></p>' % (
                            cjob.job.name, dbdd.s3_key, reverse('bg_tasks')),
                'class': 'success'
            })

            return redirect('job_details',
                            org_id=cjob.job.record_group.organization.id,
                            record_group_id=cjob.job.record_group.id,
                            job_id=cjob.job.id)

        if update_type == 'publish_set':
            update_body = request.POST
            if update_body.get('publish_set_id', None):
                cjob.job.publish_set_id = update_body['publish_set_id']
            if update_body.get('existing_publish_set_id', None):
                cjob.job.publish_set_id = update_body['existing_publish_set_id']
            redirect_anchor = update_body.get('redirect_anchor', '')
            cjob.job.save()
            return redirect(reverse('job_details', args=[org_id, record_group_id, job_id]) + redirect_anchor)
Esempio n. 16
0
def most_recent_global_message():
    gmc = GlobalMessageClient()
    gmc.load_most_recent_session()
    gm = gmc.session['gms'][0]
    return gm