def record_group_delete(request, org_id, record_group_id): """ Create new Record Group """ # retrieve record group rec_group = RecordGroup.objects.get(pk=record_group_id) # set job status to deleting rec_group.name = "%s (DELETING)" % rec_group.name rec_group.save() # initiate Combine BG Task combine_task = CombineBackgroundTask( name='Delete RecordGroup: %s' % rec_group.name, task_type='delete_model_instance', task_params_json=json.dumps({ 'model': 'RecordGroup', 'record_group_id': rec_group.id }) ) combine_task.save() # run celery task bg_task = tasks.delete_model_instance.delay( 'RecordGroup', rec_group.id, ) LOGGER.debug('firing bg task: %s', bg_task) combine_task.celery_task_id = bg_task.task_id combine_task.save() # redirect to organization page return redirect('organization', org_id=org_id)
def job_delete(request, org_id, record_group_id, job_id): LOGGER.debug('deleting job by id: %s', job_id) # get job job = Job.objects.get(pk=job_id) # set job status to deleting job.name = "%s (DELETING)" % job.name job.deleted = True job.status = 'deleting' job.save() # initiate Combine BG Task combine_task = CombineBackgroundTask( name='Delete Job: %s' % job.name, task_type='delete_model_instance', task_params_json=json.dumps({ 'model': 'Job', 'job_id': job.id }) ) combine_task.save() # run celery task bg_task = tasks.delete_model_instance.delay('Job', job.id) LOGGER.debug('firing bg task: %s', bg_task) combine_task.celery_task_id = bg_task.task_id combine_task.save() # redirect return redirect(request.META.get('HTTP_REFERER'))
def organization_delete(request, org_id): """ Delete Organization Note: Through cascade deletes, would remove: - RecordGroup - Job - Record """ # get organization org = Organization.objects.get(pk=org_id) # set job status to deleting org.name = "%s (DELETING)" % org.name org.save() # initiate Combine BG Task combine_task = CombineBackgroundTask( name='Delete Organization: %s' % org.name, task_type='delete_model_instance', task_params_json=json.dumps({ 'model': 'Organization', 'org_id': org.id }) ) combine_task.save() # run celery task bg_task = tasks.delete_model_instance.delay('Organization', org.id, ) LOGGER.debug('firing bg task: %s', bg_task) combine_task.celery_task_id = bg_task.task_id combine_task.save() return redirect('organizations')
def clone_jobs(request): LOGGER.debug('cloning jobs') job_ids = request.POST.getlist('job_ids[]') # get downstream toggle downstream_toggle = request.POST.get('downstream_clone_toggle', False) if downstream_toggle == 'true': downstream_toggle = True elif downstream_toggle == 'false': downstream_toggle = False # get rerun toggle rerun_on_clone = request.POST.get('rerun_on_clone', False) if rerun_on_clone == 'true': rerun_on_clone = True elif rerun_on_clone == 'false': rerun_on_clone = False # set of jobs to rerun job_clone_set = set() # loop through job_ids and add for job_id in job_ids: cjob = CombineJob.get_combine_job(job_id) job_clone_set.add(cjob.job) # sort and run ordered_job_clone_set = sorted(list(job_clone_set), key=lambda j: j.id) # initiate Combine BG Task combine_task = CombineBackgroundTask( name="Clone Jobs", task_type='clone_jobs', task_params_json=json.dumps({ 'ordered_job_clone_set': [j.id for j in ordered_job_clone_set], 'downstream_toggle': downstream_toggle, 'rerun_on_clone': rerun_on_clone }) ) combine_task.save() # run celery task bg_task = tasks.clone_jobs.delay(combine_task.id) LOGGER.debug('firing bg task: %s', bg_task) combine_task.celery_task_id = bg_task.task_id combine_task.save() # set gms gmc = GlobalMessageClient(request.session) gmc.add_gm({ 'html': '<strong>Cloning Job(s):</strong><br>%s<br><br>Including downstream? <strong>%s</strong><br><br>Refresh this page to update status of Jobs cloning. <button class="btn-sm btn-outline-primary" onclick="location.reload();">Refresh</button>' % ( '<br>'.join([str(j.name) for j in ordered_job_clone_set]), downstream_toggle), 'class': 'success' }) # return, as requested via Ajax which will reload page return JsonResponse({'results': True})
def export_tabular_data(request, export_source=None, job_id=None, subset=None): # get records per file records_per_file = request.POST.get('records_per_file', False) if records_per_file in ['', False]: records_per_file = 500 # get mapped fields export type tabular_data_export_type = request.POST.get('tabular_data_export_type') # get archive type archive_type = request.POST.get('archive_type') # get fm config json fm_export_config_json = request.POST.get('fm_export_config_json') # export for single job if export_source == 'job': LOGGER.debug('exporting tabular data from Job') # retrieve job cjob = CombineJob.get_combine_job(int(job_id)) # initiate Combine BG Task combine_task = CombineBackgroundTask( name='Export Tabular Data for Job: %s' % cjob.job.name, task_type='export_tabular_data', task_params_json=json.dumps({ 'job_id': cjob.job.id, 'records_per_file': int(records_per_file), 'tabular_data_export_type': tabular_data_export_type, 'archive_type': archive_type, 'fm_export_config_json': fm_export_config_json })) combine_task.save() # handle export output configurations combine_task = _handle_export_output(request, export_source, combine_task) # run celery task background_task = tasks.export_tabular_data.delay(combine_task.id) LOGGER.debug('firing bg task: %s', background_task) combine_task.celery_task_id = background_task.task_id combine_task.save() # set gm gmc = GlobalMessageClient(request.session) target = "Job:</strong><br>%s" % cjob.job.name gmc.add_gm({ 'html': '<p><strong>Exporting Tabular Data for %s</p><p><a href="%s"><button type="button" ' 'class="btn btn-outline-primary btn-sm">View Background Tasks</button></a></p>' % (target, reverse('bg_tasks')), 'class': 'success' }) return redirect('job_details', org_id=cjob.job.record_group.organization.id, record_group_id=cjob.job.record_group.id, job_id=cjob.job.id) # export for published if export_source == 'published': LOGGER.debug('exporting tabular data from published records') # get instance of Published model # TODO: not used PublishedRecords() # initiate Combine BG Task combine_task = CombineBackgroundTask( name='Export Tabular Data for Published Records', task_type='export_tabular_data', task_params_json=json.dumps({ 'published': True, 'subset': subset, 'records_per_file': int(records_per_file), 'tabular_data_export_type': tabular_data_export_type, 'archive_type': archive_type, 'fm_export_config_json': fm_export_config_json })) combine_task.save() # handle export output configurations combine_task = _handle_export_output(request, export_source, combine_task) # run celery task background_task = tasks.export_tabular_data.delay(combine_task.id) LOGGER.debug('firing bg task: %s', background_task) combine_task.celery_task_id = background_task.task_id combine_task.save() # set gm gmc = GlobalMessageClient(request.session) target = ":</strong><br>Published Records" gmc.add_gm({ 'html': '<p><strong>Exporting Tabular Data for %s</p><p><a href="%s"><button type="button" ' 'class="btn btn-outline-primary btn-sm">View Background Tasks</button></a></p>' % (target, reverse('bg_tasks')), 'class': 'success' }) return redirect('published')
def export_mapped_fields(request, export_source=None, job_id=None, subset=None): # get mapped fields export type mapped_fields_export_type = request.POST.get('mapped_fields_export_type') # check for Kibana check kibana_style = request.POST.get('kibana_style', False) if kibana_style: kibana_style = True # get archive type archive_type = request.POST.get('archive_type') # get selected fields if present mapped_field_include = request.POST.getlist('mapped_field_include', False) # export for single job if export_source == 'job': LOGGER.debug('exporting mapped fields from Job') # retrieve job cjob = CombineJob.get_combine_job(int(job_id)) # initiate Combine BG Task combine_task = CombineBackgroundTask( name='Export Mapped Fields for Job: %s' % cjob.job.name, task_type='export_mapped_fields', task_params_json=json.dumps({ 'job_id': cjob.job.id, 'mapped_fields_export_type': mapped_fields_export_type, 'kibana_style': kibana_style, 'archive_type': archive_type, 'mapped_field_include': mapped_field_include })) combine_task.save() # handle export output configurations combine_task = _handle_export_output(request, export_source, combine_task) # run celery task background_task = tasks.export_mapped_fields.delay(combine_task.id) LOGGER.debug('firing bg task: %s', background_task) combine_task.celery_task_id = background_task.task_id combine_task.save() # set gm gmc = GlobalMessageClient(request.session) target = "Job:</strong><br>%s" % cjob.job.name gmc.add_gm({ 'html': '<p><strong>Exporting Mapped Fields for %s</p><p><a href="%s"><button type="button" ' 'class="btn btn-outline-primary btn-sm">View Background Tasks</button></a></p>' % (target, reverse('bg_tasks')), 'class': 'success' }) return redirect('job_details', org_id=cjob.job.record_group.organization.id, record_group_id=cjob.job.record_group.id, job_id=cjob.job.id) # export for published if export_source == 'published': LOGGER.debug('exporting mapped fields from published records') # initiate Combine BG Task combine_task = CombineBackgroundTask( name='Export Mapped Fields for Published Records', task_type='export_mapped_fields', task_params_json=json.dumps({ 'published': True, 'subset': subset, 'mapped_fields_export_type': mapped_fields_export_type, 'kibana_style': kibana_style, 'archive_type': archive_type, 'mapped_field_include': mapped_field_include })) combine_task.save() # handle export output configurations combine_task = _handle_export_output(request, export_source, combine_task) # run celery task background_task = tasks.export_mapped_fields.delay(combine_task.id) LOGGER.debug('firing bg task: %s', background_task) combine_task.celery_task_id = background_task.task_id combine_task.save() # set gm gmc = GlobalMessageClient(request.session) target = ":</strong><br>Published Records" gmc.add_gm({ 'html': '<p><strong>Exporting Mapped Fields for %s</p><p><a href="%s"><button type="button" ' 'class="btn btn-outline-primary btn-sm">View Background Tasks</button></a></p>' % (target, reverse('bg_tasks')), 'class': 'success' }) return redirect('published')
def job_reports_create_validation(request, org_id, record_group_id, job_id): """ Generate job report based on validation results """ # retrieve job cjob = CombineJob.get_combine_job(int(job_id)) # if GET, prepare form if request.method == 'GET': # mapped field analysis, generate if not part of job_details if 'mapped_field_analysis' in cjob.job.job_details_dict.keys(): field_counts = cjob.job.job_details_dict['mapped_field_analysis'] else: if cjob.job.finished: field_counts = cjob.count_indexed_fields() cjob.job.update_job_details( {'mapped_field_analysis': field_counts}, save=True) else: LOGGER.debug('job not finished, not setting') field_counts = {} # render page return render(request, 'core/job_reports_create_validation.html', { 'cjob': cjob, 'field_counts': field_counts, 'breadcrumbs': breadcrumb_parser(request) }) # if POST, generate report if request.method == 'POST': # get job name for Combine Task report_name = request.POST.get('report_name') if report_name == '': report_name = 'j_%s_validation_report' % cjob.job.id combine_task_name = "Validation Report: %s" % cjob.job.name else: combine_task_name = "Validation Report: %s" % report_name # handle POST params and save as Combine task params task_params = { 'job_id': cjob.job.id, 'report_name': report_name, 'report_format': request.POST.get('report_format'), 'compression_type': request.POST.get('compression_type'), 'validation_scenarios': request.POST.getlist('validation_scenario', []), 'mapped_field_include': request.POST.getlist('mapped_field_include', []) } # cast to int task_params['validation_scenarios'] = [ int(vs_id) for vs_id in task_params['validation_scenarios']] # remove select, reserved fields if in mapped field request task_params['mapped_field_include'] = [f for f in task_params['mapped_field_include'] if f not in ['record_id', 'db_id', 'oid', '_id']] # initiate Combine BG Task combine_task = CombineBackgroundTask( name=combine_task_name, task_type='validation_report', task_params_json=json.dumps(task_params) ) combine_task.save() # run celery task background_task = tasks.create_validation_report.delay(combine_task.id) LOGGER.debug('firing bg task: %s', background_task) combine_task.celery_task_id = background_task.task_id combine_task.save() # redirect to Background Tasks return redirect('bg_tasks')
def delete_jobs(request): LOGGER.debug('deleting jobs') job_ids = request.POST.getlist('job_ids[]') LOGGER.debug(job_ids) # get downstream toggle downstream_toggle = request.POST.get('downstream_delete_toggle', False) if downstream_toggle == 'true': downstream_toggle = True elif downstream_toggle == 'false': downstream_toggle = False # set of jobs to delete job_delete_set = set() # loop through job_ids for job_id in job_ids: # get CombineJob cjob = CombineJob.get_combine_job(job_id) # if including downstream if downstream_toggle: # add delete lineage for this job to set job_delete_set.update(cjob.job.get_downstream_jobs()) # else, just job else: job_delete_set.add(cjob.job) # sort and run ordered_job_delete_set = sorted(list(job_delete_set), key=lambda j: j.id) # # loop through and update visible elements of Job for front-end for job in ordered_job_delete_set: LOGGER.debug('deleting Job: %s', job) # set job status to deleting job.name = "%s (DELETING)" % job.name job.deleted = True job.status = 'deleting' job.save() # initiate Combine BG Task combine_task = CombineBackgroundTask( name='Delete Job: #%s' % job.name, task_type='delete_model_instance', task_params_json=json.dumps({ 'model': 'Job', 'job_id': job.id }) ) combine_task.save() # run celery task bg_task = tasks.delete_model_instance.delay('Job', job.id, ) LOGGER.debug('firing bg task: %s', bg_task) combine_task.celery_task_id = bg_task.task_id combine_task.save() # set gms gmc = GlobalMessageClient(request.session) gmc.add_gm({ 'html': '<p><strong>Deleting Job(s):</strong><br>%s</p><p>Refresh this page to update status of removing Jobs. <button class="btn-sm btn-outline-primary" onclick="location.reload();">Refresh</button></p>' % ( '<br>'.join([j.name for j in ordered_job_delete_set])), 'class': 'danger' }) # return return JsonResponse({'results': True})