def clusters_delete(cluster_id): data = u.request_data() force = data.get('force', False) stack_name = api.get_cluster(cluster_id).get( 'extra', {}).get( 'heat_stack_name', None) api.terminate_cluster(cluster_id, force=force) if force: return u.render({"stack_name": stack_name}, status=200) else: return u.render(res=None, status=204)
def job_templates_list(): result = api.get_job_templates(**u.get_request_args().to_dict()) for jt in result: u._replace_tenant_id_project_id(jt) _replace_tenant_id_project_id_job_binary(jt['mains']) _replace_tenant_id_project_id_job_binary(jt['libs']) return u.render(res=result, name='job_templates')
def job_templates_update(job_templates_id, data): result = {'job_template': api.update_job_template( job_templates_id, data).to_dict()} u._replace_tenant_id_project_id(result['job_template']) _replace_tenant_id_project_id_job_binary(result['job_template']['mains']) _replace_tenant_id_project_id_job_binary(result['job_template']['libs']) return u.render(result)
def clusters_create(data): # renaming hadoop_version -> plugin_version # this can be removed once APIv1 is deprecated data['hadoop_version'] = data['plugin_version'] del data['plugin_version'] if data.get('count', None) is not None: result = api.create_multiple_clusters(data) for c in result['clusters']: u._replace_hadoop_version_plugin_version(c['cluster']) u._replace_tenant_id_project_id(c['cluster']) return u.render(result) else: result = api.create_cluster(data).to_wrapped_dict() u._replace_hadoop_version_plugin_version(result['cluster']) u._replace_tenant_id_project_id(result['cluster']) return u.render(result)
def clusters_delete(cluster_id, **kwargs): all_tenants = kwargs.get('all_tenants', None) if all_tenants: ctx = context.ctx() ctx.check_admin() api.terminate_cluster(cluster_id) return u.render()
def clusters_scale(cluster_id, data): result = u.to_wrapped_dict_no_render( api.scale_cluster, cluster_id, data) u._replace_hadoop_version_plugin_version(result['cluster']) u._replace_tenant_id_project_id(result['cluster']) _replace_tenant_id_project_id_provision_steps(result['cluster']) return u.render(result)
def version_list(): context.set_ctx(None) return api_utils.render({ "versions": [ {"id": "v1.0", "status": "CURRENT"} ] })
def jobs_execute(data): result = {'job': api.execute_job(data)} dict.update(result['job'], {'engine_job_id': result['job']['oozie_job_id']}) dict.pop(result['job'], 'oozie_job_id') u._replace_tenant_id_project_id(result['job']) _replace_job_id_job_template_id(result['job']) return u.render(result)
def jobs_get(job_id): data = u.get_request_args() refresh_status = six.text_type( data.get('refresh_status', 'false')).lower() == 'true' result = {'job': api.get_job_execution(job_id, refresh_status)} result['job'].pop('oozie_job_id', force=True) u._replace_tenant_id_project_id(result['job']) _replace_job_id_job_template_id(result['job']) return u.render(result)
def job_types_get(): # We want to use flat=False with to_dict() so that # the value of each arg is given as a list. This supports # filters of the form ?type=Pig&type=Java, etc. request_args = u.get_request_args().to_dict(flat=False) if 'plugin_version' in request_args: request_args['hadoop_version'] = request_args['plugin_version'] del request_args['plugin_version'] return u.render(job_types=api.get_job_types(**request_args))
def cluster_templates_update(cluster_template_id, data): if data.get('plugin_version', None): data['hadoop_version'] = data['plugin_version'] del data['plugin_version'] result = u.to_wrapped_dict_no_render( api.update_cluster_template, cluster_template_id, data) u._replace_hadoop_version_plugin_version(result['cluster_template']) u._replace_tenant_id_project_id(result['cluster_template']) return u.render(result)
def cluster_templates_create(data): # renaming hadoop_version -> plugin_version # this can be removed once APIv1 is deprecated data['hadoop_version'] = data['plugin_version'] del data['plugin_version'] result = api.create_cluster_template(data).to_wrapped_dict() u._replace_hadoop_version_plugin_version(result['cluster_template']) u._replace_tenant_id_project_id(result['cluster_template']) return u.render(result)
def cluster_templates_list(): request_args = u.get_request_args().to_dict() if 'plugin_version' in request_args: request_args['hadoop_version'] = request_args['plugin_version'] del request_args['plugin_version'] result = api.get_cluster_templates(**request_args) for ct in result: u._replace_hadoop_version_plugin_version(ct) u._replace_tenant_id_project_id(ct) return u.render(res=result, name='cluster_templates')
def make_json_error(ex): status_code = (ex.code if isinstance(ex, werkzeug_exceptions.HTTPException) else 500) description = (ex.description if isinstance(ex, werkzeug_exceptions.HTTPException) else str(ex)) return api_utils.render({'error': status_code, 'error_message': description}, status=status_code)
def node_group_templates_list(): request_args = u.get_request_args().to_dict() if 'plugin_version' in request_args: request_args['hadoop_version'] = request_args['plugin_version'] del request_args['plugin_version'] result = api.get_node_group_templates(**request_args) for ngt in result: u._replace_hadoop_version_plugin_version(ngt) u._replace_tenant_id_project_id(ngt) return u.render(res=result, name="node_group_templates")
def cluster_template_export(cluster_template_id): content = u.to_wrapped_dict_no_render( api.export_cluster_template, cluster_template_id) u._replace_hadoop_version_plugin_version(content['cluster_template']) u._replace_tenant_id_project_id(content['cluster_template']) _cluster_template_export_helper(content['cluster_template']) res = u.render(content) res.headers.add('Content-Disposition', 'attachment', filename='cluster_template.json') return res
def jobs_list(): result = api.job_execution_list(**u.get_request_args().to_dict()) # APIv2: renaming oozie_job_id -> engine_job_id # once APIv1 is deprecated this can be # removed for je in result: je.pop('oozie_job_id', force=True) u._replace_tenant_id_project_id(je) _replace_job_id_job_template_id(je) return u.render(res=result, name='jobs')
def clusters_get(cluster_id): data = u.get_request_args() show_events = six.text_type( data.get('show_progress', 'false')).lower() == 'true' result = u.to_wrapped_dict_no_render( api.get_cluster, cluster_id, show_events) u._replace_hadoop_version_plugin_version(result['cluster']) u._replace_tenant_id_project_id(result['cluster']) _replace_tenant_id_project_id_provision_steps(result['cluster']) return u.render(result)
def job_delete(job_id): api.delete_job(job_id) return u.render()
def clusters_get(cluster_id): return u.render(api.get_cluster(cluster_id).to_wrapped_dict())
def clusters_list(): return u.render(clusters=[ c.to_dict() for c in api.get_clusters(**u.get_request_args().to_dict()) ])
def images_unset(image_id): api.unregister_image(image_id) return u.render()
def cluster_templates_list(): return u.render(cluster_templates=[ t.to_dict() for t in api.get_cluster_templates(**u.get_request_args().to_dict()) ])
def job_binary_internal_delete(job_binary_internal_id): api.delete_job_binary_internal(job_binary_internal_id) return u.render()
def data_sources_list(): return u.render( data_sources=[ds.to_dict() for ds in api.get_data_sources( **u.get_request_args().to_dict())])
def job_binary_internal_list(): return u.render( binaries=[j.to_dict() for j in api.get_job_binary_internals()])
def job_binary_internal_get(job_binary_internal_id): return u.render( api.get_job_binary_internal(job_binary_internal_id).to_wrapped_dict())
def job_binary_internal_create(**values): return u.render(api.create_job_binary_internal(values).to_wrapped_dict())
def job_binary_delete(job_binary_id): api.delete_job_binary(job_binary_id) return u.render()
def job_binary_create(data): return u.render(api.create_job_binary(data).to_wrapped_dict())
def job_binary_internal_list(): return u.render(binaries=[j.to_dict() for j in api.get_job_binary_internals( **u.get_request_args().to_dict())])
def job_binary_data(job_binary_id): data = api.get_job_binary_data(job_binary_id) if type(data) == dict: data = u.render(data) return data
def job_executions_list(): job_executions = [je.to_dict() for je in api.job_execution_list( **u.get_request_args().to_dict())] return u.render(job_executions=job_executions)
def job_execute(job_id, data): return u.render(job_execution=api.execute_job(job_id, data).to_dict())
def cluster_templates_create(data): return u.render(api.create_cluster_template(data).to_wrapped_dict())
def job_config_hints_get(job_type): return u.render(api.get_job_config_hints(job_type))
def job_list(): return u.render(jobs=[j.to_dict() for j in api.get_jobs()])
def job_executions_list(): job_executions = [je.to_dict() for je in api.job_execution_list()] return u.render(job_executions=job_executions)
def clusters_delete(cluster_id): api.terminate_cluster(cluster_id) return u.render()
def job_executions_status(job_execution_id): job_execution = api.get_job_execution_status(job_execution_id) return u.render(job_execution.to_wrapped_dict())
def clusters_scale(cluster_id, data): return u.render(api.scale_cluster(cluster_id, data).to_wrapped_dict())
def image_tags_delete(image_id, data): return u.render(api.remove_image_tags(image_id, **data).wrapped_dict)
def job_binary_update(job_binary_id, data): return u.render( api.update_job_binary(job_binary_id, data).to_wrapped_dict())
def image_tags_add(image_id, data): return u.render(api.add_image_tags(image_id, **data).wrapped_dict)
def job_get(job_id): return u.render(api.get_job(job_id).to_wrapped_dict())
def job_executions_delete(job_execution_id): api.delete_job_execution(job_execution_id) return u.render()
def data_sources_list(): return u.render( data_sources=[ds.to_dict() for ds in api.get_data_sources()])
def data_source_register(data): return u.render(api.register_data_source(data).to_wrapped_dict())
def data_source_get(data_source_id): return u.render(api.get_data_source(data_source_id).to_wrapped_dict())
def cluster_templates_list(): return u.render( cluster_templates=[t.to_dict() for t in api.get_cluster_templates( **u.get_request_args().to_dict())])
def data_source_delete(data_source_id): api.delete_data_source(data_source_id) return u.render()
def cluster_templates_delete(cluster_template_id): api.terminate_cluster_template(cluster_template_id) return u.render()
def job_executions_cancel(job_execution_id): job_execution = api.cancel_job_execution(job_execution_id) return u.render(job_execution.to_wrapped_dict())
def cluster_templates_get(cluster_template_id): return u.render( api.get_cluster_template(cluster_template_id).to_wrapped_dict())