def view_jobflow_with_ajax(context, request): req_type = request.params.get('type') resource = get_resource(context) emr_conn = get_emr_connection(resource) if req_type == 'runjobflow' and is_runnable_jobflow(context): run_jobflow(emr_conn, resource, context) jobflow_ids = get_jobflow_ids(context) if req_type == 'terminate' and jobflow_ids: emr_conn.terminate_jobflows(jobflow_ids) # FIXME: enforce a termination against protection if req_type == 'getlog': s3_conn = get_s3_connection(resource) keys_list = get_s3_log_keys(s3_conn, resource.log_uri, jobflow_ids) bucket_name = keys_list[0].bucket.name if keys_list else u'' keys = [dict(get_object_attributes(key, ['name', 'size'])) for key in chain.from_iterable(keys_list)] return {'keys': keys, 'bucket_name': bucket_name} else: # refresh res_jobflows = get_jobflows_for_json(emr_conn, jobflow_ids) set_jobflow_state(context, res_jobflows) return res_jobflows
def view_download_file(context, request): resource = get_resource(context) s3_conn = get_s3_connection(resource) bucket = s3_conn.get_bucket(request.params.get('bucket')) key_path = request.params.get('key') key = bucket.lookup(key_path) response = Response() fp = get_temporary_file(key.size) if fp: key.get_contents_to_file(fp) fp.seek(0) con_dis = 'attachment; filename={0}'.format(basename(key_path)) response.content_disposition = con_dis response.app_iter = fp else: response.text = u'Download it yourself, ' response.text += u'because of large file size: {0}'.format(key.size) return response