def test_post_acceptable_job(self, scheduled_jobs, get_json_happy_dict): input_string = simplejson.dumps(get_json_happy_dict) et_scanner_queue = FakeSQS() result = self.post_job(scheduled_jobs, et_scanner_queue, input_string) assert 'post_accepted' in result assert result['post_accepted']['result'] is True assert et_scanner_queue._published_msg == {'message': 'dummy'} with mock.patch('mycroft.models.aws_connections.TableConnection.get_connection'): with mock.patch( 'mycroft.logic.job_actions.list_cluster_by_name', side_effect=ItemNotFound() ): with pytest.raises(ItemNotFound): result = post_job(scheduled_jobs, et_scanner_queue, input_string)
def jobs(request): """ jobs_name_and_version handles requests from the jobs endpoint with log_name and log_version, getting contents from the dynamo location **GET /v1/jobs/** Example: ``/v1/jobs/`` *Example Response* :: [ {'log_name': 'ad_click', 'log_schema_version': 'initial', 's3_log_uri': http://ad_click/schema.yaml?Signature=b?Expires=c?AccessKeyId=xxx 'start_date': '2014-05-01', 'end_date': '', 'contact_emails': ['*****@*****.**', '*****@*****.**'], 'redshift_id': 'abc123', 'additional_arguments': '{"et_step": ["--force-et"]}' }, {'log_name': 'ad_click', 'log_schema_version': 'minimal', 's3_log_uri': http://ad_min/schema.yaml?Signature=b?Expires=b?AccessKeyId=yyy 'start_date': '2014-05-01', 'end_date': '2014-05-07', 'contact_emails': ['*****@*****.**', '*****@*****.**'], 'redshift_id': 'abc123' 'additional_arguments': '{"et_step": ["--force-et"]}' }, {'log_name': 'bing_geocoder', 'log_schema_version': 'bing2', 's3_log_uri': http://bing/schema.yaml?Signature=b?Expires=a?AccessKeyId=zzz 'start_date': '2014-05-02', 'end_date': '2014-06-07', 'contact_emails': ['*****@*****.**', '*****@*****.**'], 'redshift_id': 'abc123' 'additional_arguments': '{"et_step": ["--force-et"]}' } ] ============ =========== Status Code Description ============ =========== **200** Success **500** unknown exception ============ =========== * **Encoding type:** *application/json* **POST /v1/jobs/** Example: ``v1/jobs`` **Query Parameters:** * **request.body** -- the json string of job details *Example request.body* :: "{ 'log_name': 'ad_click', 'log_schema_version': 'initial', 's3_log_uri': 'llll', 'start_date': '2014-04-01', 'end_date': '', 'contact_emails': ['*****@*****.**', '*****@*****.**'], 'redshift_id': 'rs1', 'additional_arguments': '{"load_step": ["--force-load"]}' }" ============ =========== Status Code Description ============ =========== **200** Success **400** bad hash_key: redshift_id, log_name, log_schema_version and start_date must all be present **404** invalid job parameters **500** unknown exception ============ =========== * **Encoding type:** *application/json* """ try: if request.method == "POST": return 200, post_job(TableConnection.get_connection("ScheduledJobs"), get_scanner_queue("et"), request.body) elif request.method == "GET": return 200, list_all_jobs(TableConnection.get_connection("ScheduledJobs")) except PrimaryKeyError as e: return 400, {"error": "bad hash_key"} except ValueError as e: if "ConditionalCheckFailedException" in repr(e): return ( 404, { "error": "ConditionalCheckFailed; possible duplicate job. \ Delete existing job first" }, ) return 404, {"error": repr(e)} except Exception as unknown_exception: return 500, {"error": repr(unknown_exception)}
def jobs(request): """ jobs_name_and_version handles requests from the jobs endpoint with log_name and log_version, getting contents from the dynamo location **GET /v1/jobs/** Example: ``/v1/jobs/`` *Example Response* :: [ {'log_name': 'ad_click', 'log_schema_version': 'initial', 's3_log_uri': http://ad_click/schema.yaml?Signature=b?Expires=c?AccessKeyId=xxx 'start_date': '2014-05-01', 'end_date': '', 'contact_emails': ['*****@*****.**', '*****@*****.**'], 'redshift_id': 'abc123', 'additional_arguments': '{"et_step": ["--force-et"]}' }, {'log_name': 'ad_click', 'log_schema_version': 'minimal', 's3_log_uri': http://ad_min/schema.yaml?Signature=b?Expires=b?AccessKeyId=yyy 'start_date': '2014-05-01', 'end_date': '2014-05-07', 'contact_emails': ['*****@*****.**', '*****@*****.**'], 'redshift_id': 'abc123' 'additional_arguments': '{"et_step": ["--force-et"]}' }, {'log_name': 'bing_geocoder', 'log_schema_version': 'bing2', 's3_log_uri': http://bing/schema.yaml?Signature=b?Expires=a?AccessKeyId=zzz 'start_date': '2014-05-02', 'end_date': '2014-06-07', 'contact_emails': ['*****@*****.**', '*****@*****.**'], 'redshift_id': 'abc123' 'additional_arguments': '{"et_step": ["--force-et"]}' } ] ============ =========== Status Code Description ============ =========== **200** Success **500** unknown exception ============ =========== * **Encoding type:** *application/json* **POST /v1/jobs/** Example: ``v1/jobs`` **Query Parameters:** * **request.body** -- the json string of job details *Example request.body* :: "{ 'log_name': 'ad_click', 'log_schema_version': 'initial', 's3_log_uri': 'llll', 'start_date': '2014-04-01', 'end_date': '', 'contact_emails': ['*****@*****.**', '*****@*****.**'], 'redshift_id': 'rs1', 'additional_arguments': '{"load_step": ["--force-load"]}' }" ============ =========== Status Code Description ============ =========== **200** Success **400** bad hash_key: redshift_id, log_name, log_schema_version and start_date must all be present **404** invalid job parameters **500** unknown exception ============ =========== * **Encoding type:** *application/json* """ try: if request.method == "POST": return 200, post_job( TableConnection.get_connection('ScheduledJobs'), get_scanner_queue('et'), request.body) elif request.method == "GET": return 200, list_all_jobs( TableConnection.get_connection('ScheduledJobs')) except PrimaryKeyError as e: return 400, {'error': 'bad hash_key'} except ValueError as e: if "ConditionalCheckFailedException" in repr(e): return 404, { 'error': "ConditionalCheckFailed; possible duplicate job. \ Delete existing job first" } return 404, {'error': repr(e)} except Exception as unknown_exception: return 500, {'error': repr(unknown_exception)}
def post_job(self, scheduled_jobs, et_scanner_queue, input_string): with mock.patch('mycroft.models.aws_connections.TableConnection.get_connection'): with mock.patch('mycroft.logic.job_actions.list_cluster_by_name'): result = post_job(scheduled_jobs, et_scanner_queue, input_string) return result