def get_all_requests(): """ Returns all of the requests. """ sql = """ SELECT request_id, request_group_id, granule_id, object_key, job_type, restore_bucket_dest, archive_bucket_dest, job_status, request_time, last_update_time, err_msg FROM request_status ORDER BY last_update_time desc """ try: rows = database.single_query(sql, ()) result = json.loads(json.dumps(rows, default=myconverter)) except database.DbError as err: print(str(err)) return result
def get_all_requests(): """ Returns all of the requests. """ sql = """ SELECT request_id, request_group_id, granule_id, object_key, job_type, restore_bucket_dest, archive_bucket_dest, job_status, request_time, last_update_time, err_msg FROM request_status ORDER BY last_update_time desc """ try: dbconnect_info = get_dbconnect_info() rows = database.single_query(sql, dbconnect_info, ()) result = result_to_json(rows) except DbError as err: LOGGER.exception(f"DbError: {str(err)}") raise DatabaseError(str(err)) return result
def update_request_status_for_job(request_id, status, err_msg=None): """ Updates the status of a job. """ if request_id is None: raise BadRequestError("No request_id provided") # must have a status provided if status is None: raise BadRequestError("A new status must be provided") date = get_utc_now_iso() # run the update sql = """ UPDATE request_status SET job_status = %s, last_update_time = %s, err_msg = %s WHERE request_id = %s """ try: dbconnect_info = get_dbconnect_info() result = database.single_query(sql, dbconnect_info, (status, date, err_msg, request_id)) except DbError as err: msg = f"DbError updating status for job {request_id} to {status}. {str(err)}" LOGGER.exception(msg) raise DatabaseError(str(err)) return result
def get_job_by_request_id(request_id): """ Reads a row from request_status by request_id. """ sql = """ SELECT request_id, request_group_id, granule_id, object_key, job_type, restore_bucket_dest, archive_bucket_dest, job_status, request_time, last_update_time, err_msg FROM request_status WHERE request_id = %s """ try: dbconnect_info = get_dbconnect_info() rows = database.single_query(sql, dbconnect_info, (request_id, )) result = result_to_json(rows) except DbError as err: LOGGER.exception(f"DbError: {str(err)}") raise DatabaseError(str(err)) return result
def get_jobs_by_status(status, max_days_old=None): """ Returns rows from request_status by status, and optional days old """ if status is None: raise BadRequestError("A status must be provided") sql = """ SELECT request_id, request_group_id, granule_id, object_key, job_type, restore_bucket_dest, archive_bucket_dest, job_status, request_time, last_update_time, err_msg FROM request_status WHERE job_status = %s """ orderby = """ order by last_update_time desc """ try: dbconnect_info = get_dbconnect_info() if max_days_old: sql2 = """ and last_update_time > CURRENT_DATE at time zone 'utc' - INTERVAL '%s' DAY""" sql = sql + sql2 + orderby rows = database.single_query(sql, dbconnect_info, ( status, max_days_old, )) result = result_to_json(rows) else: sql = sql + orderby rows = database.single_query(sql, dbconnect_info, (status, )) result = result_to_json(rows) except DbError as err: LOGGER.exception(f"DbError: {str(err)}") raise DatabaseError(str(err)) return result
def test_single_query(self): """ Tests the single_query function """ qresult = [] row = self.build_row('key1', 'value2', 'value3') qresult.append(psycopg2.extras.RealDictRow(row)) row = self.build_row('key2', 'value4', 'value5') qresult.append(psycopg2.extras.RealDictRow(row)) empty_result = [] database.single_query = Mock(side_effect=[qresult, empty_result]) sql_stmt = 'Select * from mytable' rows = database.single_query(sql_stmt, self.dbconnect_info) self.assertEqual(qresult, rows) rows = database.single_query(sql_stmt, self.dbconnect_info) self.assertEqual(empty_result, rows) database.single_query.assert_called()
def test_single_query_secretsmanager(self): """ Tests the single_query function """ dbconnect_info = { 'db_host': 'my.db.host.gov', 'db_port': 5432, 'db_name': 'postgres', 'db_user': '******', 'db_pw': 'secret' } sql_stmt = 'Select * from mytable' exp_err = ( 'Database Error. could not translate host name "my.db.host.gov" to address: Unknown host\n' ) try: database.single_query(sql_stmt, dbconnect_info) except DbError as err: self.assertEqual(exp_err, str(err))
def test_single_query_ssm(self): """ Tests the single_query function """ boto3.client = Mock() s3_cli = boto3.client('ssm') s3_cli.get_parameter = Mock(side_effect=[ os.environ["DATABASE_HOST"], os.environ["DATABASE_PW"] ]) qresult = [] row = self.build_row('key1', 'value2', 'value3') qresult.append(psycopg2.extras.RealDictRow(row)) row = self.build_row('key2', 'value4', 'value5') qresult.append(psycopg2.extras.RealDictRow(row)) sql_stmt = 'Select * from mytable' exp_err = ('Database Error. could not translate host name' ' "my.db.host.gov" to address: Unknown host\n') try: database.single_query(sql_stmt, self.dbconnect_info) except DbError as err: self.assertEqual(exp_err, str(err))
def delete_all_requests(): """ Deletes everything from the request_status table. TODO: Currently this method is only used to facilitate testing, so unit tests may not be complete. """ sql = """ DELETE FROM request_status """ try: dbconnect_info = get_dbconnect_info() result = database.single_query(sql, dbconnect_info, ()) except DbError as err: LOGGER.exception(f"DbError: {str(err)}") raise DatabaseError(str(err)) return result
def delete_request(request_id): """ Deletes a job by request_id. """ if request_id is None: raise BadRequestError("No request_id provided") sql = """ DELETE FROM request_status WHERE request_id = %s """ try: dbconnect_info = get_dbconnect_info() result = database.single_query(sql, dbconnect_info, (request_id, )) except DbError as err: LOGGER.exception(f"DbError: {str(err)}") raise DatabaseError(str(err)) return result
def get_jobs_by_request_group_id(request_group_id): """ Returns rows from request_status for a request_group_id """ if request_group_id is None: raise BadRequestError("A request_group_id must be provided") sql = """ SELECT request_id, request_group_id, granule_id, object_key, job_type, restore_bucket_dest, archive_bucket_dest, job_status, request_time, last_update_time, err_msg FROM request_status WHERE request_group_id = %s """ orderby = """ order by last_update_time desc """ try: sql = sql + orderby dbconnect_info = get_dbconnect_info() rows = database.single_query(sql, dbconnect_info, (request_group_id, )) result = result_to_json(rows) except DbError as err: LOGGER.exception(f"DbError: {str(err)}") raise DatabaseError(str(err)) return result
def submit_request(data): """ Takes the provided request data (as a dict) and attempts to update the database with a new request. Raises BadRequestError if there is a problem with the input. """ # build and run the insert sql = """ INSERT INTO request_status ( request_id, request_group_id, granule_id, object_key, job_type, restore_bucket_dest, archive_bucket_dest, job_status, request_time, last_update_time, err_msg ) VALUES ( %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s ) """ # date might be provided, if not use current utc date date = get_utc_now_iso() #data["request_id"] = request_id if "request_time" in data: rq_date = dateutil.parser.parse(data["request_time"]) else: rq_date = date if "last_update_time" in data: lu_date = dateutil.parser.parse(data["last_update_time"]) else: lu_date = date if not "restore_bucket_dest" in data: data["restore_bucket_dest"] = None if not "archive_bucket_dest" in data: data["archive_bucket_dest"] = None if not "err_msg" in data: data["err_msg"] = None try: params = ( data["request_id"], data["request_group_id"], data["granule_id"], data["object_key"], data["job_type"], data["restore_bucket_dest"], data["archive_bucket_dest"], data["job_status"], rq_date, lu_date, data["err_msg"], ) except KeyError as err: raise BadRequestError(f"Missing {str(err)} in input data") try: dbconnect_info = get_dbconnect_info() database.single_query(sql, dbconnect_info, params) except DbError as err: LOGGER.exception(f"DbError: {str(err)}") raise DatabaseError(str(err)) return data["request_id"]