def test_submit_request_error_status(self):
        """
        Tests that an error job is written to the db
        """
        utc_now_exp = UTC_NOW_EXP_4
        requests_db.get_utc_now_iso = Mock(return_value=utc_now_exp)
        requests_db.request_id_generator = Mock(side_effect=[REQUEST_ID4])
        data = {}
        data["request_id"] = REQUEST_ID4
        data["err_msg"] = "Error message goes here"
        data["request_group_id"] = REQUEST_GROUP_ID_EXP_2
        data["granule_id"] = "granule_4"
        data["object_key"] = "objectkey_4"
        data["job_type"] = "restore"
        data["job_status"] = "error"
        data["request_time"] = utc_now_exp
        qresult, exp_result = create_insert_request(
            REQUEST_ID4, data["request_group_id"],
            data["granule_id"], data["object_key"], data["job_type"],
            None, None, data["job_status"], data["request_time"],
            None, data["err_msg"])
        database.single_query = Mock(side_effect=[qresult, exp_result, None, None])
        mock_ssm_get_parameter(4)
        try:
            requests_db.submit_request(data)
            database.single_query.assert_called_once()
        except requests_db.DatabaseError as err:
            self.fail(f"submit_request. {str(err)}")

        try:
            result = requests_db.get_job_by_request_id(REQUEST_ID4)
            expected = result_to_json(exp_result)
            self.assertEqual(expected, result)
        except requests_db.DatabaseError as err:
            self.fail(f"get_job_by_request_id. {str(err)}")
Ejemplo n.º 2
0
    def test_task_no_retries_env_var(self):
        """
        Test environment var RESTORE_REQUEST_RETRIES not set - use default.
        """
        del os.environ['RESTORE_REQUEST_RETRIES']
        exp_event = {}
        granule_id = "MOD09GQ.A0219114.N5aUCG.006.0656338553321"
        exp_event["input"] = {
            "granules": [{
                "granuleId": granule_id,
                "keys": [KEY1]
            }]
        }
        exp_event["config"] = {"glacier-bucket": "some_bucket"}

        boto3.client = Mock()
        s3_cli = boto3.client('s3')
        s3_cli.head_object = Mock()
        s3_cli.restore_object = Mock(side_effect=[None])
        CumulusLogger.info = Mock()
        requests_db.request_id_generator = Mock(return_value=REQUEST_ID1)
        exp_gran = {}
        exp_gran['granuleId'] = granule_id
        exp_files = []

        exp_file = {}
        exp_file['key'] = FILE1
        exp_file['dest_bucket'] = PROTECTED_BUCKET
        exp_file['success'] = True
        exp_file['err_msg'] = ''
        exp_files.append(exp_file)

        exp_gran['files'] = exp_files
        qresult_1_inprogress, _ = create_insert_request(
            REQUEST_ID1, REQUEST_GROUP_ID_EXP_1, granule_id, FILE1, "restore",
            "some_bucket", "inprogress", UTC_NOW_EXP_1, None, None)
        database.single_query = Mock(side_effect=[qresult_1_inprogress])
        mock_ssm_get_parameter(1)
        try:
            result = request_files.task(exp_event, self.context)
            os.environ['RESTORE_REQUEST_RETRIES'] = '3'
            self.assertEqual(exp_gran, result)

            boto3.client.assert_called_with('ssm')
            s3_cli.head_object.assert_called_with(Bucket='some_bucket',
                                                  Key=FILE1)
            restore_req_exp = {
                'Days': 5,
                'GlacierJobParameters': {
                    'Tier': 'Standard'
                }
            }
            s3_cli.restore_object.assert_called_with(
                Bucket='some_bucket',
                Key=FILE1,
                RestoreRequest=restore_req_exp)
            database.single_query.assert_called_once()
        except request_files.RestoreRequestError as err:
            os.environ['RESTORE_REQUEST_RETRIES'] = '3'
            self.fail(str(err))
    def test_handler_add(self):
        """
        Test successful with four keys returned.
        """
        handler_input_event = {}
        utc_now_exp = UTC_NOW_EXP_1
        requests_db.get_utc_now_iso = Mock(return_value=utc_now_exp)
        requests_db.request_id_generator = Mock(return_value=REQUEST_ID1)
        granule_id = 'granule_1'
        status = "error"
        req_err = "error submitting restore request"
        handler_input_event["function"] = "add"
        handler_input_event["error"] = req_err

        qresult, ins_result = create_insert_request(
            REQUEST_ID1, REQUEST_GROUP_ID_EXP_1, granule_id, "object_key",
            "restore", "my_s3_bucket", status, utc_now_exp, None, req_err)
        database.single_query = Mock(side_effect=[qresult, ins_result])
        self.mock_ssm_get_parameter(2)
        try:
            result = request_status.handler(handler_input_event, None)
            self.fail("expected BadRequestError")
        except request_status.BadRequestError as err:
            self.assertEqual("Missing 'granule_id' in input data", str(err))
        handler_input_event["granule_id"] = granule_id
        try:
            result = request_status.handler(handler_input_event, None)
            self.fail("expected BadRequestError")
        except request_status.BadRequestError as err:
            self.assertEqual("Missing 'request_group_id' in input data",
                             str(err))
        handler_input_event["request_group_id"] = REQUEST_GROUP_ID_EXP_1
        try:
            result = request_status.handler(handler_input_event, None)
            expected = result_to_json(ins_result)
            self.assertEqual(expected, result)
            database.single_query.assert_called()
        except request_status.BadRequestError as err:
            self.fail(err)
        except requests_db.DbError as err:
            self.fail(err)
    def test_submit_request_inprogress_status(self):
        """
        Tests that an inprogress job is written to the db
        """
        utc_now_exp = UTC_NOW_EXP_1
        requests_db.get_utc_now_iso = Mock(return_value=utc_now_exp)
        requests_db.request_id_generator = Mock(side_effect=[REQUEST_ID1])
        data = {}
        data["request_id"] = REQUEST_ID1
        data["request_group_id"] = REQUEST_GROUP_ID_EXP_1
        data["granule_id"] = "granule_1"
        data["object_key"] = "thisisanobjectkey"
        data["job_type"] = "restore"
        data["restore_bucket_dest"] = "my_s3_bucket"
        data["archive_bucket_dest"] = "your_s3_bucket"
        data["job_status"] = "inprogress"
        data["request_time"] = utc_now_exp
        qresult, exp_result = create_insert_request(
            REQUEST_ID1, data["request_group_id"], data["granule_id"],
            data["object_key"], data["job_type"],
            data["restore_bucket_dest"], data["job_status"],
            data["archive_bucket_dest"],
            data["request_time"], None, None)
        database.single_query = Mock(side_effect=[qresult, exp_result, None, None])
        mock_ssm_get_parameter(4)
        try:
            requests_db.submit_request(data)
            database.single_query.assert_called_once()
        except requests_db.DatabaseError as err:
            self.fail(f"submit_request. {str(err)}")

        try:
            result = requests_db.get_job_by_request_id(REQUEST_ID1)
            expected = result_to_json(exp_result)
            self.assertEqual(expected, result)
        except requests_db.DatabaseError as err:
            self.fail(f"get_job_by_request_id. {str(err)}")
Ejemplo n.º 5
0
    def test_task_one_granule_4_files_success(self):
        """
        Test four files for one granule - successful
        """
        granule_id = "MOD09GQ.A0219114.N5aUCG.006.0656338553321"
        files = [KEY1, KEY2, KEY3, KEY4]
        input_event = {
            "input": {
                "granules": [{
                    "granuleId": granule_id,
                    "keys": files
                }]
            },
            "config": {
                "glacier-bucket": "my-dr-fake-glacier-bucket"
            }
        }

        boto3.client = Mock()
        s3_cli = boto3.client('s3')
        s3_cli.restore_object = Mock(side_effect=[None, None, None, None])
        s3_cli.head_object = Mock()
        CumulusLogger.info = Mock()
        qresult_1_inprogress, _ = create_insert_request(
            REQUEST_ID1, REQUEST_GROUP_ID_EXP_1, granule_id, files[0],
            "restore", "some_bucket", "inprogress", UTC_NOW_EXP_1, None, None)
        qresult_3_inprogress, _ = create_insert_request(
            REQUEST_ID1, REQUEST_GROUP_ID_EXP_1, granule_id, files[2],
            "restore", "some_bucket", "inprogress", UTC_NOW_EXP_1, None, None)
        qresult_4_inprogress, _ = create_insert_request(
            REQUEST_ID1, REQUEST_GROUP_ID_EXP_1, granule_id, files[3],
            "restore", "some_bucket", "inprogress", UTC_NOW_EXP_1, None, None)

        requests_db.request_id_generator = Mock(side_effect=[
            REQUEST_GROUP_ID_EXP_1, REQUEST_ID1, REQUEST_ID2, REQUEST_ID3,
            REQUEST_ID4
        ])
        database.single_query = Mock(side_effect=[
            qresult_1_inprogress, qresult_1_inprogress, qresult_3_inprogress,
            qresult_4_inprogress
        ])
        mock_ssm_get_parameter(4)

        try:
            result = request_files.task(input_event, self.context)
        except requests_db.DatabaseError as err:
            self.fail(str(err))

        boto3.client.assert_called_with('ssm')
        s3_cli.head_object.assert_any_call(Bucket='my-dr-fake-glacier-bucket',
                                           Key=FILE1)
        s3_cli.head_object.assert_any_call(Bucket='my-dr-fake-glacier-bucket',
                                           Key=FILE2)
        s3_cli.head_object.assert_any_call(Bucket='my-dr-fake-glacier-bucket',
                                           Key=FILE3)
        s3_cli.head_object.assert_any_call(Bucket='my-dr-fake-glacier-bucket',
                                           Key=FILE4)
        restore_req_exp = {
            'Days': 5,
            'GlacierJobParameters': {
                'Tier': 'Standard'
            }
        }

        s3_cli.restore_object.assert_any_call(
            Bucket='my-dr-fake-glacier-bucket',
            Key=FILE1,
            RestoreRequest=restore_req_exp)
        s3_cli.restore_object.assert_any_call(
            Bucket='my-dr-fake-glacier-bucket',
            Key=FILE2,
            RestoreRequest=restore_req_exp)
        s3_cli.restore_object.assert_any_call(
            Bucket='my-dr-fake-glacier-bucket',
            Key=FILE3,
            RestoreRequest=restore_req_exp)
        s3_cli.restore_object.assert_called_with(
            Bucket='my-dr-fake-glacier-bucket',
            Key=FILE4,
            RestoreRequest=restore_req_exp)

        exp_gran = {}
        exp_gran['granuleId'] = granule_id

        exp_files = self.get_expected_files()
        exp_gran['files'] = exp_files
        self.assertEqual(exp_gran, result)
        database.single_query.assert_called()  #called 4 times
Ejemplo n.º 6
0
    def test_task_client_error_2_times(self):
        """
        Test two files, first successful, second has two errors, then success.
        """
        exp_event = {}
        exp_event["config"] = {"glacier-bucket": "some_bucket"}
        gran = {}
        granule_id = "MOD09GQ.A0219114.N5aUCG.006.0656338553321"
        gran["granuleId"] = granule_id
        keys = [KEY1, KEY2]
        gran["keys"] = keys
        exp_event["input"] = {"granules": [gran]}
        requests_db.request_id_generator = Mock(side_effect=[
            REQUEST_GROUP_ID_EXP_1, REQUEST_ID1, REQUEST_GROUP_ID_EXP_2,
            REQUEST_ID2, REQUEST_ID3
        ])
        boto3.client = Mock()
        s3_cli = boto3.client('s3')
        s3_cli.head_object = Mock()

        s3_cli.restore_object = Mock(side_effect=[
            None,
            ClientError({'Error': {
                'Code': 'NoSuchBucket'
            }}, 'restore_object'),
            ClientError({'Error': {
                'Code': 'NoSuchBucket'
            }}, 'restore_object'), None
        ])
        CumulusLogger.info = Mock()
        CumulusLogger.error = Mock()
        exp_gran = {}
        exp_gran['granuleId'] = granule_id
        exp_files = []

        exp_file = {}
        exp_file['key'] = FILE1
        exp_file['dest_bucket'] = PROTECTED_BUCKET
        exp_file['success'] = True
        exp_file['err_msg'] = ''
        exp_files.append(exp_file)

        exp_file = {}
        exp_file['key'] = FILE2
        exp_file['dest_bucket'] = PROTECTED_BUCKET
        exp_file['success'] = True
        exp_file['err_msg'] = ''
        exp_files.append(exp_file)

        exp_gran['files'] = exp_files

        qresult1, _ = create_insert_request(REQUEST_ID1,
                                            REQUEST_GROUP_ID_EXP_1, granule_id,
                                            keys[0], "restore", "some_bucket",
                                            "inprogress", UTC_NOW_EXP_1, None,
                                            None)
        qresult2, _ = create_insert_request(REQUEST_ID2,
                                            REQUEST_GROUP_ID_EXP_1, granule_id,
                                            keys[0], "restore", "some_bucket",
                                            "error", UTC_NOW_EXP_1, None,
                                            "'Code': 'NoSuchBucket'")
        qresult3, _ = create_insert_request(REQUEST_ID3,
                                            REQUEST_GROUP_ID_EXP_1, granule_id,
                                            keys[1], "restore", "some_bucket",
                                            "inprogress", UTC_NOW_EXP_1, None,
                                            None)
        database.single_query = Mock(
            side_effect=[qresult1, qresult2, qresult2, qresult3])
        mock_ssm_get_parameter(4)

        result = request_files.task(exp_event, self.context)
        self.assertEqual(exp_gran, result)

        boto3.client.assert_called_with('ssm')
        s3_cli.restore_object.assert_any_call(Bucket='some_bucket',
                                              Key=FILE1,
                                              RestoreRequest={
                                                  'Days': 5,
                                                  'GlacierJobParameters': {
                                                      'Tier': 'Standard'
                                                  }
                                              })
        database.single_query.assert_called()  # 4 times
Ejemplo n.º 7
0
    def test_task_client_error_3_times(self):
        """
        Test three files, two successful, one errors on all retries and fails.
        """
        keys = [KEY1, KEY3, KEY4]

        exp_event = {}
        exp_event["config"] = {"glacier-bucket": "some_bucket"}
        gran = {}
        gran["granuleId"] = "MOD09GQ.A0219114.N5aUCG.006.0656338553321"

        gran["keys"] = keys
        exp_event["input"] = {"granules": [gran]}

        requests_db.request_id_generator = Mock(side_effect=[
            REQUEST_GROUP_ID_EXP_1, REQUEST_ID1, REQUEST_GROUP_ID_EXP_3,
            REQUEST_ID2, REQUEST_ID3, REQUEST_ID4
        ])
        boto3.client = Mock()
        s3_cli = boto3.client('s3')
        s3_cli.head_object = Mock()
        s3_cli.restore_object = Mock(side_effect=[
            None,
            ClientError({'Error': {
                'Code': 'NoSuchBucket'
            }}, 'restore_object'), None,
            ClientError({'Error': {
                'Code': 'NoSuchBucket'
            }}, 'restore_object'),
            ClientError({'Error': {
                'Code': 'NoSuchKey'
            }}, 'restore_object')
        ])
        CumulusLogger.info = Mock()
        CumulusLogger.error = Mock()
        exp_gran = {}
        exp_gran['granuleId'] = gran["granuleId"]

        exp_files = self.get_exp_files_3_errs()

        exp_gran['files'] = exp_files
        exp_err = f"One or more files failed to be requested. {exp_gran}"
        qresult_1_inprogress, _ = create_insert_request(
            REQUEST_ID1, REQUEST_GROUP_ID_EXP_1, gran["granuleId"], FILE1,
            "restore", "some_bucket", "inprogress", UTC_NOW_EXP_1, None, None)
        qresult_1_error, _ = create_insert_request(
            REQUEST_ID1, REQUEST_GROUP_ID_EXP_1, gran["granuleId"], FILE1,
            "restore", "some_bucket", "error", UTC_NOW_EXP_1, None,
            "'Code': 'NoSuchBucket'")
        qresult_3_inprogress, _ = create_insert_request(
            REQUEST_ID1, REQUEST_GROUP_ID_EXP_3, gran["granuleId"], FILE2,
            "restore", "some_bucket", "inprogress", UTC_NOW_EXP_1, None, None)
        qresult_3_error, _ = create_insert_request(
            REQUEST_ID1, REQUEST_GROUP_ID_EXP_3, gran["granuleId"], FILE2,
            "restore", "some_bucket", "error", UTC_NOW_EXP_1, None,
            "'Code': 'NoSuchBucket'")
        database.single_query = Mock(side_effect=[
            qresult_1_inprogress, qresult_1_error, qresult_3_inprogress,
            qresult_1_error, qresult_3_error
        ])
        mock_ssm_get_parameter(5)
        try:
            request_files.task(exp_event, self.context)
            self.fail("RestoreRequestError expected")
        except request_files.RestoreRequestError as err:
            self.assertEqual(exp_err, str(err))

        boto3.client.assert_called_with('ssm')
        s3_cli.head_object.assert_any_call(Bucket='some_bucket', Key=FILE1)
        s3_cli.restore_object.assert_any_call(Bucket='some_bucket',
                                              Key=FILE1,
                                              RestoreRequest={
                                                  'Days': 5,
                                                  'GlacierJobParameters': {
                                                      'Tier': 'Standard'
                                                  }
                                              })
        database.single_query.assert_called()  # 5 times