def test_handler_db_read_err(self):
     """
     Test copy lambda with error reading db.
     """
     boto3.client = Mock()
     s3_cli = boto3.client('s3')
     s3_cli.copy_object = Mock(side_effect=[None])
     exp_request_ids = [REQUEST_ID7]
     _, exp_result = create_select_requests(exp_request_ids)
     time.sleep = Mock(side_effect=None)
     exp_err = 'Database Error. Internal database error, please contact LP DAAC User Services'
     database.single_query = Mock(side_effect=[
         requests_db.DatabaseError(exp_err),
         requests_db.DatabaseError(exp_err)
     ])
     mock_ssm_get_parameter(2)
     try:
         copy_files_to_archive.handler(self.handler_input_event, None)
     except copy_files_to_archive.CopyRequestError as err:
         exp_result = [{
             'success': False,
             'source_bucket': 'my-dr-fake-glacier-bucket',
             'source_key': self.exp_file_key1
         }]
         exp_err = f"File copy failed. {exp_result}"
         self.assertEqual(exp_err, str(err))
Beispiel #2
0
    def test_handler_db_read_err(self,
                                 mock_sleep: MagicMock,
                                 mock_boto3_client: MagicMock,
                                 mock_database_single_query):
        """
        Test copy lambda with error reading db.
        """
        os.environ['COPY_RETRIES'] = '1'
        os.environ['COPY_RETRY_SLEEP_SECS'] = '0'

        mock_s3_cli = mock_boto3_client('s3')
        mock_s3_cli.copy_object = Mock(side_effect=[None])
        exp_request_ids = [REQUEST_ID7]
        _, exp_result = create_select_requests(exp_request_ids)
        exp_err = 'Database Error. Internal database error, please contact LP DAAC User Services'
        mock_database_single_query.side_effect = [requests_db.DatabaseError(exp_err),
                                                  requests_db.DatabaseError(exp_err)]
        mock_secretsmanager_get_parameter(2)
        try:
            copy_files_to_archive.handler(self.handler_input_event, None)
        except copy_files_to_archive.CopyRequestError as err:
            exp_result = [{'success': False,
                           'source_bucket': 'my-dr-fake-glacier-bucket',
                           'source_key': self.exp_file_key1}]
            exp_err = f"File copy failed. {exp_result}"
            self.assertEqual(exp_err, str(err))
Beispiel #3
0
    def test_handler_two_records_one_fail_one_success(self):
        """
        Test copy lambda with two files, one successful copy, one failed copy.
        """
        exp_file_key = 'dr-glacier/MOD09GQ.A0219114.N5aUCG.006.0656338553321.hdf'
        exp_file2_key = 'dr-glacier/MOD09GQ.A0219114.N5aUCG.006.0656338553321.txt'

        exp_rec_2 = create_copy_event2()
        self.handler_input_event["Records"].append(exp_rec_2)
        exp_err_msg = ("An error occurred (AccessDenied) when calling "
                       "the copy_object operation: Unknown")
        exp_error = (
            "File copy failed. [{'success': False, "
            f"'source_bucket': '{self.exp_src_bucket}', "
            f"'source_key': '{exp_file2_key}', "
            f"'request_id': '{REQUEST_ID3}', "
            f"'target_bucket': '{self.exp_target_bucket}', "
            f"'err_msg': '{exp_err_msg}'"
            "}, {'success': True, "
            f"'source_bucket': '{self.exp_src_bucket}', "
            f"'source_key': '{exp_file_key}', "
            f"'request_id': '{REQUEST_ID4}', "
            f"'target_bucket': '{self.exp_target_bucket}', 'err_msg': ''"
            "}]")
        self.create_test_requests()
        boto3.client = Mock()
        mock_ssm_get_parameter(13)
        s3_cli = boto3.client('s3')
        s3_cli.copy_object = Mock(side_effect=[
            ClientError({'Error': {
                'Code': 'AccessDenied'
            }}, 'copy_object'),
            ClientError({'Error': {
                'Code': 'AccessDenied'
            }}, 'copy_object'),
            ClientError({'Error': {
                'Code': 'AccessDenied'
            }}, 'copy_object'), None
        ])
        print_rows("begin")
        row = requests_db.get_job_by_request_id(REQUEST_ID3)
        self.assertEqual("inprogress", row[0]['job_status'])
        row = requests_db.get_job_by_request_id(REQUEST_ID4)
        self.assertEqual("inprogress", row[0]['job_status'])
        try:
            copy_files_to_archive.handler(self.handler_input_event, None)
            self.fail("expected CopyRequestError")
        except copy_files_to_archive.CopyRequestError as ex:
            self.assertEqual(exp_error, str(ex))

        print_rows("end")
        row = requests_db.get_job_by_request_id(REQUEST_ID3)
        self.assertEqual("error", row[0]['job_status'])
        self.assertEqual(exp_err_msg, row[0]['err_msg'])
        row = requests_db.get_job_by_request_id(REQUEST_ID4)
        self.assertEqual("complete", row[0]['job_status'])
Beispiel #4
0
    def test_handler_one_file_fail_3x(self,
                                      mock_sleep: MagicMock,
                                      mock_boto3_client: MagicMock,
                                      mock_database_single_query: MagicMock):
        """
        Test copy lambda with one failed copy after 3 retries.
        """
        retry_sleep_seconds = 18
        os.environ['COPY_RETRY_SLEEP_SECS'] = str(retry_sleep_seconds)
        copy_retries = 2
        os.environ['COPY_RETRIES'] = str(copy_retries)
        mock_s3_cli = mock_boto3_client('s3')
        mock_s3_cli.copy_object = Mock(side_effect=[ClientError({'Error': {'Code': 'AccessDenied'}},
                                                                'copy_object'),
                                                    ClientError({'Error': {'Code': 'AccessDenied'}},
                                                                'copy_object'),
                                                    ClientError({'Error': {'Code': 'AccessDenied'}},
                                                                'copy_object')])
        mock_s3_cli.head_object = Mock()
        exp_error = "File copy failed. [{'success': False, " \
                    f"'source_bucket': '{self.exp_src_bucket}', " \
                    f"'source_key': '{self.exp_file_key1}', " \
                    f"'request_id': '{REQUEST_ID7}', " \
                    f"'target_bucket': '{self.exp_target_bucket}', " \
                    "'err_msg': 'An error occurred (AccessDenied) when calling " \
                    "the copy_object operation: Unknown'}]"
        exp_upd_result = []

        exp_request_ids = [REQUEST_ID7, REQUEST_ID4]
        _, exp_result = create_select_requests(exp_request_ids)

        # todo: The file under test does not call single_query. Remove this and other references in test code.
        mock_database_single_query.side_effect = [exp_result,
                                                  exp_result,
                                                  exp_upd_result,
                                                  exp_result,
                                                  exp_upd_result]

        mock_secretsmanager_get_parameter(5)
        try:
            copy_files_to_archive.handler(self.handler_input_event, None)
            self.fail("expected CopyRequestError")
        except copy_files_to_archive.CopyRequestError as ex:
            self.assertEqual(exp_error, str(ex))
        mock_boto3_client.assert_has_calls([call('secretsmanager')])
        mock_s3_cli.copy_object.assert_called_with(Bucket=self.exp_target_bucket,
                                                   CopySource={'Bucket': self.exp_src_bucket,
                                                               'Key': self.exp_file_key1},
                                                   Key=self.exp_file_key1)
        self.assertEqual(copy_retries, mock_sleep.call_count, 'Should sleep once between each attempt.')
        mock_sleep.assert_has_calls([call(retry_sleep_seconds)] * copy_retries)
        mock_database_single_query.assert_called()
    def test_handler_one_file_fail_3x(self):
        """
        Test copy lambda with one failed copy after 3 retries.
        """
        boto3.client = Mock()
        s3_cli = boto3.client('s3')
        s3_cli.copy_object = Mock(side_effect=[
            ClientError({'Error': {
                'Code': 'AccessDenied'
            }}, 'copy_object'),
            ClientError({'Error': {
                'Code': 'AccessDenied'
            }}, 'copy_object'),
            ClientError({'Error': {
                'Code': 'AccessDenied'
            }}, 'copy_object')
        ])
        s3_cli.head_object = Mock()
        exp_error = "File copy failed. [{'success': False, " \
                    f"'source_bucket': '{self.exp_src_bucket}', " \
                    f"'source_key': '{self.exp_file_key1}', " \
                    f"'request_id': '{REQUEST_ID7}', " \
                    f"'target_bucket': '{self.exp_target_bucket}', " \
                    "'err_msg': 'An error occurred (AccessDenied) when calling " \
                    "the copy_object operation: Unknown'}]"
        exp_upd_result = []

        exp_request_ids = [REQUEST_ID7, REQUEST_ID4]
        _, exp_result = create_select_requests(exp_request_ids)

        database.single_query = Mock(side_effect=[
            exp_result, exp_result, exp_upd_result, exp_result, exp_upd_result
        ])
        mock_ssm_get_parameter(5)
        try:
            copy_files_to_archive.handler(self.handler_input_event, None)
            self.fail("expected CopyRequestError")
        except copy_files_to_archive.CopyRequestError as ex:
            self.assertEqual(exp_error, str(ex))
        boto3.client.assert_called_with('ssm')
        s3_cli.copy_object.assert_called_with(Bucket=self.exp_target_bucket,
                                              CopySource={
                                                  'Bucket':
                                                  self.exp_src_bucket,
                                                  'Key': self.exp_file_key1
                                              },
                                              Key=self.exp_file_key1)
        database.single_query.assert_called()
Beispiel #6
0
 def test_handler_one_file_success(self):
     """
     Test copy lambda with one file, expecting successful result.
     """
     del os.environ['COPY_RETRY_SLEEP_SECS']
     del os.environ['COPY_RETRIES']
     boto3.client = Mock()
     s3_cli = boto3.client('s3')
     s3_cli.copy_object = Mock(side_effect=[None])
     self.create_test_requests()
     mock_ssm_get_parameter(6)
     print_rows("begin")
     row = requests_db.get_job_by_request_id(REQUEST_ID3)
     self.assertEqual("inprogress", row[0]['job_status'])
     result = copy_files_to_archive.handler(self.handler_input_event, None)
     os.environ['COPY_RETRIES'] = '2'
     os.environ['COPY_RETRY_SLEEP_SECS'] = '1'
     boto3.client.assert_called_with('ssm')
     exp_result = [{
         "success": True,
         "source_bucket": self.exp_src_bucket,
         "source_key": self.exp_file_key1,
         "request_id": REQUEST_ID3,
         "target_bucket": self.exp_target_bucket,
         "err_msg": ""
     }]
     self.assertEqual(exp_result, result)
     print_rows("end")
     row = requests_db.get_job_by_request_id(REQUEST_ID3)
     self.assertEqual("complete", row[0]['job_status'])
Beispiel #7
0
    def test_handler_one_file_success(self,
                                      mock_sleep: MagicMock,
                                      mock_boto3_client: MagicMock,
                                      mock_database_single_query: MagicMock):
        """
        Test copy lambda with one file, expecting successful result.
        """
        os.environ['COPY_RETRIES'] = '2'
        os.environ['COPY_RETRY_SLEEP_SECS'] = '0'
        exp_src_bucket = 'my-dr-fake-glacier-bucket'

        mock_s3_cli = mock_boto3_client('s3')
        mock_s3_cli.copy_object = Mock(side_effect=[None])
        exp_upd_result = []
        exp_request_ids = [REQUEST_ID7]
        _, exp_result = create_select_requests(exp_request_ids)
        mock_database_single_query.side_effect = [exp_result, exp_upd_result]
        mock_secretsmanager_get_parameter(2)
        result = copy_files_to_archive.handler(self.handler_input_event, None)
        mock_boto3_client.assert_has_calls([call('secretsmanager')])
        mock_s3_cli.copy_object.assert_called_with(Bucket=self.exp_target_bucket,
                                                   CopySource={'Bucket': exp_src_bucket,
                                                               'Key': self.exp_file_key1},
                                                   Key=self.exp_file_key1)
        exp_result = [{"success": True,
                       "source_bucket": self.exp_src_bucket,
                       "source_key": self.exp_file_key1,
                       "request_id": REQUEST_ID7,
                       "target_bucket": self.exp_target_bucket,
                       "err_msg": ""}]
        self.assertEqual(exp_result, result)
        self.assertEqual(mock_sleep.call_count, 0, "There should be no sleeps on happy path.")
        mock_database_single_query.assert_called()
 def test_handler_db_update_err(self):
     """
     Test copy lambda with error updating db.
     """
     boto3.client = Mock()
     s3_cli = boto3.client('s3')
     s3_cli.copy_object = Mock(side_effect=[None])
     exp_request_ids = [REQUEST_ID7]
     _, exp_result = create_select_requests(exp_request_ids)
     time.sleep = Mock(side_effect=None)
     exp_err = 'Database Error. Internal database error, please contact LP DAAC User Services'
     database.single_query = Mock(side_effect=[
         exp_result,
         requests_db.DatabaseError(exp_err),
         requests_db.DatabaseError(exp_err)
     ])
     mock_ssm_get_parameter(3)
     result = copy_files_to_archive.handler(self.handler_input_event, None)
     exp_result = [{
         'success': True,
         'source_bucket': 'my-dr-fake-glacier-bucket',
         'source_key': self.exp_file_key1,
         'request_id': REQUEST_ID7,
         'target_bucket': PROTECTED_BUCKET,
         'err_msg': ''
     }]
     self.assertEqual(exp_result, result)
 def test_handler_no_object_key_in_event(self):
     """
     Test copy lambda with missing "object" key in input event.
     """
     boto3.client = Mock()
     s3_cli = boto3.client('s3')
     s3_cli.copy_object = Mock(side_effect=[None])
     mydict = self.handler_input_event["Records"][0]["s3"]["object"]
     mydict.pop('key')
     exp_err = f'event record: "{self.handler_input_event["Records"][0]}" does not contain a ' \
               f'value for Records["s3"]["object"]["key"]'
     try:
         copy_files_to_archive.handler(self.handler_input_event, None)
         self.fail("expected CopyRequestError")
     except copy_files_to_archive.CopyRequestError as ex:
         self.assertEqual(exp_err, str(ex))
 def test_handler_one_file_success(self):
     """
     Test copy lambda with one file, expecting successful result.
     """
     del os.environ['COPY_RETRY_SLEEP_SECS']
     del os.environ['COPY_RETRIES']
     boto3.client = Mock()
     s3_cli = boto3.client('s3')
     s3_cli.copy_object = Mock(side_effect=[None])
     exp_upd_result = []
     exp_request_ids = [REQUEST_ID7]
     _, exp_result = create_select_requests(exp_request_ids)
     database.single_query = Mock(side_effect=[exp_result, exp_upd_result])
     mock_ssm_get_parameter(2)
     result = copy_files_to_archive.handler(self.handler_input_event, None)
     os.environ['COPY_RETRIES'] = '2'
     os.environ['COPY_RETRY_SLEEP_SECS'] = '1'
     boto3.client.assert_called_with('ssm')
     s3_cli.copy_object.assert_called_with(Bucket=self.exp_target_bucket,
                                           CopySource={
                                               'Bucket':
                                               self.exp_src_bucket,
                                               'Key': self.exp_file_key1
                                           },
                                           Key=self.exp_file_key1)
     exp_result = [{
         "success": True,
         "source_bucket": self.exp_src_bucket,
         "source_key": self.exp_file_key1,
         "request_id": REQUEST_ID7,
         "target_bucket": self.exp_target_bucket,
         "err_msg": ""
     }]
     self.assertEqual(exp_result, result)
     database.single_query.assert_called()
Beispiel #11
0
 def test_handler_one_file_retry2_success(self):
     """
     Test copy lambda with two failed copy attempts, third attempt successful.
     """
     del os.environ['COPY_RETRY_SLEEP_SECS']
     del os.environ['COPY_RETRIES']
     time.sleep(1)
     boto3.client = Mock()
     s3_cli = boto3.client('s3')
     s3_cli.copy_object = Mock(side_effect=[
         ClientError({'Error': {
             'Code': 'AccessDenied'
         }}, 'copy_object'), None
     ])
     self.create_test_requests()
     mock_ssm_get_parameter(6)
     print_rows("begin")
     row = requests_db.get_job_by_request_id(REQUEST_ID3)
     self.assertEqual("inprogress", row[0]['job_status'])
     result = copy_files_to_archive.handler(self.handler_input_event, None)
     os.environ['COPY_RETRIES'] = '2'
     os.environ['COPY_RETRY_SLEEP_SECS'] = '1'
     exp_result = [{
         "success": True,
         "source_bucket": self.exp_src_bucket,
         "source_key": self.exp_file_key1,
         "request_id": REQUEST_ID3,
         "target_bucket": self.exp_target_bucket,
         "err_msg": ""
     }]
     self.assertEqual(exp_result, result)
     row = requests_db.get_job_by_request_id(REQUEST_ID3)
     self.assertEqual("complete", row[0]['job_status'])
     print_rows("end")
Beispiel #12
0
    def test_handler_one_file_fail_3x(self):
        """
        Test copy lambda with one failed copy after 3 retries.
        """
        exp_err_msg = ("An error occurred (AccessDenied) when calling "
                       "the copy_object operation: Unknown")
        exp_error = ("File copy failed. [{'success': False, "
                     f"'source_bucket': '{self.exp_src_bucket}', "
                     f"'source_key': '{self.exp_file_key1}', "
                     f"'request_id': '{REQUEST_ID3}', "
                     f"'target_bucket': '{self.exp_target_bucket}', "
                     f"'err_msg': '{exp_err_msg}'"
                     "}]")
        self.create_test_requests()
        utc_now_exp = requests_db.get_utc_now_iso()
        requests_db.get_utc_now_iso = Mock(return_value=utc_now_exp)
        boto3.client = Mock()
        mock_ssm_get_parameter(7)
        print_rows("begin")

        s3_cli = boto3.client('s3')
        s3_cli.copy_object = Mock(side_effect=[
            ClientError({'Error': {
                'Code': 'AccessDenied'
            }}, 'copy_object'),
            ClientError({'Error': {
                'Code': 'AccessDenied'
            }}, 'copy_object'),
            ClientError({'Error': {
                'Code': 'AccessDenied'
            }}, 'copy_object')
        ])
        s3_cli.head_object = Mock()
        row = requests_db.get_job_by_request_id(REQUEST_ID3)
        self.assertEqual("inprogress", row[0]['job_status'])
        self.assertEqual(None, row[0]['err_msg'])

        try:
            copy_files_to_archive.handler(self.handler_input_event, None)
            self.fail("expected CopyRequestError")
        except copy_files_to_archive.CopyRequestError as ex:
            self.assertEqual(exp_error, str(ex))
        print_rows("end")
        row = requests_db.get_job_by_request_id(REQUEST_ID3)
        self.assertEqual("error", row[0]['job_status'])
        self.assertEqual(exp_err_msg, row[0]['err_msg'])
    def test_handler_two_records_success(self):
        """
        Test copy lambda with two files, expecting successful result.
        """
        os.environ['DEVELOP_TESTS'] = "False"
        exp_file_key = 'dr-glacier/MOD09GQ.A0219114.N5aUCG.006.0656338553321.hdf'
        boto3.client = Mock()
        s3_cli = boto3.client('s3')
        s3_cli.copy_object = Mock(side_effect=[None, None])
        exp_upd_result = []
        exp_request_ids = [REQUEST_ID4, REQUEST_ID7]
        _, exp_result = create_select_requests(exp_request_ids)
        database.single_query = Mock(side_effect=[
            exp_result, exp_upd_result, exp_result, exp_upd_result
        ])
        mock_ssm_get_parameter(4)
        exp_rec_2 = create_copy_event2()
        self.handler_input_event["Records"].append(exp_rec_2)
        result = copy_files_to_archive.handler(self.handler_input_event, None)

        boto3.client.assert_called_with('ssm')
        exp_result = [{
            "success": True,
            "source_bucket": self.exp_src_bucket,
            "source_key": self.exp_file_key1,
            "request_id": REQUEST_ID7,
            "target_bucket": self.exp_target_bucket,
            "err_msg": ""
        }, {
            "success": True,
            "source_bucket": self.exp_src_bucket,
            "source_key": exp_file_key,
            "request_id": REQUEST_ID7,
            "target_bucket": self.exp_target_bucket,
            "err_msg": ""
        }]
        self.assertEqual(exp_result, result)

        s3_cli.copy_object.assert_any_call(Bucket=self.exp_target_bucket,
                                           CopySource={
                                               'Bucket': self.exp_src_bucket,
                                               'Key': self.exp_file_key1
                                           },
                                           Key=self.exp_file_key1)
        s3_cli.copy_object.assert_any_call(Bucket=self.exp_target_bucket,
                                           CopySource={
                                               'Bucket': self.exp_src_bucket,
                                               'Key': exp_file_key
                                           },
                                           Key=exp_file_key)
 def test_handler_one_file_retry2_success(self):
     """
     Test copy lambda with two failed copy attempts, third attempt successful.
     """
     del os.environ['COPY_RETRY_SLEEP_SECS']
     del os.environ['COPY_RETRIES']
     time.sleep(1)
     boto3.client = Mock()
     s3_cli = boto3.client('s3')
     s3_cli.copy_object = Mock(side_effect=[
         ClientError({'Error': {
             'Code': 'AccessDenied'
         }}, 'copy_object'), None
     ])
     exp_request_ids = [REQUEST_ID7, REQUEST_ID4]
     _, exp_result = create_select_requests(exp_request_ids)
     exp_upd_result = []
     database.single_query = Mock(side_effect=[
         exp_result, exp_upd_result, exp_result, exp_upd_result
     ])
     mock_ssm_get_parameter(4)
     result = copy_files_to_archive.handler(self.handler_input_event, None)
     os.environ['COPY_RETRIES'] = '2'
     os.environ['COPY_RETRY_SLEEP_SECS'] = '1'
     boto3.client.assert_called_with('ssm')
     exp_result = [{
         "success": True,
         "source_bucket": self.exp_src_bucket,
         "source_key": self.exp_file_key1,
         "request_id": REQUEST_ID7,
         "target_bucket": self.exp_target_bucket,
         "err_msg": ""
     }]
     self.assertEqual(exp_result, result)
     s3_cli.copy_object.assert_called_with(Bucket=self.exp_target_bucket,
                                           CopySource={
                                               'Bucket':
                                               self.exp_src_bucket,
                                               'Key': self.exp_file_key1
                                           },
                                           Key=self.exp_file_key1)
     database.single_query.assert_called()
Beispiel #15
0
    def test_handler_two_records_success(self):
        """
        Test copy lambda with two files, expecting successful result.
        """
        exp_file_key = 'dr-glacier/MOD09GQ.A0219114.N5aUCG.006.0656338553321.hdf'
        boto3.client = Mock()
        s3_cli = boto3.client('s3')
        s3_cli.copy_object = Mock(side_effect=[None, None])
        exp_rec_2 = create_copy_event2()
        self.handler_input_event["Records"].append(exp_rec_2)
        self.create_test_requests()
        mock_ssm_get_parameter(10)
        print_rows("begin")
        row = requests_db.get_job_by_request_id(REQUEST_ID3)
        self.assertEqual("inprogress", row[0]['job_status'])
        row = requests_db.get_job_by_request_id(REQUEST_ID4)
        self.assertEqual("inprogress", row[0]['job_status'])
        result = copy_files_to_archive.handler(self.handler_input_event, None)

        boto3.client.assert_called_with('ssm')
        exp_result = [{
            "success": True,
            "source_bucket": self.exp_src_bucket,
            "source_key": self.exp_file_key1,
            "request_id": REQUEST_ID3,
            "target_bucket": self.exp_target_bucket,
            "err_msg": ""
        }, {
            "success": True,
            "source_bucket": self.exp_src_bucket,
            "source_key": exp_file_key,
            "request_id": REQUEST_ID4,
            "target_bucket": PROTECTED_BUCKET,
            "err_msg": ""
        }]
        self.assertEqual(exp_result, result)

        print_rows("end")
        row = requests_db.get_job_by_request_id(REQUEST_ID3)
        self.assertEqual("complete", row[0]['job_status'])
        row = requests_db.get_job_by_request_id(REQUEST_ID4)
        self.assertEqual("complete", row[0]['job_status'])
Beispiel #16
0
    def test_handler_two_records_success(self,
                                         mock_boto3_client: MagicMock,
                                         mock_database_single_query: MagicMock):
        """
        Test copy lambda with two files, expecting successful result.
        """
        exp_file_key = 'dr-glacier/MOD09GQ.A0219114.N5aUCG.006.0656338553321.hdf'
        mock_s3_cli = mock_boto3_client('s3')
        mock_s3_cli.copy_object = Mock(side_effect=[None, None])
        exp_upd_result = []
        exp_request_ids = [REQUEST_ID4, REQUEST_ID7]
        _, exp_result = create_select_requests(exp_request_ids)
        mock_database_single_query.side_effect = [exp_result, exp_upd_result,
                                                  exp_result, exp_upd_result]
        mock_secretsmanager_get_parameter(4)
        exp_rec_2 = create_copy_event2()
        self.handler_input_event["Records"].append(exp_rec_2)
        result = copy_files_to_archive.handler(self.handler_input_event, None)

        mock_boto3_client.assert_has_calls([call('secretsmanager')])
        exp_result = [{"success": True, "source_bucket": self.exp_src_bucket,
                       "source_key": self.exp_file_key1,
                       "request_id": REQUEST_ID7,
                       "target_bucket": self.exp_target_bucket,
                       "err_msg": ""},
                      {"success": True, "source_bucket": self.exp_src_bucket,
                       "source_key": exp_file_key,
                       "request_id": REQUEST_ID7,
                       "target_bucket": self.exp_target_bucket,
                       "err_msg": ""}]
        self.assertEqual(exp_result, result)

        mock_s3_cli.copy_object.assert_any_call(Bucket=self.exp_target_bucket,
                                                CopySource={'Bucket': self.exp_src_bucket,
                                                            'Key': self.exp_file_key1},
                                                Key=self.exp_file_key1)
        mock_s3_cli.copy_object.assert_any_call(Bucket=self.exp_target_bucket,
                                                CopySource={'Bucket': self.exp_src_bucket,
                                                            'Key': exp_file_key},
                                                Key=exp_file_key)
Beispiel #17
0
    def test_handler_one_file_retry_success(self,
                                            mock_sleep: MagicMock,
                                            mock_boto3_client: MagicMock,
                                            mock_database_single_query):
        """
        Test copy lambda with one failed copy attempts, second attempt successful.
        """
        retry_sleep_seconds = 13
        os.environ['COPY_RETRY_SLEEP_SECS'] = str(retry_sleep_seconds)
        copy_retries = 2
        os.environ['COPY_RETRIES'] = str(copy_retries)
        mock_s3_cli = mock_boto3_client('s3')
        mock_s3_cli.copy_object = Mock(side_effect=[ClientError({'Error': {'Code': 'AccessDenied'}}, 'copy_object'),
                                                    None])
        exp_request_ids = [REQUEST_ID7, REQUEST_ID4]
        _, exp_result = create_select_requests(exp_request_ids)
        exp_upd_result = []
        mock_database_single_query.side_effect = [exp_result,
                                                  exp_upd_result,
                                                  exp_result,
                                                  exp_upd_result]

        mock_secretsmanager_get_parameter(4)
        result = copy_files_to_archive.handler(self.handler_input_event, None)
        # todo: The file under test does not call boto3.client('secretsmanager'). Remove this and other references in test code.
        mock_boto3_client.assert_has_calls([call('secretsmanager')])
        exp_result = [{"success": True, "source_bucket": self.exp_src_bucket,
                       "source_key": self.exp_file_key1,
                       "request_id": REQUEST_ID7,
                       "target_bucket": self.exp_target_bucket,
                       "err_msg": ""}]
        self.assertEqual(exp_result, result)
        mock_s3_cli.copy_object.assert_called_with(Bucket=self.exp_target_bucket,
                                                   CopySource={'Bucket': self.exp_src_bucket,
                                                               'Key': self.exp_file_key1},
                                                   Key=self.exp_file_key1)
        self.assertEqual(1, mock_sleep.call_count, 'Should sleep once between each attempt.')
        mock_sleep.assert_has_calls([call(retry_sleep_seconds)])
        mock_database_single_query.assert_called()
Beispiel #18
0
 def test_handler_db_update_err(self,
                                mock_boto3_client: MagicMock,
                                mock_database_single_query: MagicMock):
     """
     Test copy lambda with error updating db.
     # todo: Expand on situation and expected behavior. My best attempt is below.
     Error when updating status in db. Should not retry, and should be considered completed.
     """
     s3_cli_mock = mock_boto3_client('s3')
     s3_cli_mock.copy_object = Mock(side_effect=[None])
     exp_request_ids = [REQUEST_ID7]
     _, exp_result = create_select_requests(exp_request_ids)
     exp_err = 'Database Error. Internal database error, please contact LP DAAC User Services'
     mock_database_single_query.side_effect = [exp_result, requests_db.DatabaseError(exp_err),
                                               requests_db.DatabaseError(exp_err)]
     mock_secretsmanager_get_parameter(3)
     result = copy_files_to_archive.handler(self.handler_input_event, None)
     exp_result = [{'success': True, 'source_bucket': 'my-dr-fake-glacier-bucket',
                    'source_key': self.exp_file_key1,
                    'request_id': REQUEST_ID7,
                    'target_bucket': PROTECTED_BUCKET, 'err_msg': ''}]
     self.assertEqual(exp_result, result)