def add_request(event):
    """
    Adds a request to the database
    """
    try:
        granule_id = event['granule_id']
    except KeyError:
        raise BadRequestError("Missing 'granule_id' in input data")
    try:
        request_group_id = event['request_group_id']
    except KeyError:
        raise BadRequestError("Missing 'request_group_id' in input data")
    try:
        status = event['status']
    except KeyError:
        status = "error"

    data = {}
    data["request_id"] = requests_db.request_id_generator()
    data["request_group_id"] = request_group_id
    data["granule_id"] = granule_id
    data["object_key"] = "my_test_filename"
    data["job_type"] = "restore"
    data["restore_bucket_dest"] = "my_test_bucket"
    data["job_status"] = status
    if status == "error":
        data["err_msg"] = "error message goes here"
    request_id = requests_db.submit_request(data)
    result = requests_db.get_job_by_request_id(request_id)
    return result
 def test_request_id_generator(self):
     """
     Tests the request_id_generator function
     """
     requests_db.request_id_generator = Mock(
         return_value=REQUEST_GROUP_ID_EXP_1)
     self.assertEqual(REQUEST_GROUP_ID_EXP_1,
                      requests_db.request_id_generator())
 def test_no_db_connect(self):
     """
     Tests a database connection failure
     """
     uuid.uuid4 = Mock(side_effect=[REQUEST_ID1])
     os.environ["DATABASE_NAME"] = "noexist"
     data = {}
     data["request_id"] = REQUEST_ID1
     data["request_group_id"] = requests_db.request_id_generator()
     data["granule_id"] = "granule_1"
     data["object_key"] = "thisisanobjectkey"
     data["job_type"] = "restore"
     data["restore_bucket_dest"] = "my_s3_bucket"
     data["job_status"] = "inprogress"
     mock_ssm_get_parameter(1)
     exp_err = 'Database Error. FATAL:  database "noexist" does not exist\n'
     database.single_query = Mock(side_effect=[requests_db.DbError(
         exp_err)])
     try:
         requests_db.submit_request(data)
         self.fail("expected DatabaseError")
     except requests_db.DatabaseError as err:
         self.assertEqual(exp_err, str(err))
         database.single_query.assert_called_once()
Exemple #4
0
def process_granules(s3, gran, glacier_bucket, exp_days):        # pylint: disable-msg=invalid-name
    """Call restore_object for the files in the granule_list
        Args:
            gran (list):
            s3 (object): An instance of boto3 s3 client
            glacier_bucket (string): The S3 glacier bucket name
            file_key (string): The key of the Glacier object
        Returns:
            gran: updated granules list, indicating if the restore request for each file
                  was successful, including an error message for any that were not.
    """
    try:
        retries = int(os.environ['RESTORE_REQUEST_RETRIES'])
    except KeyError:
        retries = 3

    try:
        retry_sleep_secs = float(os.environ['RESTORE_RETRY_SLEEP_SECS'])
    except KeyError:
        retry_sleep_secs = 0

    try:
        retrieval_type = os.environ['RESTORE_RETRIEVAL_TYPE']
        if retrieval_type not in ('Standard', 'Bulk', 'Expedited'):
            msg = (f"Invalid RESTORE_RETRIEVAL_TYPE: '{retrieval_type}'"
                   " defaulting to 'Standard'")
            LOGGER.info(msg)
            retrieval_type = 'Standard'
    except KeyError:
        retrieval_type = 'Standard'

    attempt = 1
    request_group_id = requests_db.request_id_generator()
    granule_id = gran['granuleId']
    while attempt <= retries:
        for afile in gran['recover_files']:
            if not afile['success']:
                try:
                    obj = {}
                    obj["request_group_id"] = request_group_id
                    obj["granule_id"] = granule_id
                    obj["glacier_bucket"] = glacier_bucket
                    obj["key"] = afile['key']
                    obj["dest_bucket"] = afile['dest_bucket']
                    obj["days"] = exp_days
                    request_id = restore_object(s3, obj, attempt, retries, retrieval_type)
                    afile['success'] = True
                    afile['err_msg'] = ''
                    LOGGER.info("restore {} from {} attempt {} successful. Job: {}",
                                afile["key"], glacier_bucket, attempt, request_id)
                except ClientError as err:
                    afile['err_msg'] = str(err)

        attempt = attempt + 1
        if attempt <= retries:
            time.sleep(retry_sleep_secs)

    for afile in gran['recover_files']:
        # if any file failed, the whole granule will fail
        if not afile['success']:
            LOGGER.error("One or more files failed to be requested from {}. {}",
                         glacier_bucket, gran)
            raise RestoreRequestError(f'One or more files failed to be requested. {gran}')
    return gran