def test_create_delete_batch(self, is_mca, num_rows):
        constants.IS_MCA = is_mca
        method = constants.Method.DELETE
        batch_id = test_utils.BATCH_NUMBER
        item_rows, expected_batch, _, _ = test_utils.generate_test_data(
            method, num_rows)

        actual_batch, _, _ = batch_creator.create_batch(
            batch_id, item_rows, method)

        self.assertEqual(expected_batch, actual_batch)
    def test_create_batch_returns_batch_to_item_id_dict(self):
        constants.IS_MCA = True
        method = constants.Method.INSERT
        batch_id = test_utils.BATCH_NUMBER
        item_rows, _, _, _ = test_utils.generate_test_data(
            method, test_utils.MULTIPLE_ITEM_COUNT)

        _, _, batch_to_item_id_dict = batch_creator.create_batch(
            batch_id, item_rows, method)

        self.assertEqual(test_utils.MULTIPLE_ITEM_COUNT,
                         len(batch_to_item_id_dict))
        self.assertEqual('test id', batch_to_item_id_dict.get(0))
        self.assertEqual('test id', batch_to_item_id_dict.get(1))
    def test_create_batch_returns_skipped_items_when_merchant_id_missing(self):
        constants.IS_MCA = True
        method = constants.Method.INSERT
        batch_number = test_utils.BATCH_NUMBER
        remove_merchant_ids = True
        item_rows, _, _, _ = test_utils.generate_test_data(
            method, test_utils.MULTIPLE_ITEM_COUNT, remove_merchant_ids)

        _, skipped_item_ids, _ = batch_creator.create_batch(
            batch_number, item_rows, method)

        self.assertEqual(test_utils.MULTIPLE_ITEM_COUNT, len(skipped_item_ids))
        self.assertEqual('test id', skipped_item_ids[0])
        self.assertEqual('test id', skipped_item_ids[1])
Example #4
0
def _run_process(
        operation: constants.Operation) -> Tuple[str, http.HTTPStatus]:
    """Handles tasks pushed from Task Queue.

  When tasks are enqueued to Task Queue by initiator, this method will be
  called. It extracts necessary information from a Task Queue message. The
  following processes are executed in this function:
  - Loading items to process from BigQuery.
  - Converts items into a batch that can be sent to Content API for Shopping.
  - Sending items to Content API for Shopping (Merchant Center).
  - Records the results of the Content API for Shopping call.

  Args:
    operation: Type of operation to perform on the items.

  Returns:
    The result of HTTP request.
  """
    request_body = json.loads(flask.request.data.decode('utf-8'))
    task = upload_task.UploadTask.from_json(request_body)

    if task.batch_size == 0:
        return 'OK', http.HTTPStatus.OK

    batch_number = int(task.start_index / task.batch_size) + 1
    logging.info(
        '%s started. Batch #%d info: start_index: %d, batch_size: %d,'
        'initiation timestamp: %s', operation.value, batch_number,
        task.start_index, task.batch_size, task.timestamp)

    try:
        items = _load_items_from_bigquery(operation, task)
    except errors.HttpError:
        return 'Error loading items from BigQuery', http.HTTPStatus.INTERNAL_SERVER_ERROR

    result = process_result.ProcessResult([], [], [])
    try:
        if not items:
            logging.error(
                'Batch #%d, operation %s: 0 items loaded from BigQuery so batch not sent to Content API. Start_index: %d, batch_size: %d,'
                'initiation timestamp: %s', batch_number, operation.value,
                task.start_index, task.batch_size, task.timestamp)
            return 'No items to process', http.HTTPStatus.OK

        method = OPERATION_TO_METHOD.get(operation)

        # Creates batch from items loaded from BigQuery
        original_batch, skipped_item_ids, batch_id_to_item_id = batch_creator.create_batch(
            batch_number, items, method)

        # Optimizes batch via Shoptimizer for upsert/prevent_expiring operations
        if operation != constants.Operation.DELETE and constants.SHOPTIMIZER_API_INTEGRATION_ON:
            batch_to_send_to_content_api = _create_optimized_batch(
                original_batch, batch_number, operation)
        else:
            batch_to_send_to_content_api = original_batch

        # Sends batch of items to Content API for Shopping
        api_client = content_api_client.ContentApiClient()
        successful_item_ids, item_failures = api_client.process_items(
            batch_to_send_to_content_api, batch_number, batch_id_to_item_id,
            method)

        result = process_result.ProcessResult(
            successfully_processed_item_ids=successful_item_ids,
            content_api_failures=item_failures,
            skipped_item_ids=skipped_item_ids)
    except errors.HttpError as http_error:
        error_status_code = http_error.resp.status
        error_reason = http_error.resp.reason
        result = _handle_content_api_error(error_status_code, error_reason,
                                           batch_number, http_error, items,
                                           operation, task)
        return error_reason, error_status_code
    except socket.timeout as timeout_error:
        error_status_code = http.HTTPStatus.REQUEST_TIMEOUT
        error_reason = 'Socket timeout'
        result = _handle_content_api_error(error_status_code, error_reason,
                                           batch_number, timeout_error, items,
                                           operation, task)
        return error_reason, error_status_code
    else:
        logging.info(
            'Batch #%d with operation %s and initiation timestamp %s successfully processed %s items, failed to process %s items and skipped %s items.',
            batch_number, operation.value, task.timestamp,
            result.get_success_count(), result.get_failure_count(),
            result.get_skipped_count())
    finally:
        recorder = result_recorder.ResultRecorder.from_service_account_json(
            constants.GCP_SERVICE_ACCOUNT_PATH,
            constants.DATASET_ID_FOR_MONITORING,
            constants.TABLE_ID_FOR_RESULT_COUNTS_MONITORING,
            constants.TABLE_ID_FOR_ITEM_RESULTS_MONITORING)
        recorder.insert_result(operation.value, result, task.timestamp,
                               batch_number)
    return 'OK', http.HTTPStatus.OK