示例#1
0
def data_acquisition_download(config):
    """Download data for a specific query configuration and write it to files.

    Args:
        config: argparse arguments passed by the user.

    Returns:
        None.
    """

    bosdyn.client.util.setup_logging(config.verbose)
    sdk = bosdyn.client.create_standard_sdk('DataAcquisitionDownloadExample')
    robot = sdk.create_robot(config.hostname)
    robot.authenticate(config.username, config.password)

    query_params = None
    try:
        from_timestamp = Timestamp()
        from_timestamp.FromJsonString(config.query_from_timestamp)
        to_timestamp = Timestamp()
        to_timestamp.FromJsonString(config.query_to_timestamp)
        query_params = data_acquisition_store_pb2.DataQueryParams(
            time_range=data_acquisition_store_pb2.TimeRangeQuery(from_timestamp=from_timestamp,
                                                                 to_timestamp=to_timestamp))
    except ValueError as val_err:
        print("Value Exception:\n" + str(val_err))

    download_data_REST(query_params, config.hostname, robot.user_token,
        config.destination_folder, config.additional_REST_params)
示例#2
0
def acquire_get_status_and_save(acquisition_request, capability_name, action_name, group_name,
                                data_acq_client, data_store_client, verbose):
    """Helper function which issues an acquisition request and checks that its data is saved to the data store, and the
    GetStatus RPC eventually respond with a status "complete".

    Args:
        acquisition_request (AcquisitionRequestList): The acquisition request proto message to send to the AcquireData RPC.
        capability_name (string): The name of the data capability being acquired.
        action_name (string): The action name for the acquisition request's CaptureActionId.
        group_name (string): The group name for the acquisition request's CaptureActionId.
        data_acq_client (DataAcquisitionClient): The client for the data acquisition service running on robot.
        data_store_client (DataAcquisitionStoreClient): The client for the data acquisition store service running on robot.
        verbose (boolean): Print additional logging information on failure.

    Returns:
        A boolean indicating that the acquisition request received a "complete" status for the GetStatus RPC and
        an action id for the acquisition can be found in the data store.
    """
    # Make a request for this data capability and check that it completes successfully.
    acquire_success, acquired_request_id = acquire_data_and_check_errors(
        acquisition_request, data_acq_client, capability_name, action_name, group_name, verbose)
    if not acquire_success:
        # Exit early if the AcquireData RPC did not succeed and did not return a request_id.
        return False
    success = monitor_status_until_complete_or_failed(acquired_request_id, data_acq_client,
                                                      capability_name, action_name, verbose)

    if success:
        # If the GetStatus responds with "Complete", then check the data store for the action id.
        action_id = data_acquisition_pb2.CaptureActionId(action_name=action_name,
                                                         group_name=group_name)
        query_params = data_acquisition_store_pb2.DataQueryParams(
            action_ids=data_acquisition_store_pb2.ActionIdQuery(action_ids=[action_id]))
        try:
            saved_capture_actions = data_store_client.list_capture_actions(query_params)
            if len(saved_capture_actions) == 0:
                # Nothing saved with a matching action and group name!
                _LOGGER.error(
                    "The request %s for data '%s' with action_name '%s' did NOT save to the data "
                    "acquisition store or returned prematurely with a STATUS_COMPLETE in the GetStatus RPC.",
                    acquired_request_id, capability_name, action_name)
                if verbose:
                    _LOGGER.info("ListCaptureAction RPC's query parameters: ", query_params)
                return False
        except ResponseError as err:
            _LOGGER.error(
                "Exception raised when checking if request %s for data '%s' with action_name '%s' was "
                "saved in the data acquisition store.", request_id, capability_name, action_name)
            if verbose:
                log_debug_information(err)
            return False
    else:
        # The GetStatus checks failed in some way.
        return False

    # The acquisition request went through, the GetStatus RPC responded with "status complete" and the data was
    # successfully found in the data store service.
    return True
def make_time_query_params(start_time_secs, end_time_secs, robot):
    """Create time-based query params for the download request.

    Args:
        start_time_secs(float): The start time for the download data range.
        end_time_secs(float): The end time for the download range.
        robot (Robot): The robot object, used to acquire timesync and conver the
                       times to robot time.
    Returns:
        The query params (data_acquisition_store_pb2.DataQueryParams) for the time-range download.
    """
    from_timestamp = robot.time_sync.robot_timestamp_from_local_secs(
        start_time_secs)
    to_timestamp = robot.time_sync.robot_timestamp_from_local_secs(
        end_time_secs)
    print(from_timestamp.ToJsonString(), to_timestamp.ToJsonString())
    query_params = data_acquisition_store_pb2.DataQueryParams(
        time_range=data_acquisition_store_pb2.TimeRangeQuery(
            from_timestamp=from_timestamp, to_timestamp=to_timestamp))
    return query_params
def make_time_query_params_from_group_name(group_name, data_store_client):
    """Create time-based query params for the download request using the group name.

    Args:
        group_name(string): The group name for the data to be downloaded.
        data_store_client(DataAcquisitionStoreClient): The data store client, used to get the
                                                       action ids for the group name.

    Returns:
        The query params (data_acquisition_store_pb2.DataQueryParams) for the time-range download.
    """
    action_id = data_acquisition_pb2.CaptureActionId(group_name=group_name)
    query_params = data_acquisition_store_pb2.DataQueryParams(
        action_ids=data_acquisition_store_pb2.ActionIdQuery(
            action_ids=[action_id]))
    saved_capture_actions = []
    try:
        saved_capture_actions = data_store_client.list_capture_actions(
            query_params)
    except Exception as err:
        _LOGGER.error(
            "Failed to list the capture action ids for group_name %s: %s",
            group_name, err)
        return None

    # Filter all the CaptureActionIds for the start/end time. These end times are already in
    # the robots clock and do not need to be converted using timesync.
    start_time = (None, None)
    end_time = (None, None)
    for action_id in saved_capture_actions:
        timestamp = action_id.timestamp
        time_secs = timestamp.seconds + timestamp.nanos / 1e9
        if time_secs == 0:
            # The plugin captures don't seem to set a timestamp, so ignore them when determining
            # the start/end times for what to download.
            continue
        if start_time[0] is None or time_secs < start_time[0]:
            start_time = (time_secs, timestamp)
        if end_time[0] is None or time_secs > end_time[0]:
            end_time = (time_secs, timestamp)

    if not (start_time and end_time):
        _LOGGER.error(
            "Could not find a start/end time from the list of capture action ids: %s",
            saved_capture_actions)
        return None

    # Ensure the timestamps are ordered correctly and the
    assert start_time[0] <= end_time[0]

    # Adjust the start/end time by a few seconds each to give buffer room.
    start_time[1].seconds -= 3
    end_time[1].seconds += 3

    _LOGGER.info(
        "Downloading data with a start time  of %s seconds and end time of %s seconds.",
        start_time[0], end_time[0])

    # Make the download data request with a time query parameter.
    query_params = data_acquisition_store_pb2.DataQueryParams(
        time_range=data_acquisition_store_pb2.TimeRangeQuery(
            from_timestamp=start_time[1], to_timestamp=end_time[1]))
    return query_params
示例#5
0
def process_request(data_acq_client,
                    hostname,
                    robot,
                    acquisition_requests,
                    group_name,
                    action_name,
                    metadata=None):
    """Send acquisition request, retrieve the acquired data and write it to files.

    Args:
        data_acq_client: DataAcquisition client for send the acquisition requests.
        hostname(string): Hostname of the robot.
        robot(bosdyn.client.robot): Robot instance.
        acquisition_requests: Acquisition requests to include in request message.
        group_name: Group name for the acquitions.
        action_name: Action name for the acquitions.
        metadata: Metadata to include in the request message.

    Returns:
        None.
    """
    print("\n-----------------------------------")
    from_timestamp = robot.time_sync.robot_timestamp_from_local_secs(
        time.time() - 300)

    # Make the acquire data request. This will return our current request id.
    request_id, action_id = issue_acquire_data_request(data_acq_client,
                                                       acquisition_requests,
                                                       group_name, action_name,
                                                       metadata)

    if not request_id:
        # The AcquireData request failed for some reason. No need to attempt to
        # monitor the status.
        return

    # Monitor the status of the data acquisition
    while True:
        get_status_response = None
        try:
            get_status_response = data_acq_client.get_status(request_id)
        except ResponseError as err:
            print("Exception: " + str(err))
            break
        print("Request " + str(request_id) + " status: " +
              data_acquisition_pb2.GetStatusResponse.Status.Name(
                  get_status_response.status))
        if get_status_response.status == data_acquisition_pb2.GetStatusResponse.STATUS_COMPLETE:
            to_timestamp = robot.time_sync.robot_timestamp_from_local_secs(
                time.time() + 300)
            print(from_timestamp.ToJsonString(), to_timestamp.ToJsonString())
            query_params = data_acquisition_store_pb2.DataQueryParams(
                time_range=data_acquisition_store_pb2.TimeRangeQuery(
                    from_timestamp=from_timestamp, to_timestamp=to_timestamp))
            download_data_REST(query_params, hostname, robot.user_token)
            break
        if get_status_response.status == data_acquisition_pb2.GetStatusResponse.STATUS_TIMEDOUT:
            print("Unrecoverable request timeout: {}".format(
                get_status_response))
            exit(1)
        time.sleep(0.2)
示例#6
0
def test_downloading_all_data_via_REST(data_store_client, group_name, robot_hostname, robot, destination_folder):
    """Check that all of the data can successfully be downloaded via the REST endpoint.

    Args:
        data_store_client (DataAcquisitionStoreClient): The client for the data acquisition store service running on robot.
        group_name (string): The group name for all the acquisitions during the test, such that we can determine
                             which data to download.
        robot_hostname (string): The hostname for the robot used for testing.
        robot (Robot): The SDK robot object created for the robot used for testing.
        destination_folder (string): The filepath for where the downloaded data will be saved.

    Returns:
        Boolean indicating if the REST download succeeded or not.
    """
    # Need a start and end timestamp for the capture actions with a matching group_name.
    action_id = data_acquisition_pb2.CaptureActionId(group_name=group_name)
    query_params = data_acquisition_store_pb2.DataQueryParams(
        action_ids=data_acquisition_store_pb2.ActionIdQuery(action_ids=[action_id]))
    saved_capture_actions = []
    try:
        saved_capture_actions = data_store_client.list_capture_actions(query_params)
    except Exception as err:
        _LOGGER.error("Failed to list the capture action ids for group_name %s: %s", group_name, err)
        _LOGGER.info("ListCaptureAction RPC's query parameters: %s", query_params)
        return False

    # Filter all the CaptureActionIds for the start/end time.
    start_time = (None, None)
    end_time = (None, None)
    for action_id in saved_capture_actions:
        _LOGGER.info("Saved Capture Action ID: %s", action_id)
        timestamp = action_id.timestamp
        time_secs = timestamp.seconds + timestamp.nanos / 1e9
        if time_secs == 0:
            # The plugin captures don't seem to set a timestamp, so ignore them when determining
            # the start/end times for what to download.
            continue
        if start_time[0] is None or time_secs < start_time[0]:
            start_time = (time_secs, timestamp)
        if end_time[0] is None or time_secs > end_time[0]:
            end_time = (time_secs, timestamp)

    if not (start_time and end_time):
        _LOGGER.error("Could not find a start/end time from the list of capture action ids: %s", saved_capture_actions)
        return False

    # Ensure the timestamps are ordered correctly.
    assert start_time[0] <= end_time[0]

    # Adjust the start/end time by a few seconds each to give buffer room.
    start_time[1].seconds -= 3
    end_time[1].seconds += 3

    _LOGGER.info("Downloading data with a start time  of %s seconds and end time of %s seconds.", start_time[0], end_time[0])

    # Make the download data request with a time query parameter.
    query_params = data_acquisition_store_pb2.DataQueryParams(
        time_range=data_acquisition_store_pb2.TimeRangeQuery(from_timestamp=start_time[1],
                                                                to_timestamp=end_time[1]))
    success = download_data_REST(query_params, robot_hostname, robot.user_token, destination_folder)
    return success