Ejemplo n.º 1
0
def main():
    for line in sys.stdin:
        deviceJson = json.loads(line.strip())
        #print(deviceJson)

        if "telemetry_data" not in deviceJson:
            print("No telemetry_data")
            print(deviceJson)
            continue

        telemetry_data = deviceJson["telemetry_data"]

        try:
            s = base64.b64decode(telemetry_data)
            #print(s)
        except Exception as e:
            print("Failed b64 decoding:", e)
            continue

        try:
            d = telemetry_pb2.Telemetry()
#	    d = telemetry_top_pb2.TelemetryStream() <----- JunOS Native Telemetry
            d.ParseFromString(s)
        except Exception as e:
            print("Failed GPB parsing:", len(telemetry_data), e)

        try:
            jsonStrTelemetry = MessageToJson(d)
            print(jsonStrTelemetry)
        except Exception as e:
            print("Failed Conversion to JSON:", len(telemetry_data), e)
Ejemplo n.º 2
0
 def listenToClient(self, client, address):
     while True:
         try:
             data = client.recv(12)
             if data:
                 print 'getting some data'
                 # Set the response to echo back the recieved data
                 header = data
                 msg_type, encode_type, msg_version, flags, msg_length = struct.unpack(
                     '>hhhhi', header)
                 msg_data = b''
                 print msg_type, encode_type, msg_version, flags, msg_length
                 if encode_type == 1:
                     while len(msg_data) < msg_length:
                         msg_data += client.recv(msg_length - len(msg_data))
                     gpb_parser = telemetry_pb2.Telemetry()
                     gpb_data = gpb_parser.ParseFromString(msg_data)
                     if gpb_parser.encoding_path == 'Cisco-IOS-XR-infra-statsd-oper:infra-statistics/interfaces/interface/latest/generic-counters':
                         row_key = ifstatsbag_generic_pb2.ifstatsbag_generic_KEYS(
                         )
                         row_data = ifstatsbag_generic_pb2.ifstatsbag_generic(
                         )
                         for new_row in gpb_parser.data_gpb.row:
                             row_data.ParseFromString(new_row.content)
                             row_key.ParseFromString(new_row.keys)
                             if kafka_msg:
                                 kafka_msg_parse = create_kafka_message(
                                     gpb_parser.encoding_path,
                                     gpb_parser.node_id_str, row_key,
                                     row_data)
                                 producer.send_messages(
                                     b'ios_xr_interface_counters',
                                     kafka_msg_parse)
                                 logging.info(
                                     'Write {} to kafka topic'.format(
                                         gpb_parser.encoding_path))
                             elif influx_msg:
                                 influx_msg_parse = create_influx_message(
                                     gpb_parser.encoding_path,
                                     gpb_parser.node_id_str, row_key,
                                     row_data)
                                 client_influxdb.write_points(
                                     [influx_msg_parse], time_precision='s')
                                 logging.info('Write {} to InfluxDB'.format(
                                     gpb_parser.encoding_path))
                             else:
                                 print('Row_key:{}\n,Row_data:{}').format(
                                     row_key, row_data)
             else:
                 raise error('Client disconnected')
         except Exception as e:
             print 'Error:{}'.format(e)
             client.close()
             return False
    def MdtDialout(self, message, context):

        grpcPeerStr = context.peer()

        grpcPeer = {}
        (grpcPeerProto, grpcPeer['telemetry_node'],
         grpcPeer['telemetry_node_port']) = grpcPeerStr.split(":")
        jsonTelemetryNode = json.dumps(grpcPeer)
        print(jsonTelemetryNode)

        for new_msg in message:
            telemetry_msg = telemetry_pb2.Telemetry()
            telemetry_msg.ParseFromString(new_msg.data)
            #print(telemetry_msg)
            #print(type(telemetry_msg))
            #print(telemetry_msg.data_gpb.row[0].content)
            jsonStrTelemetry = MessageToJson(telemetry_msg)
            dictTelemetry = json.loads(jsonStrTelemetry)

            #print telemetry json message

            print(jsonStrTelemetry)
            print("Message Length {}".format(len(jsonStrTelemetry)))
            print("=" * 40)

            print(dictTelemetry["encodingPath"])
            if "dataGpb" in dictTelemetry:
                print("Message in GPB compact mode")

            if "dataGpbkv" in dictTelemetry:
                print("message in GPB-kv mode")

            # according to encoding path and dataGpb OR dataGpbkv to select which gpb-compact pb2 to be used
            if dictTelemetry[
                    "encodingPath"] == "Cisco-IOS-XR-shellutil-oper:system-time/uptime" and "dataGpb" in dictTelemetry:

                gpb_compact_content = telemetry_msg.data_gpb.row[
                    0].content  # should be use list method tohandle it
                #TBD
                Telemetry_row_content = uptime_pb2.system_uptime()
                Telemetry_row_content.ParseFromString(gpb_compact_content)

                print(Telemetry_row_content)
                print("=" * 40)

            #json_dict = proto_to_dict(Telemetry_row_content)
            #print(json_dict)

        return cisco_grpc_dialout_pb2.MdtDialoutArgs(
        )  # no return should be ok , if get telemetry stream only
Ejemplo n.º 4
0
    def MdtDialout(self, message, context):

        grpcPeerStr = context.peer()

        grpcPeer = {}
        (grpcPeerProto, grpcPeer['telemetry_node'],
         grpcPeer['telemetry_node_port']) = grpcPeerStr.split(":")
        jsonTelemetryNode = json.dumps(grpcPeer)
        print(jsonTelemetryNode)

        for new_msg in message:
            telemetry_msg = telemetry_pb2.Telemetry()
            telemetry_msg.ParseFromString(new_msg.data)
            jsonStrTelemetry = MessageToJson(telemetry_msg)
            #dictTelemetry = json.loads(jsonStrTelemetry)

            #print telemetry json message
            print(jsonStrTelemetry)
        return cisco_grpc_dialout_pb2.MdtDialoutArgs()
 def getsubscription(self, sub_id, unmarshal=True):
     """Telemetry subscription function
         :param sub_id: Subscription ID
         :type: string
         :return: Returns discrete values emitted by telemetry stream
         :rtype: JSON formatted string
     """
     sub_args = mdt_grpc_dialin_pb2.CreateSubsArgs(ReqId=1,
                                                   encode=3,
                                                   subidstr=sub_id)
     stream = self._stub.CreateSubs(sub_args,
                                    timeout=self._timeout,
                                    metadata=self._metadata)
     for segment in stream:
         if not unmarshal:
             yield segment
         else:
             # Go straight for telemetry data
             telemetry_pb = telemetry_pb2.Telemetry()
             telemetry_pb.ParseFromString(segment.data)
             # Return in JSON format instead of protobuf.
             yield MessageToJson(telemetry_pb)
Ejemplo n.º 6
0
    def MdtDialout(self, message, context):

        grpcPeerStr = context.peer()


        grpcPeer = {}
        (grpcPeerProto, grpcPeer['telemetry_node'], grpcPeer['telemetry_node_port']) = grpcPeerStr.split(":")
        jsonTelemetryNode = json.dumps(grpcPeer)
        print(jsonTelemetryNode)

        for new_msg in message:
            telemetry_msg = telemetry_pb2.Telemetry()
            telemetry_msg.ParseFromString(new_msg.data)
            #print("RAW message")
            print(telemetry_msg)
            print("="*100)
            #print(type(telemetry_msg))
            #print(telemetry_msg.data_gpb.row[0].content)
            jsonStrTelemetry = MessageToJson(telemetry_msg)
            dictTelemetry = json.loads(jsonStrTelemetry)

            #print telemetry json message

            print(jsonStrTelemetry)
            print("Message Length {}".format(len(jsonStrTelemetry)))
            print("="*40)

            print(dictTelemetry["encodingPath"])
            if "dataGpb" in dictTelemetry:
                print("Message in GPB compact mode")

            if "dataGpbkv" in dictTelemetry:
                print("message in GPB-kv mode")

            # according to encoding path and dataGpb OR dataGpbkv to select which gpb-compact pb2 to be used
            if dictTelemetry["encodingPath"] == "Cisco-IOS-XR-shellutil-oper:system-time/uptime" and "dataGpb" in dictTelemetry:


                gpb_compact_content = telemetry_msg.data_gpb.row[0].content # should be use list method tohandle it
                #TBD
                Telemetry_row_content = uptime_pb2.system_uptime()
                Telemetry_row_content.ParseFromString(gpb_compact_content)

                print(Telemetry_row_content)
                print("="*40)

            #if dictTelemetry["encodingPath"] == "analytics:test_query" and "dataGpb" in dictTelemetry:
            if dictTelemetry["encodingPath"] == "analytics:dcnminitITL" and "dataGpb" in dictTelemetry:
                '''
                MDS 97 32G line card SAN Analytics feature,
                encoding path should be predefined push analytics query name.
                
                
                '''

                gpb_compact_content = telemetry_msg.data_gpb.row[0].content  # should be use list method tohandle it
                #gpb_compact_content = telemetry_msg.data_gpb.row[0]
                Telemetry_row_content = fabric_telemetry_pb2.FlowRecordsTable()


                #Telemetry_row_content = fabric_telemetry_pb2.FlowRecordRow()

                Telemetry_row_content.ParseFromString(gpb_compact_content)

                fabric_jsonStrTelemetry = MessageToJson(Telemetry_row_content)

                print(fabric_jsonStrTelemetry)
                print("=" * 40)

            if dictTelemetry["encodingPath"] == "show_stats_fc2/2" and "dataGpb" in dictTelemetry:
                '''
                MDS 97 32G line card SAN Analytics feature,
                encoding path should be predefined push analytics query name.


                '''

                gpb_compact_content = telemetry_msg.data_gpb.row[0].content  # should be use list method tohandle it
                # gpb_compact_content = telemetry_msg.data_gpb.row[0]
                Telemetry_row_content = fabric_telemetry_pb2.FlowRecordsTable()

                # Telemetry_row_content = fabric_telemetry_pb2.FlowRecordRow()

                Telemetry_row_content.ParseFromString(gpb_compact_content)

                fabric_jsonStrTelemetry = MessageToJson(Telemetry_row_content)

                print(fabric_jsonStrTelemetry)
                print("=" * 40)

            #json_dict = proto_to_dict(Telemetry_row_content)
            #print(json_dict)


        return cisco_grpc_dialout_pb2.MdtDialoutArgs()  # no return should be ok , if get telemetry stream only
host = '10.75.58.60'
port = 57400
options = 'ems.cisco.com'

ca_cert = 'ems.pem'  # credential file scp from devices
creds = open(ca_cert).read()

target = '%s:%d' % (host, port)
creds = implementations.ssl_channel_credentials(creds.encode(
    ('utf-8')))  # args with byte type
channel = grpc.secure_channel(target, creds, ((
    'grpc.ssl_target_name_override',
    options,
), ))
channel = implementations.Channel(channel)

stub = ems_grpc_pb2.beta_create_gRPCConfigOper_stub(channel)
sub_id = 'test_sub'  # Telemetry MDT subscribtion
sub_args = ems_grpc_pb2.CreateSubsArgs(ReqId=1, encode=3, subidstr=sub_id)

timeout = float(100000)
metadata = [('username', 'cisco'), ('password', 'cisco')]

stream = stub.CreateSubs(sub_args, timeout=timeout, metadata=metadata)

for segment in stream:
    telemetry_pb = telemetry_pb2.Telemetry()
    t = telemetry_pb.ParseFromString(segment.data)
    # Print Json Message
    print(MessageToJson(telemetry_pb))


# Bind Socket UDP port 57500 as Telemetry recevice server
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
sock.bind(('0.0.0.0', 57501))

count = 0

start_time = time.time()


while True:
    count += 1
    buf, addr = sock.recvfrom(65535)
    Telemetry_content = telemetry_pb2.Telemetry()

    print(buf.hex())
    print(buf)
    print("Message Length {}".format(len(buf)))
    #print(len(str(buf)))
    #handle Telemetry UDP GPB kv from NX OS

    if buf[0:1] == b'\x01': #check the binary daa , no official document
        print("Telemetry GPB message from NX OS")
        Telemetry_content.ParseFromString(buf[6:])
        print('Node :'+Telemetry_content.node_id_str)
        print('IP Address (source port) :'+str(addr))
        print('Encodig Path :'+Telemetry_content.encoding_path)
        Top_Fields_List = Telemetry_content.data_gpbkv[0].fields
        #print(Top_Fields_List)
Ejemplo n.º 9
0
    def MdtDialout(self, message, context):

        grpcPeerStr = context.peer()

        grpcPeer = {}
        (grpcPeerProto, grpcPeer['telemetry_node'],
         grpcPeer['telemetry_node_port']) = grpcPeerStr.split(":")
        jsonTelemetryNode = json.dumps(grpcPeer)
        print(jsonTelemetryNode)

        for new_msg in message:
            telemetry_msg = telemetry_pb2.Telemetry()
            telemetry_msg.ParseFromString(new_msg.data)
            #print("RAW message")
            #print(telemetry_msg)
            print("=" * 100)
            #print(type(telemetry_msg))
            #print(telemetry_msg.data_gpb.row[0].content)
            jsonStrTelemetry = MessageToJson(telemetry_msg)
            dictTelemetry = json.loads(jsonStrTelemetry)

            #print telemetry json message

            print(jsonStrTelemetry)
            print("Message Length {}".format(len(jsonStrTelemetry)))
            print("=" * 40)

            print(dictTelemetry["encodingPath"])
            if "dataGpb" in dictTelemetry:
                print("Message in GPB compact mode")

            if "dataGpbkv" in dictTelemetry:
                print("message in GPB-kv mode")

            # according to encoding path and dataGpb OR dataGpbkv to select which gpb-compact pb2 to be used
            if dictTelemetry[
                    "encodingPath"] == "Cisco-IOS-XR-shellutil-oper:system-time/uptime" and "dataGpb" in dictTelemetry:

                gpb_compact_content = telemetry_msg.data_gpb.row[
                    0].content  # should be use list method tohandle it
                #TBD
                Telemetry_row_content = uptime_pb2.system_uptime()
                Telemetry_row_content.ParseFromString(gpb_compact_content)

                print(Telemetry_row_content)
                print("=" * 40)

            if dictTelemetry[
                    "encodingPath"] == "analytics:test_query" and "dataGpb" in dictTelemetry:
                '''
                MDS 97 32G line card SAN Analytics feature,
                encoding path should be predefined push analytics query name.
                
                So far , NX OS 8.4.1 only support gRPC GPB/GPB-kv
                for GPB-kv encoding with fabrc_telemetry.proto file.
                
                MDS 9710 sample configuration:
                 
                telemetry
                    sensor-group 1
                    path analytics:test_query
                    path show_stats_fc2/1
                    path show_stats_fc2/2
                    sensor-group 2
                    path analytics:dcnminitITL
                    destination-group 1
                    ip address 10.79.98.77 port 50051 protocol gRPC encoding GPB-compact
                    destination-group 2
                    ip address 10.124.2.116 port 57500 protocol gRPC encoding GPB-compact
                    subscription 1
                    snsr-grp 1 sample-interval 30000
                    dst-grp 1
                    subscription 2
                    snsr-grp 2 sample-interval 30000
                    dst-grp 2
                    
                sw-core1-9710# sh analytics query all
                Total queries:2
                ============================
                Query Name      :test_query
                Query String    :select all from fc-scsi.port
                Query Type      :periodic, interval 30
                
                Query Name      :dcnminitITL
                Query String    :select port, vsan, app_id, initiator_id, target_id, lun, active_io_read_count, active_io_write_count, total_read_io_count, total_write
                _io_count, total_time_metric_based_read_io_count, total_time_metric_based_write_io_count,total_read_io_time, total_write_io_time, total_read_io_initiat
                ion_time, total_write_io_initiation_time,total_read_io_bytes, total_write_io_bytes, total_time_metric_based_read_io_bytes, total_time_metric_based_writ
                e_io_bytes, read_io_rate, write_io_rate, read_io_bandwidth, write_io_bandwidth,read_io_size_min, read_io_size_max, write_io_size_min, write_io_size_max
                ,read_io_completion_time_min, read_io_completion_time_max, write_io_completion_time_min, write_io_completion_time_max,read_io_initiation_time_max, writ
                e_io_initiation_time_max, read_io_aborts, write_io_aborts,read_io_failures, write_io_failures, read_io_timeouts, write_io_timeouts from fc-scsi.scsi_in
                itiator_itl_flow
                Query Type      :periodic, interval 30
                Query Options   :differential
                    
                '''

                gpb_compact_content = telemetry_msg.data_gpb.row[
                    0].content  # should be use list method tohandle it
                #gpb_compact_content = telemetry_msg.data_gpb.row[0]
                Telemetry_row_content = fabric_telemetry_pb2.FlowRecordsTable()

                #Telemetry_row_content = fabric_telemetry_pb2.FlowRecordRow()

                Telemetry_row_content.ParseFromString(gpb_compact_content)

                fabric_jsonStrTelemetry = MessageToJson(Telemetry_row_content)

                print(fabric_jsonStrTelemetry)
                print("=" * 40)

            #json_dict = proto_to_dict(Telemetry_row_content)
            #print(json_dict)

        return cisco_grpc_dialout_pb2.MdtDialoutArgs(
        )  # no return should be ok , if get telemetry stream only
Ejemplo n.º 10
0
 def listenToClient(self, client, address):
     while True:
         try:
             data = client.recv(12)
             if data:
                 header = data
                 msg_type, encode_type, msg_version, flags, msg_length = struct.unpack(
                     '>hhhhi', header)
                 msg_data = b''
                 if encode_type == 1:
                     while len(msg_data) < msg_length:
                         msg_data += client.recv(msg_length - len(msg_data))
                     gpb_parser = telemetry_pb2.Telemetry()
                     gpb_data = gpb_parser.ParseFromString(msg_data)
                     if gpb_parser.encoding_path == 'Cisco-IOS-XR-infra-statsd-oper:infra-statistics/interfaces/interface/latest/generic-counters':
                         row_key = ifstatsbag_generic_pb2.ifstatsbag_generic_KEYS(
                         )
                         row_data = ifstatsbag_generic_pb2.ifstatsbag_generic(
                         )
                         for new_row in gpb_parser.data_gpb.row:
                             row_data.ParseFromString(new_row.content)
                             row_key.ParseFromString(new_row.keys)
                             if kafka_msg:
                                 kafka_msg_parse = create_kafka_message(
                                     gpb_parser.encoding_path,
                                     gpb_parser.node_id_str, row_key,
                                     row_data)
                                 producer.send_messages(
                                     b'ios_xr_interface_counters',
                                     kafka_msg_parse)
                                 logging.info('Write %s to kafka topic' %
                                              gpb_parser.encoding_path)
                             elif influx_msg:
                                 influx_msg_parse = create_influx_message(
                                     gpb_parser.encoding_path,
                                     gpb_parser.node_id_str, row_key,
                                     row_data, new_row.timestamp)
                                 client_influxdb.write_points(
                                     [influx_msg_parse],
                                     time_precision='ms')
                                 logging.info(
                                     'Write interface counters from %s to InfluxDB'
                                     % address[0])
                             else:
                                 print('Row_key:{}\n,Row_data:{}').format(
                                     row_key, row_data)
                     elif gpb_parser.encoding_path == 'Cisco-IOS-XR-pfi-im-cmd-oper:interfaces/interface-xr/interface':
                         row_key = ifstatsbag_generic_pb2.ifstatsbag_generic_KEYS(
                         )
                         row_data = im_cmd_info_pb2.im_cmd_info()
                         for new_row in gpb_parser.data_gpb.row:
                             row_data.ParseFromString(new_row.content)
                             row_key.ParseFromString(new_row.keys)
                             if kafka_msg:
                                 kafka_msg_parse = create_kafka_message(
                                     gpb_parser.encoding_path,
                                     gpb_parser.node_id_str, row_key,
                                     row_data)
                                 producer.send_messages(
                                     b'ios_xr_interface_info',
                                     kafka_msg_parse)
                                 logging.info(
                                     'Write {} to kafka topic'.format(
                                         gpb_parser.encoding_path))
                             elif influx_msg:
                                 influx_msg_parse = create_influx_message(
                                     gpb_parser.encoding_path,
                                     gpb_parser.node_id_str, row_key,
                                     row_data, new_row.timestamp)
                                 client_influxdb.write_points(
                                     [influx_msg_parse],
                                     time_precision='ms')
                                 logging.info(
                                     'Write interface cmd oper from %s to InfluxDB'
                                     % address[0])
                             else:
                                 print('Row_key:{}\n,Row_data:{}').format(
                                     row_key, row_data)
                     elif gpb_parser.encoding_path == 'Cisco-IOS-XR-snmp-agent-oper:snmp/if-indexes/if-index':
                         row_data = snmp_agen_oper_if_index_pb2.snmp_ifindex_ifname(
                         )
                         row_key = snmp_agen_oper_if_index_pb2.snmp_ifindex_ifname_KEYS(
                         )
                         for new_row in gpb_parser.data_gpb.row:
                             row_data.ParseFromString(new_row.content)
                             row_key.ParseFromString(new_row.keys)
                             if kafka_msg:
                                 kafka_msg_parse = create_kafka_message(
                                     gpb_parser.encoding_path,
                                     gpb_parser.node_id_str, row_key,
                                     row_data, new_row.timestamp)
                                 producer.send_messages(
                                     b'ios_xr_interface_snmp',
                                     kafka_msg_parse)
                                 logging.info(
                                     'Write {} to kafka topic'.format(
                                         gpb_parser.encoding_path))
                             elif influx_msg:
                                 influx_msg_parse = create_influx_message(
                                     gpb_parser.encoding_path,
                                     gpb_parser.node_id_str, row_key,
                                     row_data, new_row.timestamp)
                                 client_influxdb.write_points(
                                     [influx_msg_parse],
                                     time_precision='ms')
                                 logging.info(
                                     'Write interface snmp from %s to InfluxDB'
                                     % address[0])
                             else:
                                 print('Row_key:{}\n,Row_data:{}').format(
                                     row_key, row_data)
                 client.send("ack")
             else:
                 raise error('Client disconnected')
         except Exception as e:
             print 'Error:{}'.format(e)
             client.close()
             return False
Ejemplo n.º 11
0
    def run(self):
        server_ip = "192.168.123.12"
        server_port = "57777"
        xr_user = "******"
        xr_passwd = "root"
        print("Using GRPC Server IP(%s) Port(%s)" % (server_ip, server_port))

        channel = grpc.insecure_channel(
            str(server_ip) + ":" + str(server_port))

        stub = mdt_grpc_dialin_pb2_grpc.gRPCConfigOperStub(channel)

        metadata = [('username', xr_user), ('password', xr_passwd)]
        Timeout = 3600 * 24 * 365

        sub_args = mdt_grpc_dialin_pb2.CreateSubsArgs(ReqId=99,
                                                      encode=2,
                                                      subidstr='IPV6')
        stream = stub.CreateSubs(sub_args, timeout=Timeout, metadata=metadata)
        print("##########1")
        for segment in stream:
            telemetry_pb = telemetry_pb2.Telemetry()
            telemetry_pb.ParseFromString(segment.data)

            telemetry_gpb_table = telemetry_pb2.TelemetryGPBTable()
            telemetry_gpb_table.CopyFrom(telemetry_pb.data_gpb)

            gpb_rows = []
            print("##########2")
            while (len(telemetry_gpb_table.row)):
                gpb_row_dict = {}
                gpb_row_dict["keys"] = {}
                gpb_row_dict["content"] = {}

                telemetry_gpb_row = telemetry_pb2.TelemetryRowGPB()
                telemetry_gpb_row.CopyFrom(telemetry_gpb_table.row.pop())
                gpb_row_dict["timestamp"] = telemetry_gpb_row.timestamp

                ipv6_nd_neighbor_entry_keys = ipv6_nd_neighbor_entry_KEYS()
                ipv6_nd_neighbor_entry_keys.ParseFromString(
                    telemetry_gpb_row.keys)

                ipv6_nd_neighbor_entry_content = ipv6_nd_neighbor_entry()
                ipv6_nd_neighbor_entry_content.ParseFromString(
                    telemetry_gpb_row.content)

                print("1: {}".format(ipv6_nd_neighbor_entry_content))
                print("2: {}".format(ipv6_nd_neighbor_entry_keys))

                content_dump = MessageToJson(ipv6_nd_neighbor_entry_content)
                keys_dump = MessageToJson(ipv6_nd_neighbor_entry_keys)

                print("3: {}".format(content_dump))
                print("4: {}".format(keys_dump))

                gpb_row_dict["content"].update(yaml.safe_load(content_dump))
                gpb_row_dict["keys"].update(yaml.safe_load(keys_dump))

                gpb_rows.append(gpb_row_dict)
                self.sensor_service.dispatch(
                    trigger='telemetry_collector.event2', payload=gpb_rows)