Esempio n. 1
0
def gmetric_read(header_msg, data_msg):
    header = Unpacker(header_msg)
    data = Unpacker(data_msg)
    values = dict()
    header.unpack_int()
    values['HOSTNAME'] = str(header.unpack_string().decode('ascii'))
    values['NAME'] = str(header.unpack_string().decode('ascii'))
    values['SPOOFENABLED'] = header.unpack_int()
    values['TYPE'] = str(header.unpack_string().decode('ascii'))
    values['NAME'] = str(header.unpack_string().decode('ascii'))
    values['UNITS'] = str(header.unpack_string().decode('ascii'))
    values['SLOPE'] = slope_int2str[header.unpack_int()]
    values['TMAX'] = header.unpack_uint()
    values['DMAX'] = header.unpack_uint()
    if header.unpack_int() == 1:
        header.unpack_string()
        values['GROUP'] = str(header.unpack_string().decode('ascii'))
    # Actual data in the second packet
    data.unpack_int()
    values['HOSTNAME'] = str(data.unpack_string().decode('ascii'))
    values['NAME'] = str(data.unpack_string().decode('ascii'))
    values['SPOOFENABLED'] = data.unpack_int()
    data.unpack_string()
    values['VAL'] = str(data.unpack_string().decode('ascii'))
    header.done()
    data.done()
    return values
Esempio n. 2
0
    def test_AVP(self):

        a1 = AVP(1, b"user")

        a1.setMandatory(True)
        assert a1.isMandatory()
        a1.setMandatory(False)
        assert not a1.isMandatory()

        a1.setPrivate(True)
        assert a1.isPrivate()
        a1.setPrivate(False)
        assert not a1.isPrivate()

        a1 = AVP(1, b"user")
        e_sz1 = a1.encodeSize()
        p = Packer()
        e_sz2 = a1.encode(p)
        assert e_sz1 == 12
        assert e_sz2 == 12
        assert e_sz1 == e_sz2

        u = Unpacker(p.get_buffer())

        d_sz1 = AVP.decodeSize(u, e_sz2)
        assert d_sz1 == e_sz2

        a2 = AVP()
        assert a2.decode(u, d_sz1)

        assert a1.code == a2.code
        assert a1.vendor_id == a2.vendor_id

        #same as above, but requires padding
        a1 = AVP(1, b"user")
        e_sz1 = a1.encodeSize()
        p = Packer()
        e_sz2 = a1.encode(p)
        assert e_sz1 == e_sz2

        u = Unpacker(p.get_buffer())

        d_sz1 = AVP.decodeSize(u, e_sz2)
        assert d_sz1 == e_sz2

        a2 = AVP()
        assert a2.decode(u, d_sz1)

        assert a1.code == a2.code
        assert a1.vendor_id == a2.vendor_id
Esempio n. 3
0
 def read(self):
     fext = os.path.splitext(self.file)[-1]
     assert fext == ".trr"
     fp = open(self.file, "rb")
     self.data = data = fp.read()
     self.coords = []
     self.v = {}
     self.f = {}
     self.up = Unpacker(data)
     curpos = self.up.get_position()
     datasize = len(data)
     nframe = 0
     #each frame begins with a header
     while curpos < datasize:
         #print "current position:", curpos
         h = self.readHeader(nframe)
         self.headers.append(h)
         self.readData(nframe)
         nframe = nframe + 1
         curpos = self.up.get_position()
     #print "end of readTraj, cur position : %d, datazize: %d" %(self.up.get_position(), datasize)
     self.nframes = nframe
     if self.nframes:
         return 1
     else:
         return 0
Esempio n. 4
0
    def __init__(self, unpacker: Unpacker):
        self.counter_format = None
        self.counter = None

        self.counter_format = unpacker.unpack_uint()
        counter_data = unpacker.unpack_opaque()
        unpacker_counter_data = Unpacker(counter_data)

        if self.counter_format == SFlowCounterRecord.COUNTER_DATA_GENERIC_INTERFACE:
            self.counter = GenericInterfaceCounters(unpacker_counter_data)
        elif self.counter_format == SFlowCounterRecord.COUNTER_DATA_ETHERNET_INTERFACE:
            self.counter = EthernetInterfaceCounters(unpacker_counter_data)
        elif self.counter_format == SFlowCounterRecord.COUNTER_DATA_TOKEN_RING:
            pass
            self.counter = TokenRingCounters(unpacker_counter_data)
        elif self.counter_format == SFlowCounterRecord.COUNTER_DATA_VG_INTERFACE:
            pass
            self.counter = VgInterfaceCounters(unpacker_counter_data)
        elif self.counter_format == SFlowCounterRecord.COUNTER_DATA_VLAN:
            self.counter = VlanCounters(unpacker_counter_data)
        elif self.counter_format == SFlowCounterRecord.COUNTER_DATA_PROCESSOR:
            self.counter = ProcessorCounters(unpacker_counter_data)
        else:
            logging.debug('read_flow_record:Unimplemented data_format (%d)' %
                          self.flow_format)
Esempio n. 5
0
def read_sample_record(up, sample_datagram):

    # Unpack sample_record structure
    #    data_format sample_type;
    #       Specifies the type of sample data
    #    opaque sample_data<>;
    #       A structure corresponding to the sample_type

    sample_type = up.unpack_uint()

    sample_data = up.unpack_opaque()
    up_sample_data = Unpacker(sample_data)
    ret = None

    if sample_type == SAMPLE_DATA_FLOW_RECORD:
        logging.warning("sample_type: %d is flow type sample, do not \
            utilize it currently." % sample_type)
        pass
    elif sample_type == SAMPLE_DATA_COUNTER_RECORD:
        sample = read_counter_sample(up_sample_data, sample_datagram)
        if len(sample) is not 0:
            ret = sample
    else:
        logging.warning("sample_type: %d is not supported by current agent.\
            Contact Zhang Song for further development" % sample_type)
        pass

    # Check if whole data block was unpacked
    #up_sample_data.done()
    return ret
Esempio n. 6
0
    def parse(self, raw_data):
        packet = SFlowPacket()
        data = Unpacker(raw_data)

        # sFlow version (2|4|5)
        packet.version = data.unpack_uint()
        if packet.version != 5:
            logging.error("Only support version 5.")
            raise RuntimeError("Only support version 5.")
        logging.debug("Get version {0}".format(packet.version))

        # IP version of the Agent/Switch (1=v4|2=v6)
        packet.agent_ip_version = data.unpack_uint()
        if packet.agent_ip_version != 1:
            logging.error("Only support IPv4.")
            raise RuntimeError("Only support IPv4.")

        # Agent IP address (v4=4byte|v6=16byte)
        packet.agent_ip_address = ntohl(data.unpack_uint())

        # sub agent id
        packet.sub_agent_id = data.unpack_uint()

        # datagram sequence number
        packet.datagram_sequence_num = data.unpack_uint()

        # switch uptime in ms
        packet.switch_uptime = data.unpack_uint()

        # how many samples in datagram
        packet.sample_amount = data.unpack_uint()

        self._parse_samples(packet, data)

        return packet
Esempio n. 7
0
 def decode(cls, data):
     '''Deserialize the data and return an object.'''
     with _convert_exceptions():
         xdr = Unpacker(data)
         ret = cls.decode_xdr(xdr)
         xdr.done()
         return ret
 def __init__(self, topology, trajFileName):
     # since we need to be able to do seeks, can't use osOpen
     # which might return an unseekable stream
     self.topology = topology
     from OpenSave import osUncompressedPath
     path = osUncompressedPath(trajFileName)
     import os
     self.trajFileSize = os.stat(path).st_size
     self.traj = open(path, "rb")
     from xdrlib import Unpacker
     self.fileString = FileString(self.traj, 0, self.trajFileSize)
     self.xdr = Unpacker(self.fileString)
     self.crdStarts = []
     while True:
         replyobj.status("Reading frame %d header\n" %
                         (len(self.crdStarts) + 1))
         try:
             crdStart, endFrame = self._readHeader()
         except ValueError, e:
             raise ValueError("Frame %d: %s" %
                              (len(self.crdStarts) + 1, str(e)))
         if endFrame > self.trajFileSize:
             if not self.crdStarts:
                 raise ValueError("Computed size of"
                                  " first frame (%d) greater than"
                                  " trajectory file size (%s)" %
                                  (endFrame, self.trajFileSize))
             replyobj.warning("Truncated trajectory file;"
                              " skipping last partial frame.\n")
         else:
             self.crdStarts.append(crdStart)
         if endFrame == self.trajFileSize:
             break
         self.xdr.set_position(endFrame)
Esempio n. 9
0
    def __init__(self,
                 atomicAbundance: AtomicAbundance = None,
                 kuruczPfPath: str = None):
        if atomicAbundance is None:
            atomicAbundance = DefaultAtomicAbundance
        self.atomicAbundance = atomicAbundance
        kuruczPfPath = get_data_path(
        ) + 'pf_Kurucz.input' if kuruczPfPath is None else kuruczPfPath
        with open(kuruczPfPath, 'rb') as f:
            s = f.read()
        u = Unpacker(s)

        # NOTE(cmo): Each of these terms is simply in flat lists indexed by Atomic Number Z-1
        self.Tpf = np.array(u.unpack_array(u.unpack_double))
        stages = []
        pf = []
        ionpot = []
        for i in range(99):
            z = u.unpack_int()
            stages.append(u.unpack_int())
            pf.append(
                np.array(
                    u.unpack_farray(stages[-1] * self.Tpf.shape[0],
                                    u.unpack_double)).reshape(
                                        stages[-1], self.Tpf.shape[0]))
            ionpot.append(
                np.array(u.unpack_farray(stages[-1], u.unpack_double)))

        ionpot = [i * Const.HC / Const.CM_TO_M for i in ionpot]
        pf = [np.log(p) for p in pf]
        self.pf = pf
        self.ionpot = ionpot
Esempio n. 10
0
def read_datagram(addr, data):
    """Yield all record (flow and counter records) from the sFlow v5
    datagram given by up, which is expected to be an xdrlib.Unpacker
    object."""

    up = Unpacker(data)

    version = up.unpack_int()
    if not version == 5:
        hexdump_bytes(data)
        raise Exception()

    af = up.unpack_int()
    if af == 1:  # IPv4
        agent_address = ntohl(up.unpack_uint())
    else:
        raise Exception()

    sf = Datagram(addr, agent_address)

    sub_agent_id = up.unpack_uint()
    sequence_number = up.unpack_uint()
    uptime = up.unpack_uint()
    nb_sample_records = up.unpack_uint()

    # Iterating over sample records
    for i in range(nb_sample_records):
        try:
            return read_sample_record(up, sf)
        except EOFError:
            stderr.write("read_sample_datagram: EOFError reading sample_record,", \
                      "Premature end of data stream, Skipping record\n")
            up.set_position(len(up.get_buffer()))
            break
Esempio n. 11
0
def unpack_reply(response,
                 myxid=None,
                 myreply_stat=MSG_ACCEPTED,
                 myverf=NULL_AUTH,
                 myaccept_stat=SUCCESS,
                 myreject_stat=None,
                 myauth_stat=None):
    """Unpacks an RPC reply and returns a variable-length arg list
    of the same form as the argument to pack_reply, but for SUCCESS also
    returns an xdrlib.Unpacker as the final element of the list
    that the caller can use to unpack the results of the call.

    If values are given for any myXXX arguments, checks that those
    values match the unpacked XXX values.  Default myXXX values assume
    success with no authentication.
    
    Raises UnpackException on any errors or mismatches.
    """
    u = Unpacker(response)
    msg = RPCProto.unpack_rpc_msg(u)
    check(myxid, msg.xid, "xid")
    if msg.body.mtype == RPCProto.CALL:
        raise UnpackException("Expected reply, but got call")
    reply = msg.body.rbody
    check(myreply_stat, reply.stat, "reply_stat")
    retval = [msg.xid, reply.stat]
    if reply.stat == RPCProto.MSG_ACCEPTED:
        check(myverf, reply.areply.verf, "verf")
        retval.append(reply.areply.verf)
        accept_stat = reply.areply.reply_data.stat
        check(myaccept_stat, accept_stat, "accept_stat")
        retval.append(accept_stat)
        if accept_stat == RPCProto.SUCCESS:
            retval.append(u)
        elif accept_stat == RPCProto.PROG_MISMATCH:
            retval.append(reply.areply.reply_data.mismatch_info.low)
            retval.append(reply.areply.reply_data.mismatch_info.high)
        elif (accept_stat == RPCProto.PROG_UNAVAIL
              or accept_stat == RPCProto.PROC_UNAVAIL
              or accept_stat == RPCProto.GARBAGE_ARGS
              or accept_stat == RPCProto.SYSTEM_ERR):
            pass
        else:
            raise UnpackException("unknown accept_stat: %u" % accept_stat)
    elif reply.stat == RPCProto.MSG_DENIED:
        reject_stat = reply.rreply.stat
        check(myreject_stat, reject_stat, "reject_stat")
        retval.append(reject_stat)
        if reject_stat == RPCProto.RPC_MISMATCH:
            retval.append(reply.rreply.mismatch_info.low)
            retval.append(reply.rreply.mismatch_info.high)
        elif reject_stat == RPCProto.AUTH_ERROR:
            check(myauth_stat, reply.rreply.astat, "auth_stat")
            retval.append(reply.rreply.astat)
        else:
            raise UnpackException("unknown reject_stat: %u" % reject_stat)
    else:
        raise UnpackException("unknown reply_stat: %u" % reply.stat)
    return retval
Esempio n. 12
0
    def __init__(self, kuruczPfPath: Optional[str]=None, metallicity: float=0.0, 
                        abundances: Dict=None, abundDex: bool=True):
        if set(AtomicWeights.keys()) != set(AtomicAbundances.keys()):
            raise ValueError('AtomicWeights and AtomicAbundances keys differ (Problem keys: %s)' % repr(set(AtomicWeights.keys()) - set(AtomicAbundances.keys())))

        self.indices = OrderedDict(zip(AtomicWeights.keys(), range(len(AtomicWeights))))

        # Convert abundances and overwrite any provided secondary abundances
        self.abund = deepcopy(AtomicAbundances)
        if self.abund['H '] == 12.0:
            for k, v in self.abund.items():
                self.abund[k] = 10**(v - 12.0)

        if abundances is not None:
            if abundDex:
                for k, v in abundances.items():
                    abundances[k] = 10**(v - 12.0)
            for k, v in abundances.items():
                self.abund[k] = v

        metallicity = 10**metallicity
        for k, v in self.abund.items():
            if k != 'H ':
                self.abund[k] = v*metallicity

        kuruczPfPath = get_data_path() + 'pf_Kurucz.input' if kuruczPfPath is None else kuruczPfPath
        with open(kuruczPfPath, 'rb') as f:
            s = f.read()
        u = Unpacker(s)

        self.Tpf = np.array(u.unpack_array(u.unpack_double))
        ptIndex = [] # Index in the periodic table (fortran based, so +1) -- could be used for validation
        stages = []
        pf = []
        ionpot = []
        for i in range(len(AtomicWeights)):
            ptIndex.append(u.unpack_int())
            stages.append(u.unpack_int())
            pf.append(np.array(u.unpack_farray(stages[-1] * self.Tpf.shape[0], u.unpack_double)).reshape(stages[-1], self.Tpf.shape[0]))
            ionpot.append(np.array(u.unpack_farray(stages[-1], u.unpack_double)))

        ionpot = [i * Const.HC / Const.CM_TO_M for i in ionpot]
        pf = [np.log(p) for p in pf]

        totalAbund = 0.0
        avgWeight = 0.0
        self.elements: List[Element] = []
        for k, v in AtomicWeights.items():
            i = self.indices[k]
            ele = Element(k, v, self.abund[k], ionpot[i], self.Tpf, pf[i])
            self.elements.append(ele)
            totalAbund += ele.abundance
            avgWeight += ele.abundance * ele.weight

        self.totalAbundance = totalAbund
        self.weightPerH = avgWeight
        self.avgMolWeight = avgWeight / totalAbund
 def __init__(self, topology):
     from OpenSave import osOpen
     topFile = osOpen(topology, 'rb')
     import os
     self.topFileSize = os.stat(topology).st_size
     from xdrlib import Unpacker
     self.fileString = FileString(topFile, 0, self.topFileSize)
     self.xdr = Unpacker(self.fileString)
     version = self._readHeader()
     self._readTopology(version)
Esempio n. 14
0
def _unittest():
    mh = MessageHeader()

    assert mh.version == 1

    mh.setRequest(True)
    assert mh.isRequest()
    mh.setRequest(False)
    assert not mh.isRequest()

    mh.setProxiable(True)
    assert mh.isProxiable()
    mh.setProxiable(False)
    assert not mh.isProxiable()

    mh.setError(True)
    assert mh.isError()
    mh.setError(False)
    assert not mh.isError()

    mh.setRetransmit(True)
    assert mh.isRetransmit()
    mh.setRetransmit(False)
    assert not mh.isRetransmit()

    mh.setRequest(True)
    mh.setProxiable(True)
    mh.setRetransmit(True)
    mh.command_code = 42
    mh.hop_by_hop_identifier = 17
    mh.end_to_end_identifier = 117

    mh2 = MessageHeader()

    mh2.prepareResponse(mh)
    assert not mh2.isRequest()
    assert mh2.isProxiable()
    assert not mh2.isRetransmit()
    assert mh2.command_code == mh.command_code
    assert mh2.hop_by_hop_identifier == mh.hop_by_hop_identifier
    assert mh2.end_to_end_identifier == mh.end_to_end_identifier

    p = Packer()
    ml = mh.encodeSize()
    mh.encode(p, ml)
    mh3 = MessageHeader()
    u = Unpacker(p.get_buffer())
    #u.reset(p.get_buffer())
    mh3.decode(u)
    assert mh3.version == 1
    assert mh3.version == mh.version
    assert mh3.command_flags == mh.command_flags
    assert mh3.command_code == mh.command_code
    assert mh3.hop_by_hop_identifier == mh.hop_by_hop_identifier
    assert mh3.end_to_end_identifier == mh.end_to_end_identifier
Esempio n. 15
0
    def __init__(self, packet, data):
        super(CounterSample, self).__init__(packet, data)
        self.format = FORMAT_COUNTER_SAMPLE

        self.sequence_num = None
        self.source_id = None
        self.record_amount = None
        self.records = []

        sample_data = Unpacker(data.unpack_opaque())
        self._parse(packet, sample_data)
def gmetric_read(msg):
    unpacker = Unpacker(msg)
    values = dict()
    unpacker.unpack_int()
    values['TYPE'] = unpacker.unpack_string()
    values['NAME'] = unpacker.unpack_string()
    values['VAL'] = unpacker.unpack_string()
    values['UNITS'] = unpacker.unpack_string()
    values['SLOPE'] = slope_int2str[unpacker.unpack_int()]
    values['TMAX'] = unpacker.unpack_uint()
    values['DMAX'] = unpacker.unpack_uint()
    unpacker.done()
    return values
Esempio n. 17
0
 def getAVPs(self):
     """Returns a copy of the embedded AVPs in a list"""
     avps = []
     u = Unpacker(self.payload)
     bytes_left = len(self.payload)
     while bytes_left != 0:
         sz = AVP.decodeSize(u, bytes_left)
         if sz == 0:
             raise InvalidAVPLengthError(self)
         a = AVP(1, "")
         a.decode(u, sz)
         avps.append(a)
         bytes_left -= sz
     return avps
Esempio n. 18
0
    def handle(self):
        data = self.request[0]

        unpacker = Unpacker(data)
        type = unpacker.unpack_int()
        if type not in GANGLIA_DECODE: return

        host = unpacker.unpack_string()
        name = unpacker.unpack_string()
        unpacker.unpack_int()  # spoof boolean
        unpacker.unpack_string()  # format string
        value = GANGLIA_DECODE[type](unpacker)
        unpacker.done()

        graphite.record_stat(name, value)
Esempio n. 19
0
    def __init__(self, packet, data):
        super(FlowSample, self).__init__(packet, data)
        self.format = FORMAT_FLOW_SAMPLE

        self.sequence_number = None
        self.source_id = None
        self.sampling_rate = None
        self.sample_pool = None
        self.drops = None
        self.input_if = None
        self.output_if = None
        self.record_amount = None
        self.records = []

        sample_data = Unpacker(data.unpack_opaque())
        self._parse(packet, sample_data)
Esempio n. 20
0
def read_pf(path):
    with open(path, 'rb') as f:
        s = f.read()
    u = Unpacker(s)
    Tpf = u.unpack_array(u.unpack_double)
    ptis = []
    stages = []
    pf = []
    ionpot = []
    for i in range(len(AtomicWeights)):
        ptis.append(u.unpack_int())
        stages.append(u.unpack_int())
        pf.append(u.unpack_farray(stages[-1] * len(Tpf), u.unpack_double))
        ionpot.append(u.unpack_farray(stages[-1], u.unpack_double))

    return {'Tpf': Tpf, 'stages': stages, 'pf': pf, 'ionpot': ionpot}
Esempio n. 21
0
    def __init__(self, sample_data):
        super(CounterRecord, self).__init__(sample_data)
        self.format = sample_data.unpack_uint()
        record_data = Unpacker(sample_data.unpack_opaque())

        if self.format == FORMAT_COUNTER_RECORD_GENERIC:
            self._parse_generic(record_data)
        elif self.format == FORMAT_COUNTER_RECORD_ETHERNET:
            self._parse_ethernet(record_data)
        elif self.format == FORMAT_COUNTER_RECORD_TOKENRING:
            self._parse_tokenring(record_data)
        elif self.format == FORMAT_COUNTER_RECORD_100BASEVG:
            self._parse_100basevg(record_data)
        elif self.format == FORMAT_COUNTER_RECORD_VLAN:
            self._parse_vlan(record_data)
        elif self.format == FORMAT_COUNTER_RECORD_PROCESS:
            self._parse_process(record_data)
Esempio n. 22
0
 def narrow(avp):
     """Convert generic AVP to AVP_Float64
     Raises: InvalidAVPLengthError
     """
     avps = []
     u = Unpacker(avp.payload)
     bytes_left = len(avp.payload)
     while bytes_left != 0:
         sz = AVP.decodeSize(u, bytes_left)
         if sz == 0:
             raise InvalidAVPLengthError(avp)
         a = AVP(1, "")
         a.decode(u, sz)
         avps.append(a)
         bytes_left -= sz
     a = AVP_Grouped(avp.code, avps, avp.vendor_id)
     a.flags = avp.flags
     return a
Esempio n. 23
0
def read_flow_record(up, sample):
    """Reads a 'struct flow_record' (p. 29)"""

    flow_format = up.unpack_uint()
    flow_data = up.unpack_opaque()
    up_flow_data = Unpacker(flow_data)

    if flow_format == FLOW_DATA_RAW_HEADER:
        res = FlowRecord(sample, read_sampled_header(up_flow_data))
    elif flow_format == FLOW_DATA_ETHERNET_HEADER:
        res = FlowRecord(sample, read_sampled_ethernet(up_flow_data))
    elif flow_format == FLOW_DATA_IPV4_HEADER:
        res = FlowRecord(sample, read_sampled_ipv4(up_flow_data))
    else:
        res = 'read_flow_record:Unknown data_format (%d)' % flow_format

    up_flow_data.done()
    return res
Esempio n. 24
0
    def __init__(self, sample_data):
        super(FlowRecord, self).__init__(sample_data)
        self.parsed = True
        self.format = sample_data.unpack_uint()
        record_data = Unpacker(sample_data.unpack_opaque())

        if self.format == FORMAT_FLOW_RECORD_RAW_PACKET:
            self._parse_raw_packet(record_data)
        elif self.format == FORMAT_FLOW_RECORD_ETHERNET_FRAME:
            self._parse_ethernet_frame(record_data)
        elif self.format == FORMAT_FLOW_RECORD_IPv4:
            self._parse_ipv4(record_data)
        elif self.format == FORMAT_FLOW_RECORD_EXTENDED_SWITCH:
            self._parse_extended_switch(record_data)
        else:
            logging.warn("Format {0} is not supported now.".format(
                self.format))
            self.parsed = False
Esempio n. 25
0
    def __init__(self, unpacker: Unpacker):
        self.flow_format = None
        self.flow = None

        self.flow_format = unpacker.unpack_uint()
        flow_data = unpacker.unpack_opaque()
        unpacker_flow_data = Unpacker(flow_data)

        if self.flow_format == SFlowFlowRecord.FLOW_DATA_RAW_HEADER:
            self.flow = FlowDataRawHeader(unpacker_flow_data)
        elif self.flow_format == SFlowFlowRecord.FLOW_DATA_ETHERNET_HEADER:
            self.flow = FlowDataEthernetHeader(unpacker_flow_data)
        elif self.flow_format == SFlowFlowRecord.FLOW_DATA_IPV4_HEADER:
            self.flow = FlowDataIPv4Header(unpacker_flow_data)
        elif self.flow_format == SFlowFlowRecord.FLOW_DATA_EXT_SWITCH:
            self.flow = FlowDataExtSwitch(unpacker_flow_data)
        else:
            logging.debug('read_flow_record:Unimplemented data_format (%d)' % self.flow_format)
Esempio n. 26
0
 def datagramReceived(self, datagram, address):
     values = dict()
     unpacker = Unpacker(datagram)
     packet_type = unpacker.unpack_uint()
     if packet_type == 128:
         self.unpack_meta(unpacker)
         return
     elif packet_type == 136:
         #unpack_metareq function works, but serves no purpose right now
         #commented out unless anyone comes up with a good reason to respond
         #to metadata requests.
         #self.unpack_metareq(unpacker)
         return
     elif 128 < packet_type < 136:
         self.unpack_data(unpacker, packet_type, address)
         return
     else:
         return
Esempio n. 27
0
def unpack_call(request,
                myprog=None,
                myvers=None,
                mycred=NULL_AUTH,
                myverf=NULL_AUTH):
    """Unpacks an RPC call message from request.

    Returns (xid, prog, vers, proc, cred, verf, u) if okay,
    where u is an xdrlib.Unpacker.
    otherwise raises either UnpackException or ReplyException.
    If myXXX is not None, checks that XXX == myXXX.
    Assumes AUTH_NONE for cred and verf; override with mycred and myverf.
    """
    if len(request) < 24:
        raise UnpackException("Packet too short (%d bytes)" % len(request))
    u = Unpacker(request)
    msg = RPCProto.unpack_rpc_msg(u)
    if msg.body.mtype == RPCProto.REPLY:
        raise UnpackException("Expected call, but got reply")
    call = msg.body.cbody
    check(
        RPCProto.RPC_VERSION, call.rpcvers, "RPC version", lambda: pack_reply(
            msg.xid, RPCProto.MSG_DENIED, RPCProto.RPC_MISMATCH, RPCProto.
            RPC_VERSION, RPCProto.RPC_VERSION).get_buffer())
    check(
        myprog, call.prog, "program",
        lambda: pack_reply(msg.xid, RPCProto.MSG_ACCEPTED, NULL_AUTH, RPCProto.
                           PROG_UNAVAIL).get_buffer())
    check(
        myvers, call.vers, "version",
        lambda: pack_reply(msg.xid, RPCProto.MSG_ACCEPTED, NULL_AUTH, RPCProto.
                           PROG_MISMATCH, myvers, myvers).get_buffer())
    check(
        mycred, call.cred, "cred",
        lambda: pack_reply(msg.xid, RPCProto.MSG_DENIED, RPCProto.AUTH_ERROR,
                           RPCProto.AUTH_BADCRED).get_buffer())
    check(
        myverf, call.verf, "verf",
        lambda: pack_reply(msg.xid, RPCProto.MSG_DENIED, RPCProto.AUTH_ERROR,
                           RPCProto.AUTH_BADVERF).get_buffer())
    return (msg.xid, call.prog, call.vers, call.proc, call.cred, call.verf, u)
Esempio n. 28
0
def read_counter_record(up, sample):

    # Unpack counter_record structure
    #     data_format counter_format;     The format of counter_data
    #     opaque counter_data<>;          A block of counters uniquely defined by the counter_format.

    counter_format = up.unpack_uint()
    counter_data = up.unpack_opaque()
    up_counter_data = Unpacker(counter_data)

    if counter_format == COUNTER_DATA_GENERIC:
        return CounterRecord(sample, read_if_counters(up_counter_data))
    elif counter_format == COUNTER_DATA_ETHERNET:
        return CounterRecord(sample, read_ethernet_counters(up_flow_data))
    elif counter_format == COUNTER_DATA_TOKENRING:
        return CounterRecord(sample, read_tokenring_counters(up_flow_data))
    elif counter_format == COUNTER_DATA_VG:
        return CounterRecord(sample, read_vg_counters(up_flow_data))
    elif counter_format == COUNTER_DATA_VLAN:
        return CounterRecord(sample, read_vlan_counters(up_flow_data))
    else:
        return 'read_flow_record:Unknown data_format (%d)' % format
Esempio n. 29
0
def read_sample_record(up, sample_datagram):

    # Unpack sample_record structure
    #    data_format sample_type;
    #       Specifies the type of sample data
    #    opaque sample_data<>;
    #       A structure corresponding to the sample_type

    sample_type = up.unpack_uint()

    sample_data = up.unpack_opaque()
    up_sample_data = Unpacker(sample_data)

    if sample_type == SAMPLE_DATA_FLOW_RECORD:
        return read_flow_sample(up_sample_data, sample_datagram)
    elif sample_type == SAMPLE_DATA_COUNTER_RECORD:
        return read_counter_sample(up_sample_data, sample_datagram)

    else:
        raise Exception()

    # Check if whole data block was unpacked
    up_sample_data.done()
 def from_xdr_bytes(cls, xdr: bytes) -> "RevokeSponsorshipResult":
     unpacker = Unpacker(xdr)
     return cls.unpack(unpacker)