def unpack(cls, unpacker: Unpacker) -> "AccountEntry": account_id = AccountID.unpack(unpacker) balance = Int64.unpack(unpacker) seq_num = SequenceNumber.unpack(unpacker) num_sub_entries = Uint32.unpack(unpacker) inflation_dest = AccountID.unpack(unpacker) if unpacker.unpack_uint() else None flags = Uint32.unpack(unpacker) home_domain = String32.unpack(unpacker) thresholds = Thresholds.unpack(unpacker) length = unpacker.unpack_uint() signers = [] for _ in range(length): signers.append(Signer.unpack(unpacker)) ext = AccountEntryExt.unpack(unpacker) return cls( account_id=account_id, balance=balance, seq_num=seq_num, num_sub_entries=num_sub_entries, inflation_dest=inflation_dest, flags=flags, home_domain=home_domain, thresholds=thresholds, signers=signers, ext=ext, )
class Trajectory: def __init__(self, topology, trajFileName): # since we need to be able to do seeks, can't use osOpen # which might return an unseekable stream self.topology = topology from OpenSave import osUncompressedPath path = osUncompressedPath(trajFileName) import os self.trajFileSize = os.stat(path).st_size self.traj = open(path, "rb") from xdrlib import Unpacker self.fileString = FileString(self.traj, 0, self.trajFileSize) self.xdr = Unpacker(self.fileString) self.crdStarts = [] while True: replyobj.status("Reading frame %d header\n" % ( len(self.crdStarts) + 1)) try: crdStart, endFrame = self._readHeader() except ValueError, e: raise ValueError("Frame %d: %s" % (len(self.crdStarts) + 1, str(e))) if endFrame > self.trajFileSize: if not self.crdStarts: raise ValueError("Computed size of" " first frame (%d) greater than" " trajectory file size (%s)" % (endFrame, self.trajFileSize)) replyobj.warning("Truncated trajectory file;" " skipping last partial frame.\n") else: self.crdStarts.append(crdStart) if endFrame == self.trajFileSize: break self.xdr.set_position(endFrame)
def _parse_raw_sflow_counter_sample(self, unpacker: Unpacker): self.sequence_number = unpacker.unpack_uint() self.source_id = unpacker.unpack_uint() counters_in_sample = unpacker.unpack_uint() for _ in range(counters_in_sample): self.counters.append(SFlowCounterRecord(unpacker))
def __init__(self, topology, trajFileName): # since we need to be able to do seeks, can't use osOpen # which might return an unseekable stream self.topology = topology from OpenSave import osUncompressedPath path = osUncompressedPath(trajFileName) import os self.trajFileSize = os.stat(path).st_size self.traj = open(path, "rb") from xdrlib import Unpacker self.fileString = FileString(self.traj, 0, self.trajFileSize) self.xdr = Unpacker(self.fileString) self.crdStarts = [] while True: replyobj.status("Reading frame %d header\n" % (len(self.crdStarts) + 1)) try: crdStart, endFrame = self._readHeader() except ValueError, e: raise ValueError("Frame %d: %s" % (len(self.crdStarts) + 1, str(e))) if endFrame > self.trajFileSize: if not self.crdStarts: raise ValueError("Computed size of" " first frame (%d) greater than" " trajectory file size (%s)" % (endFrame, self.trajFileSize)) replyobj.warning("Truncated trajectory file;" " skipping last partial frame.\n") else: self.crdStarts.append(crdStart) if endFrame == self.trajFileSize: break self.xdr.set_position(endFrame)
def __init__(self, unpacker: Unpacker): self.counter_format = None self.counter = None self.counter_format = unpacker.unpack_uint() counter_data = unpacker.unpack_opaque() unpacker_counter_data = Unpacker(counter_data) if self.counter_format == SFlowCounterRecord.COUNTER_DATA_GENERIC_INTERFACE: self.counter = GenericInterfaceCounters(unpacker_counter_data) elif self.counter_format == SFlowCounterRecord.COUNTER_DATA_ETHERNET_INTERFACE: self.counter = EthernetInterfaceCounters(unpacker_counter_data) elif self.counter_format == SFlowCounterRecord.COUNTER_DATA_TOKEN_RING: pass self.counter = TokenRingCounters(unpacker_counter_data) elif self.counter_format == SFlowCounterRecord.COUNTER_DATA_VG_INTERFACE: pass self.counter = VgInterfaceCounters(unpacker_counter_data) elif self.counter_format == SFlowCounterRecord.COUNTER_DATA_VLAN: self.counter = VlanCounters(unpacker_counter_data) elif self.counter_format == SFlowCounterRecord.COUNTER_DATA_PROCESSOR: self.counter = ProcessorCounters(unpacker_counter_data) else: logging.debug('read_flow_record:Unimplemented data_format (%d)' % self.flow_format)
def decode(cls, data): '''Deserialize the data and return an object.''' with _convert_exceptions(): xdr = Unpacker(data) ret = cls.decode_xdr(xdr) xdr.done() return ret
def unpack(cls, unpacker: Unpacker) -> "ClaimPredicate": type = ClaimPredicateType.unpack(unpacker) if type == ClaimPredicateType.CLAIM_PREDICATE_UNCONDITIONAL: return cls(type) if type == ClaimPredicateType.CLAIM_PREDICATE_AND: length = unpacker.unpack_uint() and_predicates = [] for _ in range(length): and_predicates.append(ClaimPredicate.unpack(unpacker)) return cls(type, and_predicates=and_predicates) if type == ClaimPredicateType.CLAIM_PREDICATE_OR: length = unpacker.unpack_uint() or_predicates = [] for _ in range(length): or_predicates.append(ClaimPredicate.unpack(unpacker)) return cls(type, or_predicates=or_predicates) if type == ClaimPredicateType.CLAIM_PREDICATE_NOT: # not_predicate is optional. not_predicate = ClaimPredicate.unpack( unpacker) if unpacker.unpack_uint() else None if not_predicate is None: raise ValueError("not_predicate should not be None.") return cls(type, not_predicate=not_predicate) if type == ClaimPredicateType.CLAIM_PREDICATE_BEFORE_ABSOLUTE_TIME: abs_before = Int64.unpack(unpacker) if abs_before is None: raise ValueError("abs_before should not be None.") return cls(type, abs_before=abs_before) if type == ClaimPredicateType.CLAIM_PREDICATE_BEFORE_RELATIVE_TIME: rel_before = Int64.unpack(unpacker) if rel_before is None: raise ValueError("rel_before should not be None.") return cls(type, rel_before=rel_before) return cls(type)
def read(self): fext = os.path.splitext(self.file)[-1] assert fext == ".trr" fp = open(self.file, "rb") self.data = data = fp.read() self.coords = [] self.v = {} self.f = {} self.up = Unpacker(data) curpos = self.up.get_position() datasize = len(data) nframe = 0 #each frame begins with a header while curpos < datasize: #print "current position:", curpos h = self.readHeader(nframe) self.headers.append(h) self.readData(nframe) nframe = nframe + 1 curpos = self.up.get_position() #print "end of readTraj, cur position : %d, datazize: %d" %(self.up.get_position(), datasize) self.nframes = nframe if self.nframes: return 1 else: return 0
def is_tag_full(wrapped_message): unpacker = Unpacker(wrapped_message) try: unpacker.unpack_uint() unpacker.unpack_string() except EOFError: return False return True
def get_tlv(wrapped_message): unpacker = Unpacker(wrapped_message) tag = unpacker.unpack_uint() message = unpacker.unpack_string() pos = unpacker.get_position() buff = unpacker.get_buffer() rest = buff[pos:] return tag, message, rest
def __init__(self, topology): from OpenSave import osOpen topFile = osOpen(topology, 'rb') import os self.topFileSize = os.stat(topology).st_size from xdrlib import Unpacker self.fileString = FileString(topFile, 0, self.topFileSize) self.xdr = Unpacker(self.fileString) version = self._readHeader() self._readTopology(version)
def unpack(cls, unpacker: Unpacker) -> "SetOptionsOp": inflation_dest = AccountID.unpack( unpacker) if unpacker.unpack_uint() else None clear_flags = Uint32.unpack( unpacker) if unpacker.unpack_uint() else None set_flags = Uint32.unpack(unpacker) if unpacker.unpack_uint() else None master_weight = Uint32.unpack( unpacker) if unpacker.unpack_uint() else None low_threshold = Uint32.unpack( unpacker) if unpacker.unpack_uint() else None med_threshold = Uint32.unpack( unpacker) if unpacker.unpack_uint() else None high_threshold = Uint32.unpack( unpacker) if unpacker.unpack_uint() else None home_domain = String32.unpack( unpacker) if unpacker.unpack_uint() else None signer = Signer.unpack(unpacker) if unpacker.unpack_uint() else None return cls( inflation_dest=inflation_dest, clear_flags=clear_flags, set_flags=set_flags, master_weight=master_weight, low_threshold=low_threshold, med_threshold=med_threshold, high_threshold=high_threshold, home_domain=home_domain, signer=signer, )
def parse(self, raw_data): packet = SFlowPacket() data = Unpacker(raw_data) # sFlow version (2|4|5) packet.version = data.unpack_uint() if packet.version != 5: logging.error("Only support version 5.") raise RuntimeError("Only support version 5.") logging.debug("Get version {0}".format(packet.version)) # IP version of the Agent/Switch (1=v4|2=v6) packet.agent_ip_version = data.unpack_uint() if packet.agent_ip_version != 1: logging.error("Only support IPv4.") raise RuntimeError("Only support IPv4.") # Agent IP address (v4=4byte|v6=16byte) packet.agent_ip_address = ntohl(data.unpack_uint()) # sub agent id packet.sub_agent_id = data.unpack_uint() # datagram sequence number packet.datagram_sequence_num = data.unpack_uint() # switch uptime in ms packet.switch_uptime = data.unpack_uint() # how many samples in datagram packet.sample_amount = data.unpack_uint() self._parse_samples(packet, data) return packet
def unpack(cls, unpacker: Unpacker) -> "SCPNomination": quorum_set_hash = Hash.unpack(unpacker) length = unpacker.unpack_uint() votes = [] for _ in range(length): votes.append(Value.unpack(unpacker)) length = unpacker.unpack_uint() accepted = [] for _ in range(length): accepted.append(Value.unpack(unpacker)) return cls(quorum_set_hash=quorum_set_hash, votes=votes, accepted=accepted,)
def unpack(cls, unpacker: Unpacker) -> "SCPQuorumSet": threshold = Uint32.unpack(unpacker) length = unpacker.unpack_uint() validators = [] for _ in range(length): validators.append(PublicKey.unpack(unpacker)) length = unpacker.unpack_uint() inner_sets = [] for _ in range(length): inner_sets.append(SCPQuorumSet.unpack(unpacker)) return cls(threshold=threshold, validators=validators, inner_sets=inner_sets,)
def gmetric_read(msg): unpacker = Unpacker(msg) values = dict() unpacker.unpack_int() values['TYPE'] = unpacker.unpack_string() values['NAME'] = unpacker.unpack_string() values['VAL'] = unpacker.unpack_string() values['UNITS'] = unpacker.unpack_string() values['SLOPE'] = slope_int2str[unpacker.unpack_int()] values['TMAX'] = unpacker.unpack_uint() values['DMAX'] = unpacker.unpack_uint() unpacker.done() return values
def gmetric_read(msg): unpacker = Unpacker(msg) values = dict() unpacker.unpack_int() values["TYPE"] = unpacker.unpack_string() values["NAME"] = unpacker.unpack_string() values["VAL"] = unpacker.unpack_string() values["UNITS"] = unpacker.unpack_string() values["SLOPE"] = slope_int2str[unpacker.unpack_int()] values["TMAX"] = unpacker.unpack_uint() values["DMAX"] = unpacker.unpack_uint() unpacker.done() return values
def test_AVP(self): a1 = AVP(1, b"user") a1.setMandatory(True) assert a1.isMandatory() a1.setMandatory(False) assert not a1.isMandatory() a1.setPrivate(True) assert a1.isPrivate() a1.setPrivate(False) assert not a1.isPrivate() a1 = AVP(1, b"user") e_sz1 = a1.encodeSize() p = Packer() e_sz2 = a1.encode(p) assert e_sz1 == 12 assert e_sz2 == 12 assert e_sz1 == e_sz2 u = Unpacker(p.get_buffer()) d_sz1 = AVP.decodeSize(u, e_sz2) assert d_sz1 == e_sz2 a2 = AVP() assert a2.decode(u, d_sz1) assert a1.code == a2.code assert a1.vendor_id == a2.vendor_id #same as above, but requires padding a1 = AVP(1, b"user") e_sz1 = a1.encodeSize() p = Packer() e_sz2 = a1.encode(p) assert e_sz1 == e_sz2 u = Unpacker(p.get_buffer()) d_sz1 = AVP.decodeSize(u, e_sz2) assert d_sz1 == e_sz2 a2 = AVP() assert a2.decode(u, d_sz1) assert a1.code == a2.code assert a1.vendor_id == a2.vendor_id
def unpack(self, u: xdrlib.Unpacker): last = u.unpack_uint() + u.get_position() if u.get_position() < last: self.time_A = struct.unpack('>Q', u.unpack_fopaque(8))[0] if u.get_position() < last: self.time_B = struct.unpack('>Q', u.unpack_fopaque(8))[0] u.set_position(last)
def _length_from_bytes(four_bytes): """ The RPC standard calls for the length of a message to be sent as the least significant 31 bits of an XDR encoded unsigned integer. The most significant bit encodes a True/False bit which indicates that this message will be the last. """ from xdrlib import Unpacker unpacker = Unpacker(four_bytes) val = unpacker.unpack_uint() unpacker.done() if val < 2**31: return (val, False) return (val-2**31, True)
def handle(self): data = self.request[0] unpacker = Unpacker(data) type = unpacker.unpack_int() if type not in GANGLIA_DECODE: return host = unpacker.unpack_string() name = unpacker.unpack_string() unpacker.unpack_int() # spoof boolean unpacker.unpack_string() # format string value = GANGLIA_DECODE[type](unpacker) unpacker.done() graphite.record_stat(name, value)
def unpack(cls, unpacker: Unpacker) -> "LedgerSCPMessages": ledger_seq = Uint32.unpack(unpacker) length = unpacker.unpack_uint() messages = [] for _ in range(length): messages.append(SCPEnvelope.unpack(unpacker)) return cls(ledger_seq=ledger_seq, messages=messages,)
def read_sample_record(up, sample_datagram): # Unpack sample_record structure # data_format sample_type; # Specifies the type of sample data # opaque sample_data<>; # A structure corresponding to the sample_type sample_type = up.unpack_uint() sample_data = up.unpack_opaque() up_sample_data = Unpacker(sample_data) ret = None if sample_type == SAMPLE_DATA_FLOW_RECORD: logging.warning("sample_type: %d is flow type sample, do not \ utilize it currently." % sample_type) pass elif sample_type == SAMPLE_DATA_COUNTER_RECORD: sample = read_counter_sample(up_sample_data, sample_datagram) if len(sample) is not 0: ret = sample else: logging.warning("sample_type: %d is not supported by current agent.\ Contact Zhang Song for further development" % sample_type) pass # Check if whole data block was unpacked #up_sample_data.done() return ret
def unpack(cls, unpacker: Unpacker) -> "LedgerEntryChanges": length = unpacker.unpack_uint() ledger_entry_changes = [] for _ in range(length): ledger_entry_changes.append(LedgerEntryChange.unpack(unpacker)) return cls(ledger_entry_changes)
def unpack(cls, unpacker: Unpacker) -> "SCPHistoryEntryV0": length = unpacker.unpack_uint() quorum_sets = [] for _ in range(length): quorum_sets.append(SCPQuorumSet.unpack(unpacker)) ledger_messages = LedgerSCPMessages.unpack(unpacker) return cls(quorum_sets=quorum_sets, ledger_messages=ledger_messages,)
def unpack(cls, unpacker: Unpacker) -> "PeerStatList": length = unpacker.unpack_uint() peer_stat_list = [] for _ in range(length): peer_stat_list.append(PeerStats.unpack(unpacker)) return cls(peer_stat_list)
def unpack_reply(response, myxid=None, myreply_stat=MSG_ACCEPTED, myverf=NULL_AUTH, myaccept_stat=SUCCESS, myreject_stat=None, myauth_stat=None): """Unpacks an RPC reply and returns a variable-length arg list of the same form as the argument to pack_reply, but for SUCCESS also returns an xdrlib.Unpacker as the final element of the list that the caller can use to unpack the results of the call. If values are given for any myXXX arguments, checks that those values match the unpacked XXX values. Default myXXX values assume success with no authentication. Raises UnpackException on any errors or mismatches. """ u = Unpacker(response) msg = RPCProto.unpack_rpc_msg(u) check(myxid, msg.xid, "xid") if msg.body.mtype == RPCProto.CALL: raise UnpackException("Expected reply, but got call") reply = msg.body.rbody check(myreply_stat, reply.stat, "reply_stat") retval = [msg.xid, reply.stat] if reply.stat == RPCProto.MSG_ACCEPTED: check(myverf, reply.areply.verf, "verf") retval.append(reply.areply.verf) accept_stat = reply.areply.reply_data.stat check(myaccept_stat, accept_stat, "accept_stat") retval.append(accept_stat) if accept_stat == RPCProto.SUCCESS: retval.append(u) elif accept_stat == RPCProto.PROG_MISMATCH: retval.append(reply.areply.reply_data.mismatch_info.low) retval.append(reply.areply.reply_data.mismatch_info.high) elif (accept_stat == RPCProto.PROG_UNAVAIL or accept_stat == RPCProto.PROC_UNAVAIL or accept_stat == RPCProto.GARBAGE_ARGS or accept_stat == RPCProto.SYSTEM_ERR): pass else: raise UnpackException("unknown accept_stat: %u" % accept_stat) elif reply.stat == RPCProto.MSG_DENIED: reject_stat = reply.rreply.stat check(myreject_stat, reject_stat, "reject_stat") retval.append(reject_stat) if reject_stat == RPCProto.RPC_MISMATCH: retval.append(reply.rreply.mismatch_info.low) retval.append(reply.rreply.mismatch_info.high) elif reject_stat == RPCProto.AUTH_ERROR: check(myauth_stat, reply.rreply.astat, "auth_stat") retval.append(reply.rreply.astat) else: raise UnpackException("unknown reject_stat: %u" % reject_stat) else: raise UnpackException("unknown reply_stat: %u" % reply.stat) return retval
def unpack(cls, unpacker: Unpacker) -> "SCPStatementPrepare": quorum_set_hash = Hash.unpack(unpacker) ballot = SCPBallot.unpack(unpacker) prepared = SCPBallot.unpack( unpacker) if unpacker.unpack_uint() else None prepared_prime = SCPBallot.unpack( unpacker) if unpacker.unpack_uint() else None n_c = Uint32.unpack(unpacker) n_h = Uint32.unpack(unpacker) return cls( quorum_set_hash=quorum_set_hash, ballot=ballot, prepared=prepared, prepared_prime=prepared_prime, n_c=n_c, n_h=n_h, )
def unpack(cls, unpacker: Unpacker) -> "Operation": source_account = (MuxedAccount.unpack(unpacker) if unpacker.unpack_uint() else None) body = OperationBody.unpack(unpacker) return cls( source_account=source_account, body=body, )
def unpack(cls, unpacker: Unpacker) -> "TransactionResultSet": length = unpacker.unpack_uint() results = [] for _ in range(length): results.append(TransactionResultPair.unpack(unpacker)) return cls( results=results, )
def unpack(cls, unpacker: Unpacker) -> "ManageDataOp": data_name = String64.unpack(unpacker) data_value = DataValue.unpack( unpacker) if unpacker.unpack_uint() else None return cls( data_name=data_name, data_value=data_value, )
def __init__(self, unpacker: Unpacker): self.sequence_number = None self.source_id = None self.counters = [] sample_data = unpacker.unpack_opaque() unpacker_sample_data = Unpacker(sample_data) self._parse_raw_sflow_counter_sample(unpacker_sample_data)
def _parse_raw_sflow_datagram(self, unpacker: Unpacker): self.sflow_version = unpacker.unpack_int() if not self.sflow_version == 5: logging.debug("Unimplemented sFlow version: {}".format(self.sflow_version)) # TODO: read remainder if needed return self.agent_ip_version = unpacker.unpack_int() if self.agent_ip_version == 1: self.agent_ip_address = ntohl(unpacker.unpack_uint()) # TODO: implement other versions else: logging.debug("Unimplemented agent IP version: {}".format(self.agent_ip_version)) return self.sub_agent_id = unpacker.unpack_uint() self.sequence_number = unpacker.unpack_uint() self.switch_uptime = unpacker.unpack_uint() samples_in_datagram = unpacker.unpack_uint() for _ in range(samples_in_datagram): try: self.samples.append(SFlowSample(unpacker)) except Exception as e: logging.warning("Bad sample") raise e
def _parse_raw_sflow_datagram(self, unpacker: Unpacker): self.sflow_version = unpacker.unpack_int() if not self.sflow_version == 5: logging.debug("Unimplemented sFlow version: {}".format( self.sflow_version)) # TODO: read remainder if needed return self.agent_ip_version = unpacker.unpack_int() if self.agent_ip_version == 1: self.agent_ip_address = ntohl(unpacker.unpack_uint()) # TODO: implement other versions else: logging.debug("Unimplemented agent IP version: {}".format( self.agent_ip_version)) return self.sub_agent_id = unpacker.unpack_uint() self.sequence_number = unpacker.unpack_uint() self.switch_uptime = unpacker.unpack_uint() samples_in_datagram = unpacker.unpack_uint() for _ in range(samples_in_datagram): try: self.samples.append(SFlowSample(unpacker)) except Exception as e: logging.warning("Bad sample") raise e
def __init__(self, atomicAbundance: AtomicAbundance = None, kuruczPfPath: str = None): if atomicAbundance is None: atomicAbundance = DefaultAtomicAbundance self.atomicAbundance = atomicAbundance kuruczPfPath = get_data_path( ) + 'pf_Kurucz.input' if kuruczPfPath is None else kuruczPfPath with open(kuruczPfPath, 'rb') as f: s = f.read() u = Unpacker(s) # NOTE(cmo): Each of these terms is simply in flat lists indexed by Atomic Number Z-1 self.Tpf = np.array(u.unpack_array(u.unpack_double)) stages = [] pf = [] ionpot = [] for i in range(99): z = u.unpack_int() stages.append(u.unpack_int()) pf.append( np.array( u.unpack_farray(stages[-1] * self.Tpf.shape[0], u.unpack_double)).reshape( stages[-1], self.Tpf.shape[0])) ionpot.append( np.array(u.unpack_farray(stages[-1], u.unpack_double))) ionpot = [i * Const.HC / Const.CM_TO_M for i in ionpot] pf = [np.log(p) for p in pf] self.pf = pf self.ionpot = ionpot
def __init__(self, unpacker: Unpacker): # struct struct counters_sample_expanded self.sequence_number = None self.source_id = {} self.counters = [] sample_data = unpacker.unpack_opaque() unpacker_sample_data = Unpacker(sample_data) self._parse_raw_expanded_sflow_counter_sample(unpacker_sample_data)
def unpack(cls, unpacker: Unpacker) -> "InflationResult": code = InflationResultCode.unpack(unpacker) if code == InflationResultCode.INFLATION_SUCCESS: length = unpacker.unpack_uint() payouts = [] for _ in range(length): payouts.append(InflationPayout.unpack(unpacker)) return cls(code, payouts=payouts) raise ValueError("Invalid code.")
def __init__(self, unpacker: Unpacker): self.flow_format = None self.flow = None self.flow_format = unpacker.unpack_uint() flow_data = unpacker.unpack_opaque() unpacker_flow_data = Unpacker(flow_data) if self.flow_format == SFlowFlowRecord.FLOW_DATA_RAW_HEADER: self.flow = FlowDataRawHeader(unpacker_flow_data) elif self.flow_format == SFlowFlowRecord.FLOW_DATA_ETHERNET_HEADER: self.flow = FlowDataEthernetHeader(unpacker_flow_data) elif self.flow_format == SFlowFlowRecord.FLOW_DATA_IPV4_HEADER: self.flow = FlowDataIPv4Header(unpacker_flow_data) elif self.flow_format == SFlowFlowRecord.FLOW_DATA_EXT_SWITCH: self.flow = FlowDataExtSwitch(unpacker_flow_data) else: logging.debug('read_flow_record:Unimplemented data_format (%d)' % self.flow_format)
def read_flow_record(up, sample): """Reads a 'struct flow_record' (p. 29)""" flow_format = up.unpack_uint() flow_data = up.unpack_opaque() up_flow_data = Unpacker(flow_data) if flow_format == FLOW_DATA_RAW_HEADER: res = FlowRecord(sample, read_sampled_header(up_flow_data)) elif flow_format == FLOW_DATA_ETHERNET_HEADER: res = FlowRecord(sample, read_sampled_ethernet(up_flow_data)) elif flow_format == FLOW_DATA_IPV4_HEADER: res = FlowRecord(sample, read_sampled_ipv4(up_flow_data)) else: res = 'read_flow_record:Unknown data_format (%d)' % flow_format up_flow_data.done() return res
def datagramReceived(self, datagram, address): values = dict() unpacker = Unpacker(datagram) packet_type = unpacker.unpack_uint() if packet_type == 128: self.unpack_meta(unpacker) return elif packet_type == 136: #unpack_metareq function works, but serves no purpose right now #commented out unless anyone comes up with a good reason to respond #to metadata requests. #self.unpack_metareq(unpacker) return elif 128 < packet_type < 136: self.unpack_data(unpacker, packet_type, address) return else: return
def _parse_raw_sflow_flow_sample(self, unpacker: Unpacker): self.sequence_number = unpacker.unpack_uint() self.source_id = unpacker.unpack_uint() self.sampling_rate = unpacker.unpack_uint() self.sample_pool = unpacker.unpack_uint() self.drops = unpacker.unpack_uint() self.input_if = unpacker.unpack_uint() self.output_if = unpacker.unpack_uint() flows_in_sample = unpacker.unpack_uint() for _ in range(flows_in_sample): self.flows.append(SFlowFlowRecord(unpacker))
def __init__(self, unpacker: Unpacker): self.type = None self.sample = None self.type = unpacker.unpack_uint() if self.type == SFlowSample.SAMPLE_DATA_FLOW_SAMPLE: self.sample = SFlowFlowSample(unpacker) elif self.type == SFlowSample.SAMPLE_DATA_COUNTER_SAMPLE: self.sample = SFlowCounterSample(unpacker) elif self.type == SFlowSample.SAMPLE_DATA_EXPANDED_FLOW_SAMPLE: self.sample = SFlowExpandedFlowSample(unpacker) elif self.type == SFlowSample.SAMPLE_DATA_EXPANDED_COUNTER_SAMPLE: self.sample = SFlowExpandedCounterSample(unpacker) else: logging.debug("Unknown data format: {}".format(type)) logging.debug(unpacker.unpack_opaque())
def __init__(self, unpacker: Unpacker): self.sequence_number = None self.source_id = None self.sampling_rate = None self.sample_pool = None self.drops = None self.input_if = None self.output_if = None self.flows = [] sample_data = unpacker.unpack_opaque() unpacker_sample_data = Unpacker(sample_data) self._parse_raw_sflow_flow_sample(unpacker_sample_data)
def read_sample_record(up, sample_datagram): # Unpack sample_record structure # data_format sample_type; # Specifies the type of sample data # opaque sample_data<>; # A structure corresponding to the sample_type sample_type = up.unpack_uint() sample_data = up.unpack_opaque() up_sample_data = Unpacker(sample_data) if sample_type == SAMPLE_DATA_FLOW_RECORD: return read_flow_sample(up_sample_data, sample_datagram) elif sample_type == SAMPLE_DATA_COUNTER_RECORD: return read_counter_sample(up_sample_data, sample_datagram) else: raise Exception() # Check if whole data block was unpacked up_sample_data.done()
def getvalue(self): if isinstance(self.var, SequenceType): out = [] mark = self._unpack_uint() while mark == 1509949440: var = self.var # Create a structure with the sequence vars: self.var = StructureType(name=self.var.name) self.var.update(var) out.append(self.getvalue()) self.var = var mark = self._unpack_uint() elif isinstance(self.var, StructureType): out = [] for child in self.var.walk(): var = self.var self.var = child out.append(self.getvalue()) self.var = var out = tuple(out) else: # Get data length. n = 1 if getattr(self.var, 'shape', False): n = self._unpack_uint() if self.var.type not in [Url, String]: self._unpack_uint() # Bytes are treated differently. if self.var.type == Byte: out = self._unpack_bytes(n) out = numpy.array(out, self.var.type.typecode) # As are strings... elif self.var.type in [Url, String]: out = self._unpack_string(n) out = numpy.array(out, self.var.type.typecode) else: i = self._pos self._pos = j = i + (n*self.var.type.size) #dtype = ">%s%s" % (self.var.type.typecode, self.var.type.size) #out = numpy.fromstring(self._buf[i:j], dtype=dtype) if self.var.type.typecode == 'i': un = Unpacker(self._buf[i:j]) out = un.unpack_farray( n, un.unpack_int ) elif (self.var.type.typecode == 'f') and (self.var.type.size == 8): un = Unpacker(self._buf[i:j]) out = un.unpack_farray( n, un.unpack_double ) elif (self.var.type.typecode == 'f') and (self.var.type.size == 4): un = Unpacker(self._buf[i:j]) out = un.unpack_farray( n, un.unpack_float ) else: print "type", self.var.type.typecode pass #print out return out
def read_datagram(addr, data): """Yield all record (flow and counter records) from the sFlow v5 datagram given by up, which is expected to be an xdrlib.Unpacker object.""" up = Unpacker(data) version = up.unpack_int() if not version == 5: hexdump_bytes(data) raise Exception() af = up.unpack_int() if af == 1: # IPv4 agent_address = ntohl(up.unpack_uint()) else: raise Exception() sf = Datagram(addr, agent_address) sub_agent_id = up.unpack_uint() sequence_number = up.unpack_uint() uptime = up.unpack_uint() nb_sample_records = up.unpack_uint() # Iterating over sample records for i in range(nb_sample_records): try: return read_sample_record(up, sf) except EOFError: stderr.write("read_sample_datagram: EOFError reading sample_record,", \ "Premature end of data stream, Skipping record\n") up.set_position(len(up.get_buffer())) break
def __parse_header(self, header): """ Parses a header and stores information contained in it as necessary Returns (counterparty pub key sexp, symmetric key) throws Error """ assert type(header) == type('') try: hash = sha(header).digest() cached = self.__cached_headers.get(hash) if cached is not None: return cached u = Unpacker(header) # messages start with the hash of the recipient's public id recipient_id = u.unpack_fstring(SIZE_OF_UNIQS) if recipient_id != self.__my_public_key_id: raise Error, 'message not intended for me' # unpack PK encrypted public key encrypted_key = u.unpack_string() self.__key.set_value_string(encrypted_key) self.__key.decrypt() # PKop decrypted = self.__key.get_value() try: symmetric_key = cryptutil.oaep_decode(decrypted[1:]) # Leave off the initial 0 byte. ### XX check whether it really is 0 and raise bad-encoding error if not. --Zooko 2000-07-29 except cryptutil.OAEPError, le: raise Error, 'bad encryption -- pad badding: padded: %s, Error: %s' % (`decrypted`, `le.args`) iv = u.unpack_fstring(8) # first half of the MAC # XXX A.K.A. the key? --Zooko 2000-07-29 prefix = header[:u.get_position()] # all data except the symmetric key and recipient, encrypted encrypted = u.unpack_string() u.done() decrypted = tripledescbc.new(symmetric_key).decrypt(iv, encrypted) u = Unpacker(decrypted) # the full public key of the sender sender_key = u.unpack_string() full_key = MojoKey.makePublicRSAKeyForCommunicating(modval.new(sender_key, HARDCODED_RSA_PUBLIC_EXPONENT)) full_key_id = idlib.make_id(full_key, 'broker') # the session id for messages sent 'here' id_in = _mix_counterparties(full_key_id, self.__my_public_key_id, u.unpack_fstring(SIZE_OF_UNIQS)) # the session id for messages sent 'there' id_out = _mix_counterparties(full_key_id, self.__my_public_key_id, u.unpack_fstring(SIZE_OF_UNIQS)) # check that the pk encrypted symmetric key used to send this message is the same was generated properly strl = u.unpack_fstring(SIZE_OF_UNIQS) sr = HashRandom.SHARandom(_mix_counterparties(full_key_id, self.__my_public_key_id, strl)) spaml = sr.get(SIZE_OF_SYMMETRIC_KEYS) if symmetric_key != spaml: raise Error, 'improperly generated key' # the second half of what's in the MAC # XXX A.K.A. the message? --Zooko 2000-07-29 end = decrypted[:u.get_position()] # the signature of everything signature = u.unpack_fstring(len(sender_key)) u.done() # debugprint("------ ------ ------ ------ hmachish(key=%s, message=%s)\n" % (`symmetric_key`, `end`)) summary = cryptutil.hmacish(key=symmetric_key, message=end) x = modval.new(sender_key, HARDCODED_RSA_PUBLIC_EXPONENT, signature) x.undo_signature() # PKop signed_value = x.get_value() try: thingie = cryptutil.oaep_decode(signed_value[1:]) # Leave off the initial 0 byte. ### XX check whether it really is 0 and raise bad-encoding error if not. --Zooko 2000-07-29 except cryptutil.OAEPError, le: raise Error, 'bad encryption -- pad badding: padded: %s, Error: %s' % (`signed_value`, `le.args`)
def _parse_raw_ethernet_interface_counters(self, unpacker: Unpacker): # Unpack ethernet_counters structure # unsigned int dot3_stats_alignment_errors; # unsigned int dot3_stats_fcs_errors; # unsigned int dot3_stats_single_collision_frames; # unsigned int dot3_stats_multiple_collision_frames; # unsigned int dot3_stats_sqe_test_errors; # unsigned int dot3_stats_deferred_transmissions; # unsigned int dot3_stats_late_collisions; # unsigned int dot3_stats_excessive_collisions; # unsigned int dot3_stats_internal_mac_transmit_errors; # unsigned int dot3_stats_carrier_sense_errors; # unsigned int dot3_stats_frame_too_longs; # unsigned int dot3_stats_internal_mac_receive_errors; # unsigned int dot3_stats_symbol_errors; self.dot3_stats_alignment_errors = unpacker.unpack_uint() self.dot3_stats_fcs_errors = unpacker.unpack_uint() self.dot3_stats_single_collision_frames = unpacker.unpack_uint() self.dot3_stats_multiple_collision_frames = unpacker.unpack_uint() self.dot3_stats_sqe_test_errors = unpacker.unpack_uint() self.dot3_stats_deferred_transmissions = unpacker.unpack_uint() self.dot3_stats_late_collisions = unpacker.unpack_uint() self.dot3_stats_excessive_collisions = unpacker.unpack_uint() self.dot3_stats_internal_mac_transmit_errors = unpacker.unpack_uint() self.dot3_stats_carrier_sense_errors = unpacker.unpack_uint() self.dot3_stats_frame_too_longs = unpacker.unpack_uint() self.dot3_stats_internal_mac_receive_errors = unpacker.unpack_uint() self.dot3_stats_symbol_errors = unpacker.unpack_uint()
def run(self): while self.server_running: potential_read = [self.server_socket] if self.client is not None: potential_read.append(self.client) try: ready_to_read, ready_to_write, in_erro = select.select( potential_read, [], []) if self.server_socket in ready_to_read: conn, addr = self.server_socket.accept() self.client = conn print('New connection from ', addr) elif self.client in ready_to_read: # self.client.recv_into(self.buffer, 512) recv = self.client.recv(128) self.buffer += recv if len(recv) == 0: print('Disconnection from client') self.client.close() self.client = None self.buffer = '' continue unpack = Unpacker(self.buffer) if len(self.buffer) >= unpack.unpack_int(): unpack.set_position(0) size = unpack.unpack_int() cmd = unpack.unpack_int() if cmd == ServerMouseController.PACKET_MOVE: # Mouse move control x = unpack.unpack_float() y = unpack.unpack_float() print(size, cmd, x, y) self.mouse_controller.move( self.mouse_controller.position()[0] - x, self.mouse_controller.position()[1] - y) elif cmd == ServerMouseController.PACKET_CLICK: # Mouse click control button = unpack.unpack_int() nb_click = unpack.unpack_int() print(size, cmd, button, nb_click) self.mouse_controller.click( self.mouse_controller.position()[0], self.mouse_controller.position()[1], button, nb_click) elif cmd == ServerMouseController.PACKET_SCROLL: # Mouse scrolling x = unpack.unpack_float() y = unpack.unpack_float() print(size, cmd, x, y) self.mouse_controller.scroll( vertical=int(y), horizontal=int(x)) self.buffer = self.buffer[unpack.get_position():] except select.error as e: print(e) if self.client is not None: self.client.close() self.server_socket.close() print('Server stop')
def _parse_raw_generic_interface_counters(self, unpacker: Unpacker): # Unpack Generic Interface Counters # unsigned int ifIndex; # unsigned int ifType; # unsigned hyper ifSpeed; # unsigned int ifDirection; derived from MAU MIB (RFC 2668) # 0 = unkown, 1=full-duplex, 2=half-duplex, # 3 = in, 4=out # unsigned int ifStatus; bit field with the following bits assigned # bit 0 = ifAdminStatus (0 = down, 1 = up) # bit 1 = ifOperStatus (0 = down, 1 = up) # unsigned hyper ifInOctets; # unsigned int ifInUcastPkts; # unsigned int ifInMulticastPkts; # unsigned int ifInBroadcastPkts; # unsigned int ifInDiscards; # unsigned int ifInErrors; # unsigned int ifInUnknownProtos; # unsigned hyper ifOutOctets; # unsigned int ifOutUcastPkts; # unsigned int ifOutMulticastPkts; # unsigned int ifOutBroadcastPkts; # unsigned int ifOutDiscards; # unsigned int ifOutErrors; # unsigned int ifPromiscuousMode; self.if_index = unpacker.unpack_uint() self.if_type = unpacker.unpack_uint() self.if_speed = unpacker.unpack_uhyper() self.if_direction = unpacker.unpack_uint() self.if_status = unpacker.unpack_uint() self.if_in_octets = unpacker.unpack_uhyper() self.if_in_ucasts = unpacker.unpack_uint() self.if_in_mcasts = unpacker.unpack_uint() self.if_in_bcasts = unpacker.unpack_uint() self.if_in_discards = unpacker.unpack_uint() self.if_in_errors = unpacker.unpack_uint() self.if_in_unknown_protos = unpacker.unpack_uint() self.if_out_octets = unpacker.unpack_uhyper() self.if_out_ucasts = unpacker.unpack_uint() self.if_out_mcasts = unpacker.unpack_uint() self.if_out_bcasts = unpacker.unpack_uint() self.if_out_discards = unpacker.unpack_uint() self.if_out_errors = unpacker.unpack_uint() self.if_promiscuous_mode = unpacker.unpack_uint()
def parse(self, wired_string): """ @returns (counterparty_pub_key_sexp, cleartext,) @raises SessionInvalidated if the incoming message was an "invalidate session" message \000\000\000\002. @raises UnknownSession error if the incoming message did not identify a known session key. @precondition `wired_string' must be a string.: type(wired_string) == types.StringType: "wired_string: %s :: %s" % (hr(wired_string), hr(type(wired_string))) @postcondition `counterparty_pub_key_sexp' is a public key.: MojoKey.publicRSAKeyForCommunicationSecurityIsWellFormed(counterparty_pub_key_sexp): "counterparty_pub_key_sexp: %s" % hr(counterparty_pub_key_sexp) """ assert type(wired_string) == types.StringType, "precondition: `wired_string' must be a string." + " -- " + "wired_string: %s :: %s" % (hr(wired_string), hr(type(wired_string))) session = None try: u = Unpacker(wired_string) mtype = u.unpack_fstring(4) if mtype == '\000\000\000\000': # a message with a full PK header header = u.unpack_string() iv = u.unpack_fstring(8) prefix = wired_string[:u.get_position()] encrypted = u.unpack_string() u.done() counterparty_pub_key_sexp, symmetric_key = self._session_keeper.parse_header(header) decrypted = tripledescbc.new(symmetric_key).decrypt(iv, encrypted) u = Unpacker(decrypted) message = u.unpack_string() mac = u.unpack_fstring(SIZE_OF_UNIQS) u.done() # debugprint("------ ------ ------ ------ hmachish(key=%s, message=%s)\n" % (`symmetric_key`, `message`)) maccomp = cryptutil.hmacish(key=symmetric_key, message=message) if mac != maccomp: raise Error, 'incorrect MAC' return (counterparty_pub_key_sexp, message) elif mtype == '\000\000\000\001': # a message using an already established session id session = u.unpack_fstring(SIZE_OF_UNIQS) iv = u.unpack_fstring(8) prefix = wired_string[:u.get_position()] encrypted = u.unpack_string() u.done() counterparty_pub_key_sexp, symmetric_key, want_ack = self._session_keeper.get_session_info(session) decrypted = tripledescbc.new(symmetric_key).decrypt(iv, encrypted) u = Unpacker(decrypted) message = u.unpack_string() mac = u.unpack_fstring(SIZE_OF_UNIQS) u.done() counterparty_id = idlib.make_id(counterparty_pub_key_sexp, 'broker') # debugprint("------ ------ ------ ------ hmachish(key=%s, message=%s)\n" % (`symmetric_key`, `message`)) maccomp = cryptutil.hmacish(key=symmetric_key, message=message) if mac != maccomp: raise Error, 'incorrect MAC' if want_ack: self._session_keeper.got_ack(counterparty_id) return (counterparty_pub_key_sexp, message) elif mtype == '\000\000\002\002': # a short "message" invalidating an outgoing session id bad_session_id_out = u.unpack_fstring(SIZE_OF_UNIQS) unverified_counterparty_id = u.unpack_fstring(SIZE_OF_UNIQS) self._session_keeper.invalidate_session(bad_session_id_out, unverified_counterparty_id) raise SessionInvalidated, 'session_id %s with %s invalidated' % (`bad_session_id_out`, idlib.to_ascii(unverified_counterparty_id)) else: raise Error, 'unsupported message type' except (modval.Error, tripledescbc.Error, xdrlib.Error, EOFError), le: debugprint("got error in mesgen.parse(): %s", args=(le,), v=4, vs="debug") if session is not None: raise UnknownSession(session, self.get_id()) else: raise Error, le