def parser(cls, datapath, version, msg_type, msg_len, xid, buf): msg = super(OFPFlowMod, cls).parser(datapath, version, msg_type, msg_len, xid, buf) offset = ofproto.OFP_HEADER_SIZE (msg.cookie, msg.cookie_mask, msg.table_id, msg.command, msg.idle_timeout, msg.hard_timeout, msg.priority, msg.buffer_id, msg.out_port, msg.out_group, msg.flags) = struct.unpack_from(ofproto.OFP_FLOW_MOD_PACK_STR0, msg.buf, offset) offset += struct.calcsize(ofproto.OFP_FLOW_MOD_PACK_STR0) msg.match = ofproto_parser.OFPMatch.parser(msg.buf, offset) padding_length = utils.round_up(msg.match.length, 8) - msg.match.length offset += msg.match.length + padding_length msg.instructions = [] while msg_len > offset: inst = ofproto_parser.OFPInstruction.parser(msg.buf, offset) msg.instructions.append(inst) offset += inst.len return msg
def parser(cls,buf,offset): _offset = offset #LOG.info("Offset = %d" %offset) (length,table_id,flow_cookie,duration_sec,duration_nsec, new_match_packets,new_match_bytes,total_match_packets,total_match_bytes ) = struct.unpack_from("!HB5xQIIQQQQ",buf,offset) offset += 56 #LOG.info("single flow, length = %d" %length) match = ofproto_parser.OFPMatch.parser(buf,offset) LOG.info(match) offset += utils.round_up(match.length, 8) instructions = [] inst_length = length - ( offset - _offset ) #LOG.info("Length of instructions = %d" %inst_length) while inst_length > 0: inst = ofproto_parser.OFPInstruction.parser(buf,offset) offset += inst.len inst_length -= inst.len instructions.append(inst) #LOG.info(instructions) single_flow = cls(table_id,flow_cookie,duration_sec,duration_nsec, new_match_packets,new_match_bytes,total_match_packets,total_match_bytes, match,instructions) single_flow.length = length return single_flow
def external_processing(self, field_type, url, control_port, data_port, lib_id, lib_options): if (field_type == 'URL'): epb_search_field_type = [0, 0, 1] if (field_type == 'Delay'): epb_search_field_type = [0, 0, 2] if (field_type == 'Jitter'): epb_search_field_type = [0, 0, 3] if (field_type == 'Loss'): epb_search_field_type = [0, 0, 4] epb_search_field_type = struct.pack("!3B", *epb_search_field_type) expected_size = 40 epb_search_field_len = struct.pack("!B", expected_size) size = len(url) pad_len = expected_size - size epb_search_value = struct.pack("!" + ("B" * size), *map(ord, url)) # pad the remaining length with x00 epb_search_value += struct.pack("!" + ("B" * pad_len), *(pad_len * [0])) control_port = struct.pack("!B", control_port) data_port = struct.pack("!B", data_port) lib_id = struct.pack("!B", lib_id) lib_options = '{0:08b}'.format(lib_options) # Library Options is a 9 byte attribute len_ = utils.round_up(len(lib_options), 72) pad_len = len_ - len(lib_options) lib_options_binary = str(lib_options) + ("0" * pad_len) first_byte = int(lib_options_binary[:8], 2) next_four_bytes = int(lib_options_binary[8:40], 2) final_four_bytes = int(lib_options_binary[40:], 2) lib_options = struct.pack("!BII", first_byte, next_four_bytes, final_four_bytes) result = control_port + data_port + lib_id + lib_options + epb_search_field_type + epb_search_field_len + epb_search_value return result
def parser(cls, datapath, buf, offset, report_reason, event_type, event_id): assert event_type == EVT_FLOW_STATS_TIMER_TRIGGER length = len(buf) #LOG.info("Event %d Message length = %d" %(event_id,length) ) #LOG.info("Offset = %d" %offset) _offset = offset (table_id, out_port, out_group, interval_sec, interval_msec) = struct.unpack_from( OFP13_FLOW_TIMER_REPORT_HEADER_PACK_STR, buf, offset) offset += 24 match = ofproto_parser.OFPMatch.parser(buf, offset) offset += utils.round_up(match.length, 8) single_flows = [] while offset < length: #LOG.info("One flow") try: single_flow = SingleFlowReport.parser(buf, offset) single_flows.append(single_flow) offset += single_flow.length except Exception, e: #LOG.info("Error in parsing single flows: %s" %e) #LOG.info( utils.hex_array(buf[offset:]) ) break
def parser(cls, buf, offset): _offset = offset #LOG.info("Offset = %d" %offset) (length, table_id, flow_cookie, duration_sec, duration_nsec, new_match_packets, new_match_bytes, total_match_packets, total_match_bytes) = struct.unpack_from("!HB5xQIIQQQQ", buf, offset) offset += 56 #LOG.info("single flow, length = %d" %length) match = ofproto_parser.OFPMatch.parser(buf, offset) LOG.info(match) offset += utils.round_up(match.length, 8) instructions = [] inst_length = length - (offset - _offset) #LOG.info("Length of instructions = %d" %inst_length) while inst_length > 0: inst = ofproto_parser.OFPInstruction.parser(buf, offset) offset += inst.len inst_length -= inst.len instructions.append(inst) #LOG.info(instructions) single_flow = cls(table_id, flow_cookie, duration_sec, duration_nsec, new_match_packets, new_match_bytes, total_match_packets, total_match_bytes, match, instructions) single_flow.length = length return single_flow
def serialize(self, buf, offset): data = bytearray() msg_pack_into(NXActionConjunction._fmt_str, data, 0, self.clause, self.n_clauses, self.id) payload_offset = (ofp.OFP_ACTION_EXPERIMENTER_HEADER_SIZE + struct.calcsize(NXAction._fmt_str)) self.len = utils.round_up(payload_offset + len(data), 8) super(NXActionConjunction, self).serialize(buf, offset) msg_pack_into('!%ds' % len(data), buf, offset + payload_offset, bytes(data))
def serialize(self, buf, offset): data = bytearray() msg_pack_into(NXActionResubmitTable._fmt_str, data, 0, self.in_port, self.table_id) payload_offset = (ofp.OFP_ACTION_EXPERIMENTER_HEADER_SIZE + struct.calcsize(NXAction._fmt_str)) self.len = utils.round_up(payload_offset + len(data), 8) super(NXActionResubmitTable, self).serialize(buf, offset) msg_pack_into('!%ds' % len(data), buf, offset + payload_offset, bytes(data))
def serialize(self, buf, offset): data = self.serialize_body() payload_offset = (ofp.OFP_ACTION_EXPERIMENTER_HEADER_SIZE + struct.calcsize(self._fmt_str)) self.len = utils.round_up(payload_offset + len(data), 8) super(ofpp.NXAction, self).serialize(buf, offset) msg_pack_into(EricssonAction._fmt_str, buf, offset + ofp.OFP_ACTION_EXPERIMENTER_HEADER_SIZE, self.subtype) buf += data
def serialize(self, buf, offset): # fixup data = self.data if data is None: data = bytearray() payload_offset = (ofp.OFP_ACTION_EXPERIMENTER_HEADER_SIZE + struct.calcsize(NXAction._fmt_str)) self.len = utils.round_up(payload_offset + len(data), 8) super(NXActionUnknown, self).serialize(buf, offset) buf += data
def serialize(self, buf, offset): # fixup data = self.data if data is None: data = bytearray() payload_offset = ( ofp.OFP_ACTION_EXPERIMENTER_HEADER_SIZE + struct.calcsize(NXAction._fmt_str) ) self.len = utils.round_up(payload_offset + len(data), 8) super(NXActionUnknown, self).serialize(buf, offset) buf += data
def serialize(self, buf, offset): data = self.serialize_body() payload_offset = ( ofp.OFP_ACTION_EXPERIMENTER_HEADER_SIZE + struct.calcsize(NXAction._fmt_str) ) self.len = utils.round_up(payload_offset + len(data), 8) super(NXAction, self).serialize(buf, offset) msg_pack_into(NXAction._fmt_str, buf, offset + ofp.OFP_ACTION_EXPERIMENTER_HEADER_SIZE, self.subtype) buf += data
def serialize(self, buf, offset): data = bytearray() msg_pack_into(NXActionCT._fmt_str, data, 0, self.flags, self.zone_src, self.zone_ofs_nbits, self.recirc_table, self.alg) for a in self.actions: a.serialize(data, len(data)) payload_offset = (ofp.OFP_ACTION_EXPERIMENTER_HEADER_SIZE + struct.calcsize(NXAction._fmt_str)) self.len = utils.round_up(payload_offset + len(data), 8) super(NXActionCT, self).serialize(buf, offset) msg_pack_into('!%ds' % len(data), buf, offset + payload_offset, bytes(data))
def serialize(self, buf, offset): data = bytearray() msg_pack_into(NXActionResubmitTable._fmt_str, data, 0, self.in_port, self.table_id) payload_offset = ( ofp.OFP_ACTION_EXPERIMENTER_HEADER_SIZE + struct.calcsize(NXAction._fmt_str) ) self.len = utils.round_up(payload_offset + len(data), 8) super(NXActionResubmitTable, self).serialize(buf, offset) msg_pack_into('!%ds' % len(data), buf, offset + payload_offset, bytes(data))
def serialize(self, buf, offset): data = bytearray() msg_pack_into(NXActionConjunction._fmt_str, data, 0, self.clause, self.n_clauses, self.id) payload_offset = ( ofp.OFP_ACTION_EXPERIMENTER_HEADER_SIZE + struct.calcsize(NXAction._fmt_str) ) self.len = utils.round_up(payload_offset + len(data), 8) super(NXActionConjunction, self).serialize(buf, offset) msg_pack_into('!%ds' % len(data), buf, offset + payload_offset, bytes(data))
def serialize(self, buf, offset): # fixup data = bytearray() msg_pack_into(NXActionLearn._fmt_str, data, 0, self.idle_timeout, self.hard_timeout, self.priority, self.cookie, self.flags, self.table_id, self.fin_idle_timeout, self.fin_hard_timeout) for spec in self.specs: data += spec.serialize() payload_offset = (ofp.OFP_ACTION_EXPERIMENTER_HEADER_SIZE + struct.calcsize(NXAction._fmt_str)) self.len = utils.round_up(payload_offset + len(data), 8) super(NXActionLearn, self).serialize(buf, offset) msg_pack_into('!%ds' % len(data), buf, offset + payload_offset, bytes(data))
def serialize(self, buf, offset): hdr_data = bytearray() n = ofp.oxm_from_user_header(self.dst) ofp.oxm_serialize_header(n, hdr_data, 0) (dst_num, ) = struct.unpack_from('!I', six.binary_type(hdr_data), 0) ofs_nbits = (self.ofs << 6) + self.nbits - 1 data = bytearray() msg_pack_into(NXActionRegLoad._fmt_str, data, 0, ofs_nbits, dst_num, self.value) payload_offset = (ofp.OFP_ACTION_EXPERIMENTER_HEADER_SIZE + struct.calcsize(NXAction._fmt_str)) self.len = utils.round_up(payload_offset + len(data), 8) super(NXActionRegLoad, self).serialize(buf, offset) msg_pack_into('!%ds' % len(data), buf, offset + payload_offset, bytes(data))
def serialize(self, buf, offset): # fixup data = bytearray() msg_pack_into(NXActionRegMove._fmt_str, data, 0, self.n_bits, self.src_ofs, self.dst_ofs) # src field n = ofp.oxm_from_user_header(self.src_field) ofp.oxm_serialize_header(n, data, len(data)) # dst field n = ofp.oxm_from_user_header(self.dst_field) ofp.oxm_serialize_header(n, data, len(data)) payload_offset = (ofp.OFP_ACTION_EXPERIMENTER_HEADER_SIZE + struct.calcsize(NXAction._fmt_str)) self.len = utils.round_up(payload_offset + len(data), 8) super(NXActionRegMove, self).serialize(buf, offset) msg_pack_into('!%ds' % len(data), buf, offset + payload_offset, bytes(data))
def serialize(self, buf, offset): # Pack optional parameters first, as range_present needs # to be calculated. optional_data = b'' range_present = 0 if self.range_ipv4_min != '': range_present |= nicira_ext.NX_NAT_RANGE_IPV4_MIN optional_data += type_desc.IPv4Addr.from_user( self.range_ipv4_min) if self.range_ipv4_max != '': range_present |= nicira_ext.NX_NAT_RANGE_IPV4_MAX optional_data += type_desc.IPv4Addr.from_user( self.range_ipv4_max) if self.range_ipv6_min != '': range_present |= nicira_ext.NX_NAT_RANGE_IPV6_MIN optional_data += type_desc.IPv6Addr.from_user( self.range_ipv6_min) if self.range_ipv6_max != '': range_present |= nicira_ext.NX_NAT_RANGE_IPV6_MAX optional_data += type_desc.IPv6Addr.from_user( self.range_ipv6_max) if self.range_proto_min is not None: range_present |= nicira_ext.NX_NAT_RANGE_PROTO_MIN optional_data += type_desc.Int2.from_user( self.range_proto_min) if self.range_proto_max is not None: range_present |= nicira_ext.NX_NAT_RANGE_PROTO_MAX optional_data += type_desc.Int2.from_user( self.range_proto_max) data = bytearray() msg_pack_into(NXActionNAT._fmt_str, data, 0, self.flags, range_present) msg_pack_into('!%ds' % len(optional_data), data, len(data), optional_data) payload_offset = ( ofp.OFP_ACTION_EXPERIMENTER_HEADER_SIZE + struct.calcsize(NXAction._fmt_str) ) self.len = utils.round_up(payload_offset + len(data), 8) super(NXActionNAT, self).serialize(buf, offset) msg_pack_into('!%ds' % len(data), buf, offset + payload_offset, bytes(data))
def serialize(self, buf, offset): data = bytearray() msg_pack_into(NXActionCT._fmt_str, data, 0, self.flags, self.zone_src, self.zone_ofs_nbits, self.recirc_table, self.alg) for a in self.actions: a.serialize(data, len(data)) payload_offset = ( ofp.OFP_ACTION_EXPERIMENTER_HEADER_SIZE + struct.calcsize(NXAction._fmt_str) ) self.len = utils.round_up(payload_offset + len(data), 8) super(NXActionCT, self).serialize(buf, offset) msg_pack_into('!%ds' % len(data), buf, offset + payload_offset, bytes(data))
def serialize(self, buf, offset): hdr_data = bytearray() n = ofp.oxm_from_user_header(self.dst) ofp.oxm_serialize_header(n, hdr_data, 0) (dst_num,) = struct.unpack_from('!I', six.binary_type(hdr_data), 0) ofs_nbits = (self.ofs << 6) + self.nbits - 1 data = bytearray() msg_pack_into(NXActionRegLoad._fmt_str, data, 0, ofs_nbits, dst_num, self.value) payload_offset = ( ofp.OFP_ACTION_EXPERIMENTER_HEADER_SIZE + struct.calcsize(NXAction._fmt_str) ) self.len = utils.round_up(payload_offset + len(data), 8) super(NXActionRegLoad, self).serialize(buf, offset) msg_pack_into('!%ds' % len(data), buf, offset + payload_offset, bytes(data))
def serialize_body(self): ofs_nbits = (self.start << 6) + (self.end - self.start) data = bytearray() slave_offset = (nicira_ext.NX_ACTION_BUNDLE_0_SIZE - nicira_ext.NX_ACTION_HEADER_0_SIZE) self.n_slaves = len(self.slaves) for s in self.slaves: msg_pack_into('!H', data, slave_offset, s) slave_offset += 2 pad_len = (utils.round_up(self.n_slaves, 4) - self.n_slaves) if pad_len != 0: msg_pack_into('%dx' % pad_len * 2, data, slave_offset) msg_pack_into(self._fmt_str, data, 0, self.algorithm, self.fields, self.basis, self.slave_type, self.n_slaves, ofs_nbits, self.dst) return data
def serialize(self, buf, offset): # fixup data = bytearray() msg_pack_into(NXActionRegMove._fmt_str, data, 0, self.n_bits, self.src_ofs, self.dst_ofs) # src field n = ofp.oxm_from_user_header(self.src_field) ofp.oxm_serialize_header(n, data, len(data)) # dst field n = ofp.oxm_from_user_header(self.dst_field) ofp.oxm_serialize_header(n, data, len(data)) payload_offset = ( ofp.OFP_ACTION_EXPERIMENTER_HEADER_SIZE + struct.calcsize(NXAction._fmt_str) ) self.len = utils.round_up(payload_offset + len(data), 8) super(NXActionRegMove, self).serialize(buf, offset) msg_pack_into('!%ds' % len(data), buf, offset + payload_offset, bytes(data))
def serialize_body(self): oxm_buffer = bytearray() for field in self.fields: field_buf = bytearray() n = ofp.oxm_from_user_header(field) ofp.oxm_serialize_header(n, field_buf, 0) oxm_buffer.extend(field_buf) prefix_len = 13 # type, len, experimenter, customer, reserved, novi_action_type, fields_num, oxm_len = len(oxm_buffer) pad_len = utils.round_up(prefix_len + oxm_len, 8) - prefix_len - oxm_len self.len = prefix_len + oxm_len + pad_len buf = bytearray(1 + oxm_len + pad_len) # 1 for fields_num fmt = self._fmt_str + str(oxm_len) + 's' + str(pad_len) + 'x' try: struct.pack_into(fmt, buf, 0, self.field_num, oxm_buffer) except Exception as e: print(e) return buf
def __init__(self, algorithm, fields, basis, slave_type, n_slaves, start, end, dst, slaves): super(_NXActionBundleBase, self).__init__() self.len = utils.round_up( nicira_ext.NX_ACTION_BUNDLE_0_SIZE + len(slaves) * 2, 8) self.algorithm = algorithm self.fields = fields self.basis = basis self.slave_type = slave_type self.n_slaves = n_slaves self.start = start self.end = end self.dst = dst assert isinstance(slaves, (list, tuple)) for s in slaves: assert isinstance(s, six.integer_types) self.slaves = slaves
def serialize(self, buf, offset): # fixup data = bytearray() msg_pack_into(NXActionLearn._fmt_str, data, 0, self.idle_timeout, self.hard_timeout, self.priority, self.cookie, self.flags, self.table_id, self.fin_idle_timeout, self.fin_hard_timeout) for spec in self.specs: data += spec.serialize() payload_offset = ( ofp.OFP_ACTION_EXPERIMENTER_HEADER_SIZE + struct.calcsize(NXAction._fmt_str) ) self.len = utils.round_up(payload_offset + len(data), 8) super(NXActionLearn, self).serialize(buf, offset) msg_pack_into('!%ds' % len(data), buf, offset + payload_offset, bytes(data))
def parser(cls,datapath,buf,offset,report_reason,event_type,event_id): assert event_type == EVT_FLOW_STATS_TIMER_TRIGGER length = len(buf) #LOG.info("Event %d Message length = %d" %(event_id,length) ) #LOG.info("Offset = %d" %offset) _offset = offset (table_id,out_port,out_group,interval_sec,interval_msec ) = struct.unpack_from(OFP13_FLOW_TIMER_REPORT_HEADER_PACK_STR,buf,offset) offset += 24 match = ofproto_parser.OFPMatch.parser(buf,offset) offset += utils.round_up(match.length, 8) single_flows = [] while offset < length: #LOG.info("One flow") try: single_flow = SingleFlowReport.parser(buf,offset) single_flows.append(single_flow) offset += single_flow.length except Exception,e: #LOG.info("Error in parsing single flows: %s" %e) #LOG.info( utils.hex_array(buf[offset:]) ) break