def pack(self): """Packs the object and returns a buffer representing the packed object.""" fields = self._all_fields() ctx = PackContext(self, fields) for field in fields: if field.pack_if.deref(ctx): try: ctx.output_buffer.set( field.pack_ref.deref(ctx), field.pack_absolute_position_ref.deref(ctx)) except: raise chain_exceptions( InstructBufferError("Pack error occured", ctx, type(self), field.attr_name())) result = bytearray(ctx.output_buffer.get()) # We want to support the user defining the buffer's fixed byte size but not using it all: static_byte_size = type(self).byte_size if static_byte_size: static_byte_size = int(math.ceil(static_byte_size)) assert len(result) <= static_byte_size, \ ("in type {0} computed pack size is {1} but declared byte size is {2} - perhaps you manually defined " + "the byte size in the type but the actual size is bigger?").format(type(self), len(result), static_byte_size) if len(result) < static_byte_size: result += bytearray(static_byte_size - len(result)) return result
def pack(self): """Packs the object and returns a buffer representing the packed object.""" fields = self._all_fields() ctx = PackContext(self, fields) for field in fields: if field.pack_if.deref(ctx): try: ctx.output_buffer.set(field.pack_ref.deref(ctx), field.pack_absolute_position_ref.deref(ctx)) except: raise chain_exceptions(InstructBufferError("Pack error occured", ctx, type(self), field.attr_name())) result = bytearray(ctx.output_buffer.get()) # We want to support the user defining the buffer's fixed byte size but not using it all: static_byte_size = type(self).byte_size if static_byte_size: static_byte_size = int(math.ceil(static_byte_size)) assert len(result) <= static_byte_size, \ ("in type {0} computed pack size is {1} but declared byte size is {2} - perhaps you manually defined " + "the byte size in the type but the actual size is bigger?").format(type(self), len(result), static_byte_size) if len(result) < static_byte_size: result += bytearray(static_byte_size - len(result)) return result
def create_from_stream(self, stream, context=EMPTY_CONTEXT): packed_value = stream.read(self.size) if len(packed_value) < self.size: raise NotEnoughDataError(expected=self.size, actually_read=len(packed_value)) try: return struct.unpack(self.format_string, packed_value)[0] except struct.error: raise chain_exceptions(InstructError("Unpacking error occurred"))
def create_from_stream(self, stream, context=EMPTY_CONTEXT): packed_value = stream.read(self.size) if len(packed_value) < self.size: raise NotEnoughDataError(expected=self.size, actually_read=len(packed_value)) try: return struct.unpack(self.format_string, packed_value)[0] except struct.error as e: raise chain_exceptions(InstructError("Unpacking error occurred"))
def calc_byte_size(cls, class_name, fields): ctx = PackContext(None, fields) positions = SequentialRangeList() for field in fields: # we avoid list comprehension here so we'll know which field raised an error try: if not (field.pack_absolute_position_ref.is_static() and field.unpack_absolute_position_ref.is_static()): return None positions.extend(field.pack_absolute_position_ref.deref(ctx)) except: raise chain_exceptions(InstructBufferError("Error while calculating static byte size", ctx, class_name, field.attr_name())) return positions.max_stop()
def calc_byte_size(cls, class_name, fields): ctx = PackContext(None, fields) positions = SequentialRangeList() for field in fields: # we avoid list comprehension here so we'll know which field raised an error try: if not (field.pack_absolute_position_ref.is_static() and field.unpack_absolute_position_ref.is_static()): return None positions.extend(field.pack_absolute_position_ref.deref(ctx)) except: raise chain_exceptions( InstructBufferError( "Error while calculating static byte size", ctx, class_name, field.attr_name())) return positions.max_stop()
def unpack(self, buffer): """Unpacks the object's fields from buffer.""" fields = self._all_fields() ctx = UnpackContext(self, fields, buffer) for field in fields: try: if field.unpack_if.deref(ctx): # TODO: get rid of unpack_after once we use dependencies as we should. for prev_field in field.unpack_after: prev_field.unpack_value_ref.deref(ctx) field.unpack_value_ref.deref(ctx) else: setattr(self, field.attr_name(), None) except: raise chain_exceptions(InstructBufferError("Unpack error occurred", ctx, type(self), field.attr_name())) return self.calc_byte_size(ctx)
def unpack(self, buffer): """Unpacks the object's fields from buffer.""" fields = self._all_fields() ctx = UnpackContext(self, fields, buffer) for field in fields: try: if field.unpack_if.deref(ctx): # TODO: get rid of unpack_after once we use dependencies as we should. for prev_field in field.unpack_after: prev_field.unpack_value_ref.deref(ctx) field.unpack_value_ref.deref(ctx) else: setattr(self, field.attr_name(), None) except: raise chain_exceptions( InstructBufferError("Unpack error occurred", ctx, type(self), field.attr_name())) return self.calc_byte_size(ctx)
def write_to_stream(self, obj, stream, context=EMPTY_CONTEXT): try: stream.write(struct.pack(self.format_string, obj)) except struct.error as e: raise chain_exceptions(InstructError("Packing error occurred"))
def write_to_stream(self, obj, stream, context=EMPTY_CONTEXT): try: stream.write(struct.pack(self.format_string, obj)) except struct.error: raise chain_exceptions(InstructError("Packing error occurred"))