def parse_dict(data): """Parse data into a dict recursively """ elements = construct.GreedyRange(FOURCC).parse(data) new_dict = dict() for element in elements: if element.type == 0: if (element.key.decode('ascii') == "STRM"): new_dict.setdefault("STRM", []).append(parse_dict(element.data)) else: new_dict[element.key.decode('ascii')] = parse_dict( element.data) else: try: value = parse_value(element) except ValueError: value = element.data new_dict[element.key.decode('ascii')] = value return new_dict
def get_snap_coarse_channel(c, fpgas = [], pol = 0, channel = -1, setup_snap = True): """ Get data from a specific coarse channel - straight out of the FFT into the snap block, NOT via the buffer block. Returns a list of the data from only that polarisation. """ if channel == -1: raise RuntimeError('Cannot get data from unspecified channel.') if len(fpgas) == 0: fpgas = c.ffpgas if setup_snap: corr_functions.write_masked_register(fpgas, register_fengine_control, debug_snap_select = snap_fengine_debug_select['coarse_72']) corr_functions.write_masked_register(fpgas, register_fengine_coarse_control, debug_pol_select = pol, debug_specify_chan = 1, debug_chan = channel >> 1) snap_data = snap.snapshots_get(fpgas = fpgas, dev_names = snap_debug, wait_period = 3) rd = [] for ctr in range(0, len(snap_data['data'])): d = snap_data['data'][ctr] repeater = construct.GreedyRange(snap_fengine_debug_coarse_fft) up = repeater.parse(d) coarsed = [] for a in up: if channel & 1: num = bin2fp(a['d1_r'], 18, 17) + (1j * bin2fp(a['d1_i'], 18, 17)) else: num = bin2fp(a['d0_r'], 18, 17) + (1j * bin2fp(a['d0_i'], 18, 17)) coarsed.append(num) rd.append(coarsed) return rd
def get_snap_ct(c, fpgas = [], offset = -1, setup_snap = True): """ Read and return data from the corner turner. Both pols are returned. """ if len(fpgas) == 0: fpgas = c.ffpgas if setup_snap: corr_functions.write_masked_register(fpgas, register_fengine_control, debug_snap_select = snap_fengine_debug_select['ct_64']) snap_data = snap.snapshots_get(fpgas = fpgas, dev_names = snap_debug, wait_period = 3, offset = offset) rd = [] for ctr in range(0, len(snap_data['data'])): d = snap_data['data'][ctr] repeater = construct.GreedyRange(snap_fengine_debug_ct) up = repeater.parse(d) fdata_p0 = [] fdata_p1 = [] for a in up: p0 = [] p1 = [] p0.append(bin2fp(a['p00_r'], 4, 3) + (1j * bin2fp(a['p00_i'], 4, 3))) p0.append(bin2fp(a['p01_r'], 4, 3) + (1j * bin2fp(a['p01_i'], 4, 3))) p0.append(bin2fp(a['p02_r'], 4, 3) + (1j * bin2fp(a['p02_i'], 4, 3))) p0.append(bin2fp(a['p03_r'], 4, 3) + (1j * bin2fp(a['p03_i'], 4, 3))) p1.append(bin2fp(a['p10_r'], 4, 3) + (1j * bin2fp(a['p10_i'], 4, 3))) p1.append(bin2fp(a['p11_r'], 4, 3) + (1j * bin2fp(a['p11_i'], 4, 3))) p1.append(bin2fp(a['p12_r'], 4, 3) + (1j * bin2fp(a['p12_i'], 4, 3))) p1.append(bin2fp(a['p13_r'], 4, 3) + (1j * bin2fp(a['p13_i'], 4, 3))) fdata_p0.extend(p0) fdata_p1.extend(p1) rd.append([fdata_p0, fdata_p1]) return rd
def parse(data): contents = list() marker = data.index(0) i = marker + 1 for l in c.GreedyRange(c.VarInt).parse(data[:marker]): contents.append(deserialize(data[i:i + l])) i += l return contents
class TStructTest(cst.TContainerMixin): @dataclasses.dataclass class Entry(cst.TContainerMixin): id: int = cst.sfield(cs.Int8sb) width: int = cst.sfield(cs.Int8sb) height: int = cst.sfield(cs.Int8sb) entries: t.List[Entry] = cst.sfield(cs.GreedyRange(cst.TStruct(Entry))) cnt: int = cst.sfield(cs.Computed(lambda ctx: len(ctx.entries)))
def recursive(data, parents=tuple()): """Recursive parser returns depth-first traversing generator yielding fields and list of their parent keys""" elements = construct.GreedyRange(FOURCC).parse(data) for element in elements: if element.type == 0: subparents = parents + (element.key, ) for subyield in recursive(element.data, subparents): yield subyield else: yield (element, parents)
class GridElement(object): """GridElement. Contains one tropo delay (mean and stddev), plus STEC residuals (mean and stddev) for each satellite at the grid point. Parameters ---------- index : int Index of the grid point tropo_delay_correction : TroposphericDelayCorrection Wet and hydrostatic vertical delays (mean, stddev) stec_residuals : array STEC residuals for each satellite (mean, stddev) """ _parser = construct.Embedded( construct.Struct( 'index' / construct.Int16ul, 'tropo_delay_correction' / construct.Struct(TroposphericDelayCorrection._parser), construct.GreedyRange('stec_residuals' / construct.Struct(STECResidual._parser)), )) __slots__ = [ 'index', 'tropo_delay_correction', 'stec_residuals', ] def __init__(self, payload=None, **kwargs): if payload: self.from_binary(payload) else: self.index = kwargs.pop('index') self.tropo_delay_correction = kwargs.pop('tropo_delay_correction') self.stec_residuals = kwargs.pop('stec_residuals') def __repr__(self): return fmt_repr(self) def from_binary(self, d): p = GridElement._parser.parse(d) for n in self.__class__.__slots__: setattr(self, n, getattr(p, n)) def to_binary(self): d = dict([(k, getattr(obj, k)) for k in self.__slots__]) return GridElement.build(d)
def get_snap_feng_10gbe(c, fpgas = [], offset = -1, man_trigger = False, man_valid = False): if len(fpgas) == 0: fpgas = c.ffpgas corr_functions.write_masked_register(fpgas, register_fengine_control, debug_snap_select = snap_fengine_debug_select['gbetx0_128']) snap_data = snap.snapshots_get(fpgas = fpgas, dev_names = snap_debug, wait_period = 3, offset = offset, man_trig = man_trigger, man_valid = man_valid, circular_capture = False) rd = [] for ctr in range(0, len(snap_data['data'])): d = snap_data['data'][ctr] repeater = construct.GreedyRange(snap_fengine_gbe_tx) up = repeater.parse(d) for a in up: a['link_down'] = not a['link_up'] a['hdr_valid'] = False a['mrst'] = False a['sync'] = False rd.append(up) return rd
def parse_list(data, parent=[]): elements = construct.GreedyRange(FOURCC).parse(data) out_list = [] for element in elements: if element.type == 0: out_list.append(parse_list(element.data, out_list)) else: try: value = parse_value(element) except ValueError: value = element.data out_list.append(value) print(element.key) return out_list
def get_snap_fine_fft(c, fpgas = [], offset = -1, setup_snap = True): if len(fpgas) == 0: fpgas = c.ffpgas if setup_snap: corr_functions.write_masked_register(fpgas, register_fengine_control, debug_snap_select = snap_fengine_debug_select['fine_128']) snap_data = snap.snapshots_get(fpgas = fpgas, dev_names = snap_debug, wait_period = 3, offset = offset) rd = [] for ctr in range(0, len(snap_data['data'])): d = snap_data['data'][ctr] repeater = construct.GreedyRange(snap_fengine_debug_fine_fft) up = repeater.parse(d) fdata_p0 = [] fdata_p1 = [] for a in up: p0c = bin2fp(a['p0_r'], fine_fft_bitwidth, 17) + (1j * bin2fp(a['p0_i'], fine_fft_bitwidth, 17)) p1c = bin2fp(a['p1_r'], fine_fft_bitwidth, 17) + (1j * bin2fp(a['p1_i'], fine_fft_bitwidth, 17)) fdata_p0.append(p0c) fdata_p1.append(p1c) rd.append([fdata_p0, fdata_p1]) return rd
def get_snap_adc(c, fpgas = [], wait_period = 3): """ Read raw samples from the ADC snap block. 2 pols, each one 4 parallel samples f8.7. So 64-bits total. """ raw = snap.snapshots_get(fpgas = fpgas, dev_names = snap_adc, wait_period = wait_period) repeater = construct.GreedyRange(snap_fengine_adc) rv = [] for index, d in enumerate(raw['data']): upd = repeater.parse(d) data = [[], []] for ctr in range(0, len(upd)): for pol in range(0,2): for sample in range(0,4): uf = upd[ctr]['d%i_%i' % (pol,sample)] f87 = bin2fp(uf) data[pol].append(f87) v = {'fpga_index': index, 'data': data} rv.append(v) return rv
def get_snap_adc_DUMB(c, fpgas = [], wait_period = 3): """ Read raw samples from the ADC snap block. 2 pols, each one 4 parallel samples f8.7. So 64-bits total. """ raw = snap.snapshots_get(fpgas = fpgas, dev_names = snap_adc, wait_period = wait_period) repeater = construct.GreedyRange(snap_fengine_adc) rv = [] for index, d in enumerate(raw['data']): data = [[],[]] od = numpy.fromstring(d, dtype = numpy.int8) for ctr in range(0, len(od), 8): for ctr2 in range(0,4): data[0].append(od[ctr + ctr2]) for ctr2 in range(4,8): data[1].append(od[ctr + ctr2]) data = [numpy.array(data[0], dtype=numpy.int8), numpy.array(data[1], dtype=numpy.int8)] v = {'fpga_index': index, 'data': data} rv.append(v) return rv
def _fpga_snap_quant(fpga = None, offset = -1, wbc_compat = False, debug_data = None): '''' Get quantiser snap data from only one f-engine FPGA. NB: Assumes the quantiser has already been selected in the control register. Returns a snapshot of quantised data in one of two formats, depending on the wbc_compat argument. Either way, it's data for both pols. debug_data is data from the snap.snapshots_get function ''' def _log(msg): fpga._logger.debug('_fpga_snap_quant: %s' % msg) if fpga == None: raise RuntimeError('Please provide the FPGA from which to read the quantised data.') if debug_data == None: _log('reading snap data at offset %i.' % offset) snap_data = snap.snapshots_get(fpgas = [fpga], dev_names = snap_debug, wait_period = 3, offset = offset)['data'][0] else: _log('using debug data, not fresh snap data.') snap_data = debug_data['data'][0] _log('unpacking data.') data = [[], []] if not wbc_compat: repeater = construct.GreedyRange(snap_fengine_debug_quant) unpacked = repeater.parse(snap_data) for ctr in unpacked: p0c = bin2fp(ctr['p0_r'], 4, 3) + (1j * bin2fp(ctr['p0_i'], 4, 3)) p1c = bin2fp(ctr['p1_r'], 4, 3) + (1j * bin2fp(ctr['p1_i'], 4, 3)) data[0].append(p0c) data[1].append(p1c) else: # remember that the data is 16-bit padded up to 128-bit because of the one debug snap block, so only 2 of every 16 bytes are valid data unpacked = numpy.fromstring(snap_data, dtype = numpy.uint8) for ctr in range(14, len(unpacked), 16): pol0_r_bits = (unpacked[ctr] & ((2**8) - (2**4))) >> 4 pol0_i_bits = (unpacked[ctr] & ((2**4) - (2**0))) pol1_r_bits = (unpacked[ctr+1] & ((2**8) - (2**4))) >> 4 pol1_i_bits = (unpacked[ctr+1] & ((2**4) - (2**0))) data[0].append(float(((numpy.int8(pol0_r_bits << 4) >> 4))) + (1j * float(((numpy.int8(pol0_i_bits << 4) >> 4))))) data[1].append(float(((numpy.int8(pol1_r_bits << 4) >> 4))) + (1j * float(((numpy.int8(pol1_i_bits << 4) >> 4))))) _log('returning %i complex values for each pol.' % len(data[0])) return data
def get_snap_coarse_fft(c, fpgas = [], pol = 0, setup_snap = True): """ Read and return data from the coarse FFT. Returns a list of the data from only that polarisation. """ if len(fpgas) == 0: fpgas = c.ffpgas if setup_snap: corr_functions.write_masked_register(fpgas, register_fengine_control, debug_snap_select = snap_fengine_debug_select['coarse_72']) corr_functions.write_masked_register(fpgas, register_fengine_coarse_control, debug_pol_select = pol, debug_specify_chan = 0) snap_data = snap.snapshots_get(fpgas = fpgas, dev_names = snap_debug, wait_period = 3) rd = [] for ctr in range(0, len(snap_data['data'])): d = snap_data['data'][ctr] repeater = construct.GreedyRange(snap_fengine_debug_coarse_fft) up = repeater.parse(d) coarsed = [] for a in up: for b in range(0,2): num = bin2fp(a['d%i_r'%b], 18, 17) + (1j * bin2fp(a['d%i_i'%b], 18, 17)) coarsed.append(num) rd.append(coarsed) return rd
def get_snap_buffer_pfb(c, fpgas = [], pol = 0, setup_snap = True, pfb = False): '''This DOESN'T EXIST in regular F-engines. Only in specific debug versions. ''' if len(fpgas) == 0: fpgas = c.ffpgas if setup_snap: if pfb: corr_functions.write_masked_register(fpgas, register_fengine_control, debug_snap_select = snap_fengine_debug_select['pfb_72']) else: corr_functions.write_masked_register(fpgas, register_fengine_control, debug_snap_select = snap_fengine_debug_select['buffer_72']) corr_functions.write_masked_register(fpgas, register_fengine_coarse_control, debug_pol_select = pol) snap_data = snap.snapshots_get(fpgas = fpgas, dev_names = snap_debug, wait_period = 3) rd = [] for ctr in range(0, len(snap_data['data'])): d = snap_data['data'][ctr] repeater = construct.GreedyRange(snap_fengine_debug_coarse_fft) up = repeater.parse(d) coarsed = [] for a in up: num = bin2fp(a['d%i_r'%pol], 18, 17) + (1j * bin2fp(a['d%i_i'%pol], 18, 17)) coarsed.append(num) rd.append(coarsed) return rd
class MsgSsrGridDefinition(SBP): """SBP class for message MSG_SSR_GRID_DEFINITION (0x05F5). You can have MSG_SSR_GRID_DEFINITION inherit its fields directly from an inherited SBP object, or construct it inline using a dict of its fields. Based on the 3GPP proposal R2-1906781 which is in turn based on OMA-LPPe-ValidityArea from OMA-TS-LPPe-V2_0-20141202-C Parameters ---------- sbp : SBP SBP parent object to inherit from. header : GridDefinitionHeader Header of a Gridded Correction message rle_list : array Run Length Encode list of quadrants that contain valid data. The spec describes the encoding scheme in detail, but essentially the index of the quadrants that contain transitions between valid and invalid (and vice versa) are encoded as u8 integers. sender : int Optional sender ID, defaults to SENDER_ID (see sbp/msg.py). """ _parser = construct.Struct( 'header' / construct.Struct(GridDefinitionHeader._parser), construct.GreedyRange('rle_list' / construct.Int8ul), ) __slots__ = [ 'header', 'rle_list', ] def __init__(self, sbp=None, **kwargs): if sbp: super(MsgSsrGridDefinition, self).__init__(sbp.msg_type, sbp.sender, sbp.length, sbp.payload, sbp.crc) self.from_binary(sbp.payload) else: super(MsgSsrGridDefinition, self).__init__() self.msg_type = SBP_MSG_SSR_GRID_DEFINITION self.sender = kwargs.pop('sender', SENDER_ID) self.header = kwargs.pop('header') self.rle_list = kwargs.pop('rle_list') def __repr__(self): return fmt_repr(self) @staticmethod def from_json(s): """Given a JSON-encoded string s, build a message object. """ d = json.loads(s) return MsgSsrGridDefinition.from_json_dict(d) @staticmethod def from_json_dict(d): sbp = SBP.from_json_dict(d) return MsgSsrGridDefinition(sbp, **d) def from_binary(self, d): """Given a binary payload d, update the appropriate payload fields of the message. """ p = MsgSsrGridDefinition._parser.parse(d) for n in self.__class__.__slots__: setattr(self, n, getattr(p, n)) def to_binary(self): """Produce a framed/packed SBP message. """ c = containerize(exclude_fields(self)) self.payload = MsgSsrGridDefinition._parser.build(c) return self.pack() def into_buffer(self, buf, offset): """Produce a framed/packed SBP message into the provided buffer and offset. """ self.payload = containerize(exclude_fields(self)) self.parser = MsgSsrGridDefinition._parser self.stream_payload.reset(buf, offset) return self.pack_into(buf, offset, self._build_payload) def to_json_dict(self): self.to_binary() d = super(MsgSsrGridDefinition, self).to_json_dict() j = walk_json_dict(exclude_fields(self)) d.update(j) return d
class MsgSsrStecCorrection(SBP): """SBP class for message MSG_SSR_STEC_CORRECTION (0x05EB). You can have MSG_SSR_STEC_CORRECTION inherit its fields directly from an inherited SBP object, or construct it inline using a dict of its fields. The STEC per space vehicle, given as polynomial approximation for a given grid. This should be combined with MSG_SSR_GRIDDED_CORRECTION message to get the state space representation of the atmospheric delay. It is typically equivalent to the QZSS CLAS Sub Type 8 messages Parameters ---------- sbp : SBP SBP parent object to inherit from. header : STECHeader Header of a STEC message stec_sat_list : array Array of STEC information for each space vehicle sender : int Optional sender ID, defaults to SENDER_ID (see sbp/msg.py). """ _parser = construct.Struct( 'header' / construct.Struct(STECHeader._parser), construct.GreedyRange('stec_sat_list' / construct.Struct(STECSatElement._parser)), ) __slots__ = [ 'header', 'stec_sat_list', ] def __init__(self, sbp=None, **kwargs): if sbp: super(MsgSsrStecCorrection, self).__init__(sbp.msg_type, sbp.sender, sbp.length, sbp.payload, sbp.crc) self.from_binary(sbp.payload) else: super(MsgSsrStecCorrection, self).__init__() self.msg_type = SBP_MSG_SSR_STEC_CORRECTION self.sender = kwargs.pop('sender', SENDER_ID) self.header = kwargs.pop('header') self.stec_sat_list = kwargs.pop('stec_sat_list') def __repr__(self): return fmt_repr(self) @staticmethod def from_json(s): """Given a JSON-encoded string s, build a message object. """ d = json.loads(s) return MsgSsrStecCorrection.from_json_dict(d) @staticmethod def from_json_dict(d): sbp = SBP.from_json_dict(d) return MsgSsrStecCorrection(sbp, **d) def from_binary(self, d): """Given a binary payload d, update the appropriate payload fields of the message. """ p = MsgSsrStecCorrection._parser.parse(d) for n in self.__class__.__slots__: setattr(self, n, getattr(p, n)) def to_binary(self): """Produce a framed/packed SBP message. """ c = containerize(exclude_fields(self)) self.payload = MsgSsrStecCorrection._parser.build(c) return self.pack() def into_buffer(self, buf, offset): """Produce a framed/packed SBP message into the provided buffer and offset. """ self.payload = containerize(exclude_fields(self)) self.parser = MsgSsrStecCorrection._parser self.stream_payload.reset(buf, offset) return self.pack_into(buf, offset, self._build_payload) def to_json_dict(self): self.to_binary() d = super(MsgSsrStecCorrection, self).to_json_dict() j = walk_json_dict(exclude_fields(self)) d.update(j) return d
class MsgSsrPhaseBiases(SBP): """SBP class for message MSG_SSR_PHASE_BIASES (0x05E6). You can have MSG_SSR_PHASE_BIASES inherit its fields directly from an inherited SBP object, or construct it inline using a dict of its fields. The precise phase biases message contains the biases to be added to the carrier phase of the corresponding signal to get corrected carrier phase measurement, as well as the satellite yaw angle to be applied to compute the phase wind-up correction. It is typically an equivalent to the 1265 RTCM message types Parameters ---------- sbp : SBP SBP parent object to inherit from. time : GPSTimeSec GNSS reference time of the correction sid : GnssSignal GNSS signal identifier (16 bit) update_interval : int Update interval between consecutive corrections. Encoded following RTCM DF391 specification. iod_ssr : int IOD of the SSR correction. A change of Issue Of Data SSR is used to indicate a change in the SSR generating configuration dispersive_bias : int Indicator for the dispersive phase biases property. mw_consistency : int Consistency indicator for Melbourne-Wubbena linear combinations yaw : int Satellite yaw angle yaw_rate : int Satellite yaw angle rate biases : array Phase biases corrections for a satellite being tracked. sender : int Optional sender ID, defaults to SENDER_ID (see sbp/msg.py). """ _parser = construct.Struct( 'time' / construct.Struct(GPSTimeSec._parser), 'sid' / construct.Struct(GnssSignal._parser), 'update_interval' / construct.Int8ul, 'iod_ssr' / construct.Int8ul, 'dispersive_bias' / construct.Int8ul, 'mw_consistency' / construct.Int8ul, 'yaw' / construct.Int16ul, 'yaw_rate' / construct.Int8sl, construct.GreedyRange('biases' / construct.Struct(PhaseBiasesContent._parser)), ) __slots__ = [ 'time', 'sid', 'update_interval', 'iod_ssr', 'dispersive_bias', 'mw_consistency', 'yaw', 'yaw_rate', 'biases', ] def __init__(self, sbp=None, **kwargs): if sbp: super(MsgSsrPhaseBiases, self).__init__(sbp.msg_type, sbp.sender, sbp.length, sbp.payload, sbp.crc) self.from_binary(sbp.payload) else: super(MsgSsrPhaseBiases, self).__init__() self.msg_type = SBP_MSG_SSR_PHASE_BIASES self.sender = kwargs.pop('sender', SENDER_ID) self.time = kwargs.pop('time') self.sid = kwargs.pop('sid') self.update_interval = kwargs.pop('update_interval') self.iod_ssr = kwargs.pop('iod_ssr') self.dispersive_bias = kwargs.pop('dispersive_bias') self.mw_consistency = kwargs.pop('mw_consistency') self.yaw = kwargs.pop('yaw') self.yaw_rate = kwargs.pop('yaw_rate') self.biases = kwargs.pop('biases') def __repr__(self): return fmt_repr(self) @staticmethod def from_json(s): """Given a JSON-encoded string s, build a message object. """ d = json.loads(s) return MsgSsrPhaseBiases.from_json_dict(d) @staticmethod def from_json_dict(d): sbp = SBP.from_json_dict(d) return MsgSsrPhaseBiases(sbp, **d) def from_binary(self, d): """Given a binary payload d, update the appropriate payload fields of the message. """ p = MsgSsrPhaseBiases._parser.parse(d) for n in self.__class__.__slots__: setattr(self, n, getattr(p, n)) def to_binary(self): """Produce a framed/packed SBP message. """ c = containerize(exclude_fields(self)) self.payload = MsgSsrPhaseBiases._parser.build(c) return self.pack() def into_buffer(self, buf, offset): """Produce a framed/packed SBP message into the provided buffer and offset. """ self.payload = containerize(exclude_fields(self)) self.parser = MsgSsrPhaseBiases._parser self.stream_payload.reset(buf, offset) return self.pack_into(buf, offset, self._build_payload) def to_json_dict(self): self.to_binary() d = super(MsgSsrPhaseBiases, self).to_json_dict() j = walk_json_dict(exclude_fields(self)) d.update(j) return d
snap_stat1=fpga.read_uint('snap_debug_status') snap_statadc=fpga.read_uint('adc_snap1_status') pps_count=fpga.read_uint('pps_count') clock_freq=fpga.read_uint('clk_frequency') fpga.write_int('adc_snap1_ctrl',0) fpga.write_int('adc_snap1_ctrl',1) fpga.write_int('adc_snap1_ctrl',0) fpga.write_int('snap_debug_ctrl',1) #bring this high to trigger capture fpga.write_int('snap_debug_ctrl',0) #and take it low again time.sleep(1.0) # snap_stat2=fpga.read_uint('snap_debug_status') # a=readBram(fpga,'snap_debug_bram',8*1024) # print 'a',a # a_0=struct.unpack('>16384b',a) adc_0=struct.unpack('>4096b',adc) repeater = construct.GreedyRange(snap_fengine_debug_coarse_fft) bram_dmp=dict() bram_dmp['data']=[] bram_dmp['data'].append(fpga.read('snap_debug_bram',8192)) # print bram_dmp['data'] d=bram_dmp['data'][0] tt=repeater.parse(d) val=numpy.zeros(512) for i in range(0,100): rd=[] bram_dmp['data']=[] bram_dmp['data'].append(fpga.read('snap_debug_bram',8192)) d=bram_dmp['data'][0] tt=repeater.parse(d) fpga.write_int('snap_debug_ctrl',1) #bring this high to trigger capture fpga.write_int('snap_debug_ctrl',0) #and take it low again
class MsgUserData(SBP): """SBP class for message MSG_USER_DATA (0x0800). You can have MSG_USER_DATA inherit its fields directly from an inherited SBP object, or construct it inline using a dict of its fields. This message can contain any application specific user data up to a maximum length of 255 bytes per message. Parameters ---------- sbp : SBP SBP parent object to inherit from. contents : array User data payload sender : int Optional sender ID, defaults to SENDER_ID (see sbp/msg.py). """ _parser = construct.Struct( construct.GreedyRange('contents' / construct.Int8ul), ) __slots__ = [ 'contents', ] def __init__(self, sbp=None, **kwargs): if sbp: super(MsgUserData, self).__init__(sbp.msg_type, sbp.sender, sbp.length, sbp.payload, sbp.crc) self.from_binary(sbp.payload) else: super(MsgUserData, self).__init__() self.msg_type = SBP_MSG_USER_DATA self.sender = kwargs.pop('sender', SENDER_ID) self.contents = kwargs.pop('contents') def __repr__(self): return fmt_repr(self) @staticmethod def from_json(s): """Given a JSON-encoded string s, build a message object. """ d = json.loads(s) return MsgUserData.from_json_dict(d) @staticmethod def from_json_dict(d): sbp = SBP.from_json_dict(d) return MsgUserData(sbp, **d) def from_binary(self, d): """Given a binary payload d, update the appropriate payload fields of the message. """ p = MsgUserData._parser.parse(d) for n in self.__class__.__slots__: setattr(self, n, getattr(p, n)) def to_binary(self): """Produce a framed/packed SBP message. """ c = containerize(exclude_fields(self)) self.payload = MsgUserData._parser.build(c) return self.pack() def to_json_dict(self): self.to_binary() d = super(MsgUserData, self).to_json_dict() j = walk_json_dict(exclude_fields(self)) d.update(j) return d
def ping_to_sl2(in_path: Path, out_path: Path): ping2_packets = c.GreedyRange(ping_schema).parse_file(in_path) sl2_data = dict( flags={}, channel_type=ChannelType.SidescanComposite, frame_offset=0, previous_frame_size=0, frequency=Frequency.KHz455, altitude_ft=0.0, course_over_ground_radians=0.0, heading_radians=0.0, keel_depth_feet=0.0, water_depth_feet=0.0, water_speed_knots=0.0, water_temperature_c=0.0, gps_speed_knots=0.0, northing=0, easting=0, ) first_timestamp_msec = None with out_path.open('wb') as out_fd: file_header = dict( version=2, hardware_version=1, block_size=3200, # sometimes 1970 sometimes 3200 ) out_fd.write(sl_file_header.build(file_header)) for pkt in ping2_packets: sl2_data['frame_offset'] = out_fd.tell() if pkt.message_id.value == message_id_schema.PROFILE6: sl2_data['last_frame_offset_right'] = out_fd.tell() p6 = profile6_schema.parse(pkt.payload.value) if first_timestamp_msec is None: first_timestamp_msec = p6.timestamp_msec sl2_data[ 'time_offset'] = p6.timestamp_msec - first_timestamp_msec sl2_data['frame_index'] = p6.ping_number sl2_data['upper_limit_feet'] = p6.start_mm / MM_PER_FOOT sl2_data['lower_limit_feet'] = (p6.start_mm + p6.length_mm) / MM_PER_FOOT sl2_data['packet_size'] = SIDESCAN_PACKET_SIZE pwr = get_ranges_root_power(p6) pwr2 = np.clip(pwr / 200000, 0, 1) # data is now scaled 0-1 pwr3 = np.interp( np.linspace(-1, 1, 3200), np.linspace(0, 1, len(pwr2)), pwr2, ) * (2**8 - 1) # resample sounded data to the expected size sl2_data['sounded_data'] = np.round(pwr3).astype( np.uint8).tolist() elif pkt.message_id.value == message_id_schema.NMEA0183: nmea = parse_nmea(pkt.payload.value) if nmea.sentence_id == 'RMC': rmc = parse_nmea_rmc(nmea.words) if not rmc.is_status_ok: print( f'ignoring NMEA RMC sentence with status=invalid') sl2_data[ 'heading_radians'] = rmc.track_made_good_degrees_true * math.pi / 180 sl2_data['water_speed_knots'] = rmc.speed_over_ground_knots sl2_data['gps_speed_knots'] = rmc.speed_over_ground_knots northing, easting = lat_long_to_navico_northing_easting( rmc.latitude_n, rmc.longitude_e) sl2_data['northing'] = int(round(northing)) sl2_data['easting'] = int(round(easting)) sl2_data['flags']['PositionValid'] = 1 sl2_data['flags']['HeadingValid'] = 1 sl2_data['flags']['GPSSpeedValid'] = 1 else: pass # print (f'ignoring NMEA sentence {nmea.sentence_id}') else: print( f'ignoring packet with message id {pkt.message_id.value}') if sl2_data.get('sounded_data'): sl2_data['packet_size'] = len(sl2_data['sounded_data']) sl2_data['frame_size'] = sl2_data['packet_size'] + 144 out_fd.write(sl2_frame.build(sl2_data)) sl2_data['flags'] = dict() sl2_data['previous_frame_size'] = sl2_data['frame_size'] del sl2_data['sounded_data']
class MsgFlashProgram(SBP): """SBP class for message MSG_FLASH_PROGRAM (0x00E6). You can have MSG_FLASH_PROGRAM inherit its fields directly from an inherited SBP object, or construct it inline using a dict of its fields. The flash program message programs a set of addresses of either the STM or M25 flash. The device replies with either a MSG_FLASH_DONE message containing the return code FLASH_OK (0) on success, or FLASH_INVALID_LEN (2) if the maximum write size is exceeded. Note that the sector-containing addresses must be erased before addresses can be programmed. Parameters ---------- sbp : SBP SBP parent object to inherit from. target : int Target flags addr_start : array Starting address offset to program addr_len : int Length of set of addresses to program, counting up from starting address data : array Data to program addresses with, with length N=addr_len sender : int Optional sender ID, defaults to SENDER_ID (see sbp/msg.py). """ _parser = construct.Struct( 'target' / construct.Int8ul, 'addr_start' / construct.Array(3, construct.Int8ul), 'addr_len' / construct.Int8ul, construct.GreedyRange('data' / construct.Int8ul), ) __slots__ = [ 'target', 'addr_start', 'addr_len', 'data', ] def __init__(self, sbp=None, **kwargs): if sbp: super(MsgFlashProgram, self).__init__(sbp.msg_type, sbp.sender, sbp.length, sbp.payload, sbp.crc) self.from_binary(sbp.payload) else: super(MsgFlashProgram, self).__init__() self.msg_type = SBP_MSG_FLASH_PROGRAM self.sender = kwargs.pop('sender', SENDER_ID) self.target = kwargs.pop('target') self.addr_start = kwargs.pop('addr_start') self.addr_len = kwargs.pop('addr_len') self.data = kwargs.pop('data') def __repr__(self): return fmt_repr(self) @staticmethod def from_json(s): """Given a JSON-encoded string s, build a message object. """ d = json.loads(s) return MsgFlashProgram.from_json_dict(d) @staticmethod def from_json_dict(d): sbp = SBP.from_json_dict(d) return MsgFlashProgram(sbp, **d) def from_binary(self, d): """Given a binary payload d, update the appropriate payload fields of the message. """ p = MsgFlashProgram._parser.parse(d) for n in self.__class__.__slots__: setattr(self, n, getattr(p, n)) def to_binary(self): """Produce a framed/packed SBP message. """ c = containerize(exclude_fields(self)) self.payload = MsgFlashProgram._parser.build(c) return self.pack() def to_json_dict(self): self.to_binary() d = super(MsgFlashProgram, self).to_json_dict() j = walk_json_dict(exclude_fields(self)) d.update(j) return d
class SystemdJournalParser(interface.FileObjectParser): """Parses Systemd Journal files.""" NAME = 'systemd_journal' DESCRIPTION = 'Parser for Systemd Journal files.' _OBJECT_COMPRESSED_FLAG = 0x00000001 # Unfortunately this doesn't help us knowing about the "dirtiness" or # "corrupted" file state. # A file can be in any of these states and still be corrupted, for example, by # an unexpected shut down. Once journald detects one of these, it will # "rotate" the corrupted journal file, an store it away, and change the status # to STATE_OFFLINE. # STATE_ONLINE means the file wasn't closed, but the journal can still be in a # clean state. _JOURNAL_STATE = construct.Enum( construct.ULInt8('state'), STATE_OFFLINE=0, STATE_ONLINE=1, STATE_ARCHIVED=2 ) _OBJECT_HEADER_TYPE = construct.Enum( construct.ULInt8('type'), UNUSED=0, DATA=1, FIELD=2, ENTRY=3, DATA_HASH_TABLE=4, FIELD_HASH_TABLE=5, ENTRY_ARRAY=6, TAG=7 ) _ULInt64 = construct.ULInt64('int') _OBJECT_HEADER = construct.Struct( 'object_header', _OBJECT_HEADER_TYPE, construct.ULInt8('flags'), construct.Bytes('reserved', 6), construct.ULInt64('size') ) _OBJECT_HEADER_SIZE = _OBJECT_HEADER.sizeof() _DATA_OBJECT = construct.Struct( 'data_object', construct.ULInt64('hash'), construct.ULInt64('next_hash_offset'), construct.ULInt64('next_field_offset'), construct.ULInt64('entry_offset'), construct.ULInt64('entry_array_offset'), construct.ULInt64('n_entries') ) _DATA_OBJECT_SIZE = _DATA_OBJECT.sizeof() _ENTRY_ITEM = construct.Struct( 'entry_item', construct.ULInt64('object_offset'), construct.ULInt64('hash') ) _ENTRY_OBJECT = construct.Struct( 'entry_object', construct.ULInt64('seqnum'), construct.ULInt64('realtime'), construct.ULInt64('monotonic'), construct.Struct( 'boot_id', construct.Bytes('bytes', 16), construct.ULInt64('qword1'), construct.ULInt64('qword2')), construct.ULInt64('xor_hash'), construct.Rename('object_items', construct.GreedyRange(_ENTRY_ITEM)) ) _JOURNAL_HEADER = construct.Struct( 'journal_header', construct.Const(construct.String('signature', 8), b'LPKSHHRH'), construct.ULInt32('compatible_flags'), construct.ULInt32('incompatible_flags'), _JOURNAL_STATE, construct.Bytes('reserved', 7), construct.Bytes('file_id', 16), construct.Bytes('machine_id', 16), construct.Bytes('boot_id', 16), construct.Bytes('seqnum_id', 16), construct.ULInt64('header_size'), construct.ULInt64('arena_size'), construct.ULInt64('data_hash_table_offset'), construct.ULInt64('data_hash_table_size'), construct.ULInt64('field_hash_table_offset'), construct.ULInt64('field_hash_table_size'), construct.ULInt64('tail_object_offset'), construct.ULInt64('n_objects'), construct.ULInt64('n_entries'), construct.ULInt64('tail_entry_seqnum'), construct.ULInt64('head_entry_seqnum'), construct.ULInt64('entry_array_offset'), construct.ULInt64('head_entry_realtime'), construct.ULInt64('tail_entry_realtime'), construct.ULInt64('tail_entry_monotonic'), # Added in format version 187 construct.ULInt64('n_data'), construct.ULInt64('n_fields'), # Added in format version 189 construct.ULInt64('n_tags'), construct.ULInt64('n_entry_arrays') ) def __init__(self): """Initializes a parser object.""" super(SystemdJournalParser, self).__init__() self._max_journal_file_offset = 0 def _ParseObjectHeader(self, file_object, offset): """Parses a Systemd journal object header structure. Args: file_object (dfvfs.FileIO): a file-like object. offset (int): offset to the object header. Returns: tuple[construct.Struct, int]: parsed object header and size of the object payload (data) that follows. """ file_object.seek(offset, os.SEEK_SET) object_header_data = file_object.read(self._OBJECT_HEADER_SIZE) object_header = self._OBJECT_HEADER.parse(object_header_data) payload_size = object_header.size - self._OBJECT_HEADER_SIZE return (object_header, payload_size) def _ParseItem(self, file_object, offset): """Parses a Systemd journal DATA object. This method will read, and decompress if needed, the content of a DATA object. Args: file_object (dfvfs.FileIO): a file-like object. offset (int): offset to the DATA object. Returns: tuple[str, str]: key and value of this item. Raises: ParseError: When an unexpected object type is parsed. """ object_header, payload_size = self._ParseObjectHeader(file_object, offset) file_object.read(self._DATA_OBJECT_SIZE) if object_header.type != 'DATA': raise errors.ParseError( 'Expected an object of type DATA, but got {0:s}'.format( object_header.type)) event_data = file_object.read(payload_size - self._DATA_OBJECT_SIZE) if object_header.flags & self._OBJECT_COMPRESSED_FLAG: event_data = lzma.decompress(event_data) event_string = event_data.decode('utf-8') event_key, event_value = event_string.split('=', 1) return (event_key, event_value) def _ParseJournalEntry(self, parser_mediator, file_object, offset): """Parses a Systemd journal ENTRY object. This method will generate an event per ENTRY object. Args: parser_mediator (ParserMediator): parser mediator. file_object (dfvfs.FileIO): a file-like object. offset (int): offset of the ENTRY object. Raises: ParseError: When an unexpected object type is parsed. """ object_header, payload_size = self._ParseObjectHeader(file_object, offset) entry_object_data = file_object.read(payload_size) entry_object = self._ENTRY_OBJECT.parse(entry_object_data) if object_header.type != 'ENTRY': raise errors.ParseError( 'Expected an object of type ENTRY, but got {0:s}'.format( object_header.type)) fields = {} for item in entry_object.object_items: if item.object_offset < self._max_journal_file_offset: raise errors.ParseError( 'object offset should be after hash tables ({0:d} < {1:d})'.format( offset, self._max_journal_file_offset)) key, value = self._ParseItem(file_object, item.object_offset) fields[key] = value reporter = fields.get('SYSLOG_IDENTIFIER', None) if reporter and reporter != 'kernel': pid = fields.get('_PID', fields.get('SYSLOG_PID', None)) else: pid = None event_data = SystemdJournalEventData() event_data.body = fields['MESSAGE'] event_data.hostname = fields['_HOSTNAME'] event_data.pid = pid event_data.reporter = reporter date_time = dfdatetime_posix_time.PosixTimeInMicroseconds( timestamp=entry_object.realtime) event = time_events.DateTimeValuesEvent( date_time, definitions.TIME_DESCRIPTION_WRITTEN) parser_mediator.ProduceEventWithEventData(event, event_data) def _ParseEntries(self, file_object, offset): """Parses Systemd journal ENTRY_ARRAY objects. Args: file_object (dfvfs.FileIO): a file-like object. offset (int): offset of the ENTRY_ARRAY object. Returns: list[dict]: every ENTRY objects offsets. Raises: ParseError: When an unexpected object type is parsed. """ entry_offsets = [] object_header, payload_size = self._ParseObjectHeader(file_object, offset) if object_header.type != 'ENTRY_ARRAY': raise errors.ParseError( 'Expected an object of type ENTRY_ARRAY, but got {0:s}'.format( object_header.type)) next_array_offset = self._ULInt64.parse_stream(file_object) entry_offests_numbers, _ = divmod((payload_size - 8), 8) for entry_offset in range(entry_offests_numbers): entry_offset = self._ULInt64.parse_stream(file_object) if entry_offset != 0: entry_offsets.append(entry_offset) if next_array_offset != 0: next_entry_offsets = self._ParseEntries(file_object, next_array_offset) entry_offsets.extend(next_entry_offsets) return entry_offsets def ParseFileObject(self, parser_mediator, file_object, **kwargs): """Parses a Systemd journal file-like object. Args: parser_mediator (ParserMediator): parser mediator. file_object (dfvfs.FileIO): a file-like object. Raises: UnableToParseFile: when the header cannot be parsed. """ try: journal_header = self._JOURNAL_HEADER.parse_stream(file_object) except construct.ConstructError as exception: raise errors.UnableToParseFile( 'Unable to parse journal header with error: {0!s}'.format(exception)) max_data_hash_table_offset = ( journal_header.data_hash_table_offset + journal_header.data_hash_table_size) max_field_hash_table_offset = ( journal_header.field_hash_table_offset + journal_header.field_hash_table_size) self._max_journal_file_offset = max( max_data_hash_table_offset, max_field_hash_table_offset) entries_offsets = self._ParseEntries( file_object, journal_header.entry_array_offset) for entry_offset in entries_offsets: try: self._ParseJournalEntry(parser_mediator, file_object, entry_offset) except errors.ParseError as exception: parser_mediator.ProduceExtractionError(( 'Unable to complete parsing journal file: {0:s} at offset ' '0x{1:08x}').format(exception, entry_offset)) return except construct.ConstructError as exception: raise errors.UnableToParseFile(( 'Unable to parse journal header at offset: 0x{0:08x} with ' 'error: {1:s}').format(entry_offset, exception))
class MsgTrackingStateDepB(SBP): """SBP class for message MSG_TRACKING_STATE_DEP_B (0x0013). You can have MSG_TRACKING_STATE_DEP_B inherit its fields directly from an inherited SBP object, or construct it inline using a dict of its fields. Deprecated. Parameters ---------- sbp : SBP SBP parent object to inherit from. states : array Signal tracking channel state sender : int Optional sender ID, defaults to SENDER_ID (see sbp/msg.py). """ _parser = construct.Struct( construct.GreedyRange( 'states' / construct.Struct(TrackingChannelStateDepB._parser)), ) __slots__ = [ 'states', ] def __init__(self, sbp=None, **kwargs): if sbp: super(MsgTrackingStateDepB, self).__init__(sbp.msg_type, sbp.sender, sbp.length, sbp.payload, sbp.crc) self.from_binary(sbp.payload) else: super(MsgTrackingStateDepB, self).__init__() self.msg_type = SBP_MSG_TRACKING_STATE_DEP_B self.sender = kwargs.pop('sender', SENDER_ID) self.states = kwargs.pop('states') def __repr__(self): return fmt_repr(self) @staticmethod def from_json(s): """Given a JSON-encoded string s, build a message object. """ d = json.loads(s) return MsgTrackingStateDepB.from_json_dict(d) @staticmethod def from_json_dict(d): sbp = SBP.from_json_dict(d) return MsgTrackingStateDepB(sbp, **d) def from_binary(self, d): """Given a binary payload d, update the appropriate payload fields of the message. """ p = MsgTrackingStateDepB._parser.parse(d) for n in self.__class__.__slots__: setattr(self, n, getattr(p, n)) def to_binary(self): """Produce a framed/packed SBP message. """ c = containerize(exclude_fields(self)) self.payload = MsgTrackingStateDepB._parser.build(c) return self.pack() def to_json_dict(self): self.to_binary() d = super(MsgTrackingStateDepB, self).to_json_dict() j = walk_json_dict(exclude_fields(self)) d.update(j) return d
class MsgSsrCodeBiases(SBP): """SBP class for message MSG_SSR_CODE_BIASES (0x05E1). You can have MSG_SSR_CODE_BIASES inherit its fields directly from an inherited SBP object, or construct it inline using a dict of its fields. The precise code biases message is to be added to the pseudorange of the corresponding signal to get corrected pseudorange. It is typically an equivalent to the 1059 and 1065 RTCM message types Parameters ---------- sbp : SBP SBP parent object to inherit from. time : GPSTimeSec GNSS reference time of the correction sid : GnssSignal GNSS signal identifier (16 bit) update_interval : int Update interval between consecutive corrections iod_ssr : int IOD of the SSR correction. A change of Issue Of Data SSR is used to indicate a change in the SSR generating configuration biases : array Code biases for the different satellite signals sender : int Optional sender ID, defaults to SENDER_ID (see sbp/msg.py). """ _parser = construct.Struct( 'time' / construct.Struct(GPSTimeSec._parser), 'sid' / construct.Struct(GnssSignal._parser), 'update_interval' / construct.Int8ul, 'iod_ssr' / construct.Int8ul, construct.GreedyRange('biases' / construct.Struct(CodeBiasesContent._parser)), ) __slots__ = [ 'time', 'sid', 'update_interval', 'iod_ssr', 'biases', ] def __init__(self, sbp=None, **kwargs): if sbp: super(MsgSsrCodeBiases, self).__init__(sbp.msg_type, sbp.sender, sbp.length, sbp.payload, sbp.crc) self.from_binary(sbp.payload) else: super(MsgSsrCodeBiases, self).__init__() self.msg_type = SBP_MSG_SSR_CODE_BIASES self.sender = kwargs.pop('sender', SENDER_ID) self.time = kwargs.pop('time') self.sid = kwargs.pop('sid') self.update_interval = kwargs.pop('update_interval') self.iod_ssr = kwargs.pop('iod_ssr') self.biases = kwargs.pop('biases') def __repr__(self): return fmt_repr(self) @staticmethod def from_json(s): """Given a JSON-encoded string s, build a message object. """ d = json.loads(s) return MsgSsrCodeBiases.from_json_dict(d) @staticmethod def from_json_dict(d): sbp = SBP.from_json_dict(d) return MsgSsrCodeBiases(sbp, **d) def from_binary(self, d): """Given a binary payload d, update the appropriate payload fields of the message. """ p = MsgSsrCodeBiases._parser.parse(d) for n in self.__class__.__slots__: setattr(self, n, getattr(p, n)) def to_binary(self): """Produce a framed/packed SBP message. """ c = containerize(exclude_fields(self)) self.payload = MsgSsrCodeBiases._parser.build(c) return self.pack() def into_buffer(self, buf, offset): """Produce a framed/packed SBP message into the provided buffer and offset. """ self.payload = containerize(exclude_fields(self)) self.parser = MsgSsrCodeBiases._parser self.stream_payload.reset(buf, offset) return self.pack_into(buf, offset, self._build_payload) def to_json_dict(self): self.to_binary() d = super(MsgSsrCodeBiases, self).to_json_dict() j = walk_json_dict(exclude_fields(self)) d.update(j) return d
class MsgGroupMeta(SBP): """SBP class for message MSG_GROUP_META (0xFF0A). You can have MSG_GROUP_META inherit its fields directly from an inherited SBP object, or construct it inline using a dict of its fields. This leading message lists the time metadata of the Solution Group. It also lists the atomic contents (i.e. types of messages included) of the Solution Group. Parameters ---------- sbp : SBP SBP parent object to inherit from. group_id : int Id of the Msgs Group, 0 is Unknown, 1 is Bestpos, 2 is Gnss flags : int Status flags (reserved) n_group_msgs : int Size of list group_msgs group_msgs : array An inorder list of message types included in the Solution Group, including GROUP_META itself sender : int Optional sender ID, defaults to SENDER_ID (see sbp/msg.py). """ _parser = construct.Struct( 'group_id' / construct.Int8ul, 'flags' / construct.Int8ul, 'n_group_msgs' / construct.Int8ul, construct.GreedyRange('group_msgs' / construct.Int16ul), ) __slots__ = [ 'group_id', 'flags', 'n_group_msgs', 'group_msgs', ] def __init__(self, sbp=None, **kwargs): if sbp: super(MsgGroupMeta, self).__init__(sbp.msg_type, sbp.sender, sbp.length, sbp.payload, sbp.crc) self.from_binary(sbp.payload) else: super(MsgGroupMeta, self).__init__() self.msg_type = SBP_MSG_GROUP_META self.sender = kwargs.pop('sender', SENDER_ID) self.group_id = kwargs.pop('group_id') self.flags = kwargs.pop('flags') self.n_group_msgs = kwargs.pop('n_group_msgs') self.group_msgs = kwargs.pop('group_msgs') def __repr__(self): return fmt_repr(self) @staticmethod def from_json(s): """Given a JSON-encoded string s, build a message object. """ d = json.loads(s) return MsgGroupMeta.from_json_dict(d) @staticmethod def from_json_dict(d): sbp = SBP.from_json_dict(d) return MsgGroupMeta(sbp, **d) def from_binary(self, d): """Given a binary payload d, update the appropriate payload fields of the message. """ p = MsgGroupMeta._parser.parse(d) for n in self.__class__.__slots__: setattr(self, n, getattr(p, n)) def to_binary(self): """Produce a framed/packed SBP message. """ c = containerize(exclude_fields(self)) self.payload = MsgGroupMeta._parser.build(c) return self.pack() def into_buffer(self, buf, offset): """Produce a framed/packed SBP message into the provided buffer and offset. """ self.payload = containerize(exclude_fields(self)) self.parser = MsgGroupMeta._parser self.stream_payload.reset(buf, offset) return self.pack_into(buf, offset, self._build_payload) def to_json_dict(self): self.to_binary() d = super(MsgGroupMeta, self).to_json_dict() j = walk_json_dict(exclude_fields(self)) d.update(j) return d
# current key-value based format: KeyValue = c.Struct( "key" / c.Prefixed(CompactUint, c.Struct( "type" / c.Byte, "data" / c.GreedyBytes, )), "value" / c.Prefixed(CompactUint, c.GreedyBytes), "ofs" / c.Tell, ) Sequence = c.FocusedSeq("content", "content" / c.GreedyRange( c.FocusedSeq( "keyvalue", "terminator" / c.Peek(c.Byte), c.StopIf(c.this.terminator == 0), "keyvalue" / KeyValue, ) ), c.Const(b'\0'), ) PSBT = c.Struct( "magic" / c.Const(b'psbt'), "sep" / c.Const(b'\xff'), "general" / Sequence, "transaction" / c.RestreamData(c.this.general[0].value, Transaction), "inputs" / c.Array(c.len_(c.this.transaction.inputs), Sequence), "outputs" / c.Array(c.len_(c.this.transaction.outputs), Sequence), c.Terminated, )
adc0 = fpga.read_uint('adc_sum_sq0') adc1 = fpga.read_uint('adc_sum_sq1') adc = readBram(fpga, 'adc_snap1_bram', 4 * 1024) snap_stat1 = fpga.read_uint('snap_debug_status') snap_statadc = fpga.read_uint('adc_snap1_status') pps_count = fpga.read_uint('pps_count') clock_freq = fpga.read_uint('clk_frequency') fpga.write_int('adc_snap1_ctrl', 0) fpga.write_int('adc_snap1_ctrl', 1) fpga.write_int('adc_snap1_ctrl', 0) fpga.write_int('snap_debug_ctrl', 1) #bring this high to trigger capture fpga.write_int('snap_debug_ctrl', 0) #and take it low again time.sleep(1.0) adc_0 = struct.unpack('>4096b', adc) repeater = construct.GreedyRange(snap_fengine_xaui) bram_dmp = dict() #dd= fpga.read('snap_debug_bram',8192) #dd0=struct.unpack('>8192b',dd) val = numpy.zeros(512) for i in range(0, 200): rd = [] bram_dmp['data'] = [] bram_dmp['data'].append(fpga.read('snap_debug_bram', 8192)) d = bram_dmp['data'][0] tt = repeater.parse(d) fpga.write_int('snap_debug_ctrl', 1) #bring this high to trigger capture fpga.write_int('snap_debug_ctrl', 0) #and take it low again for a in tt: xiau = []
class MsgStatusReport(SBP): """SBP class for message MSG_STATUS_REPORT (0xFFFE). You can have MSG_STATUS_REPORT inherit its fields directly from an inherited SBP object, or construct it inline using a dict of its fields. The status report is sent periodically to inform the host or other attached devices that the system is running. It is used to monitor system malfunctions. It contains status reports that indicate to the host the status of each sub-system and whether it is operating correctly. Interpretation of the subsystem specific status code is product dependent, but if the generic status code is initializing, it should be ignored. Refer to product documentation for details. Parameters ---------- sbp : SBP SBP parent object to inherit from. reporting_system : int Identity of reporting system sbp_version : int SBP protocol version sequence : int Increments on each status report sent uptime : int Number of seconds since system start-up status : array Reported status of individual subsystems sender : int Optional sender ID, defaults to SENDER_ID (see sbp/msg.py). """ _parser = construct.Struct( 'reporting_system' / construct.Int16ul, 'sbp_version' / construct.Int16ul, 'sequence' / construct.Int32ul, 'uptime' / construct.Int32ul, construct.GreedyRange('status' / construct.Struct(SubSystemReport._parser)), ) __slots__ = [ 'reporting_system', 'sbp_version', 'sequence', 'uptime', 'status', ] def __init__(self, sbp=None, **kwargs): if sbp: super(MsgStatusReport, self).__init__(sbp.msg_type, sbp.sender, sbp.length, sbp.payload, sbp.crc) self.from_binary(sbp.payload) else: super(MsgStatusReport, self).__init__() self.msg_type = SBP_MSG_STATUS_REPORT self.sender = kwargs.pop('sender', SENDER_ID) self.reporting_system = kwargs.pop('reporting_system') self.sbp_version = kwargs.pop('sbp_version') self.sequence = kwargs.pop('sequence') self.uptime = kwargs.pop('uptime') self.status = kwargs.pop('status') def __repr__(self): return fmt_repr(self) @staticmethod def from_json(s): """Given a JSON-encoded string s, build a message object. """ d = json.loads(s) return MsgStatusReport.from_json_dict(d) @staticmethod def from_json_dict(d): sbp = SBP.from_json_dict(d) return MsgStatusReport(sbp, **d) def from_binary(self, d): """Given a binary payload d, update the appropriate payload fields of the message. """ p = MsgStatusReport._parser.parse(d) for n in self.__class__.__slots__: setattr(self, n, getattr(p, n)) def to_binary(self): """Produce a framed/packed SBP message. """ c = containerize(exclude_fields(self)) self.payload = MsgStatusReport._parser.build(c) return self.pack() def into_buffer(self, buf, offset): """Produce a framed/packed SBP message into the provided buffer and offset. """ self.payload = containerize(exclude_fields(self)) self.parser = MsgStatusReport._parser self.stream_payload.reset(buf, offset) return self.pack_into(buf, offset, self._build_payload) def to_json_dict(self): self.to_binary() d = super(MsgStatusReport, self).to_json_dict() j = walk_json_dict(exclude_fields(self)) d.update(j) return d
c.Struct( "type" / CompactUint, "data" / Optional(c.GreedyBytes), )), "value" / c.Prefixed(CompactUint, c.GreedyBytes), ) PsbtProprietaryKey = c.Struct( "prefix" / c.CString("utf-8"), "subtype" / CompactUint, "data" / Optional(c.GreedyBytes), ) PsbtSequence = c.FocusedSeq( "content", "content" / c.GreedyRange(PsbtKeyValue), c.Const(b"\0"), ) PsbtEnvelope = c.FocusedSeq( "sequences", "magic" / c.Const(b"psbt\xff"), "sequences" / c.GreedyRange(PsbtSequence), c.Terminated, ) Bip32Field = c.Struct( "fingerprint" / c.Bytes(4), "address_n" / c.GreedyRange(c.Int32ul), ) # fmt: on