def _scrub_method_name(self, method_name): """Scrubs a method name, returning result from local cache if available. This method wraps fitparse.utils.scrub_method_name and memoizes results, as scrubbing a method name is expensive. Args: method_name: Method name to scrub. Returns: Scrubbed method name. """ if method_name not in self._scrubbed_method_names: self._scrubbed_method_names[method_name] = ( scrub_method_name(method_name)) return self._scrubbed_method_names[method_name]
def do_fitparse_profile(): unit_values = [] for message_type in MESSAGE_TYPES.values(): for field in message_type.fields.values(): unit_values.append(field.units) if field.components: for component in field.components: unit_values.append(component.units) if field.subfields: for subfield in field.subfields: unit_values.append(subfield.units) if subfield.components: for component in subfield.components: unit_values.append(component.units) unit_values = filter(None, unit_values) print 'In fitparse/profile.py:' for unit_value in sorted(set(unit_values)): print ' * %s [%s]' % ( unit_value, scrub_method_name('process_units_%s' % unit_value, convert_units=True), )
def do_fitparse_profile(): unit_values = [] for message_type in MESSAGE_TYPES.values(): for field in message_type.fields.values(): unit_values.append(field.units) if field.components: for component in field.components: unit_values.append(component.units) if field.subfields: for subfield in field.subfields: unit_values.append(subfield.units) if subfield.components: for component in subfield.components: unit_values.append(component.units) unit_values = filter(None, unit_values) print('In fitparse/profile.py:') for unit_value in sorted(set(unit_values)): print(' * {} [{}]'.format( unit_value, scrub_method_name('process_units_%s' % unit_value, convert_units=True) ))
def _parse_data_message(self, header): def_mesg = self._local_mesgs.get(header.local_mesg_num) if not def_mesg: raise FitParseError( 'Got data message with invalid local message type %d' % (header.local_mesg_num)) raw_values = self._parse_raw_values_from_data_message(def_mesg) field_datas = [ ] # TODO: I don't love this name, update on DataMessage too # TODO: Maybe refactor this and make it simpler (or at least broken # up into sub-functions) for field_def, raw_value in zip(def_mesg.field_defs, raw_values): field, parent_field = field_def.field, None if field: field, parent_field = self._resolve_subfield( field, def_mesg, raw_values) # Resolve component fields if getattr(field, 'components', None): for component in field.components: # Render its raw value cmp_raw_value = component.render(raw_value) # Apply accumulated value if component.accumulate: accumulator = self._accumulators[def_mesg.mesg_num] cmp_raw_value = self._apply_compressed_accumulation( cmp_raw_value, accumulator[component.def_num], component.bits, ) accumulator[component.def_num] = cmp_raw_value # Apply scale and offset from component, not from the dynamic field # as they may differ cmp_raw_value = self._apply_scale_offset( component, cmp_raw_value) # Extract the component's dynamic field from def_mesg cmp_field = def_mesg.mesg_type.fields[ component.def_num] # Resolve a possible subfield cmp_field, cmp_parent_field = self._resolve_subfield( cmp_field, def_mesg, raw_values) cmp_value = cmp_field.render(cmp_raw_value) # Plop it on field_datas field_datas.append( FieldData( field_def=None, field=cmp_field, parent_field=cmp_parent_field, value=cmp_value, raw_value=cmp_raw_value, )) # TODO: Do we care about a base_type and a resolved field mismatch? # My hunch is we don't value = self._apply_scale_offset(field, field.render(raw_value)) else: value = raw_value # Update compressed timestamp field if (field_def.def_num == FIELD_TYPE_TIMESTAMP.def_num) and (raw_value is not None): self._compressed_ts_accumulator = raw_value field_datas.append( FieldData( field_def=field_def, field=field, parent_field=parent_field, value=value, raw_value=raw_value, )) # Apply timestamp field if we got a header if header.time_offset is not None: ts_value = self._compressed_ts_accumulator = self._apply_compressed_accumulation( header.time_offset, self._compressed_ts_accumulator, 5, ) field_datas.append( FieldData( field_def=None, field=FIELD_TYPE_TIMESTAMP, parent_field=None, value=FIELD_TYPE_TIMESTAMP.render(ts_value), raw_value=ts_value, )) # Apply data processors for field_data in field_datas: # Apply type name processor process_method_name = scrub_method_name('process_type_%s' % field_data.type.name) type_processor = getattr(self._processor, process_method_name, None) if type_processor: type_processor(field_data) # Apply field name processor process_method_name = scrub_method_name('process_field_%s' % field_data.name) field_processor = getattr(self._processor, process_method_name, None) if field_processor: field_processor(field_data) # Apply units name processor if field_data.units: process_method_name = scrub_method_name('process_units_%s' % field_data.units, convert_units=True) units_processor = getattr(self._processor, process_method_name, None) if units_processor: units_processor(field_data) data_message = DataMessage(header=header, def_mesg=def_mesg, fields=field_datas) process_method_name = scrub_method_name('process_message_%s' % def_mesg.name) mesg_processor = getattr(self._processor, process_method_name, None) if mesg_processor: mesg_processor(data_message) return data_message