def __init__(self, filename): (data, metadata, analysis, meta_analysis) = fcs.read(filename) # List of column names columns = map(lambda j: metadata['$P{}N'.format(j)], range(1, data.shape[0] + 1)) self._data = LabeledColumns(data, columns) # Same for Analysis columns = map(lambda j: metadata['$P{}N'.format(j)], range(1, data.shape[0] + 1)) self._analysis = LabeledColumns(analysis, columns) # This variable encodes the original length of the data set as imported for use # normalizing kernel density estimates self._original_length = int(metadata['$TOT']) # Name that will appear in the legend of plots self.title = '' try: self.title = metadata['$FIL'] except: pass self._metadata = metadata self._meta_analysis = meta_analysis
def __init__(self, filename): (data, metadata, analysis, meta_analysis) = fcs.read(filename) # List of column names columns = map(lambda j: metadata['$P{}N'.format(j)], range(1,data.shape[0]+1)) # There is an endian-ness bug that requires changing the type of data to satisfy # pandas super(FlowData,self).__init__(np.transpose(data).astype('f8'), columns = columns) # This variable encodes the original length of the data set as imported for use # normalizing kernel density estimates self._original_length = int(metadata['$TOT']) # Name that will appear in the legend of plots self.title = '' try: self.title = metadata['$FIL'] except: pass self._metadata = metadata self._analysis = analysis self._meta_analysis = meta_analysis self._data = data
def __init__(self, filename): (data, metadata, analysis, meta_analysis) = fcs.read(filename) # List of column names columns = map(lambda j: metadata['$P{}N'.format(j)], range(1, data.shape[0] + 1)) # There is an endian-ness bug that requires changing the type of data to satisfy # pandas super(FlowData, self).__init__(np.transpose(data).astype('f8'), columns=columns) # This variable encodes the original length of the data set as imported for use # normalizing kernel density estimates self._original_length = int(metadata['$TOT']) # Name that will appear in the legend of plots self.title = '' try: self.title = metadata['$FIL'] except: pass self._metadata = metadata self._analysis = analysis self._meta_analysis = meta_analysis self._data = data
def tick(io0, io1): if not fcs._fcs: raise RuntimeError("Please call fcs.init()") fcs.write(0, io0) fcs.write(1, io1) fcs._fcs.fcs_board_tick() fcs._fcs.fcs_ahrs_tick() fcs.read(1, 1023) fcs.read(2, 1023) fcs.read(3, 1023) fcs.read(4, 1023) sensor_health = fcs.get_sensor_health() try: return plog.ParameterLog.deserialize(fcs.read(0, 1023)), sensor_health except Exception: return None, None
def __init__(self, path = None): if not path is None: (self._data, self._metadata, self._analysis, self._meta_analysis) = \ fcs.read(path, True) self._path = path self._filename = os.path.basename(path) self._original_length = self.nevents else: self._data = [] self._metadata = {} self._analysis = [] self._meta_analysis = {} # Number of variables in original dataset, in case we make a daughter, # for normalization when doing KDEs self._original_length = 0
def main(): import time import fcs (data, metadata, analysis, meta_analysis) = fcs.read('test2.fcs') print "Original data length {}".format(data.shape[1]) data = data[2:37,:] start = time.time() s = Spade(data, use_KD_tree = True) s.nsamples = 2000 print s.estimate_median_dist() s.compute_local_density() s.downsample() stop = time.time() print "Elapsed time {}".format(stop - start)
def main(): import time import fcs (data, metadata, analysis, meta_analysis) = fcs.read('test.fcs') print data.shape print "Original data length {}".format(data.shape[1]) data = data[2:20, 0:5000] start = time.time() s = Spade(data, use_KD_tree=True) s.nsamples = 2000 print s.estimate_median_dist() s.compute_local_density() s.downsample() stop = time.time() print "Elapsed time {}".format(stop - start)
def __init__(self, filename = None, panda = None, metadata = None): """Load an FCS file specified by the filename. """ if filename is not None: (data, metadata, analysis, meta_analysis) = fcs.read(filename) elif panda is not None: self.panda = panda data = None analysis = None meta_analysis = None # Fill the metadata if necessary if metadata is None: metadata = {} for j, key in enumerate(self.panda.columns): metadata['$P{:d}S'.format(j+1)] = key if '$PAR' not in metadata: metadata['$PAR'] = panda.shape[1] if '$TOT' not in metadata: metadata['$TOT'] = panda.shape[0] self._metadata = metadata self._analysis = analysis self._meta_analysis = meta_analysis self._data = data # A dictionary that converts column names to index numbers # currently we default to using the long name value $PnS self._alt_names = util.alt_names(self.names, self.short_names) # There is an endian-ness bug that requires changing the type of data to satisfy # pandas if filename is not None: self.panda = pd.DataFrame(np.transpose(data).astype('f8'), columns = self.names) # Name that will appear in the legend of plots self.title = '' try: self.title = metadata['$FIL'] except: pass self.spade_mst = {} self.spade_means = {}
def __init__(self, filename): (data, metadata, analysis, meta_analysis) = fcs.read(filename) # List of column names columns = map(lambda j: metadata['$P{}N'.format(j)], range(1,data.shape[0]+1)) self._data = LabeledColumns(data, columns) # Same for Analysis columns = map(lambda j: metadata['$P{}N'.format(j)], range(1,data.shape[0]+1)) self._analysis = LabeledColumns(analysis, columns) # This variable encodes the original length of the data set as imported for use # normalizing kernel density estimates self._original_length = int(metadata['$TOT']) # Name that will appear in the legend of plots self.title = '' try: self.title = metadata['$FIL'] except: pass self._metadata = metadata self._meta_analysis = meta_analysis
def tick(lat=None, lon=None, alt=None, velocity=None, attitude=None, angular_velocity=None, wind_velocity=None, measurement_input=None): """ Runs the FCS control and comms tasks with the state data provided as though it came from the AHRS, and returns the control output. """ if not fcs._fcs: raise RuntimeError("Please call fcs.init()") estimate_log = plog.ParameterLog( log_type=plog.LogType.FCS_LOG_TYPE_ESTIMATE) estimate_log.append( plog.DataParameter( device_id=0, parameter_type=plog.ParameterType.FCS_PARAMETER_ESTIMATED_POSITION_LLA, value_type=plog.ValueType.FCS_VALUE_SIGNED, value_precision=32, values=[ int(lat * (2**31 - 1) / math.pi), int(lon * (2**31 - 1) / math.pi), int(alt * 1e2) ] ) ) estimate_log.append( plog.DataParameter( device_id=0, parameter_type=plog.ParameterType.FCS_PARAMETER_ESTIMATED_VELOCITY_NED, value_type=plog.ValueType.FCS_VALUE_SIGNED, value_precision=16, values=map(lambda x: int(x * 1e2), velocity) ) ) estimate_log.append( plog.DataParameter( device_id=0, parameter_type=plog.ParameterType.FCS_PARAMETER_ESTIMATED_ATTITUDE_Q, value_type=plog.ValueType.FCS_VALUE_SIGNED, value_precision=16, values=map(lambda x: int(x * (2**15 - 1)), attitude) ) ) estimate_log.append( plog.DataParameter( device_id=0, parameter_type=plog.ParameterType.FCS_PARAMETER_ESTIMATED_ANGULAR_VELOCITY_XYZ, value_type=plog.ValueType.FCS_VALUE_SIGNED, value_precision=16, values=map(lambda x: int(x * (2**15 - 1) / math.pi * 0.25), angular_velocity) ) ) estimate_log.append( plog.DataParameter( device_id=0, parameter_type=plog.ParameterType.FCS_PARAMETER_ESTIMATED_WIND_VELOCITY_NED, value_type=plog.ValueType.FCS_VALUE_SIGNED, value_precision=16, values=map(lambda x: int(x * 1e2), wind_velocity) ) ) estimate_log.append( plog.DataParameter( device_id=0, parameter_type=plog.ParameterType.FCS_PARAMETER_AHRS_STATUS, value_type=plog.ValueType.FCS_VALUE_SIGNED, value_precision=16, values=[0, 0] ) ) fcs.write(3, estimate_log.serialize()) fcs.write(1, measurement_input) #print binascii.b2a_hex(estimate_log.serialize()) fcs._fcs.fcs_board_tick() fcs._fcs.fcs_ahrs_tick() fcs._fcs.fcs_control_tick() # Read out ignored streams fcs.read(0, 1023) fcs.read(1, 1023) fcs.read(2, 1023) sys.stderr.write(fcs.read(4, 1023)) try: control_log = plog.ParameterLog.deserialize(fcs.read(3, 1023)) # print control_log control_param = control_log.find_by( device_id=0, parameter_type=plog.ParameterType.FCS_PARAMETER_CONTROL_SETPOINT) path = control_log.find_by( device_id=0, parameter_type=plog.ParameterType.FCS_PARAMETER_NAV_PATH_ID).values[0] refp = control_log.find_by( device_id=0, parameter_type=plog.ParameterType.FCS_PARAMETER_KEY_VALUE) cycles, obj_val, errors, resets = control_log.find_by( device_id=0, parameter_type=plog.ParameterType.FCS_PARAMETER_CONTROL_STATUS).values last_control = ( map(lambda x: float(x) / float(2**16), control_param.values), plog.extract_waypoint(refp.value), obj_val, cycles, errors, resets, control_log.tick, path ) return last_control except Exception: return ([0.0, 0.5, 0.5], {}, 0, 0, 0, 0, 0, 0xFFFF)
#! /opt/local/bin/python import fcs data, metadata, analysis, meta_analysis = fcs.read('test.fcs') fcs.write('test_write.fcs',data, metadata)
def tick(lat=None, lon=None, alt=None, velocity=None, attitude=None, angular_velocity=None, wind_velocity=None, measurement_input=None): """ Runs the FCS control and comms tasks with the state data provided as though it came from the AHRS, and returns the control output. """ if not fcs._fcs: raise RuntimeError("Please call fcs.init()") estimate_log = plog.ParameterLog( log_type=plog.LogType.FCS_LOG_TYPE_ESTIMATE) estimate_log.append( plog.DataParameter(device_id=0, parameter_type=plog.ParameterType. FCS_PARAMETER_ESTIMATED_POSITION_LLA, value_type=plog.ValueType.FCS_VALUE_SIGNED, value_precision=32, values=[ int(lat * (2**31 - 1) / math.pi), int(lon * (2**31 - 1) / math.pi), int(alt * 1e2) ])) estimate_log.append( plog.DataParameter(device_id=0, parameter_type=plog.ParameterType. FCS_PARAMETER_ESTIMATED_VELOCITY_NED, value_type=plog.ValueType.FCS_VALUE_SIGNED, value_precision=16, values=map(lambda x: int(x * 1e2), velocity))) estimate_log.append( plog.DataParameter(device_id=0, parameter_type=plog.ParameterType. FCS_PARAMETER_ESTIMATED_ATTITUDE_Q, value_type=plog.ValueType.FCS_VALUE_SIGNED, value_precision=16, values=map(lambda x: int(x * (2**15 - 1)), attitude))) estimate_log.append( plog.DataParameter(device_id=0, parameter_type=plog.ParameterType. FCS_PARAMETER_ESTIMATED_ANGULAR_VELOCITY_XYZ, value_type=plog.ValueType.FCS_VALUE_SIGNED, value_precision=16, values=map( lambda x: int(x * (2**15 - 1) / math.pi * 0.25), angular_velocity))) estimate_log.append( plog.DataParameter(device_id=0, parameter_type=plog.ParameterType. FCS_PARAMETER_ESTIMATED_WIND_VELOCITY_NED, value_type=plog.ValueType.FCS_VALUE_SIGNED, value_precision=16, values=map(lambda x: int(x * 1e2), wind_velocity))) estimate_log.append( plog.DataParameter( device_id=0, parameter_type=plog.ParameterType.FCS_PARAMETER_AHRS_STATUS, value_type=plog.ValueType.FCS_VALUE_SIGNED, value_precision=16, values=[0, 0])) fcs.write(3, estimate_log.serialize()) fcs.write(1, measurement_input) #print binascii.b2a_hex(estimate_log.serialize()) fcs._fcs.fcs_board_tick() fcs._fcs.fcs_ahrs_tick() fcs._fcs.fcs_control_tick() # Read out ignored streams fcs.read(0, 1023) fcs.read(1, 1023) fcs.read(2, 1023) sys.stderr.write(fcs.read(4, 1023)) try: control_log = plog.ParameterLog.deserialize(fcs.read(3, 1023)) # print control_log control_param = control_log.find_by( device_id=0, parameter_type=plog.ParameterType.FCS_PARAMETER_CONTROL_SETPOINT) path = control_log.find_by(device_id=0, parameter_type=plog.ParameterType. FCS_PARAMETER_NAV_PATH_ID).values[0] refp = control_log.find_by( device_id=0, parameter_type=plog.ParameterType.FCS_PARAMETER_KEY_VALUE) cycles, obj_val, errors, resets = control_log.find_by( device_id=0, parameter_type=plog.ParameterType.FCS_PARAMETER_CONTROL_STATUS ).values last_control = (map(lambda x: float(x) / float(2**16), control_param.values), plog.extract_waypoint(refp.value), obj_val, cycles, errors, resets, control_log.tick, path) return last_control except Exception: return ([0.0, 0.5, 0.5], {}, 0, 0, 0, 0, 0, 0xFFFF)