def test_basic(self): fit_to_range = False test_datas = [ ("test_a", fcs.FCSData( (np.array([[1, 2, 3, 10], [4, 2, 5, 99]]).T, np.ones((4, 2))), channels=["A", "B"], ), fcs.FCSData( (np.array([[0.0, 0.111111, 0.222222, 1.0], [0.020619, 0.0, 0.030928, 1.0]]).T, np.ones( (4, 2))), channels=[ Marker.name_to_marker( "A", fcs.ChannelMeta(0.0, 1.0, pne=(0, 0), png=0)), Marker.name_to_marker( "B", fcs.ChannelMeta(0.0, 1.0, pne=(0, 0), png=0)) ]), False) ] for name, testdata, expected, fit_to_range in test_datas: with self.subTest(name=name, fit_to_range=fit_to_range): model = scalers.FCSMinMaxScaler(fit_to_range=fit_to_range) result = model.fit_transform(testdata) assert_array_almost_equal(result.data, expected.data) assert_array_almost_equal(result.ranges_array, expected.ranges_array)
def test_basic(self): test_datas = [( "test_a", fcs.FCSData( (np.array([[1, 2, 3, 10], [4, 2, 5, 99]]).T, np.ones((4, 2))), channels=["A", "B"], ), fcs.FCSData( (np.array([[-0.84852815, -0.56568545, -0.28284273, 1.6970563], [-0.56908065, -0.61751306, -0.5448645, 1.7314582] ]).T, np.ones((4, 2))), channels=[ Marker.name_to_marker( "A", fcs.ChannelMeta(-0.84852815, 1.6970563, pne=(0, 0), png=0)), Marker.name_to_marker( "B", fcs.ChannelMeta(-0.61751306, 1.7314582, pne=(0, 0), png=0)), ]), )] for name, testdata, expected, in test_datas: with self.subTest(name=name): model = scalers.FCSStandardScaler() result = model.fit_transform(testdata) assert_array_almost_equal(result.data, expected.data) assert_array_almost_equal(result.ranges_array, expected.ranges_array)
def read_sel_markers(selected_markers) -> "Dict[(Marker, str), float]": """read selcted markers from a file and convert to Marker object""" markers = list(selected_markers.values())[0] marker_names = [] for marker in markers: marker_names.append( Marker(antibody=Marker.name_to_marker(marker).antibody, color=None)) selected_markers = {"1": marker_names} return selected_markers
def create_merge_marker(channel_data: "Union[Marker, str]") -> Marker: if isinstance(channel_data, Marker): return channel_data channel_data = Marker.name_to_marker(channel_data) if channel_data.color is not None and channel_data.antibody is not None: channel_data = channel_data.set_strict(True) return channel_data
def get_tube_marker(cases: "Iterable[Case]") -> "Dict[(Marker, str), float]": """Get markers in the given tube among cases.""" samples = [s for c in cases for s in c.samples] markers = list({ Marker(antibody=marker.antibody, color=None) for s in samples for marker in s.get_data().channels }) markers_dict = {"1": markers} return markers_dict
def parse_channel_value(channel_value, data_min, data_max) -> "ChannelMeta": data_meta = ChannelMeta(data_min, data_max, (0, 0), 0) if isinstance(channel_value, str): marker = Marker.name_to_marker(channel_value, meta=data_meta) elif channel_value.meta is None: marker = channel_value.set_meta(data_meta) else: # marker with valid metadata, return as-is marker = channel_value return marker
def create_meta_from_fcs(meta: dict, channels: list) -> dict: """Get ranges from pnr in metadata.""" def get_gain(i): try: gain = float(meta[f"$P{i + 1}G"]) except KeyError: LOGGER.debug("No Gain value found for channel %d", i + 1) gain = 1.0 return gain return [ Marker.name_to_marker(c, meta=ChannelMeta( 0, int(meta[f"$P{i + 1}R"]), tuple(map(float, meta[f"$P{i + 1}E"].split(","))), get_gain(i) )) for i, c in enumerate(channels) ]
def __init__( self, initdata: Union["URLPath", "FCSData", tuple], channels: list = None,): """Create a new FCS object. Args: initdata: Either tuple of meta and data from fcsparser, string filepath or another FCSData object. meta: Dict of fcsmeta named tuples. Returns: FCSData object. """ if isinstance(initdata, self.__class__): self.data = initdata.data.copy() self.mask = initdata.mask.copy() self.channels = initdata.channels.copy() elif isinstance(initdata, (URLPath, str)): parser = FCSParser(str(initdata), data_set=DEFAULT_DATASET, encoding=DEFAULT_ENCODING) self.data = parser.data self.mask = np.ones(self.data.shape) if parser.channel_names_s: self.channels = create_meta_from_fcs(parser.annotation, parser.channel_names_s) else: self.channels = create_meta_from_fcs(parser.annotation, parser.channel_names_n) elif isinstance(initdata, tuple): self.data, self.mask = initdata if channels is None: raise ValueError("Channels needed when initializing from np data") self.channels = create_meta_from_data(self.data, channels) else: raise RuntimeError( "Invalid data for FCS. Either Path, similar object or tuple of data and metadata needed.") self.data = self.data.astype("float32", copy=False) self.channels = [Marker.convert(c) for c in self.channels]
def add_missing_channels(self, channels: List[str]) -> "FCSData": """Add missing columns in the given channel list to the dataframe and set them to the missing value.""" if any(map(lambda c: c in self.channels, channels)): raise ValueError("Given channel already in data.") channels = [Marker.convert(c).set_meta(ChannelMeta(0, 0, (0, 0), 0)) for c in channels] cur_channels = self.channels new_channels = cur_channels + channels cur_dim_a, cur_dim_b = self.data.shape new_len = len(channels) newdata = np.zeros((cur_dim_a, cur_dim_b + new_len)) newdata[:, :-new_len] = self.data newmask = np.zeros((cur_dim_a, cur_dim_b + new_len)) newmask[:, :-new_len] = self.mask self.data = newdata self.mask = newmask self.channels = new_channels return self
def as_fc(d): if "__enum__" in d: name, member = d["__enum__"].split(".") return getattr(PUBLIC_ENUMS[name], member) elif "__urlpath__" in d: return URLPath(d["__urlpath__"]) elif "__casecollection__" in d: return case_dataset.json_to_case_collection(d["__casecollection__"]) elif "__case__" in d: return case.json_to_case(d["__case__"]) elif "__fcssample__" in d: return sample.json_to_fcssample(d["__fcssample__"]) elif "__somsample__" in d: return sample.json_to_somsample(d["__somsample__"]) elif "__date__" in d: return str_to_date(d["__date__"]) elif "__datetime__" in d: return str_to_datetime(d["__datetime__"]) elif "__marker__" in d: return Marker.name_to_marker(d["__marker__"]) else: return d
def __post_init__(self): if isinstance(self.data, (URLPath, str)): self.data = np.load(str(self.data)) self.markers = [Marker.convert(m) for m in self.markers]
def __init__(self, channels: list): """Intervals are either (x.xx, y.yy) or (None, x.xx) etc, they are always strict smaller""" self.trained = False self.channels = [Marker.name_to_marker(m) for m in channels]