예제 #1
0
 def newByDTO(cls, dto):
   obj = Quantity(
       [dto.start, dto.stop],
       dto.units,
       ).view(cls)
   obj._dto = dto
   return obj
예제 #2
0
    def getRecipe(self) -> Recipe:
        title = self.lineRecipeName.displayText()
        servings = self.spnServings.value()
        prep = self.spnPrepTime.value()
        cook = self.spnCookTime.value()
        directions = self.txtEDirections.toPlainText()
        ingredients = {}
        for i in range(self.vbox.count()):
            ingredient: IngredientEditor = self.vbox.itemAt(i).widget()
            units = ingredient.comboUnits.currentText()
            amount = ingredient.dspnAmount.value()
            if units == 'count':
                quantity = Quantity.count(amount)
            else:
                quantity = Quantity.of(amount, units)

            name = ingredient.lineIngredientName.displayText()
            ingredients[name] = quantity
        flags = RecipeFlags.NONE
        if self.chkDF.isChecked(): flags |= RecipeFlags.DAIRYFREE
        if self.chkGF.isChecked(): flags |= RecipeFlags.GLUTENFREE
        if self.chkHealthy.isChecked(): flags |= RecipeFlags.HEALTHY
        if self.chkNoAlcohol.isChecked(): flags |= RecipeFlags.NONALCOHOLIC
        if self.chkVeg.isChecked(): flags |= RecipeFlags.VEGETARIAN
        if self.chkVegan.isChecked(): flags |= RecipeFlags.VEGAN

        return Recipe(title=title,
                      servings=servings,
                      prep=prep,
                      cook=cook,
                      ingredients=ingredients,
                      directions=directions,
                      flags=flags)
예제 #3
0
 def info(self):
   signal = self.view(Quantity)
   signal_base = Quantity(
     self._dto.signal_base_amount,
     self._dto.signal_base_units)
   signal_base.setflags(write=False)
   return {
       'signal': signal,
       'signal_base': signal_base}
예제 #4
0
def init_extra_points_state_graph(verbosity=0):
    # Construct tap
    inflow = Quantity("inflow", derivative="+")
    tap = Tap(inflow=inflow)

    # Construct container
    volume = Quantity("volume")
    height = Quantity("height")
    pressure = Quantity("pressure")
    container = Container(volume=volume, height=height, pressure=pressure)

    # Construct drain
    outflow = Quantity("outflow")
    drain = Drain(outflow=outflow)

    # Set up rules
    inter_state = [
        PositiveInfluence(source="tap.inflow", target="container.volume"),
        NegativeInfluence(source="drain.outflow", target="container.volume"),
        PositiveProportion(source="container.volume",
                           target="container.height"),
        PositiveProportion(source="container.height",
                           target="container.pressure"),
        PositiveProportion(source="container.pressure",
                           target="drain.outflow"),
    ]
    intra_state = [
        PositiveConsequence(target="tap.inflow"),
        NegativeConsequence(target="tap.inflow"),
        PositiveConsequence(target="container.volume"),
        NegativeConsequence(target="container.volume"),
        PositiveConsequence(target="container.height"),
        NegativeConsequence(target="container.height"),
        PositiveConsequence(target="container.pressure"),
        NegativeConsequence(target="container.pressure"),
        PositiveConsequence(target="drain.outflow"),
        NegativeConsequence(target="drain.outflow"),
        VCmax(source="container.volume", target="container.height"),
        VCzero(source="container.volume", target="container.height"),
        VCmax(source="container.height", target="container.pressure"),
        VCzero(source="container.height", target="container.pressure"),
        VCmax(source="container.pressure", target="drain.outflow"),
        VCzero(source="container.pressure", target="drain.outflow")
    ]

    # Create initial state
    init_state = State(tap=tap, container=container, drain=drain)

    # Create state graph
    state_graph = StateGraph(initial_state=init_state,
                             inter_state=inter_state,
                             intra_state=intra_state,
                             verbosity=verbosity)
    return state_graph
예제 #5
0
class AgeResolver:
    
    ageEquivalence = {}
    
    ##### Rat
    # Rats become sexually mature at age 6 weeks, but reach social maturity several months later at about 5 to 6 months of age (Adams and Boice 1983). In adulthood, each rat month is roughly equivalent to 2.5 human years (Ruth 1935).
    # Domestic rats live about 2 to 3.5 years (Pass and Freeth 1993).    
    ageEquivalence["NIFORG:birnlex_160"] = {
        #Adult
        "NIFORG:birnlex_681": [Quantity(5, "month"), Quantity(3.5, "years")],
    }
        
    #ontoMng = OntoManager(recomputer=True)

    
    @staticmethod
    def resolve_fromIDs(speciesId, ageCategoryId, unit=None, typeValue=""):
        
        def resolve_age(ageCatDict, ageCategoryId):  
            for ageCategoryId2 in ageCatDict:
                #ontoMng.
                if ageCategoryId2 == ageCategoryId:
                    return ageCatDict[ageCategoryId]
                    
            return None

        def resolve_species_age(speciesId, ageCategoryId):
            for speciesId2 in AgeResolver.ageEquivalence:
                #ontoMng.
                if speciesId2 == speciesId:
                    return resolve_age(AgeResolver.ageEquivalence[speciesId2], ageCategoryId)
                    
                if speciesId in getChildren(speciesId2):
                    return resolve_age(AgeResolver.ageEquivalence[speciesId2], ageCategoryId)
    
            return None    

        age = resolve_species_age(speciesId, ageCategoryId)
        if age is None:
            return None
        
        if typeValue == "min":
            age = age[0]
        elif typeValue == "max":
            age = age[1]
        elif typeValue == "median":
            age = (age[1]+age[0])/2.0
    
        if not unit is None:
            if isinstance(age, list):
                return [a.rescale(unit) for a in age]
            else:
                return age.rescale(unit)
        return age
예제 #6
0
def _read_signal_file(
        filepaths: List[str],
        signal_unit: Quantity = None) -> IrregularlySampledSignal:
    # things are a bit complicated here as the signal is not necessarily covering the whole experiment!
    try:
        # get the continous signal file
        signal_file = [
            file for file in filepaths
            if "continuous" in os.path.basename(file).lower()
        ][0]

        times = []
        signal = []

        # open the file for reading
        with open(signal_file, "r") as file:

            # and create a reader
            reader = csv.reader(file, delimiter=",")
            # this try/catch handles the exception that is raised by "next" if the reader reached the file ending
            try:
                while True:
                    # read time and signal rows
                    time_row = np.array([float(val) for val in next(reader)])
                    signal_row = np.array([float(val) for val in next(reader)])
                    assert len(time_row) == len(signal_row)

                    times.append(time_row)
                    signal.append(signal_row)
            except StopIteration:
                pass

        # concatenate our list of arrays
        times = np.concatenate(times) * second
        signal = np.concatenate(signal)
        assert len(times) == len(signal)

        if not signal_unit is None:
            signal = Quantity(signal, signal_unit)
        else:
            signal = Quantity(signal, "dimensionless")

        result = IrregularlySampledSignal(times=times,
                                          signal=signal,
                                          name="Irregularly Sampled Signal",
                                          file_origin=signal_file)
        channel_id = f"{TypeID.RAW_DATA.value}.0"
        result.annotate(id=channel_id, type_id=TypeID.RAW_DATA.value)
        return result

    # something might go wrong as we perform some IO operations here
    except Exception as ex:
        traceback.print_exc()
예제 #7
0
def _read_main_pulse_file(filepaths: List[str]) -> Event:
    try:
        # read pulse file
        pulse_file = [
            file for file in filepaths
            if "pulses" in os.path.basename(file).lower()
        ][0]
        pulses_df = pd.read_csv(filepath_or_buffer=pulse_file,
                                header=None,
                                names=["timestamp", "comment"])
        times = Quantity(pulses_df["timestamp"], "s")

        pulses = Event(times=times,
                       labels=pulses_df["comment"],
                       name="Dapsys Main Pulse",
                       file_origin=pulse_file)
        channel_id = f"{TypeID.ELECTRICAL_STIMULUS.value}.0"
        pulses.annotate(id=channel_id,
                        type_id=TypeID.ELECTRICAL_STIMULUS.value)

        intervals: Quantity = np.diff(times)
        intervals = quantity_concat(intervals,
                                    np.array([float("inf")]) * second)
        pulses.array_annotate(intervals=intervals)

        return pulses
    except Exception as ex:
        traceback.print_exc()
예제 #8
0
def spike_statistics(idx, row):
    from elephant.statistics import mean_firing_rate, cv, isi
    from elephant.conversion import BinnedSpikeTrain
    from elephant.spike_train_correlation import corrcoef

    print(idx)
    results = {}

    # read spike trains from file
    io = get_io(row["output_file"])
    data_block = io.read()[0]
    spiketrains = data_block.segments[0].spiketrains

    # calculate mean firing rate
    results["spike_counts"] = sum(st.size for st in spiketrains)
    rates = [mean_firing_rate(st) for st in spiketrains]
    results["firing_rate"] = Quantity(rates, units=rates[0].units).rescale("1/s").mean()

    # calculate coefficient of variation of the inter-spike interval
    cvs = [cv(isi(st)) for st in spiketrains if st.size > 1]
    if len(cvs) > 0:
        results["cv_isi"] = sum(cvs)/len(cvs)
    else:
        results["cv_isi"] = 0

    # calculate global cross-correlation
    #cc_matrix = corrcoef(BinnedSpikeTrain(spiketrains, binsize=5*ms))
    #results["cc_min"] = cc_matrix.min()
    #results["cc_max"] = cc_matrix.max()
    #results["cc_mean"] = cc_matrix.mean()

    io.close()
    return results
예제 #9
0
 def rescale(self, quantity: pq.Quantity):
     if not isinstance(quantity, pq.Quantity):
         raise TypeError(f"Expected Quantity, got '{type(quantity)}'")
     dimensionality = quantity.dimensionality.simplified
     for reference in self.units_all:
         if reference.dimensionality.simplified == dimensionality:
             return quantity.rescale(reference)
     raise ValueError(f"Unknown units: '{quantity.units}'")
예제 #10
0
def _imply_sampling_rate_from_irregular_signal(
        signal: IrregularlySampledSignal, sample_at_idx: int = 0) -> Quantity:
    if len(signal.times) < (sample_at_idx + 2):
        raise ValueError(
            "Signal has fewer than two samples, therefore cannot imply sampling rate."
        )
    t_diff = signal.times[sample_at_idx + 1] - signal.times[sample_at_idx]
    return Quantity(1.0 / t_diff, "Hz")
예제 #11
0
 def transform_dict_out(self, value):
     if value.get('_datatype', None) == 'quantity':
         if 'uncertainty' in value:
             return datastructures.UncertainQuantity(
                 value['magnitude'], value['units'],
                 self.handle_uncert_load(value['uncertainty']))
         else:
             return Quantity(value['magnitude'], value['units'])
     return None
예제 #12
0
def import_dapsys_csv_files(directory: str,
                            sampling_rate: Union[Quantity, str] = "imply",
                            ap_correlation_window_size: Quantity = Quantity(0.003, "s")) \
                            -> Tuple[Block, Dict[TypeID, Dict[str, str]], List[APTrack]]:

    csv_files = _get_files_with_extension(directory, ".csv")

    main_pulses: Event = _read_main_pulse_file(filepaths=csv_files)
    irregular_sig: IrregularlySampledSignal = _read_signal_file(
        filepaths=csv_files, signal_unit="uV")

    if isinstance(sampling_rate, str) and sampling_rate == "imply":
        sampling_rate = _imply_sampling_rate_from_irregular_signal(
            irregular_sig)

    analog_sig: AnalogSignal = convert_irregularly_sampled_signal_to_analog_signal(
        irregular_sig, sampling_rate=sampling_rate)
    analog_sig.annotate(id=f"{TypeID.RAW_DATA.value}.1",
                        type_id=TypeID.RAW_DATA.value)

    ap_tracks: List[APTrack] = _read_track_files(filepaths=csv_files,
                                                 el_stimuli=main_pulses,
                                                 sampling_rate=sampling_rate)
    track_aps: SpikeTrain = _find_action_potentials_on_tracks(
        ap_tracks=ap_tracks,
        el_stimuli=main_pulses,
        signal=irregular_sig,
        window_size=ap_correlation_window_size,
        sampling_rate=sampling_rate)

    # create mapping from names to channel ids
    channel_id_map = {type_id: {} for type_id in TypeID}
    channel_id_map[TypeID.ELECTRICAL_STIMULUS].update(
        {"Main Pulse": main_pulses.annotations["id"]})
    channel_id_map[TypeID.RAW_DATA].update({
        "Analog Signal":
        analog_sig.annotations["id"],
        "Irregular Signal":
        irregular_sig.annotations["id"]
    })
    channel_id_map[TypeID.ACTION_POTENTIAL].update(
        {"Track APs": track_aps.annotations["id"]})

    # produce the corresponding NEO objects
    block: Block = Block(name="Base block of dapsys csv recording")
    segment: Segment = Segment(name="This recording consists of one segment")

    segment.events.append(main_pulses)
    segment.irregularlysampledsignals.append(irregular_sig)
    segment.analogsignals.append(analog_sig)
    segment.spiketrains.append(track_aps)

    block.segments.append(segment)

    return block, channel_id_map, ap_tracks
예제 #13
0
def convert_irregularly_sampled_signal_to_analog_signal(irregular_sig: IrregularlySampledSignal, sampling_rate: Quantity(10000, "Hz")) -> AnalogSignal:
    
    # allocate array for the regular signal
    num_regular_samples = ceil(Quantity(irregular_sig.duration * sampling_rate).magnitude)
    regular_sig = Quantity(np.zeros(num_regular_samples, dtype = np.float64), irregular_sig.dimensionality)

    # calculate the indices of the samples
    idcs: Quantity = (irregular_sig.times - irregular_sig.times[0]) * sampling_rate
    idcs = idcs.magnitude
    to_int = np.vectorize(np.int)
    idcs = to_int(idcs)

    # conversion step
    regular_sig[idcs] = irregular_sig[:].ravel()
    result: AnalogSignal = AnalogSignal(regular_sig, 
                                        t_start = irregular_sig.times[0], 
                                        sampling_rate = sampling_rate,
                                        name = "Analog Signal", 
                                        file_origin = irregular_sig.file_origin)
    return result
예제 #14
0
파일: utils.py 프로젝트: tranmmh/nc_paper
def convert_quantity(x):
    if type(x) is Quantity:
        return [x.item(), str(x.units).split()[1]]

    elif type(x) is list:
        if type(x[0]) is float and type(x[1]) is str:
            return Quantity(*x, dtype=float)
        else:
            raise TypeError('Do not recognise type of %s' % x)
    else:
        return x
예제 #15
0
def init_minimum_viable_state_graph(verbosity=0):
    # Construct tap
    inflow = Quantity("inflow", derivative="+")
    tap = Tap(inflow=inflow)

    # Construct container
    volume = Quantity("volume")
    container = Container(volume=volume)

    # Construct drain
    outflow = Quantity("outflow")
    drain = Drain(outflow=outflow)

    # Set up relationships
    inter_state = [
        PositiveInfluence(source="tap.inflow", target="container.volume"),
        NegativeInfluence(source="drain.outflow", target="container.volume"),
        PositiveProportion(source="container.volume", target="drain.outflow")
    ]
    intra_state = [
        PositiveConsequence(target="tap.inflow"),
        NegativeConsequence(target="tap.inflow"),
        PositiveConsequence(target="container.volume"),
        NegativeConsequence(target="container.volume"),
        PositiveConsequence(target="drain.outflow"),
        NegativeConsequence(target="drain.outflow"),
        VCmax(source="container.volume", target="drain.outflow"),
        VCzero(source="container.volume", target="drain.outflow")
    ]

    # Create initial state
    init_state = State(tap=tap, container=container, drain=drain)

    # Create state graph
    state_graph = StateGraph(initial_state=init_state,
                             inter_state=inter_state,
                             intra_state=intra_state,
                             verbosity=verbosity)
    return state_graph
예제 #16
0
def _homogeneous_process(interval_generator, args, mean_rate, t_start, t_stop,
                         as_array):
    """
    Returns a spike train whose spikes are a realization of a random process
    generated by the function `interval_generator` with the given rate,
    starting at time `t_start` and stopping `time t_stop`.
    """
    def rescale(x):
        return (x / mean_rate.units).rescale(t_stop.units)

    n = int(((t_stop - t_start) * mean_rate).simplified)
    number = np.ceil(n + 3 * np.sqrt(n))
    if number < 100:
        number = min(5 + np.ceil(2 * n), 100)
    assert number > 4  # if positive, number cannot be less than 5
    isi = rescale(interval_generator(*args, size=int(number)))
    spikes = np.cumsum(isi)
    spikes += t_start

    i = spikes.searchsorted(t_stop)
    if i == len(spikes):
        # ISI buffer overrun
        extra_spikes = []
        t_last = spikes[-1] + rescale(interval_generator(*args, size=1))[0]
        while t_last < t_stop:
            extra_spikes.append(t_last)
            t_last = t_last + rescale(interval_generator(*args, size=1))[0]
        # np.concatenate does not conserve units
        spikes = Quantity(np.concatenate((spikes, extra_spikes)).magnitude,
                          units=spikes.units)
    else:
        spikes = spikes[:i]

    if as_array:
        spikes = spikes.magnitude
    else:
        spikes = SpikeTrain(spikes,
                            t_start=t_start,
                            t_stop=t_stop,
                            units=spikes.units)

    return spikes
예제 #17
0
    def test_regular_score_types_2(self):
        BooleanScore(True)
        BooleanScore(False)
        score = BooleanScore.compute(5, 5)
        self.assertEqual(score.norm_score, 1)
        score = BooleanScore.compute(4, 5)
        self.assertEqual(score.norm_score, 0)

        self.assertEqual(1, BooleanScore(True).norm_score)
        self.assertEqual(0, BooleanScore(False).norm_score)

        t = RangeTest([2, 3])
        score.test = t
        score.describe()
        score.description = "Lorem Ipsum"
        score.describe()

        score = FloatScore(3.14)
        self.assertRaises(InvalidScoreError, score.check_score,
                          Quantity([1, 2, 3], "J"))

        obs = np.array([1.0, 2.0, 3.0])
        pred = np.array([1.0, 2.0, 4.0])
        score = FloatScore.compute_ssd(obs, pred)
        self.assertEqual(str(score), "1")
        self.assertEqual(score.score, 1.0)

        score = RatioScore(1.2)
        self.assertEqual(1, RatioScore(1.0).norm_score)
        self.assertEqual(0, RatioScore(1e12).norm_score)
        self.assertEqual(0, RatioScore(1e-12).norm_score)

        self.assertEqual(str(score), "Ratio = 1.20")

        self.assertRaises(InvalidScoreError, RatioScore, -1.0)
        score = RatioScore.compute({"mean": 4.0, "std": 1.0}, {"value": 2.0})

        self.assertEqual(score.score, 0.5)
예제 #18
0
    def rescaleUnit(self, unit, rescaleStereo=True):
        self.__operations.append(["rescaleUnit", unit, rescaleStereo])

        def rescale2DStereo(paramID, thicknessValue, thicknessUnit,
                            desiredUnit):
            density = paramGetter.getParam(paramID)
            thickness = Quantity(thicknessValue, thicknessUnit)
            return (density / thickness).rescale(desiredUnit)

        self.__report += "Rescaling the units to '" + str(unit) + "'.\n"
        if rescaleStereo:
            self.__report += "Rescaling densities from 2D densities to 3D.\n"

        paramGetter = ParameterGetter(pathDB=self.pathDB)
        for param, annot, (index, row) in zip(self.sampleDF["obj_parameter"],
                                              self.sampleDF["obj_annotation"],
                                              self.sampleDF.iterrows()):
            if param.unit == unit:
                continue

            try:
                param = param.rescale(unit)
            except ValueError:

                if rescaleStereo:
                    thicknessInstanceId = [
                        param.instanceId
                        for param in annot.experimentProperties
                        if getParameterTypeNameFromID(param.paramTypeId) ==
                        "slice_thickness"
                    ]

                    if len(thicknessInstanceId) == 1:
                        thicknessParameter = paramGetter.getParam(
                            thicknessInstanceId[0])
                        if len(thicknessParameter.values) == 1:
                            param = rescale2DStereo(
                                param.id,
                                thicknessValue=thicknessParameter.values[0],
                                thicknessUnit=thicknessParameter.unit,
                                desiredUnit=unit)
                            self.sampleDF.loc[index, "obj_parameter"] = param
                            self.sampleDF.loc[index,
                                              "Values"] = param.valuesText()
                            self.sampleDF.loc[index, "Unit"] = param.unit
                            continue

                statusStr = "Cannot be rescaled to unit " + str(unit) + "\n"
                self.sampleDF.loc[index, "isValid"] = False
                self.sampleDF.loc[index, "statusStr"] += statusStr
                continue

            if Quantity(1, param.unit) != Quantity(1, unit):
                statusStr = "Cannot be rescaled to unit " + str(unit) + "\n"
                self.sampleDF.loc[index, "isValid"] = False
                self.sampleDF.loc[index, "statusStr"] += statusStr
                continue

            self.sampleDF.loc[index, "obj_parameter"] = param
            self.sampleDF.loc[index, "Values"] = param.valuesText()
            self.sampleDF.loc[index, "Unit"] = param.unit
예제 #19
0
    NONALCOHOLIC = auto()
    HEALTHY = auto()


class Recipe(NamedTuple):
    title: str
    servings: str
    prep: int
    cook: int
    ingredients: Dict[str, Quantity]
    directions: str
    flags: RecipeFlags


guac = Recipe(
    title='Guacamole',
    ingredients={
        'avocado': Quantity.count(4),
        'roma tomato': Quantity.count(2),
        'onion': Quantity.count(0.5),
        'lime': Quantity.count(2),
        'jalapeño': Quantity.count(1),
        'cilantro': Quantity.count(0.5),
        'salt': Quantity.of(2, 'tsp')
    },
    directions='Combine ingredients in a bowl. Chill before serving.',
    servings=8,
    prep=20,
    cook=0,
    flags=RecipeFlags.GLUTENFREE | RecipeFlags.DAIRYFREE | RecipeFlags.VEGAN
    | RecipeFlags.NONALCOHOLIC | RecipeFlags.VEGETARIAN)
예제 #20
0
 def getSamplingFrequency(self):
     return Quantity(self._samplerate, 'Hz')
예제 #21
0
    def preprocess_age(self):
        self.__operations.append(["preprocess_age"])

        if not "SpeciesId" in self.sampleDF:
            self.preprocess_species()

        ageCategoryIds = []
        ageCategories = []
        numericalAges = []
        for index, row in self.sampleDF.iterrows():

            # First check if an experimental property with age as been attributed to the record
            ageExpProp = [
                expProp.instanceId
                for expProp in row["obj_annotation"].experimentProperties
                if expProp.paramTypeId == 'BBP-002001'
            ]
            if len(ageExpProp) > 1:
                statusStr = "Age is ambiguous. More than one age experimentation property is associated with the annotation.\n"
                self.sampleDF.loc[index, "isValid"] = False
                self.sampleDF.loc[index, "statusStr"] += statusStr
                continue

            if len(ageExpProp) == 1:
                getter = ParameterGetter(pathDB=self.pathDB)

                ageParam = getter.getParam(ageExpProp[0])

                ageCategoryIds.append(None)
                ageCategories.append(None)
                try:
                    numericalAges.append(
                        Quantity(ageParam.centralTendancy(),
                                 ageParam.unit).rescale(self.ageUnit))
                except ValueError:
                    raise ValueError(
                        "Issue encountered while processing annotation Parameter instance ID: "
                        +
                        str(self.sampleDF.loc[index, "Parameter instance ID"]))

            # No experimental property attributed. Check to use a age category if one has been attributed.
            else:
                tags = row["AgeCategories"]
                if len(tags) > 1:
                    statusStr = "Age is ambiguous. More than one age category is associated with the annotation.\n"
                    self.sampleDF.loc[index, "isValid"] = False
                    self.sampleDF.loc[index, "statusStr"] += statusStr

                if len(tags) == 0:
                    ageCategoryIds.append(None)
                    ageCategories.append(None)
                    numericalAges.append(None)
                    continue

                ageCategoryIds.append(tags[0].id)
                ageCategories.append(tags[0].name)
                age = AgeResolver.resolve_fromIDs(row["SpeciesId"],
                                                  tags[0].id,
                                                  unit=self.ageUnit,
                                                  typeValue=self.ageTypeValue)
                numericalAges.append(age)

        self.sampleDF["AgeCategoryId"] = ageCategoryIds
        self.sampleDF["AgeCategory"] = ageCategories
        self.sampleDF["age"] = numericalAges

        self.__report += "Preprocessing age information.\n"
예제 #22
0
 def rescale2DStereo(paramID, thicknessValue, thicknessUnit,
                     desiredUnit):
     density = paramGetter.getParam(paramID)
     thickness = Quantity(thicknessValue, thicknessUnit)
     return (density / thickness).rescale(desiredUnit)
예제 #23
0
 def put(self, obj: pq.Quantity):
     assert isinstance(obj, pq.Quantity)
     obj = obj.rescale(pq.mL)
     return obj.item()
예제 #24
0
def quantity_concat(a: Quantity, b: Quantity) -> Quantity:
    return np.concatenate([a, b.rescale(a.units)]) * a.units
예제 #25
0
def _find_action_potentials_on_tracks(ap_tracks: Iterable[APTrack],
                                      el_stimuli: Event,
                                      signal: Union[IrregularlySampledSignal,
                                                    AnalogSignal],
                                      window_size: Quantity = Quantity(
                                          0.003, "s"),
                                      sampling_rate=None) -> SpikeTrain:

    # TODO implement this function for analog signals and make it reusable
    if isinstance(signal, IrregularlySampledSignal) and sampling_rate is None:
        raise ValueError(
            "If an irregularly sampled signal is passed, you need to set the sampling rate!"
        )
    elif isinstance(signal, AnalogSignal):
        sampling_rate = signal.sampling_rate

    # initialize our list of action_potentials
    ap_times = []
    ap_waveforms = []

    # iterate over all the tracks
    for track_idx, ap_track in enumerate(ap_tracks):
        # first, get the template of our current AP track
        if ap_track.ap_template == None:
            warnings.warn(
                f"""No AP template for AP track no. {track_idx}! Cannot extract APs for this track."""
            )
            continue
        else:
            ap_template = ap_track.ap_template

        try:
            # then, slide the template over the window, calculating cross correlation for all the datapoints
            for sweep_idx, ap_latency in tqdm(zip(ap_track.sweep_idcs, ap_track.latencies), total = len(ap_track), \
                desc = f"""Processing AP Track {track_idx} with {len(ap_track)} latencies."""):
                # we need the time of the main pulse and add the latency to define the point around which we want to search
                window_center_time = el_stimuli.times[sweep_idx] + ap_latency

                # now, we define the indices of the first and last data points that we consider for our windowing
                if isinstance(signal, IrregularlySampledSignal):
                    signal: IrregularlySampledSignal
                    # find first signal index
                    first_signal_idx = bisect.bisect_left(
                        signal.times, (window_center_time - window_size -
                                       ap_template.duration / 2))
                    # find last signal index
                    last_signal_idx = bisect.bisect_left(
                        signal.times, (window_center_time + window_size +
                                       ap_template.duration / 2))
                elif isinstance(signal, AnalogSignal):
                    # TODO Check why this function is much slower for analog signals
                    first_signal_idx = floor(
                        (window_center_time - window_size -
                         (ap_template.duration / 2.0)) * signal.sampling_rate)
                    last_signal_idx = floor(
                        (window_center_time + window_size +
                         (ap_template.duration / 2.0)) * signal.sampling_rate)

                # slide the template over the window
                correlations = sliding_window_normalized_cross_correlation(
                    signal[first_signal_idx:last_signal_idx],
                    ap_template.signal_template)
                # then, retrieve the index for which we had the maximum correlation
                max_correlation_idx = np.argmax(
                    correlations) + first_signal_idx
                # finally, append the starting time and
                ap_times.append(signal.times[max_correlation_idx])
                ap_waveforms.append(
                    signal[max_correlation_idx:max_correlation_idx +
                           len(ap_template)])
        except Exception as ex:
            traceback.print_exc()

    # sort the APs and return spiketrain object
    ap_times = sorted(ap_times)

    # build the waveforms array
    num_aps = len(ap_waveforms)
    max_len = max([len(ap) for ap in ap_waveforms])
    waveforms = np.zeros(shape=(num_aps, 1, max_len),
                         dtype=np.float64) * signal.units
    for ap_idx, ap_waveform in enumerate(ap_waveforms):
        waveforms[ap_idx, 0, 0:len(ap_waveform)] = ap_waveform.ravel()

    result = SpikeTrain(times=Quantity(ap_times, "s"),
                        t_start=signal.t_start,
                        t_stop=signal.t_stop,
                        name="APs from tracks",
                        waveforms=waveforms,
                        sampling_rate=sampling_rate)

    result.annotate(id=f"{TypeID.ACTION_POTENTIAL.value}.0",
                    type_id=TypeID.ACTION_POTENTIAL.value)
    return result
예제 #26
0
 def __getitem__(self, key):
   return Quantity.__getitem__(self.view(Quantity), key)
예제 #27
0
 def feature_units(self) -> Quantity:
     return Quantity(1.)