Esempio n. 1
0
def logpmf(X, N, P):
    assert X.shape == N.shape == P.shape
    # Limit to one-dimension arrays, as indexing an array using a boolean array
    # of >1D (as I do with `results[idxs] = val`) is not supported by Numba.
    assert X.ndim == N.ndim == P.ndim == 1

    P_isclose_0 = util.isclose(0, P)
    P_isclose_1 = util.isclose(1, P)
    X_equals_0 = X == 0
    X_equals_N = X == N
    special = [
        (np.logical_and(P_isclose_0, X_equals_0), 0),
        (np.logical_and(P_isclose_0, np.logical_not(X_equals_0)), -np.inf),
        (np.logical_and(P_isclose_1, X_equals_N), 0),
        (np.logical_and(P_isclose_1, np.logical_not(X_equals_N)), -np.inf),
    ]

    result = np.full_like(P, np.nan)
    for idx in range(len(special)):
        idxs = special[idx][0]
        val = special[idx][1]
        assert np.all(np.isnan(result[idxs]))
        result[idxs] = val

    unfilled = np.isnan(result)
    Nu = N[unfilled]
    Xu = X[unfilled]
    Pu = P[unfilled]
    result[unfilled] = util.log_N_choose_K(
        Nu, Xu) + Xu * np.log(Pu) + (Nu - Xu) * np.log(1 - Pu)
    assert not np.any(np.isnan(result))

    return result
Esempio n. 2
0
def done_command(label):
    # TODO(alive): Rewrite with the paradigm used in timer_db.py.
    #              Move this logic into Entry.
    if not os.path.isfile(_resource_path(label)):
        util.tlog("No diary entry with label `%s` exists" % label)
        return

    with util.OpenAndLock(_resource_path(label), 'r') as f:
        entry = json.load(f)

    now = time.time()
    span = now - entry['epoch']
    effective = entry['effective']

    # Handle orderings:
    #   1. __enter__, new, done, __exit__.
    #   2. new, done, __enter__, __exit__.
    #   3. __enter__, __exit__, new, done.
    #
    # If we are in any of the above orderings AND effective is 0.0, then we
    # simply set `effective` to `span`. In these cases, there is no interaction
    # between diary and timer.
    #
    # If, however, the first condition is True, but the second is false, then
    # we must be in case #1 above. The only way for `effective` to be non-zero
    # here is for the user to have called timer.inc(). This is only possible
    # if a timer is running, and therefore, cases #2 and #3 are ruled out. The
    # else block handles this case.
    if (util.isclose(entry['epoch'], entry['interval_start_time'])
            and util.isclose(effective, 0.0)):
        effective = span
    else:
        # Handle orderings:
        #   1. __enter__, new, done, __exit__ (with call to timer.inc()).
        #   5. new, __enter__, done, __exit__.
        # Capture the amount of time elapsed after __enter__.
        timer = timer_db.running_timer()
        if timer:
            with timer:
                if timer.label == label:
                    effective += time.time() - entry['interval_start_time']

    if util.isclose(span - effective, 0.0, abs_tol=_TIME_EPSILON):
        overhead = 0.0
    else:
        overhead = (span - effective) / span

    click.echo(" Start time:    %s" % _format_timestamp(entry['epoch']))
    click.echo(" End time:      %s" % _format_timestamp(now))
    click.echo(" Span (m):      %.2f" % (span / 60.0))
    click.echo(" Effective (m): %.2f" % (effective / 60.0))
    click.echo(" Overhead (%%):  %.1f%%" % (overhead * 100.0))

    os.remove(_resource_path(label))
Esempio n. 3
0
def calc_mut_p(A, Z, psi):
    eta = softmax(psi)  # Kx1
    phi = np.dot(Z, eta)  # Kx1
    mut_phi = np.dot(A, phi)  # Mx1
    # TODO: should this use omega_v?
    mut_p = 0.5 * mut_phi

    # Avoid numerical issues.
    delta = 1e-30
    mut_p[util.isclose(mut_p, 0)] += delta
    mut_p[util.isclose(mut_p, 0.5)] -= delta

    return mut_p
Esempio n. 4
0
    def merge_one_station(self, window, strategy):
        new_window = {}
        for channel_id, channel_win in window.iteritems():
            winnum = self.find_duplicate_channel(window, channel_id)
            if len(winnum) == 1:
                weighting = 1.0
            if len(winnum) > 1:
                weighting = self.calculate_weighting(
                                channel_id, winnum, strategy)

            if self._verbose == 2:
                print("%s" % str(channel_id)),
                print(winnum), 
                print(" --> weighting: %.2f" % (weighting))

            if isclose(weighting, 0.0):
                continue

            new_window[channel_id] = []
            for win in channel_win:
                newwin = {}
                newwin["initial_weighting"] = weighting
                channel_id = win["channel_id"]
                content = channel_id.split(".")
                newwin["obsd_id"] = channel_id
                newwin["synt_id"] = "%s.%s.S3.MX%s" % (content[0], content[1], 
                                             content[3][-1])
                newwin["relative_starttime"] = win["relative_starttime"]
                newwin["relative_endtime"] = win["relative_endtime"]
                new_window[channel_id].append(newwin)
        return new_window
Esempio n. 5
0
    def merge_one_station(self, window, strategy):
        new_window = {}
        for channel_id, channel_win in window.iteritems():
            winnum = self.find_duplicate_channel(window, channel_id)
            if len(winnum) == 1:
                weighting = 1.0
            if len(winnum) > 1:
                weighting = self.calculate_weighting(channel_id, winnum,
                                                     strategy)

            if self._verbose == 2:
                print("%s" % str(channel_id)),
                print(winnum),
                print(" --> weighting: %.2f" % (weighting))

            if isclose(weighting, 0.0):
                continue

            new_window[channel_id] = []
            for win in channel_win:
                newwin = {}
                newwin["initial_weighting"] = weighting
                channel_id = win["channel_id"]
                content = channel_id.split(".")
                newwin["obsd_id"] = channel_id
                newwin["synt_id"] = "%s.%s.S3.MX%s" % (content[0], content[1],
                                                       content[3][-1])
                newwin["relative_starttime"] = win["relative_starttime"]
                newwin["relative_endtime"] = win["relative_endtime"]
                new_window[channel_id].append(newwin)
        return new_window
Esempio n. 6
0
 def test_compute_significance_two_tails(self):
     cases = [(5, 22, 0.0169), (16, 22, 0.0525), (100, 220, 0.2001),
              (300, 640, 0.1231)]
     for success, total, expected in cases:
         assert util.isclose(compute_significance_two_tails(success, total),
                             expected,
                             rel_tol=1e-03)
Esempio n. 7
0
def mkcells(gidinfo):
    timeit()
    for gid in gidinfo:
        x, y, z = gidinfo[gid]
        cell = h.Cell()
        gidinfo[gid] = CellInfo(cell)

        # cell shape is actually an arc and area is fastidious with respect
        # to all 6 sides. But length
        # treated as line distance between org points (interior corners
        # in circumferential direction. Set diam so area is correct including
        # end areas.
        cell.soma.pt3dclear()
        cell.soma.pt3dadd(x, y, z, 1.)
        ilayer, icircle, ipt = gid2org(gid)
        x1, y1, z1 = xyz(ilayer, icircle, ipt + 1)
        cell.soma.pt3dadd(x1, y1, z1, 1.)
        length = cell.soma.L
        area = sum(mkgap.cell_side_areas(gid))
        diam = area / pi / length
        cell.soma.diam = diam
        assert (isclose(cell.soma(.5).area(), area, abs_tol=area * 1e-5))

        cell.position(x, y, z)
        pc.set_gid2node(gid, rank)
        nc = cell.connect2target(None)
        pc.cell(gid, nc)
    x = pc.allreduce(len(gidinfo), 1)
    pr("Global number of real cells is %d" % x)
    timeit("mkcells")
Esempio n. 8
0
    def update(self):
        '''
        This function updates the Button image with the reward,
        and direction values
        :return: None
        '''

        # If statement is to stop the termination squares from being updated
        if len(self.children) > 0:

            max_val = max(self.direction_values)
            max_index = self.direction_values.index(max_val)

            # If two direction_values are about the same as the max value,
            # then don't display any arrow images
            max_count = 0
            for dir_val in self.direction_values:

                if util.isclose(dir_val, max_val):
                    max_count += 1

            if max_count is 1:
                self.children[0].opacity = 1
                self.children[0].source = self.background_images[max_index]
            else:
                self.children[0].opacity = 0

            self.children[1].set_strength(max_val * 4)
            self.children[1].color = self.colour

            if self.colour.rgb != [1.0, 1.0, 1.0]:
                self.children[1].draw()
Esempio n. 9
0
def _do_gibbs_iter(V,
                   T_prime,
                   phi_alpha0,
                   phi_beta0,
                   logconc,
                   C,
                   Z,
                   check_full_llh=False):
    N = len(Z)
    Z = np.copy(Z)

    for vidx in range(N):
        old_cluster = Z[vidx]
        Z[vidx] = -1
        if not np.any(Z == old_cluster):
            # If `vidx` was the only member, remove this cluster.
            # Do so by moving the last cluster to its index.
            # (These operations are valid even if `highest == old_cluster`.)
            highest = C - 1
            Z[Z == highest] = old_cluster
            C -= 1

        # `cweights`: LLHs of each cluster destination for `vidx`
        # cweights[C] = LLH of adding new cluster
        cweights = np.empty(C + 1)
        # Consider every possible destination.
        for cidx in range(C):
            members = np.flatnonzero(Z == cidx)
            cweights[cidx] = _calc_cweight(V, T_prime, phi_alpha0, phi_beta0,
                                           vidx, members)
        # Consider adding a new cluster.
        cweights[C] = _calc_new_cluster_weight(V, T_prime, phi_alpha0,
                                               phi_beta0, vidx, logconc)
        cweights -= np.log(np.exp(logconc) + N - 1)

        if check_full_llh:
            cweights_full = _compute_cweights_full(V, T_prime, Z, phi_alpha0,
                                                   phi_beta0, vidx, logconc, C)
            assert np.all(util.isclose(cweights, cweights_full))

        cprobs = util.softmax(cweights)
        new_cluster = util.sample_multinom(cprobs)
        Z[vidx] = new_cluster
        if new_cluster == C:
            C += 1

    llh = _calc_llh(V, T_prime, Z, phi_alpha0, phi_beta0, logconc)
    return (C, Z, llh)
Esempio n. 10
0
def _integral_separate_clusters(args):
    phi1, V1_var_reads, V1_ref_reads, V1_omega, V2_var_reads, V2_ref_reads, V2_omega, midx, logsub = args

    V1_total_reads = V1_var_reads + V1_ref_reads
    logP = _binom_logpmf(
        V1_var_reads,
        V1_total_reads,
        V1_omega * phi1,
    )
    upper = _make_upper(phi1, midx)
    lower = _make_lower(phi1, midx)

    A = V2_var_reads + 1
    B = V2_ref_reads + 1
    betainc_upper = betacdf(A, B, V2_omega * upper)
    betainc_lower = betacdf(A, B, V2_omega * lower)
    if util.isclose(betainc_upper, betainc_lower):
        return 0
    logP += np.log(betainc_upper - betainc_lower)
    logP -= logsub

    return np.exp(logP)
Esempio n. 11
0
 def is_running(self):
   return not util.isclose(self.endtime, 0, abs_tol=1e-3)
figure_index = 0

# First re-type distance and temperature arrays for pyplot:
distances = np.array([[distances[index]-distances[0] for index in range(0,len(distances))] for distances in distance_averages_for_each_num_states])
temperatures = np.array([[Temp[index] for index in range(0,len(Temp))] for Temp in temperatures_for_each_num_states])
distance_uncertainty = np.array([[uncertainty[index] for index in range(0,len(uncertainty))] for uncertainty in uncertainty_distances_for_each_num_states])

# Define temporary arrays to store the MBAR-evaluated, re-weighted properties at T = 400 K
states_temp = []
distances_temp = []
uncertainty_temp = []
for state_index in range(0,len(temperatures)):
# If the temperature distribution for 'num_states' includes a temperature within 'tol' (10.0 K) of 400 K, we will include it in our analysis, otherwise we omit the dataset
#
# For example, if num_states = 4, the temperatures we sample are: 300.0, 366.6, 433.3, and 500.0.  Thus, we won't plot the results for 'num_states' = 4
 if any(util.isclose(temperatures[state_index][temperature_index],400.0,tol=10.0) for temperature_index in range(0,len(temperatures[state_index]))):
  for temperature_index in range(0,len(temperatures[state_index])):
   if temperatures[state_index][temperature_index] == float(400.0):
    states_temp.append(len(temperatures[state_index]))
    distances_temp.append(distance_averages_for_each_num_states[state_index][temperature_index])
    uncertainty_temp.append(uncertainty_distances_for_each_num_states[state_index][temperature_index])
    break
# Define arrays with the datapoints we will be plotting
states_400 = np.array([states_temp[index] for index in range(0,len(states_temp))])
uncertainty_400 = np.array([uncertainty_temp[index] for index in range(0,len(uncertainty_temp))])
distances_400 = np.array([distances_temp[index] for index in range(0,len(distances_temp))])

#Plot <R_Na-Cl> @ 400 K vs. # of thermodynamic states
figure = pyplot.figure(figure_index)
pyplot.plot(states_400,distances_400,figure=figure)
pyplot.xlabel("Number of thermodynamic states")
temperatures = np.array([[Temp[index] for index in range(0, len(Temp))]
                         for Temp in temperatures_for_each_num_states])
distance_uncertainty = np.array(
    [[uncertainty[index] for index in range(0, len(uncertainty))]
     for uncertainty in uncertainty_distances_for_each_num_states])

# Define temporary arrays to store the MBAR-evaluated, re-weighted properties at T = 400 K
states_temp = []
distances_temp = []
uncertainty_temp = []
for state_index in range(0, len(temperatures)):
    # If the temperature distribution for 'num_states' includes a temperature within 'tol' (10.0 K) of 400 K, we will include it in our analysis, otherwise we omit the dataset
    #
    # For example, if num_states = 4, the temperatures we sample are: 300.0, 366.6, 433.3, and 500.0.  Thus, we won't plot the results for 'num_states' = 4
    if any(
            util.isclose(
                temperatures[state_index][temperature_index], 400.0, tol=10.0)
            for temperature_index in range(0, len(temperatures[state_index]))):
        for temperature_index in range(0, len(temperatures[state_index])):
            if temperatures[state_index][temperature_index] == float(400.0):
                states_temp.append(len(temperatures[state_index]))
                distances_temp.append(
                    distance_averages_for_each_num_states[state_index]
                    [temperature_index])
                uncertainty_temp.append(
                    uncertainty_distances_for_each_num_states[state_index]
                    [temperature_index])
                break
# Define arrays with the datapoints we will be plotting
states_400 = np.array(
    [states_temp[index] for index in range(0, len(states_temp))])
uncertainty_400 = np.array(