Beispiel #1
0
 def test_smooth(self):
     cat = cv2.imread('cat.jpg')
     cat = util.bgr2gray(cat)
     cur_img_1 = util.smooth(30, 3, cat)
     cur_img_2 = util.smooth(60, 5, cat)
     cur_img_3 = util.smooth(0, 3, cat)
     self.assertEquals(cur_img_1.dtype, 'uint8')
     self.assertEquals(cur_img_2.dtype, 'uint8')
     self.assertEquals(cur_img_3.dtype, 'uint8')
Beispiel #2
0
def plot_pair(dd, states, daily, title):
    if not title:
        if len(states) == 1:
            title = states[0]
        else:
            title = "Combined"

    fig, (ax1, ax2) = plt.subplots(nrows=1, ncols=2, figsize=(10, 5))

    sdd = dd.loc[dd['state'].isin(pops.full_states(states))].copy()
    sdd = sdd.groupby('date').sum()

    def plot(axis, df, column, **kwargs):
        ax = df.plot(ax=axis, y=column, grid=True, **kwargs)
        decorate(ax, states)
        return ax

    if daily == False:
        # cumulative deaths & cases
        plot(ax1,
             sdd,
             'deaths',
             title=title + " deaths",
             color=util.death_color)
        plot(ax2, sdd, 'cases', title=title + " cases")
    else:
        # daily deaths
        util.calc_daily(sdd, 'deaths', 'deathIncrease')
        plot(ax1,
             sdd,
             'deathIncrease',
             title=title + " daily deaths",
             color=util.death_color,
             alpha=0.25)

        sdd.loc[:, 'deathIncrease-smoothed'] = util.smooth(
            sdd.deathIncrease.values)
        plot(ax1, sdd, 'deathIncrease-smoothed', color=util.death_color)

        # daily cases
        util.calc_daily(sdd, 'cases', 'positiveIncrease')
        plot(ax2,
             sdd,
             'positiveIncrease',
             title=title + " daily cases",
             color=util.case_color,
             alpha=0.25)

        sdd.loc[:, 'daily-cases-smoothed'] = util.smooth(
            sdd.positiveIncrease.values)
        plot(ax2, sdd, 'daily-cases-smoothed', color=util.case_color)

    fig.tight_layout()
def bwith(data, fs, smoothie, fk):
    """
    Bandwidth of a signal.

    Computes the bandwidth of the given data which can be windowed or not.
    The bandwidth corresponds to the level where the power of the spectrum is
    half its maximum value. It is determined as the level of 1/sqrt(2) times
    the maximum Fourier amplitude.

    If data are windowed the bandwidth of each window is returned.

    :type data: :class:`~numpy.ndarray`
    :param data: Data to make envelope of.
    :param fs: Sampling frequency in Hz.
    :param smoothie: Factor for smoothing the result.
    :param fk: Coefficients for calculating time derivatives
        (calculated via central difference).
    :return: **bwith[, dbwithd]** - Bandwidth, Time derivative of predominant
        period (windowed only).
    """
    nfft = util.nextpow2(data.shape[1])
    freqaxis = np.linspace(0, fs, nfft + 1)
    bwith = np.zeros(data.shape[0])
    f = fftpack.fft(data, nfft)
    f_sm = util.smooth(abs(f[:, 0:nfft / 2]), 10)
    if np.size(data.shape) > 1:
        i = 0
        for row in f_sm:
            minfc = abs(row - max(abs(row * (1 / np.sqrt(2)))))
            [mdist_ind, _mindist] = min(enumerate(minfc), key=itemgetter(1))
            bwith[i] = freqaxis[mdist_ind]
            i = i + 1
        #bwith_add = \
        #        np.append(np.append([bwith[0]] * (np.size(fk) // 2), bwith),
        #        [bwith[np.size(bwith) - 1]] * (np.size(fk) // 2))
        # faster alternative
        bwith_add = np.hstack(
            ([bwith[0]] * (np.size(fk) // 2), bwith,
             [bwith[np.size(bwith) - 1]] * (np.size(fk) // 2)))
        dbwith = signal.lfilter(fk, 1, bwith_add)
        #dbwith = dbwith[np.size(fk) // 2:(np.size(dbwith) - np.size(fk) // 2)]
        # correct start and end values of time derivative
        dbwith = dbwith[np.size(fk) - 1:]
        bwith = util.smooth(bwith, smoothie)
        dbwith = util.smooth(dbwith, smoothie)
        return bwith, dbwith
    else:
        minfc = abs(data - max(abs(data * (1 / np.sqrt(2)))))
        [mdist_ind, _mindist] = min(enumerate(minfc), key=itemgetter(1))
        bwith = freqaxis[mdist_ind]
        return bwith
Beispiel #4
0
def bwith(data, fs, smoothie, fk):
    """
    Bandwidth of a signal.

    Computes the bandwidth of the given data which can be windowed or not.
    The bandwidth corresponds to the level where the power of the spectrum is
    half its maximum value. It is determined as the level of 1/sqrt(2) times
    the maximum Fourier amplitude.

    If data are windowed the bandwidth of each window is returned.

    :type data: :class:`~numpy.ndarray`
    :param data: Data to make envelope of.
    :param fs: Sampling frequency in Hz.
    :param smoothie: Factor for smoothing the result.
    :param fk: Coefficients for calculating time derivatives
        (calculated via central difference).
    :return: **bwith[, dbwithd]** - Bandwidth, Time derivative of predominant
        period (windowed only).
    """
    nfft = util.nextpow2(data.shape[1])
    freqaxis = np.linspace(0, fs, nfft + 1)
    bwith = np.zeros(data.shape[0])
    f = fftpack.fft(data, nfft)
    f_sm = util.smooth(abs(f[:, 0:nfft / 2]), 10)
    if np.size(data.shape) > 1:
        i = 0
        for row in f_sm:
            minfc = abs(row - max(abs(row * (1 / np.sqrt(2)))))
            [mdist_ind, _mindist] = min(enumerate(minfc), key=itemgetter(1))
            bwith[i] = freqaxis[mdist_ind]
            i = i + 1
        #bwith_add = \
        #        np.append(np.append([bwith[0]] * (np.size(fk) // 2), bwith),
        #        [bwith[np.size(bwith) - 1]] * (np.size(fk) // 2))
        # faster alternative
        bwith_add = np.hstack(
            ([bwith[0]] * (np.size(fk) // 2), bwith,
             [bwith[np.size(bwith) - 1]] * (np.size(fk) // 2)))
        dbwith = signal.lfilter(fk, 1, bwith_add)
        #dbwith = dbwith[np.size(fk) // 2:(np.size(dbwith) - np.size(fk) // 2)]
        # correct start and end values of time derivative
        dbwith = dbwith[np.size(fk) - 1:]
        bwith = util.smooth(bwith, smoothie)
        dbwith = util.smooth(dbwith, smoothie)
        return bwith, dbwith
    else:
        minfc = abs(data - max(abs(data * (1 / np.sqrt(2)))))
        [mdist_ind, _mindist] = min(enumerate(minfc), key=itemgetter(1))
        bwith = freqaxis[mdist_ind]
        return bwith
def domperiod(data, fs, smoothie, fk):
    """
    Predominant period of a signal.

    Computes the predominant period of the given data which can be windowed or
    not. The period is determined as the period of the maximum value of the
    Fourier amplitude spectrum.

    If data are windowed the predominant period of each window is returned.

    :type data: :class:`~numpy.ndarray`
    :param data: Data to determine predominant period of.
    :param fs: Sampling frequency in Hz.
    :param smoothie: Factor for smoothing the result.
    :param fk: Coefficients for calculating time derivatives
        (calculated via central difference).
    :return: **dperiod[, ddperiod]** - Predominant period, Time derivative of
        predominant period (windowed only).
    """
    nfft = 1024
    #nfft = util.nextpow2(data.shape[1])
    freqaxis = np.linspace(0, fs, nfft + 1)
    dperiod = np.zeros(data.shape[0])
    f = fftpack.fft(data, nfft)
    #f_sm = util.smooth(abs(f[:,0:nfft // 2]),1)
    f_sm = f[:, 0:nfft // 2]
    if np.size(data.shape) > 1:
        i = 0
        for row in f_sm:
            [mdist_ind, _mindist] = max(enumerate(abs(row)), key=itemgetter(1))
            dperiod[i] = freqaxis[mdist_ind]
            i = i + 1
        #dperiod_add = np.append(np.append([dperiod[0]] * (np.size(fk) // 2), \
        #    dperiod), [dperiod[np.size(dperiod) - 1]] * (np.size(fk) // 2))
        # faster alternative
        dperiod_add = np.hstack(
            ([dperiod[0]] * (np.size(fk) // 2), dperiod,
             [dperiod[np.size(dperiod) - 1]] * (np.size(fk) // 2)))
        ddperiod = signal.lfilter(fk, 1, dperiod_add)
        #ddperiod = ddperiod[np.size(fk) / \
        #    2:(np.size(ddperiod) - np.size(fk) // 2)]
        # correct start and end values of time derivative
        ddperiod = ddperiod[np.size(fk) - 1:]
        dperiod = util.smooth(dperiod, smoothie)
        ddperiod = util.smooth(ddperiod, smoothie)
        return dperiod, ddperiod
    else:
        [mdist_ind, _mindist] = max(enumerate(abs(data)), key=itemgetter(1))
        dperiod = freqaxis[mdist_ind]
        return dperiod
Beispiel #6
0
def domperiod(data, fs, smoothie, fk):
    """
    Predominant period of a signal.

    Computes the predominant period of the given data which can be windowed or
    not. The period is determined as the period of the maximum value of the
    Fourier amplitude spectrum.

    If data are windowed the predominant period of each window is returned.

    :type data: :class:`~numpy.ndarray`
    :param data: Data to determine predominant period of.
    :param fs: Sampling frequency in Hz.
    :param smoothie: Factor for smoothing the result.
    :param fk: Coefficients for calculating time derivatives
        (calculated via central difference).
    :return: **dperiod[, ddperiod]** - Predominant period, Time derivative of
        predominant period (windowed only).
    """
    nfft = 1024
    #nfft = util.nextpow2(data.shape[1])
    freqaxis = np.linspace(0, fs, nfft + 1)
    dperiod = np.zeros(data.shape[0])
    f = fftpack.fft(data, nfft)
    #f_sm = util.smooth(abs(f[:,0:nfft/2]),1)
    f_sm = f[:, 0:nfft / 2]
    if np.size(data.shape) > 1:
        i = 0
        for row in f_sm:
            [mdist_ind, _mindist] = max(enumerate(abs(row)), key=itemgetter(1))
            dperiod[i] = freqaxis[mdist_ind]
            i = i + 1
        #dperiod_add = np.append(np.append([dperiod[0]] * (np.size(fk) // 2), \
        #    dperiod), [dperiod[np.size(dperiod) - 1]] * (np.size(fk) // 2))
        # faster alternative
        dperiod_add = np.hstack(
            ([dperiod[0]] * (np.size(fk) // 2), dperiod,
             [dperiod[np.size(dperiod) - 1]] * (np.size(fk) // 2)))
        ddperiod = signal.lfilter(fk, 1, dperiod_add)
        #ddperiod = ddperiod[np.size(fk) / \
        #    2:(np.size(ddperiod) - np.size(fk) // 2)]
        # correct start and end values of time derivative
        ddperiod = ddperiod[np.size(fk) - 1:]
        dperiod = util.smooth(dperiod, smoothie)
        ddperiod = util.smooth(ddperiod, smoothie)
        return dperiod, ddperiod
    else:
        [mdist_ind, _mindist] = max(enumerate(abs(data)), key=itemgetter(1))
        dperiod = freqaxis[mdist_ind]
        return dperiod
def get_wave_feature(waves, sample_rate, N):
    #waves = smooth(waves)
    features = []
    cep =  wave_ceptrum(26, waves, N, sample_rate)
    # print cep
    cep = cep[:6]
    features = np.concatenate((cep, np.diff(cep, n=1), np.diff(cep, n=2)), axis=0)
    #features = np.array([])
    #waves = smooth(waves)
    #f = np.abs(fft(waves))
    features = np.append(features, np.mean(waves))
    features = np.append(features, np.std(waves))
    features = np.append(features, np.var(waves))
    features = np.append(features, np.amax(waves))
    features = np.append(features, np.amin(waves))
    # features = np.append(features, np.median(waves))

    smooth_waves = smooth(waves)

    features = np.append(features, np.mean(smooth_waves))
    features = np.append(features, np.std(smooth_waves))
    features = np.append(features, np.var(smooth_waves))
    features = np.append(features, np.amax(smooth_waves))
    features = np.append(features, np.amin(smooth_waves))
    # features = np.append(features, np.median(smooth_waves))

    return features
  def add_episode(self, episode, smooth_reward_factor=0.0):
    rewards = [e.reward for e in episode.event]
    if smooth_reward_factor > 0:
      rewards = util.smooth(rewards, smooth_reward_factor)

    self.stats['>add_episode'] += 1
    inserts = []
    for n, event in enumerate(episode.event):
      if n == 0:
        # for first event need to explicitly store state
        state_1_idx = self.state_free_slots.pop(0)
        self.state[state_1_idx] = util.rgb_from_render(event.render)
      if n != len(episode.event)-1:
        # in all but last event we need to peek for next state
        terminal = False
        state_2_idx = self.state_free_slots.pop(0)
        self.state[state_2_idx] = util.rgb_from_render(episode.event[n+1].render)
      else:
        # for last event store zero state for null state_2 (see init for more info)
        terminal = True
        state_2_idx = self.zero_state_idx
      # add element to replay memory recording where it was added
      insert_idx = self._add(state_1_idx, event.action.value, rewards[n], terminal, state_2_idx)
      self.prio_replay.new_experience(insert_idx)
      # roll state_2_idx to become state_1_idx for next step
      state_1_idx = state_2_idx

    self.stats['free_slots'] = len(self.state_free_slots)
    self.stats['size'] = self.size()
Beispiel #9
0
def model(data,
          ix_to_char,
          char_to_ix,
          vocab_size,
          num_iterations=35000,
          n_a=50):
    """
    Trains the model and generates jokes.

    Arguments:
        data -- text corpus
        ix_to_char -- dictionary that maps the index to a character
        char_to_ix -- dictionary that maps a character to an index
        num_iterations -- number of iterations to train the model for
        n_a -- number of units of the RNN cell
        vocab_size -- number of unique characters in vocabulary

    Returns:
        parameters -- learned parameters
    """
    n_x, n_y = vocab_size, vocab_size
    parameters = initialize_parameters(n_a, n_x, n_y)
    loss = get_initial_loss(vocab_size, 10)

    # Shuffle list of all jokes
    np.random.seed(0)
    np.random.shuffle(data)

    # Initialize the hidden state
    a_prev = np.zeros((n_a, 1))

    # Optimization loop
    for j in range(num_iterations):
        index = j % len(data)
        X = [None] + [char_to_ix[ch] for ch in data[index]]
        Y = X[1:] + [char_to_ix["\n"]]

        curr_loss, gradients, a_prev = optimize(X,
                                                Y,
                                                a_prev,
                                                parameters,
                                                vocab_size,
                                                learning_rate=0.01)

        # Use a latency trick to keep the loss smooth.
        loss = smooth(loss, curr_loss)

        # Every 1000 iterations, generate n characters to check if the model is
        # learning properly
        if j % 1000 == 0:
            print('Iteration: %d, Loss: %f' % (j, loss) + '\n')
            for name in range(10):
                sampled_indices = sample(parameters, char_to_ix)
                print_sample(sampled_indices, ix_to_char)
            print('\n')

    return parameters
Beispiel #10
0
 def callback(value):
     # use global variable because we can only pass in one parameter
     global cur_img
     global cat
     cur_img = util.smooth(value, 10, grayscale)
     cv2.imshow('image', cur_img)
     if (value == 0):
         cv2.imshow('image',
                    cat)  # display original image when value = 0
Beispiel #11
0
def plotScoreUnit(legendList, d, name, lineMarker, color, n, idx1, idx2,
                  smooth, smNum):
    x, y = d[:n][idx1], d[:n][idx2]
    if smooth:
        x, y = util.smooth(x, y, smNum)
    line = plt.plot(x, y, lineMarker, color=color)
    if (legendList is not None):
        legendList.append(name)
    return line[0].get_color(), d.shape[1]
Beispiel #12
0
def pixel_count(guess, truth):
    iou_scores = {}
    net_fp = 0
    net_fn = 0
    net_tp = 0
    # compute for all semantic class ids
    # NOTE: void class doesn't contribute to these metrics
    for class_id in range(len(util.BDD_CLASSES) - 1):
        fp = ((guess == class_id) & (truth != class_id)).sum(axis=(0, 1))
        fn = ((guess != class_id) & (truth == class_id)).sum(axis=(0, 1))
        tp = ((guess == class_id) & (truth == class_id)).sum(axis=(0, 1))
        iou = util.smooth(float(tp), float(tp + fp + fn))
        # record metrics for each class
        iou_scores[class_id] = {"fp": fp, "fn": fn, "tp": tp, "iou": iou}
        net_fp += fp
        net_fn += fn
        net_tp += tp
    for category_name, category_ids in util.CITYSCAPE_IDS.items():
        sum_fp = 0
        sum_fn = 0
        sum_tp = 0
        for cat_id in category_ids:
            sum_fp += iou_scores[cat_id]["fp"]
            sum_fn += iou_scores[cat_id]["fn"]
            sum_tp += iou_scores[cat_id]["tp"]
        cat_iou = util.smooth(float(sum_tp), float(sum_tp + sum_fp + sum_fn))
        iou_scores[category_name] = {
            "fp": sum_fp,
            "fn": sum_fn,
            "iou": cat_iou
        }

    # record "net" metrics across all classes for this image
    net_iou = util.smooth(float(net_tp), float(net_tp + net_fp + net_fn))
    iou_scores["net_iou"] = net_iou
    iou_scores["net_tp"] = net_tp
    iou_scores["net_fp"] = net_fp
    iou_scores["net_fn"] = net_fn
    return iou_scores
Beispiel #13
0
def cfrequency(data, fs, smoothie, fk):
    """
    Central frequency of a signal.

    Computes the central frequency of the given data which can be windowed or
    not. The central frequency is a measure of the frequency where the
    power is concentrated. It corresponds to the second moment of the power
    spectral density function.

    The central frequency is returned.

    :type data: :class:`~numpy.ndarray`
    :param data: Data to estimate central frequency from.
    :param fs: Sampling frequency in Hz.
    :param smoothie: Factor for smoothing the result.
    :param fk: Coefficients for calculating time derivatives
        (calculated via central difference).
    :return: **cfreq[, dcfreq]** - Central frequency, Time derivative of center
        frequency (windowed only).
    """
    nfft = util.nextpow2(data.shape[1])
    freq = np.linspace(0, fs, nfft + 1)
    freqaxis = freq[0:nfft / 2]
    cfreq = np.zeros(data.shape[0])
    if np.size(data.shape) > 1:
        i = 0
        for row in data:
            Px_wm = welch(row, np.hamming(len(row)), util.nextpow2(len(row)))
            Px = Px_wm[0:len(Px_wm) / 2]
            cfreq[i] = np.sqrt(np.sum(freqaxis ** 2 * Px) / (sum(Px)))
            i = i + 1
        cfreq = util.smooth(cfreq, smoothie)
        #cfreq_add = \
        #        np.append(np.append([cfreq[0]] * (np.size(fk) // 2), cfreq),
        #        [cfreq[np.size(cfreq) - 1]] * (np.size(fk) // 2))
        # faster alternative
        cfreq_add = np.hstack(
            ([cfreq[0]] * (np.size(fk) // 2), cfreq,
             [cfreq[np.size(cfreq) - 1]] * (np.size(fk) // 2)))
        dcfreq = signal.lfilter(fk, 1, cfreq_add)
        #dcfreq = dcfreq[np.size(fk) // 2:(np.size(dcfreq) - np.size(fk) // 2)]
        # correct start and end values of time derivative
        dcfreq = dcfreq[np.size(fk) - 1:np.size(dcfreq)]
        return cfreq, dcfreq
    else:
        Px_wm = welch(data, np.hamming(len(data)), util.nextpow2(len(data)))
        Px = Px_wm[0:len(Px_wm) / 2]
        cfreq = np.sqrt(np.sum(freqaxis ** 2 * Px) / (sum(Px)))
        return cfreq
Beispiel #14
0
def cfrequency(data, fs, smoothie, fk):
    """
    Central frequency of a signal.

    Computes the central frequency of the given data which can be windowed or
    not. The central frequency is a measure of the frequency where the
    power is concentrated. It corresponds to the second moment of the power
    spectral density function.

    The central frequency is returned.

    :type data: :class:`~numpy.ndarray`
    :param data: Data to estimate central frequency from.
    :param fs: Sampling frequency in Hz.
    :param smoothie: Factor for smoothing the result.
    :param fk: Coefficients for calculating time derivatives
        (calculated via central difference).
    :return: **cfreq[, dcfreq]** - Central frequency, Time derivative of center
        frequency (windowed only).
    """
    nfft = util.nextpow2(data.shape[1])
    freq = np.linspace(0, fs, nfft + 1)
    freqaxis = freq[0:nfft / 2]
    cfreq = np.zeros(data.shape[0])
    if np.size(data.shape) > 1:
        i = 0
        for row in data:
            Px_wm = welch(row, np.hamming(len(row)), util.nextpow2(len(row)))
            Px = Px_wm[0:len(Px_wm) / 2]
            cfreq[i] = np.sqrt(np.sum(freqaxis**2 * Px) / (sum(Px)))
            i = i + 1
        cfreq = util.smooth(cfreq, smoothie)
        #cfreq_add = \
        #        np.append(np.append([cfreq[0]] * (np.size(fk) // 2), cfreq),
        #        [cfreq[np.size(cfreq) - 1]] * (np.size(fk) // 2))
        # faster alternative
        cfreq_add = np.hstack(
            ([cfreq[0]] * (np.size(fk) // 2), cfreq,
             [cfreq[np.size(cfreq) - 1]] * (np.size(fk) // 2)))
        dcfreq = signal.lfilter(fk, 1, cfreq_add)
        #dcfreq = dcfreq[np.size(fk) // 2:(np.size(dcfreq) - np.size(fk) // 2)]
        # correct start and end values of time derivative
        dcfreq = dcfreq[np.size(fk) - 1:np.size(dcfreq)]
        return cfreq, dcfreq
    else:
        Px_wm = welch(data, np.hamming(len(data)), util.nextpow2(len(data)))
        Px = Px_wm[0:len(Px_wm) / 2]
        cfreq = np.sqrt(np.sum(freqaxis**2 * Px) / (sum(Px)))
        return cfreq
Beispiel #15
0
def plot_grid(dd, states, daily, cases):
    n = len(states)
    s = int(math.sqrt(n))
    t = s
    while s * t < n:
        t = t + 1

    column = 'cases' if cases else 'deaths'
    color = util.case_color if cases else util.death_color

    dd.set_index('state', inplace=True)
    fig, ax = plt.subplots(figsize=(24, 24))
    for i in range(n):
        ax = plt.subplot2grid((t, s), (i // s, i % s))

        state = states[i].upper()
        fstate = pops.full_state(state)

        sdd = dd.loc[fstate].copy()
        sdd.set_index('date', inplace=True)
        sdd.sort_index(inplace=True)

        if daily:
            util.calc_daily(sdd, column, 'increase')
            ax = sdd.plot(ax=ax,
                          y='increase',
                          grid=True,
                          color=color,
                          alpha=0.25)
        else:
            ax = sdd.plot(ax=ax, y=column, grid=True, color=color)

        title = ax.set_title(state,
                             loc='left',
                             color='black',
                             verticalalignment='top',
                             fontweight='roman')

        title.set_position([0.05, 0.85])

        if daily:
            sdd['smoothed'] = util.smooth(sdd.increase.values)
            sdd.plot(ax=ax, y='smoothed', grid=True, color=color)

        decorate(ax, [state])

    fig.tight_layout()
Beispiel #16
0
def make_plot(show = False):
    fig = plot.figure(figsize = (7, 6), dpi = dpi)
    ax = fig.add_subplot(111)

    # Plot
    x = frame_range
    y = np.array(extents)

    kernel = 5
    smooth_y = util.smooth(y, kernel)

    plot.plot(x, y, c = colors[1], linewidth = linewidth, alpha = alpha)
    plot.plot(x, smooth_y, c = colors[1], linewidth = linewidth)

    # Axes
    plot.xlim(x[0], x[-1])

    angles = np.linspace(0, 360, 7)
    plot.yticks(angles)
    plot.ylim(0, 360)

    # Annotate Axes
    plot.xlabel(r"$t \mathrm{\ (planet\ orbits)}$", fontsize = fontsize + 2)
    plot.ylabel(r"Azimuthal Extents $\mathrm{(degrees)}$", fontsize = fontsize + 2)

    #plot.legend(loc = "upper right", bbox_to_anchor = (1.28, 1.0)) # outside of plot
    #plot.legend(loc = "upper left") # outside of plot

    # Title
    #title = r"$\mathrm{Azimuthal\ Extents}$"
    title = 
    plot.title("%s" % (title), y = 1.01, fontsize = fontsize + 3)

    # Save, Show, and Close
    current_directory = os.getcwd().split("/")[-3]
    current_beam = os.getcwd().split("/")[-1]
    if version is None:
        save_fn = "%s/extents-%s-%s.png" % (save_directory, current_directory, current_beam)
    else:
        save_fn = "%s/v%04d_extents-%s-%s.png" % (save_directory, version, current_directory, current_beam)
    plot.savefig(save_fn, bbox_inches = 'tight', dpi = dpi)

    if show:
        plot.show()

    plot.close(fig) # Close Figure (to avoid too many figures)
Beispiel #17
0
def plotUnit(legendList,
             fn,
             name,
             lineMarker,
             color,
             n,
             idx1,
             idx2,
             smooth=False,
             smNum=100):
    x, y, xr, yr = myio.loadScore(fn, n, idx1=idx1, idx2=idx2)
    if smooth:
        x, y = util.smooth(x, y, smNum)
    line = plt.plot(x, y, lineMarker, color=color)
    if (legendList is not None):
        legendList.append(name)
    return line[0].get_color(), (xr, yr)
Beispiel #18
0
def make_plot(show=False):
    # Set up figure
    fig = plot.figure(figsize=(7, 6), dpi=dpi)
    ax = fig.add_subplot(111)

    ### Line Plots ###

    for i, beam in enumerate(beam_sizes):
        contrasts = load_data(beam)

        x = frame_range
        y = np.array(contrasts)
        y2 = util.smooth(y, kernel)
        plot.plot(x, y, c=colors[i], linewidth=linewidth, alpha=alpha)
        plot.plot(x,
                  y2,
                  c=colors[i],
                  linewidth=linewidth,
                  label=r"$%d$ $\mathrm{AU}$" % beam)

    # Annotate
    plot.xlabel("Time (planet orbits)", fontsize=fontsize)
    plot.ylabel("Contrasts", fontsize=fontsize)
    #plot.title("")

    plot.legend(loc="upper left")

    # Axes
    plot.xlim(frame_range[0], frame_range[-1])
    if max_y is not None:
        plot.ylim(0, max_y)

    # Save, Show, and Close
    if version is None:
        save_fn = "%s/id%04d_comparing_contrasts_lambda%04d.png" % (
            save_directory, id_number, wavelength)
    else:
        save_fn = "%s/v%04d_id%04d_comparing_contrasts_lambda%04d.png" % (
            save_directory, version, id_number, wavelength)
    plot.savefig(save_fn, bbox_inches='tight', dpi=dpi)

    if show:
        plot.show()

    plot.close(fig)  # Close Figure (to avoid too many figures)
def cfrequency(data, fs, smoothie, fk):
    """
    Central frequency of a signal.

    Computes the central frequency of the given data which can be windowed or
    not. The central frequency is a measure of the frequency where the
    power is concentrated. It corresponds to the second moment of the power
    spectral density function.

    The central frequency is returned.

    :type data: :class:`~numpy.ndarray`
    :param data: Data to estimate central frequency from.
    :param fs: Sampling frequency in Hz.
    :param smoothie: Factor for smoothing the result.
    :param fk: Coefficients for calculating time derivatives
        (calculated via central difference).
    :return: **cfreq[, dcfreq]** - Central frequency, Time derivative of center
        frequency (windowed only).
    """
    # for windowed data
    if np.size(data.shape) > 1:
        cfreq = np.zeros(data.shape[0])
        i = 0
        for row in data:
            cfreq[i] = cfrequency_unwindowed(row, fs)
            i = i + 1
        cfreq = util.smooth(cfreq, smoothie)
        #cfreq_add = \
        #        np.append(np.append([cfreq[0]] * (np.size(fk) // 2), cfreq),
        #        [cfreq[np.size(cfreq) - 1]] * (np.size(fk) // 2))
        # faster alternative
        cfreq_add = np.hstack(
            ([cfreq[0]] * (np.size(fk) // 2), cfreq,
             [cfreq[np.size(cfreq) - 1]] * (np.size(fk) // 2)))
        dcfreq = signal.lfilter(fk, 1, cfreq_add)
        #dcfreq = dcfreq[np.size(fk) // 2:(np.size(dcfreq) - np.size(fk) // 2)]
        # correct start and end values of time derivative
        dcfreq = dcfreq[np.size(fk) - 1:np.size(dcfreq)]
        return cfreq, dcfreq
    # for unwindowed data
    else:
        cfreq = cfrequency_unwindowed(data, fs)
        return cfreq
Beispiel #20
0
def cfrequency(data, fs, smoothie, fk):
    """
    Central frequency of a signal.

    Computes the central frequency of the given data which can be windowed or
    not. The central frequency is a measure of the frequency where the
    power is concentrated. It corresponds to the second moment of the power
    spectral density function.

    The central frequency is returned.

    :type data: :class:`~numpy.ndarray`
    :param data: Data to estimate central frequency from.
    :param fs: Sampling frequency in Hz.
    :param smoothie: Factor for smoothing the result.
    :param fk: Coefficients for calculating time derivatives
        (calculated via central difference).
    :return: **cfreq[, dcfreq]** - Central frequency, Time derivative of center
        frequency (windowed only).
    """
    # for windowed data
    if np.size(data.shape) > 1:
        cfreq = np.zeros(data.shape[0])
        i = 0
        for row in data:
            cfreq[i] = cfrequency_unwindowed(row, fs)
            i = i + 1
        cfreq = util.smooth(cfreq, smoothie)
        #cfreq_add = \
        #        np.append(np.append([cfreq[0]] * (np.size(fk) // 2), cfreq),
        #        [cfreq[np.size(cfreq) - 1]] * (np.size(fk) // 2))
        # faster alternative
        cfreq_add = np.hstack(
            ([cfreq[0]] * (np.size(fk) // 2), cfreq,
             [cfreq[np.size(cfreq) - 1]] * (np.size(fk) // 2)))
        dcfreq = signal.lfilter(fk, 1, cfreq_add)
        #dcfreq = dcfreq[np.size(fk) // 2:(np.size(dcfreq) - np.size(fk) // 2)]
        # correct start and end values of time derivative
        dcfreq = dcfreq[np.size(fk) - 1:np.size(dcfreq)]
        return cfreq, dcfreq
    # for unwindowed data
    else:
        cfreq = cfrequency_unwindowed(data, fs)
        return cfreq
Beispiel #21
0
def drawOne(fn,
            n=None,
            ver=0,
            xlbl=None,
            ylbl=None,
            ls='-',
            smooth=False,
            smNum=100):
    x, y, xr, yr = myio.loadScore(fn, n, ver)
    lgd = fn.replace('.txt', '')
    if smooth:
        x, y = util.smooth(x, y, smNum)
    plt.plot(x, y, ls)
    xlbl = xlbl if xlbl is not None else xr
    ylbl = ylbl if ylbl is not None else yr
    plt.xlabel(xlbl)
    plt.ylabel(ylbl)
    plt.legend(lgd)
    plt.show()
def make_plot(show = False):
    fig = plot.figure(figsize = (7, 6), dpi = dpi)
    ax = fig.add_subplot(111)

    # Plot
    for i, resolution in enumerate(resolutions):
        resolution_label = r"$%d$" % resolution

        x = frame_range
        y = np.array(extents[i])

        kernel = 5
        smooth_y = util.smooth(y, kernel)

        plot.plot(x, y, c = colors[i], linewidth = linewidth, alpha = alpha)
        plot.plot(x, smooth_y, c = colors[i], linewidth = linewidth, label = resolution_label)

    # Axes
    angles = np.linspace(0, 360, 7)
    plot.yticks(angles)
    plot.ylim(0, 360)

    # Annotate Axes
    plot.xlabel(r"$t \mathrm{\ (planet\ orbits)}$", fontsize = fontsize + 2)
    plot.ylabel(r"$\phi_\mathrm{extent}$ $\mathrm{(degrees)}$", fontsize = fontsize + 2)

    plot.legend(loc = "upper right", bbox_to_anchor = (1.28, 1.0)) # outside of plot

    # Title
    title = r"Azimuthal Extents"
    plot.title("%s" % (title), fontsize = fontsize + 3)

    # Save, Show, and Close
    if version is None:
        save_fn = "%s/extentsByResolution.png" % (save_directory)
    else:
        save_fn = "%s/v%04d_extentsByResolution.png" % (save_directory, version)
    plot.savefig(save_fn, bbox_inches = 'tight', dpi = dpi)

    if show:
        plot.show()

    plot.close(fig) # Close Figure (to avoid too many figures)
def colorbar_ax(ax):
  from mpl_toolkits.axes_grid1 import make_axes_locatable
  fig = ax.figure
  div = make_axes_locatable(ax)
  cax = div.append_axes('right', size='5%', pad=0.05)
  cax.tick_params(labelsize=10)
  return cax

###truth
filename = workdir+'/truth/{:05d}.bin'.format(t)
psik = util.read_field(filename, nkx, nky, nz)
# var = util.spec2grid(util.spec_bandpass(convertor(psik), krange, s))
var = util.spec2grid(convertor(psik))
var = np.roll(np.roll(var, -40, axis=0), 60, axis=1) #shift domain position for better plotting

out1 = util.smooth(var[:, :, lv], smth)
out1, clevel = set_clevel(out1, varmin, varmax, varint)
c = ax[0].contourf(ii, jj, out1, clevel, cmap='seismic')
ax[0].contour(ii, jj, out1, clevel_highlight, colors='black', linestyles='solid', linewidths=2)
cax = colorbar_ax(ax[0])
plt.colorbar(c, cax=cax)
set_axis(ax[0], varname+' truth')

###ensemble
if ens_type == 1:
  name = 'f_{:05d}'.format(t)
if ens_type == 2:
  name = '{:05d}'.format(t)
if ens_type == 3:
  name = 'fa_{:05d}'.format(t)
Beispiel #24
0
def plot_combined(counties, daily, title):

    dd = pd.read_csv(
        nytimes,
        usecols=['date', 'fips', 'cases', 'deaths'],
        parse_dates=['date'],
    )

    sdd = dd.loc[dd['fips'].isin(counties)].copy()
    sdd = sdd.groupby('date').sum()

    def decorate(axis):
        axis.set_xlabel('')
        sec = axis.secondary_yaxis('right',
                                   functions=pops.county_funcs(counties))
        sec.set_ylabel('per 10k population')
        axis.get_legend().remove()
        date_form = DateFormatter("%m-%d")
        ax.xaxis.set_major_formatter(date_form)

        for xlabel in ax.get_xticklabels():
            xlabel.set_fontsize(8)
            xlabel.set_rotation(20)

    if len(sdd.index) == 0:
        print("no data")
        return

    #place = pops.county_name(fips)
    if title:
        place = title
    else:
        place = "Combined"

    fig, (ax1, ax2) = plt.subplots(nrows=1, ncols=2, figsize=(10, 5))

    if daily:
        util.calc_daily(sdd, 'deaths', 'daily_deaths')
        ax = sdd.plot(ax=ax1,
                      y='daily_deaths',
                      logy=False,
                      grid=True,
                      color=util.death_color,
                      alpha=0.25,
                      title="Daily Deaths: " + place)

        sdd.loc[:, 'deaths-smoothed'] = util.smooth(sdd.daily_deaths.values)
        sdd.plot(ax=ax1,
                 y='deaths-smoothed',
                 grid=True,
                 color=util.death_color)

    else:
        ax = sdd.plot(ax=ax1,
                      y='deaths',
                      logy=False,
                      grid=True,
                      color=util.death_color,
                      title="Deaths: " + place)
    decorate(ax)

    if daily:
        util.calc_daily(sdd, 'cases', 'daily_cases')
        ax = sdd.plot(ax=ax2,
                      y='daily_cases',
                      logy=False,
                      grid=True,
                      color=util.case_color,
                      alpha=0.25,
                      title=("Daily Cases: " + place))

        sdd['cases-smoothed'] = util.smooth(sdd.daily_cases.values)
        sdd.plot(ax=ax2, y='cases-smoothed', grid=True, color=util.case_color)

    else:
        ax = sdd.plot(ax=ax2,
                      y='cases',
                      logy=False,
                      grid=True,
                      color=util.case_color,
                      title=("Cases: " + place))
    decorate(ax)
    fig.tight_layout()
Beispiel #25
0
def normEnvelope(data, fs, smoothie, fk):
    """
    Normalized envelope of a signal.

    Computes the normalized envelope of the given data which can be windowed
    or not. In order to obtain a normalized measure of the signal envelope
    the instantaneous bandwidth of the smoothed envelope is normalized by the
    Nyquist frequency and is integrated afterwards.

    The time derivative of the normalized envelope is returned if input data
    are windowed only.

    :type data: :class:`~numpy.ndarray`
    :param data: Data to make normalized envelope of.
    :param fs: Sampling frequency.
    :param smoothie: Window length for moving average.
    :param fk: Coefficients for calculating time derivatives
        (calculated via central difference).
    :return: **Anorm[, dAnorm]** - Normalized envelope of input data, Time
        derivative of normalized envelope (windowed only).
    """
    x = envelope(data)
    fs = float(fs)
    if (size(x[1].shape) > 1):
        i = 0
        Anorm = np.zeros(x[1].shape[0], dtype='float64')
        for row in x[1]:
            A_win_smooth = util.smooth(row, int(np.floor(len(row) / 3)))
            # Differentiation of original signal, dA/dt
            #A_win_add = append(append([row[0]]*(size(fk)/2),row),
            #                      [row[size(row)-1]]*(size(fk)/2))
            # Better, because faster, calculation of A_win_add
            A_win_add = np.hstack(([A_win_smooth[0]] * (size(fk) // 2),
                                   A_win_smooth,
                                   [A_win_smooth[size(A_win_smooth) - 1]] * \
                                   (size(fk) // 2)))
            t = signal.lfilter(fk, 1, A_win_add)
            #t = t[size(fk) // 2:(size(t) - size(fk) // 2)]
            # correct start and end values of time derivative
            t = t[size(fk) - 1:size(t)]
            A_win_smooth[A_win_smooth < 1] = 1
            # (dA/dt) / 2*PI*smooth(A)*fs/2
            t_ = t / (2. * pi * (A_win_smooth) * (fs / 2.0))
            # Integral within window
            t_ = cumtrapz(t_, dx=(1. / fs))
            t_ = np.concatenate((t_[0:1], t_))
            Anorm[i] = ((np.exp(np.mean(t_))) - 1) * 100
            i = i + 1
        #Anorm = util.smooth(Anorm,smoothie)
        #Anorm_add = np.append(np.append([Anorm[0]] * (size(fk) // 2), Anorm),
        #                      [Anorm[size(Anorm) - 1]] * (size(fk) // 2))
        # faster alternative to calculate Anorm_add
        Anorm_add = np.hstack(([Anorm[0]] * (np.size(fk) // 2), Anorm, \
                  [Anorm[np.size(Anorm) - 1]] * (np.size(fk) // 2)))
        dAnorm = signal.lfilter(fk, 1, Anorm_add)
        # correct start and end values of time derivative
        dAnorm = dAnorm[size(fk) - 1:size(dAnorm)]
        #dAnorm = dAnorm[size(fk) // 2:(size(dAnorm) - size(fk) // 2)]
        return Anorm, dAnorm
    else:
        Anorm = np.zeros(1, dtype='float64')
        A_win_smooth = util.smooth(x[1], smoothie)
        # Differentiation of original signal, dA/dt
        #A_win_add = np.append(np.append([x[1][0]] * (size(fk) // 2), x[1]),
        #                      [x[1][size(x[1]) - 1]] * (size(fk) // 2))
        # Better, because faster, calculation of A_win_add
        A_win_add = np.hstack(([A_win_smooth[0]] * (size(fk) // 2), \
                  A_win_smooth, [A_win_smooth[size(A_win_smooth) - 1]] * \
                  (size(fk) // 2)))
        t = signal.lfilter(fk, 1, A_win_add)
        #t = t[size(fk) // 2:(size(t) - size(fk) // 2)]
        # correct start and end values of time derivative
        t = t[size(fk) - 1:size(t)]
        A_win_smooth[A_win_smooth < 1] = 1
        # (dA/dt) / 2*PI*smooth(A)*fs/2
        t_ = t / (2. * pi * (A_win_smooth) * (fs / 2.0))
        # Integral within window
        t_ = cumtrapz(t_, dx=(1.0 / fs))
        t_ = np.concatenate((t_[0:1], t_))
        Anorm = ((np.exp(np.mean(t_))) - 1) * 100
        return Anorm
Beispiel #26
0
def make_plot(show=False):
    fig = plot.figure(figsize=(10, 6), dpi=dpi)
    gs = gridspec.GridSpec(nrows=1, ncols=2, width_ratios=[5, 2], figure=fig)
    ax = fig.add_subplot(gs[0])

    # Plot
    x = frame_range
    y = np.array(peak_offsets)

    kernel = 5
    smooth_y = util.smooth(y, kernel)

    plot.scatter(x, y, c="mediumspringgreen", s=size, alpha=alpha)
    #plot.plot(x, y, c = colors[1], linewidth = linewidth)
    #plot.plot(x, smooth_y, c = colors[1], linewidth = linewidth)

    plot.plot([last_frame, last_frame], [-120, 120], linestyle="--", c='k')

    # Axes
    plot.xlim(x[0], x[-1])
    #ax.set_xticklabels([])

    angles = np.linspace(-120, 120, 9)
    plot.yticks(angles)
    plot.ylim(-120, 120)

    # Annotate Axes
    plot.xlabel(r"$t \mathrm{\ (planet\ orbits)}$", fontsize=fontsize + 2)
    plot.ylabel(r"Peak Offsets $\mathrm{(degrees)}$", fontsize=fontsize + 2)

    threshold_text = r"$\frac{I_\mathrm{cut}}{I_\mathrm{max}}=%.2f$" % threshold
    plot.text(0.98 * (x[-1] - x[0]) + x[0],
              0.9 * (plot.ylim()[-1] - plot.ylim()[0]) + plot.ylim()[0],
              threshold_text,
              horizontalalignment='right',
              fontsize=fontsize - 4)

    #plot.legend(loc = "upper right", bbox_to_anchor = (1.28, 1.0)) # outside of plot
    #plot.legend(loc = "upper left") # outside of plot

    # Title
    #title = r"$\mathrm{Azimuthal\ Extents}$"
    title = r'$h = %.2f$   $\Sigma = %.3e$  (2-D)  [$%.3f^{\prime\prime}$]' % (
        scale_height, fargo_par["p"].sigma0, arc_beam)
    plot.title("%s" % (title),
               y=1.20,
               fontsize=fontsize + 3,
               bbox=dict(facecolor='none',
                         edgecolor='black',
                         linewidth=1.5,
                         pad=7.0))

    #### Histograms ####
    ax2 = fig.add_subplot(gs[1])

    truncate = az.my_searchsorted(frame_range, last_frame) - 1
    y_truncated = y[:truncate]
    plot.hist(y_truncated,
              bins=np.linspace(-120 - 10, 120 + 10, 261),
              cumulative=True,
              color='darkgreen',
              align='left',
              orientation='horizontal',
              histtype='stepfilled',
              density=True)

    ref_lines = np.linspace(0, 30, 4)
    for i, ref_i in enumerate(ref_lines):
        if ref_i == 0 or ref_i == ref_lines[-1]:
            linestyle = "-"
            ref_linewidth = 2
        else:
            linestyle = "--"
            ref_linewidth = 1
        plot.plot([0, 1], [ref_i, ref_i],
                  c='k',
                  linestyle=linestyle,
                  linewidth=ref_linewidth)
        plot.plot([0, 1], [-ref_i, -ref_i],
                  c='k',
                  linestyle=linestyle,
                  linewidth=ref_linewidth)

    ax2.set_xlim(0, 1)
    hist_ticks = np.linspace(0, 1, 6)
    hist_ticks_minor = np.linspace(0, 1, 11)
    ax2.set_xticks(hist_ticks)
    ax2.set_xticks(hist_ticks_minor, minor=True)

    ax2.set_ylim(-120, 120)
    ax2.set_yticks(angles)
    #ax2.set_yticklabels([])

    if last_frame < frame_range[-1]:
        plot.title(r"ONLY to $t$ = $%d$" % last_frame, fontsize=fontsize - 1)

    #### Add mass axis ####

    min_mass = args.min_mass
    max_mass = args.max_mass
    delta_mass = args.delta_mass
    if max_mass is None:
        max_mass = total_mass[frame_range[-1] - 1]

    mass_ticks = np.arange(min_mass, max_mass, delta_mass)

    def tick_function(masses):
        # For the secondary x-axis showing the planet mass over time
        tick_locations = np.zeros(len(masses))
        tick_labels = []

        for i, mass in enumerate(masses):
            #total_mass_jupiter = total_mass # in Jupiter masses
            times_i = az.my_searchsorted(total_mass, mass)

            #tick_times = times[times_i]

            print mass, times_i, len(times)

            tick_locations[i] = times[times_i]
            if delta_mass < 0.1:
                tick_labels.append("%.2f" % mass)
            else:
                tick_labels.append("%.1f" % mass)

        return tick_locations, tick_labels

    tick_locations, tick_labels = tick_function(mass_ticks)

    ax_twin = ax.twiny()
    ax_twin.set_xlim(ax.get_xlim())
    ax_twin.set_xticks(tick_locations)
    ax_twin.set_xticklabels(tick_labels)

    ax_twin.set_xlabel(r"$M_\mathrm{p}$ [$M_\mathrm{J}$]",
                       fontsize=fontsize,
                       labelpad=10)

    if args.minor_delta_mass is not None:
        minor_mass_ticks = np.arange(0.1, max_mass, args.minor_delta_mass)
        minor_tick_locations, _ = tick_function(minor_mass_ticks)
        ax_twin.set_xticks(minor_tick_locations, minor=True)

    # Save, Show, and Close
    current_directory = os.getcwd().split("/")[-3]
    current_beam = os.getcwd().split("/")[-1]
    if version is None:
        save_fn = "%s/peakOffsets-%s-%s.png" % (
            save_directory, current_directory, current_beam)
    else:
        save_fn = "%s/v%04d_peakOffsets-%s-%s.png" % (
            save_directory, version, current_directory, current_beam)
    plot.savefig(save_fn, bbox_inches='tight', dpi=dpi, pad_inches=0.2)

    if show:
        plot.show()

    plot.close(fig)  # Close Figure (to avoid too many figures)
Beispiel #27
0
def plot_them(fips, daily):
    if fips == 'New York City':
        sdd = by_name.loc[fips].copy()
    else:
        fips = int(fips)
        sdd = by_fips.loc[fips].copy()

    def decorate(axis):
        axis.set_xlabel('')
        sec = axis.secondary_yaxis('right',
                                   functions=pops.county_funcs([fips]))
        sec.set_ylabel('per 10k population')
        axis.get_legend().remove()
        date_form = DateFormatter("%m-%d")
        ax.xaxis.set_major_formatter(date_form)

        for xlabel in ax.get_xticklabels():
            xlabel.set_fontsize(8)
            xlabel.set_rotation(20)

    if len(sdd.index) == 0:
        print("no data")
        return

    place = pops.county_name(fips)
    fig, (ax1, ax2) = plt.subplots(nrows=1, ncols=2, figsize=(10, 5))

    if daily:
        util.calc_daily(sdd, 'deaths', 'daily_deaths')
        ax = sdd.plot(ax=ax1,
                      x='date',
                      y='daily_deaths',
                      logy=False,
                      grid=True,
                      color=util.death_color,
                      alpha=0.25,
                      title="Daily Deaths: " + place)

        sdd.loc[:, 'deaths-smoothed'] = util.smooth(sdd.daily_deaths.values)
        sdd.plot(ax=ax1,
                 x='date',
                 y='deaths-smoothed',
                 grid=True,
                 color=util.death_color)

    else:
        ax = sdd.plot(ax=ax1,
                      x='date',
                      y='deaths',
                      logy=False,
                      grid=True,
                      color=util.death_color,
                      title="Deaths: " + place)
    decorate(ax)

    if daily:
        util.calc_daily(sdd, 'cases', 'daily_cases')
        ax = sdd.plot(ax=ax2,
                      x='date',
                      y='daily_cases',
                      logy=False,
                      grid=True,
                      color=util.case_color,
                      alpha=0.25,
                      title=("Daily Cases: " + place))

        sdd['cases-smoothed'] = util.smooth(sdd.daily_cases.values)
        sdd.plot(ax=ax2,
                 x='date',
                 y='cases-smoothed',
                 grid=True,
                 color=util.case_color)

    else:
        ax = sdd.plot(ax=ax2,
                      x='date',
                      y='cases',
                      logy=False,
                      grid=True,
                      color=util.case_color,
                      title=("Cases: " + place))
    decorate(ax)
    fig.tight_layout()
Beispiel #28
0
def cont_max_dist_clustering(dist_matrix, corr_thr=0.7, top_only=True, arr=None, err_thr=5):
  # Returns None if nothing is to be merged.
  bins = [Bin(i) for i in range(dist_matrix.shape[0])]
  heap = [Domain(bins[i], bins[i + 1], val) 
      for i, val in enumerate(dist_matrix.diagonal(1)) if not np.isnan(val) and not np.ma.is_masked(val)]
  heapq.heapify(heap)
  selected_lengths = []
  domains = []
  inter_domain_variance = []
  prev_elem = None
  while heap:
    elem = heapq.heappop(heap)
    if not elem.valid():
      continue
    elem.chose()
    if arr is not None:
      square = np.ma.filled(
          arr[elem.get_begin():elem.get_end()+1, elem.get_begin():elem.get_end()+1],
          np.nan)
      var = np.nanvar(square)
      inter_domain_variance.append((np.nanvar(square), np.nanmean(square)))
    selected_lengths.append(elem.val)
    #if elem.val < corr_thr:
      #break
    domains.append(elem)
    prev_elem = elem
    left_bin_number = elem.get_begin() - 1
    if left_bin_number >= 0:
      left_begin = bins[left_bin_number].get_root_domain().begin
      candidate = Domain(left_begin, elem.end, dist_matrix[left_begin.i, elem.end.i])
      if not np.ma.is_masked(candidate.val):
        heapq.heappush(heap, candidate)
    right_bin_number = elem.end.i + 1
    if right_bin_number < len(bins):
      right_end = bins[right_bin_number].get_root_domain().end
      candidate = Domain(elem.begin, right_end, dist_matrix[elem.begin.i, right_end.i])
      if not np.ma.is_masked(candidate.val):
        heapq.heappush(heap, candidate)
  #return [(bin.get_root_domain().begin.i, bin.get_root_domain().end.i) for bin in bins if bin.valid_begin()] 


  min_corr = 0.2
  lengths = np.array(selected_lengths)
  max_corr = 0.97*lengths[0]
  max_limit = np.searchsorted(-lengths, -max_corr)
  min_limit = np.searchsorted(-lengths, -min_corr)

  if not min_limit:
    min_limit = len(lengths) - 1

  break_step = 5

  def two_linear(thr):
    # return x * a1 + b1 if x < thr else x * a2 + b2
    # But the "if else" construction is not broadcastable
    def inner(x, a1, b1, a2, b2):
      ver1 = x * a1 + b1
      ver2 = x * a2 + b2
      mult1 = 1.0 * (-np.sign(x - thr) + 1) / 2
      mult2 = 1.0 * (np.sign(x - thr) + 1) / 2
      return ver1 * mult1 + ver2 + mult2
    return inner

  def one_linear(x, a, b):
    return 1.0 * a * x + b

  xs = np.array(range(len(selected_lengths)))
  lengths_limited = lengths[max_limit:min_limit]
  xs_limited = xs[max_limit:min_limit]

  tested_breaks = range(max_limit, min_limit, break_step)
  errs = np.zeros_like(tested_breaks, dtype=np.float32)
  for i, fixed_break in enumerate(tested_breaks):
    params, _ = curve_fit(two_linear(fixed_break), xs_limited, lengths_limited)
    estimated = two_linear(fixed_break)(xs_limited, *params)
    err = np.mean((estimated - lengths_limited) ** 2)
    errs[i] = err

  result_limit = tested_breaks[np.argmin(errs)]
  # For visualization purposes
  params, _ = curve_fit(two_linear(result_limit), xs_limited, lengths_limited)
  params_one, _ = curve_fit(one_linear, xs_limited, lengths_limited)
  err_one = np.mean((one_linear(xs_limited, *params_one) - lengths_limited) ** 2)
  debug('Two-linear is %f times better (%f / %f)' % (err_one / np.min(errs), err_one, np.min(errs)))
  if err_one / np.min(errs) < err_thr:
    debug('Merging below error threshold')
    return None
  debug('Two-linear has AIC: %f, one-linear has AIC: %f' % (
    akeike(two_linear(result_limit), xs_limited, lengths_limited, params),
    akeike(one_linear, xs_limited, lengths_limited, params_one)))

  smoothed_lengths = smooth(lengths, 20)
  if SHOW_PLOTS:
    plt.plot(xs, smoothed_lengths)
    plt.axvline(result_limit, c='red')
    plt.axvline(max_limit, c='red')
    plt.axvline(min_limit, c='red')
    plt.plot(xs_limited, two_linear(result_limit)(xs_limited, *params), c='purple')
    plt.plot(xs_limited, one_linear(xs_limited, *params_one), c='purple')
    ax2 = plt.twinx()
    ax2.plot(tested_breaks, errs)

    plt.show()
    plt.figure()
    gradient1 = smooth(np.gradient(smoothed_lengths), 20)
    plt.plot(range(len(selected_lengths)), gradient1)
    plt.axvline(result_limit, c='red')
    if arr is not None:
      variances = [x[0] for x in inter_domain_variance]
      means = [x[1] for x in inter_domain_variance]
      plt.show()
      plt.figure()
      plt.plot(range(len(selected_lengths)), variances)
      #plt.twinx()
      #plt.plot(range(len(selected_lengths)), means, c='green')
      #plt.twinx()
      #plt.plot(range(len(selected_lengths)), [var/mean for (var, mean) in inter_domain_variance], c='red')
      plt.axvline(result_limit, c='red')
    plt.show()

  domains = domains[:result_limit]
  #result = [(domain.begin.i, domain.end.i) for domain in domains]
  result = copy(domains)
  result.extend([Domain(bins[i], bins[i], 0.0) for i in range(dist_matrix.shape[0])])
  result = sort_domains(result)
  if top_only:
    result = topify(result)
  return result
Beispiel #29
0
  noise = util.gaussian_random_field(Pk, n)
  temp = temp0 + temp_err*np.tile(noise, (nz, 1, 1)).T
  noise = util.gaussian_random_field(Pk, n)
  zeta = zeta0 + zeta_err*np.tile(noise, (nz, 1, 1)).T

  #thinning
  xx1 = xx[::obs_thin, ::obs_thin, obs_z]
  yy1 = yy[::obs_thin, ::obs_thin, obs_z]
  zz1 = zz[::obs_thin, ::obs_thin, obs_z]
  psi1 = psi[::obs_thin, ::obs_thin, obs_z]
  u1 = u[::obs_thin, ::obs_thin, obs_z]
  v1 = v[::obs_thin, ::obs_thin, obs_z]
  temp1 = temp[::obs_thin, ::obs_thin, obs_z]
  zeta1 = zeta[::obs_thin, ::obs_thin, obs_z]
  nx1, ny1, nz1 = xx1.shape

  #smoothing
  psi1 = util.smooth(psi1, smth)
  u1 = util.smooth(u1, smth)
  v1 = util.smooth(v1, smth)
  temp1 = util.smooth(temp1, smth)
  zeta1 = util.smooth(zeta1, smth)

  f = open(workdir+'/obs/{:05d}'.format(t+1), 'w')
  for z in range(nz1):
    for y in range(ny1):
      for x in range(nx1):
        f.write('{:7.2f} {:7.2f} {:5.2f} {:12.5f} {:12.5f} {:12.5f} {:12.5f} {:12.5f}\n'.format(xx1[x,y,z], yy1[x,y,z], zz1[x,y,z], u1[x,y,z], v1[x,y,z], psi1[x,y,z], zeta1[x,y,z], temp1[x,y,z]))

  f.close()
Beispiel #30
0
def make_plot(show=False):
    fig = plot.figure(figsize=(7, 6), dpi=dpi)
    ax = fig.add_subplot(111)

    # Plot
    for i, size in enumerate(sizes):
        size_label = util.get_size_label(size)

        x = frame_range
        y = np.array(extents[i])

        kernel = 5
        smooth_y = util.smooth(y, kernel)

        plot.plot(x, y, c=colors[i], linewidth=linewidth, alpha=alpha)
        plot.plot(x,
                  smooth_y,
                  c=colors[i],
                  linewidth=linewidth,
                  label=size_label)

    # Comparisons
    if compare:
        comparisons = comparison_dictionary[threshold]
        for i, extent_i in enumerate(comparisons[::-1]):
            plot.scatter(x[0],
                         extent_i,
                         c=colors[i],
                         s=100,
                         marker="H",
                         zorder=5)  # Left Marker
            plot.scatter(x[-1],
                         extent_i,
                         c=colors[i],
                         s=100,
                         marker="H",
                         zorder=99,
                         clip_on=False)  # Right Marker
            plot.plot([x[0], x[-1]], [extent_i, extent_i],
                      linestyle="--",
                      c=colors[i],
                      zorder=2)  # Dashed line ine

    # Axes
    plot.xlim(x[0], x[-1])

    angles = np.linspace(0, 360, 7)
    plot.yticks(angles)
    plot.ylim(0, 360)

    # Annotate Axes
    plot.xlabel(r"$t \mathrm{\ (planet\ orbits)}$", fontsize=fontsize + 2)
    plot.ylabel(r"$\Delta \phi$ $\mathrm{(degrees)}$", fontsize=fontsize + 2)

    #plot.legend(loc = "upper right", bbox_to_anchor = (1.28, 1.0)) # outside of plot
    plot.legend(loc="upper left")  # outside of plot

    # Title
    title = r"$\mathrm{Azimuthal\ Extents}$"
    plot.title("%s" % (title), y=1.01, fontsize=fontsize + 3)

    # Save, Show, and Close
    if version is None:
        save_fn = "%s/extentsBySize.png" % (save_directory)
    else:
        save_fn = "%s/v%04d_extentsBySize.png" % (save_directory, version)
    plot.savefig(save_fn, bbox_inches='tight', dpi=dpi)

    if show:
        plot.show()

    plot.close(fig)  # Close Figure (to avoid too many figures)
Beispiel #31
0
def make_plot(show=False):
    fig = plot.figure(figsize=(8, 6), dpi=dpi)
    gs = gridspec.GridSpec(nrows=2,
                           ncols=1,
                           height_ratios=[7, 1.5],
                           hspace=0,
                           figure=fig)
    ax = fig.add_subplot(gs[0, :])

    # Plot
    x = frame_range
    y = np.array(extents)

    kernel = 5
    smooth_y = util.smooth(y, kernel)

    plot.plot(x, y, c=colors[1], linewidth=linewidth, alpha=alpha)
    plot.plot(x, smooth_y, c=colors[1], linewidth=linewidth)

    # Axes
    plot.xlim(x[0], x[-1])
    ax.set_xticklabels([])

    angles = np.linspace(0, 360, 7)
    plot.yticks(angles)
    plot.ylim(0, 360)

    # Annotate Axes
    plot.ylabel(r"Azimuthal Extents $\mathrm{(degrees)}$",
                fontsize=fontsize + 2)

    #plot.legend(loc = "upper right", bbox_to_anchor = (1.28, 1.0)) # outside of plot
    #plot.legend(loc = "upper left") # outside of plot

    # Title
    #title = r"$\mathrm{Azimuthal\ Extents}$"
    beam_diameter = 2.0 * fargo_par["Beam"] * fargo_par["Radius"] / fargo_par[
        "Distance"]
    title = r'$h = %.2f$   $\Sigma = %.3e$  (2-D)  [$%.3f^{\prime\prime}$]' % (
        scale_height, fargo_par["p"].sigma0, beam_diameter)
    plot.title("%s" % (title), y=1.01, fontsize=fontsize + 3)

    #### Peaks ####
    ax2 = fig.add_subplot(gs[1, :])

    y2 = np.array(peak_counts)
    plot.bar(x, y2, color=colors[2], edgecolor=colors[2], width=x[1] - x[0])

    # Axes
    plot.xlim(x[0], x[-1])

    counts = [0, 2, 4]
    plot.yticks(counts)
    plot.ylim(0, counts[-1])

    # Labels
    plot.xlabel(r"$t \mathrm{\ (planet\ orbits)}$", fontsize=fontsize + 2)
    plot.ylabel("# Peaks", fontsize=fontsize + 2, rotation=270, labelpad=25)

    ax2.yaxis.set_label_position("right")
    ax2.yaxis.tick_right()

    # Save, Show, and Close
    current_directory = os.getcwd().split("/")[-3]
    current_beam = os.getcwd().split("/")[-1]
    if version is None:
        save_fn = "%s/extentsAndPeakCounts-%s-%s.png" % (
            save_directory, current_directory, current_beam)
    else:
        save_fn = "%s/v%04d_extentsAndPeakCounts-%s-%s.png" % (
            save_directory, version, current_directory, current_beam)
    plot.savefig(save_fn, bbox_inches='tight', dpi=dpi)

    if show:
        plot.show()

    plot.close(fig)  # Close Figure (to avoid too many figures)
Beispiel #32
0
noise = np.zeros((n, n, 1))
noise[:, :, 0] = util.gaussian_random_field(Pk, n) * amplitude
obs = truth + noise

colors = ('red', 'blue', [.3, .7, .3], [.8, .8, .3], [.7, .3, .7])
###demonstrate thinning of obs
# intv = (1, 2, 4, 8)
# for i in range(len(intv)):
# wn, pwr = util.pwrspec2d(noise[::intv[i], ::intv[i], :])
# ax.loglog(wn, util.smooth_spec(wn, pwr, specsmth), color=colors[i], linewidth=2)

###demonstrate smoothing of obs
intv = 2
smth = (1, 2, 4, 8)
for i in range(len(smth)):
    obs_smth = util.smooth(obs[::intv, ::intv, :], smth[i])
    obs_error = obs_smth - truth[::intv, ::intv, :]
    wn, pwr = util.pwrspec2d(obs_error)
    ax.loglog(wn,
              util.smooth_spec(wn, pwr, specsmth),
              color=colors[i],
              linewidth=2)

###demonstrate downscaling effect
# ns = (n, 2*n, 4*n)
# for i in range(len(ns)):
#   noise_ds = np.zeros((ns[i], ns[i], 1))
#   noise_ds[:, :, 0] = util.regrid(noise[:, :, 0], ns[i], ns[i])
#   wn, pwr = util.pwrspec2d(noise_ds)
#   ax.loglog(wn, util.smooth_spec(wn, pwr, specsmth), color=colors[i], linewidth=2)
def make_plot(show=False):
    fig = plot.figure(figsize=(9, 6), dpi=dpi)
    gs = gridspec.GridSpec(nrows=2,
                           ncols=2,
                           height_ratios=[7, 1.5],
                           width_ratios=[7, 1],
                           wspace=0,
                           figure=fig)
    ax = fig.add_subplot(gs[0, 0])

    # Plot
    x = frame_range
    y = np.array(extents)

    kernel = 5
    smooth_y = util.smooth(y, kernel)

    plot.plot(x, y, c=colors[1], linewidth=linewidth, alpha=alpha)
    plot.plot(x, smooth_y, c=colors[1], linewidth=linewidth)

    # Axes
    plot.xlim(x[0], x[-1])
    #ax.set_xticklabels([])

    angles = np.linspace(0, 360, 7)
    plot.yticks(angles)
    plot.ylim(0, 360)

    # Annotate Axes
    plot.ylabel(r"Azimuthal Extents $\mathrm{(degrees)}$",
                fontsize=fontsize + 2)

    threshold_text = r"$\frac{I_\mathrm{cut}}{I_\mathrm{max}}=%.2f$" % threshold
    plot.text(0.98 * (x[-1] - x[0]) + x[0],
              0.9 * plot.ylim()[-1],
              threshold_text,
              horizontalalignment='right',
              fontsize=fontsize - 4)

    #plot.legend(loc = "upper right", bbox_to_anchor = (1.28, 1.0)) # outside of plot
    #plot.legend(loc = "upper left") # outside of plot

    # Title
    #title = r"$\mathrm{Azimuthal\ Extents}$"
    title = r'$h = %.2f$   $\Sigma = %.3e$  (2-D)  [$%.3f^{\prime\prime}$]' % (
        scale_height, fargo_par["p"].sigma0, arc_beam)
    plot.title("%s" % (title),
               y=1.25,
               fontsize=fontsize + 3,
               bbox=dict(facecolor='none',
                         edgecolor='black',
                         linewidth=1.5,
                         pad=7.0))

    #### Peaks ####
    ax2 = fig.add_subplot(gs[1, 0])

    y2 = np.array(peak_counts)
    plot.bar(x, y2, color=colors[2], edgecolor=colors[2], width=x[1] - x[0])

    # Axes
    plot.xlim(x[0], x[-1])

    counts = [0, 2, 4]
    plot.yticks(counts)
    plot.ylim(0, counts[-1])

    # Labels
    plot.xlabel(r"$t \mathrm{\ (planet\ orbits)}$", fontsize=fontsize + 2)
    plot.ylabel("# Peaks", fontsize=fontsize + 2)
    #plot.ylabel("# Peaks", fontsize = fontsize + 2, rotation = 270, labelpad = 25)

    #ax2.yaxis.set_label_position("right")
    #ax2.yaxis.tick_right()

    #### Histograms ####
    ax3 = fig.add_subplot(gs[0, 1])
    plot.hist(y,
              bins=np.linspace(0, 370, 38),
              cumulative=True,
              color='sienna',
              align='left',
              orientation='horizontal',
              histtype='stepfilled',
              density=True)

    ax3.set_xlim(0, 1)
    hist_ticks = np.linspace(0, 1, 11)
    ax3.set_xticks(hist_ticks)
    ax3.set_xticklabels([])

    ax3.set_ylim(0, 360)
    ax3.set_yticks(angles)
    ax3.set_yticklabels([])

    ax4 = fig.add_subplot(gs[1, 1])
    y2_adjusted = y2[:]
    y2_adjusted[y2 > 4] = 4
    plot.hist(y2_adjusted,
              bins=np.linspace(0, 4, 5) + 1e-8,
              cumulative=True,
              color='navy',
              orientation='horizontal',
              histtype='stepfilled',
              density=True)

    ax4.set_xlim(0, 1)
    ax4.set_xticks(hist_ticks)
    ax4.set_xticklabels([])

    ax4.set_ylim(counts[0], counts[-1])
    ax4.set_yticks(counts)
    ax4.set_yticklabels([])

    #### Add mass axis ####

    min_mass = args.min_mass
    max_mass = args.max_mass
    delta_mass = args.delta_mass
    if max_mass is None:
        max_mass = total_mass[frame_range[-1] - 1]

    mass_ticks = np.arange(min_mass, max_mass, delta_mass)

    def tick_function(masses):
        # For the secondary x-axis showing the planet mass over time
        tick_locations = np.zeros(len(masses))
        tick_labels = []

        for i, mass in enumerate(masses):
            #total_mass_jupiter = total_mass # in Jupiter masses
            times_i = az.my_searchsorted(total_mass, mass)

            #tick_times = times[times_i]

            print mass, times_i, len(times)

            tick_locations[i] = times[times_i]
            if delta_mass < 0.1:
                tick_labels.append("%.2f" % mass)
            else:
                tick_labels.append("%.1f" % mass)

        return tick_locations, tick_labels

    tick_locations, tick_labels = tick_function(mass_ticks)

    ax_twin = ax.twiny()
    ax_twin.set_xlim(ax.get_xlim())
    ax_twin.set_xticks(tick_locations)
    ax_twin.set_xticklabels(tick_labels)

    ax_twin.set_xlabel(r"$M_\mathrm{p}$ [$M_\mathrm{J}$]",
                       fontsize=fontsize,
                       labelpad=10)

    if args.minor_delta_mass is not None:
        minor_mass_ticks = np.arange(0.1, max_mass, args.minor_delta_mass)
        minor_tick_locations, _ = tick_function(minor_mass_ticks)
        ax_twin.set_xticks(minor_tick_locations, minor=True)

    # Print counts
    print len(peak_counts)
    print(y2 == 1).sum(), (y2 == 2).sum(), (y2 == 3).sum(), (y2 >= 4).sum()

    num_one = 1.0 * (y2 == 1).sum() / len(peak_counts)
    num_two = 1.0 * (y2 == 2).sum() / len(peak_counts)
    num_three = 1.0 * (y2 == 3).sum() / len(peak_counts)
    num_four = 1.0 * (y2 >= 4).sum() / len(peak_counts)
    print "%.2f %.2f %.2f %.2f" % (num_one, num_two, num_three, num_four)

    # Save, Show, and Close
    current_directory = os.getcwd().split("/")[-3]
    current_beam = os.getcwd().split("/")[-1]
    if version is None:
        save_fn = "%s/extentsAndPeakCounts-%s-%s.png" % (
            save_directory, current_directory, current_beam)
    else:
        save_fn = "%s/v%04d_extentsAndPeakCounts-%s-%s.png" % (
            save_directory, version, current_directory, current_beam)
    plot.savefig(save_fn, bbox_inches='tight', dpi=dpi, pad_inches=0.2)

    if show:
        plot.show()

    plot.close(fig)  # Close Figure (to avoid too many figures)
Beispiel #34
0
 def applySmoothing(self, smooth_width):
     """ User-directed binning or smoothing of count line
     data is done here """
     self.counts = smooth(self.counts, smooth_width)