Exemple #1
0
def plot_decision_function(clf, data_range=None,
                           features=None, labels=None, feature_columns=[0, 1],
                           n_gridpoints=201):
    """Plot the decision function of a classifier in 2D.

    Parameters
    ----------
    clf : scikit-learn classifier
        The classifier to be evaluated.
    data_range : tuple of int, optional
        The range of values to be evaluated.
    features : 2D array of float, optional
        The features of the training data.
    labels : 1D array of int, optional
        The labels of the training data.
    feature_columns : tuple of int, optional
        Which feature columns to plot, if there are more than two.
    n_gridpoints : int, optional
        The number of points to place on each dimension of the 2D grid.
    """
    if features is not None:
        features = features[:, feature_columns]
        minfeat, maxfeat = np.min(features), np.max(features)
        featrange = maxfeat - minfeat

    if data_range is None:
        if features is None:
            data_range = (0, 1)
        else:
            data_range = (minfeat - 0.05 * featrange,
                          maxfeat + 0.05 * featrange)

    data_range = np.array(data_range)

    grid = np.linspace(*data_range, num=n_gridpoints, endpoint=True)
    rr, cc = np.meshgrid(grid, grid, sparse=False)
    feature_space = np.hstack((np.reshape(rr, (-1, 1)),
                               np.reshape(cc, (-1, 1))))
    prediction = clf.predict_proba(feature_space)[:, 1]  # Pr(class(X)=1)
    prediction = np.reshape(prediction, (n_gridpoints, n_gridpoints))

    fig, ax = plt.subplots()
    ax.imshow(prediction, cmap='RdBu')
    ax.set_xticks([])
    ax.set_yticks([])

    features = (features - data_range[0]) / (data_range[1] - data_range[0])

    if features is not None:
        if labels is not None:
            label_colors = cm.viridis(labels.astype(float) / np.max(labels))
        else:
            label_colors = cm.viridis(np.zeros(features.shape[0]))
        ax.scatter(*(features.T * n_gridpoints), c=label_colors)
    plt.show()
Exemple #2
0
def bokeh_plot(df):
    tooltip = """
        <div>
            <div>
                <img
                src="@image_files" height="60" alt="image"
                style="float: left; margin: 0px 15px 15px 0px; image-rendering: pixelated;"
                border="2"
                ></img>
            </div>
            <div>
                <span style="font-size: 17px;">@source_filenames</span>
            </div>
        </div>
              """
    filenames = b64_image_files(df['images'])
    df['image_files'] = filenames
    colors_raw = cm.viridis((df['time'] - df['time'].min()) /
                            (df['time'].max() - df['time'].min()), bytes=True)
    colors_str = ['#%02x%02x%02x' % tuple(c[:3]) for c in colors_raw]
    df['color'] = colors_str
    source = ColumnDataSource(df)
    bplot.output_file('plot.html')
    hover0 = HoverTool(tooltips=tooltip)
    hover1 = HoverTool(tooltips=tooltip)
    tools0 = [t() for t in TOOLS] + [hover0]
    tools1 = [t() for t in TOOLS] + [hover1]
    pca = bplot.figure(tools=tools0)
    pca.circle('PC1', 'PC2', color='color', source=source)
    tsne = bplot.figure(tools=tools1)
    tsne.circle('tSNE-0', 'tSNE-1', color='color', source=source)
    p = bplot.gridplot([[pca, tsne]])
    bplot.show(p)
def result_length_of_sub_dist(path):
    fig, ax = plt.subplots()
    for i, result_data_path in enumerate(path):
        data = np.load(result_data_path)
        beta = data['beta']
        num_of_strings = data['num_of_strings']
        L = data['L']
        frames = data['frames']
        Ls = data['Ls'].astype(np.float)
        size_dist = data['size_dist']
        N = np.sum(size_dist, axis=0)
        N_freq = N / np.sum(N)
        X = np.arange(1, len(N)+1)
        ax.semilogy(X, N_freq, '.', label=r'$\beta = %2.2f$' % beta,
                  color=cm.viridis(float(i) / len(path)))
        # ax.semilogy(X[::2], N_freq[::2], '.', label=r'$\beta = %2.2f$' % beta,
        #             color=cm.viridis(float(i) / len(path)))
        # ax.semilogy(X[1::2], N_freq[1::2], '.', label=r'$\beta = %2.2f$' % beta,
        #           color=cm.viridis(float(i) / len(path)))

    ax.legend(loc='best')
    ax.set_title("Appearance frequency of the subcluster of size $N$")
    ax.set_xlabel("$N$")
    ax.set_ylabel("Freq")
    plt.show()
Exemple #4
0
 def silhouette_plot(self, X, X_predicted, title, filename):
     plt.clf()
     plt.cla()
     
     cluster_labels = np.unique(X_predicted)
     n_clusters = cluster_labels.shape[0]
     silhouette_vals = silhouette_samples(X, X_predicted, metric='euclidean')
     y_ax_lower, y_ax_upper = 0, 0
     
     color=iter(cm.viridis(np.linspace(0,1,cluster_labels.shape[0])))
        
     yticks = []
     for i, c in enumerate(cluster_labels):
         c_silhouette_vals = silhouette_vals[X_predicted == c]
         c_silhouette_vals.sort()
         y_ax_upper += len(c_silhouette_vals)
         plt.barh(range(y_ax_lower, y_ax_upper), c_silhouette_vals, height=1.0, edgecolor='none', color=next(color))
     
         yticks.append((y_ax_lower + y_ax_upper) / 2.)
         y_ax_lower += len(c_silhouette_vals)
         
     silhouette_avg = np.mean(silhouette_vals)
     plt.axvline(silhouette_avg, color="red", linestyle="--") 
     
     plt.yticks(yticks, cluster_labels + 1)
     plt.ylabel('Cluster')
     plt.xlabel('Silhouette Coefficient')
     
     plt.title(title)
     
     plt.tight_layout()
     plt.savefig(filename)
     plt.close('all')
Exemple #5
0
def fermi(path, fixed_a, fixed_loc, save_image=False):
    matplotlib.rcParams['savefig.dpi'] = 300

    def modified_gamma_2(x, scale):
        a = fixed_a
        loc = fixed_loc
        return gamma.pdf(x, a=a, loc=loc, scale=scale)

    betas = []
    scale = []
    L = []
    S = []

    fig, ax = plt.subplots()
    for i, result_data_path in enumerate(path):
        globals().update(load_data(result_data_path))
        L.append(Ls)
        S.append(M_ave)
        popt = curve_fit(modified_gamma_2, xdata=Ls, ydata=M_ave, p0=[10.])[0]
        # print beta, popt
        betas.append(beta)
        theta = popt[0]
        scale.append(theta)
        ax.plot(Ls / theta, M_ave * theta, '.', label=r'$\beta = %2.2f$' % beta,
                color=cm.viridis(float(i) / len(path)))

    show_plot1(ax, num_of_strings)
    ax.set_title(r'Collapsed data')
    ax.set_xlabel(r'$L / \theta$')
    ax.set_ylabel(r'$\theta * f(L)$')

    plt.show()
def plot_sample_trajectory(data, subj_id, trial_no):
    x_lim = [-1.2, 1.2]
    y_lim = [-0.2, 1.2]
    
    tickLabelFontSize = 20
    axisLabelFontSize = 24

    sns.set_style('white')    
    fig = plt.figure(tight_layout=True)
    ax = fig.add_subplot(111)
    ax.set_xlabel(r'x coordinate', fontsize=axisLabelFontSize)
    ax.set_ylabel(r'y coordinate', fontsize=axisLabelFontSize)
    ax.set_xlim(x_lim)
    ax.set_ylim(y_lim)
    ax.tick_params(axis='both', which='major', labelsize=tickLabelFontSize)
    
    traj_color = cm.viridis(0.1)

    trajectory = data.loc[subj_id, trial_no]    
    ax.plot(trajectory.x, trajectory.y, color=traj_color, ls='none', marker='o', ms=15,
            markerfacecolor='none', markeredgewidth=2, markeredgecolor=traj_color, 
            label='Mouse trajectory')

    # draw screen above the surface and choice options on it
    patches = get_choice_patches()
    for patch in patches:
        ax.add_patch(patch)
        
    ax.set_axis_off()
    plt.savefig('figures/sample_traj.pdf')
def plot_variance_coefficients(df, coeff='a', yAxis='center', xAxis='n', zAxis='tDiff', i1='mdi', i2='mdi'):
    """Takes a dataframe containing fit coefficients for variance grids and
    plots them as a function of yaxis.

    coeff:      what coefficient that is being plotted
    yAxis:      which coefficient (center, middle, outer disk) to plot
    xAxis:      the independent variable to plot_fits
    cAxis:      secondary variable to separate data sets
    i1:         reference instrument
    i2:         secondary instrument
    """

    x = df[xAxis][(df['i1'] == i1) & (df['i2'] == i2)].as_matrix()
    y = df[yAxis][(df['i1'] == i1) & (df['i2'] == i2)].as_matrix()
    z = df[zAxis][(df['i1'] == i1) & (df['i2'] == i2)].as_matrix().astype(np.float32)
    z[(z < 1)] = .25
    norm1 = matplotlib.colors.LogNorm(vmin=np.min(z), vmax=np.max(z))
    norm2 = matplotlib.colors.LogNorm(vmin=np.min(x), vmax=np.max(x))

    f, (ax1, ax2) = plt.subplots(1, 2, sharey=True)

    for segment in sorted(set(z)):
    
        ind = (z == segment)
        ax1.scatter(x[ind], y[ind], c=cm.viridis(norm1(z[ind][0])), 
                edgecolor='face', cmap='viridis')
        ax1.plot(x[ind], y[ind], 
            c=cm.viridis(norm1(z[ind][0])), 
            label='{0} hr'.format(segment))

    for segment in sorted(set(x)):
        ind = (x == segment)
        ax2.scatter(z[ind], y[ind], c=cm.viridis(norm2(x[ind][0])), 
                edgecolor='face', cmap='viridis')
        ax2.plot(z[ind][np.argsort(z[ind])], y[ind][np.argsort(z[ind])], 
            c=cm.viridis(norm2(x[ind][0])),
            label='n = {0}'.format(segment))

    f = plt.gcf()
    fig_title = "{0}/{1}".format(i1.upper(), i2.upper())
    f.suptitle(fig_title, y=.95, fontsize=30, fontweight='bold')
    ax1.set_ylabel('{0} Disk {1} Coefficient'.format(yAxis.title(), coeff))
    ax1.set_xlabel('{0}'.format(xAxis))
    ax2.set_xlabel('{0}'.format(zAxis))
    plt.legend(loc=4)
Exemple #8
0
def fit_a_x0_scale(path):
    betas = []
    a = []
    loc = []
    scale = []

    fig, ax = plt.subplots()
    for i, result_data_path in enumerate(path):
        globals().update(load_data(result_data_path))

        ax.plot(Ls, M_ave, '.', label=r'$\beta = %2.2f$' % beta,
                color=cm.viridis(float(i) / len(path)))

        popt = curve_fit(gamma.pdf, xdata=Ls, ydata=M_ave, p0=[2.5, -5., 10.])[0]
        print beta, popt
        betas.append(beta)

        a.append(popt[0])
        loc.append(popt[1])
        scale.append(popt[2])

        x = np.linspace(0, max(Ls), num=5*max(Ls))
        ax.plot(x, gamma.pdf(x, a=popt[0], loc=popt[1], scale=popt[2]),
                    '-', label=r'fitted $\beta = %2.2f$' % beta,
                    color=cm.viridis(float(i) / len(path)))
    show_plot1(ax, num_of_strings)
    plt.show()

    betas = np.array(betas)
    a = np.array(a)
    loc = np.array(loc)
    scale = np.array(scale)

    fig, (ax1, ax2, ax3) = plt.subplots(3, 1)
    ax1.plot(betas, a, 'o')
    [ax.set_xlabel(r'$\beta$') for ax in [ax1, ax2, ax3]]
    [ax.set_xlim((0, max(betas))) for ax in [ax1, ax2, ax3]]
    ax1.set_ylabel(r'Shape parameter: $a$')
    ax2.plot(betas, loc, 'o')
    ax2.set_ylabel(r'Translation parameter: $x_{0}$')
    # ax3.plot(-betas, -scale)  # お試し
    ax3.plot(betas, scale, 'o')
    ax3.set_ylabel(r'Scale parameter: $\theta$')
    plt.show()
Exemple #9
0
def plot_init_means(x, mus, algs, fname):
    import matplotlib.cm as cm
    fig = plt.figure()
    plt.scatter(x[:,0], x[:,1], c='gray', cmap='viridis', s=20, alpha= 0.4, label='data')
    for mu, alg, clr in zip(mus, algs, cm.viridis(np.linspace(0, 1, len(mus)))):
        plt.scatter(mu[:,0], mu[:, 1], c=clr, s=50, label=alg)
        plt.scatter(mu[:, 0], mu[:, 1], c='black', s=10, alpha=1)
    legend = plt.legend(loc='upper right', fontsize='small')
    plt.title('Initial guesses for centroids')
    fig.savefig(fname)
Exemple #10
0
def arr2png(arr, path, size=1080):
    # min zero
    arr = arr - np.min(arr)
    # max one
    arr = arr / np.max(arr)

    im = Image.fromarray(np.uint8(cm.viridis(arr) * 255))
    im = im.resize((size, size))
    im.save(path)
    return im
Exemple #11
0
def fit_a_scale(path, fixed_loc):

    def modified_gamma(x, a, scale):
        # loc = c * a + d
        loc = fixed_loc
        return gamma.pdf(x, a=a, loc=loc, scale=scale)

    betas = []
    a = []
    scale = []

    fig, ax = plt.subplots()
    for i, result_data_path in enumerate(path):
        globals().update(load_data(result_data_path))
        ax.plot(Ls, M_ave, '.', label=r'$\beta = %2.2f$' % beta,
                color=cm.viridis(float(i) / len(path)))
        popt = curve_fit(modified_gamma, xdata=Ls, ydata=M_ave, p0=[2.5, 10.])[0]
        print beta, popt
        betas.append(beta)

        a.append(popt[0])
        scale.append(popt[1])

        x = np.linspace(0, max(Ls), num=5*max(Ls))
        ax.plot(x, modified_gamma(x, a=popt[0], scale=popt[1]),
                    '-', label=r'fitted $\beta = %2.2f$' % beta,
                    color=cm.viridis(float(i) / len(path)))
    show_plot1(ax, num_of_strings)
    plt.show()

    betas = np.array(betas)
    a = np.array(a)
    scale = np.array(scale)

    fig, (ax1, ax2) = plt.subplots(2, 1)
    ax1.set_title(r'Fitting parameter (fixed: $x_{0} = 0$)')
    ax1.plot(betas, a, 'o')
    [ax.set_xlabel(r'$\beta$') for ax in [ax1, ax2]]
    [ax.set_xlim((0, max(betas))) for ax in [ax1, ax2]]
    ax1.set_ylabel(r'Shape parameter: $a$')
    ax2.plot(betas, scale, 'o')
    ax2.set_ylabel(r'Scale parameter: $\theta$')
    plt.show()
Exemple #12
0
def sradcmap():
    # This function returns the colormap and bins for the PM spatial plots
    # this is designed to have a vmin =0 and vmax = 140
    # return cmap,bins
    colors1 = cm.viridis(linspace(0, 1, 128))
    colors2 = cm.plasma(linspace(.2, 1, 128))
    colors = vstack((colors1, colors2))
    return mcolors.LinearSegmentedColormap.from_list('sradcmap',
                                                     colors), arange(
                                                         0, 1410., 10)
Exemple #13
0
def pm10cmap():
    # This function returns the colormap and bins for the NO2/NO/NOx spatial plots
    # this is designed to have a vmin =0 and vmax = 140
    # return cmap,bins
    colors1 = cm.viridis(linspace(0, 1, 128))
    colors2 = cm.plasma_r(linspace(.042, .75, 128))
    colors = vstack((colors1, colors2))
    return mcolors.LinearSegmentedColormap.from_list('noxcmap',
                                                     colors), arange(
                                                         0, 150.5, .5)
Exemple #14
0
def _biplot(xidx, yidx, data, pc_columns, columns, singular_values, components,
            explained_variance_ratio, alpha=1, ax=None, hue=None, key_col=None):
    if ax is None:
        ax = plt.gca()
    
    xs = data[pc_columns[xidx]] * singular_values[xidx] ** alpha
    ys = data[pc_columns[yidx]] * singular_values[yidx] ** alpha
    
    if key_col is not None and hue is not None:
        groups = data[hue].unique()
        k = len(data[hue].unique())
        colors = cm.viridis(np.arange(k).astype(float) / k)
        for j, color in zip(range(k), colors):
            group_data = data[data[hue] == groups[j]]
            for idx in group_data.index:
                ax.text(xs[idx], ys[idx], data[key_col][idx], color=color, va='center', ha='center')
        ax.legend([Patch(color=colors[i]) for i, _ in enumerate(groups)], groups.tolist())
    elif key_col is not None and hue is None:
        for i in range(data.shape[0]):
            ax.text(xs[i], ys[i], data[key_col][i], color='black', va='center', ha='center')
    elif hue is not None:
        sns.scatterplot(xs, ys, hue=data[hue], data=data, ax=ax)
    else:
        sns.scatterplot(xs, ys, data=data, ax=ax)
        
    ax.set_xlabel('%s (%0.4f)' % (pc_columns[xidx], explained_variance_ratio[xidx]))
    ax.set_ylabel('%s (%0.4f)' % (pc_columns[yidx], explained_variance_ratio[yidx]))
    
    axs = components[xidx] * singular_values[xidx] ** (1 - alpha)
    ays = components[yidx] * singular_values[yidx] ** (1 - alpha)
    
    xmax = np.amax(np.concatenate((xs, axs * 1.5)))
    xmin = np.amin(np.concatenate((xs, axs * 1.5)))
    ymax = np.amax(np.concatenate((ys, ays * 1.5)))
    ymin = np.amin(np.concatenate((ys, ays * 1.5)))
    
    for i, col in enumerate(columns):
        x, y = axs[i], ays[i]
        ax.arrow(0, 0, x, y, color='r', width=0.001, head_width=0.05)
        ax.text(x * 1.3, y * 1.3, col, color='r', ha='center', va='center')
    
    ys, ye = ax.get_ylim()
    xs, xe = ax.get_xlim()

    m = 1.2
    ax.set_xlim(xmin * m, xmax * m)
    ax.set_ylim(ymin * m, ymax * m)
    
    # plt.title('PCA result with two components')
    # plt.show()
    plt_two = plt2MD(plt)
    plt.clf()
    
    return plt_two
Exemple #15
0
def segments(mesh, plot):
    vs, _, edges = mesh
    if edges_values_map == "sort":
        for i, (_, edge_idx) in enumerate(sorted(edges, key=lambda x: x[0])):
            edge = vs[edge_idx]
            line = a3.art3d.Line3DCollection([edge], linewidths=.8)
            line.set_color(cm.viridis((i + 1) / len(edges)))
            plot[0].add_collection3d(line)
    else:
        for (edge_c, edge_idx) in edges:
            edge = vs[edge_idx]
            line = a3.art3d.Line3DCollection([edge], linewidths=.8)
            if edges_values_map == "sqrt":
                line.set_color(cm.viridis(np.sqrt(edge_c)))
            elif edges_values_map == "log2":
                line.set_color(cm.viridis(np.log2(1 + edge_c)))
            else:
                line.set_color(cm.viridis(edge_c))
            plot[0].add_collection3d(line)
    return plot
Exemple #16
0
def pass_rose(df_passes, palette=None):
    """Based from https://gist.github.com/phobson/41b41bdd157a2bcf6e14"""

    if "pass_angle_deg" in df_passes.columns:
        pass
    else:
        print("Adding Pass Angle Degrees")
        df_passes['pass_angle_deg'] = (
            df_passes['pass_angle'].apply(pass_angle_deg))

    total_count = df_passes.shape[0]
    print('{} total observations'.format(total_count))

    dir_bins = np.arange(-7.5, 370, 15)
    dir_labels = (dir_bins[:-1] + dir_bins[1:]) / 2

    rosedata = (df_passes.assign(PassAngle_bins=lambda df: (pd.cut(
        df['pass_angle_deg'], bins=dir_bins, labels=dir_labels, right=False))))

    rosedata.loc[rosedata["PassAngle_bins"] == 360., "PassAngle_bins"] = 0.
    rosedata["PassAngle_bins"].cat.remove_categories([360], inplace=True)

    rosedata = (
        rosedata.groupby(by=['PassAngle_bins']).agg({"pass_length": "size"})
        # .unstack(level='PassAngle_bins')
        .fillna(0).sort_index(axis=0)
        #.applymap(lambda x: x / total_count * 100)
    )

    pass_dirs = np.arange(0, 360, 15)

    if palette is None:
        palette = sns.color_palette('inferno', n_colors=rosedata.shape[1])

    bar_dir, bar_width = _convert_dir(pass_dirs)

    fig, ax = plt.subplots(figsize=(5, 5), subplot_kw=dict(polar=True))
    ax.set_theta_direction('clockwise')
    ax.set_theta_zero_location('N')

    c1 = "pass_length"
    colors = cm.viridis(rosedata[c1].values / float(max(rosedata[c1].values)))
    print(len(bar_dir))
    print(len(rosedata[c1].values))
    # first column only
    ax.bar(bar_dir,
           rosedata[c1].values,
           width=bar_width,
           color=colors,
           edgecolor='none',
           label=c1,
           linewidth=0)

    leg = ax.legend(loc=(0.75, 0.95), ncol=2)
def combined_line_plot(dictionary, timestep,
                       xlabel, ylabel, title,
                       outputname, init_year):
    """Creates a combined line plot of timestep vs dictionary

    Parameters
    ----------
    dictionary: dictionary
        dictionary with "key=description of timestep, and
        value=list of timestep progressions"
    timestep: numpy linspace
        timestep of simulation
    xlabel: str
        xlabel of plot
    ylabel: str
        ylabel of plot
    title: str
        title of plot
    init_year: int
        initial year of simulation

    Returns
    -------
    """
    # set different colors for each bar
    color_index = 0
    plt.figure()
    # for every country, create bar chart with different color
    for key in dictionary:
        # label is the name of the nuclide (converted from ZZAAA0000 format)
        if isinstance(key, str) is True:
            label = key.replace('_government', '')
        else:
            label = str(key)

        plt.plot(timestep_to_years(init_year, timestep),
                 dictionary[key],
                 label=label,
                 color=cm.viridis(float(color_index) / len(dictionary)))
        color_index += 1

    if sum(sum(dictionary[k]) for k in dictionary) > 1000:
        ax = plt.gca()
        ax.get_yaxis().set_major_formatter(
            plt.FuncFormatter(lambda x, loc: "{:,}".format(int(x))))
    plt.ylabel(ylabel)
    plt.title(title)
    plt.xlabel(xlabel)
    plt.legend(loc=(1.0, 0), prop={'size': 10})
    plt.grid(True)
    plt.savefig(label + '_' + outputname + '.png',
                format='png',
                bbox_inches='tight')
    plt.close()
Exemple #18
0
def strategy_3d_map(model):
    # target_angle_list = np.arange(-3,3.3,0.3)
    target_angle_list = np.arange(-2, 2.1, 0.1)
    v_list = np.arange(10 / 3.6, 55 / 3.6, 1)
    output = np.zeros((len(v_list), len(target_angle_list)))
    # print(output)
    # print(len(v_list),len(rc_list))
    target_angle_mesh, v_mesh = np.meshgrid(target_angle_list, v_list)
    # print(target_angle_mesh)
    # print(v_mesh[3,8],rc_mesh[3,8])
    # print(v_mesh)

    for i in range(0, (len(v_list))):
        for j in range(0, (len(target_angle_list))):
            # print(i,j)
            output[i][j] = model.choose_action(
                np.array([target_angle_mesh[i, j], v_mesh[i, j]]))
            # print(i,j,output[i][j])
    # print(output)

    from matplotlib import cm
    import matplotlib.pyplot as plt
    from mpl_toolkits.mplot3d import axes3d

    # plt.ion()
    norm = plt.Normalize(output.min(), output.max())
    colors = cm.viridis(norm(output))
    rcount, ccount, _ = colors.shape

    fig3 = plt.figure(3)
    fig3.canvas.manager.window.wm_geometry('+3000+500')
    plt.cla()
    ax = fig3.gca(projection='3d')
    # surf = ax.plot_surface(v_mesh, target_angle_mesh*180/np.pi, output*180/np.pi, rcount=rcount, ccount=ccount, facecolors=colors, shade=False)
    surf = ax.plot_surface(target_angle_mesh * 180 / np.pi,
                           v_mesh * 3.6,
                           output * 180 / np.pi,
                           rcount=rcount,
                           ccount=ccount,
                           facecolors=colors,
                           shade=False)
    surf.set_facecolor((0, 0, 0, 0))
    # ax.view_init(30, -90)
    # ax.view_init(30, -60)
    # ax.view_init(30, -70)
    ax.view_init(30, -110)
    zplate = ax.plot_surface(target_angle_mesh * 180 / np.pi,
                             v_mesh * 3.6,
                             np.zeros((len(v_list), len(target_angle_list))),
                             alpha=0.5)
    ax.plot(np.zeros_like(v_list), v_list * 3.6, 0, color='k', linewidth=2)
    ax.set_xlabel("target_angle(degree)")
    ax.set_ylabel("vehicle_speed(km/h)")
    ax.set_zlabel("steering_angle_compensation(degree)")
Exemple #19
0
def plot_rw_rccar_var001_var016():
    label_params = [
        # ['exp', ('exp_name',)],
        ['policy', ('policy', 'GCGPolicy', 'outputs', 0, 'name')],
        ['H', ('policy', 'H')],
        ['target', ('policy', 'use_target')],
        ['obs_shape', ('alg', 'env', 'params', 'obs_shape')]
    ]

    experiment_groups = [
        ExperimentGroup(os.path.join(DATA_DIR, 'rw_rccar/var{0:03d}'.format(num)),
                        label_params=label_params,
                        plot={
                        })
    for num in [1, 5, 9, 12, 13]]

    mec = MultiExperimentComparison(experiment_groups)

    lengths_list = []
    for exp in mec.list:
        eval_folder = os.path.join(exp.folder, 'eval_itr_0039')
        eval_pkl_fname = os.path.join(eval_folder, 'itr_0039_eval_rollouts.pkl')
        rollouts = mypickle.load(eval_pkl_fname)['rollouts']

        assert (len(rollouts) == 24)

        lengths = [len(r['dones']) for r in rollouts]
        lengths_list.append(lengths)

    f, ax = plt.subplots(1, 1)
    xs = np.vstack((np.r_[0:8.] + 0.,
                    np.r_[0:8.] + 0.1,
                    np.r_[0:8.] + 0.2,)).T.ravel()
    legend_patches = []
    for i, (exp, lengths) in enumerate(zip(mec.list, lengths_list)):
        lengths = np.reshape(lengths, (8, 3))
        width = 0.6 / float(len(lengths_list))
        color = cm.viridis(i / float(len(lengths_list)))
        label = 'median: {0}, {1}'.format(np.median(lengths), exp.plot['label'])

        bp = ax.boxplot(lengths.T, positions=np.arange(len(lengths)) + 1.2 * i * width, widths=width, patch_artist=True)
        for patch in bp['boxes']:
            patch.set_facecolor(color)
        legend_patches.append(mpatches.Patch(color=color, label=label))
        # ax.plot(xs, lengths, label=exp.plot['label'], linestyle='None', marker='o')
    ax.legend(handles=legend_patches)
    ax.xaxis.set_ticks(np.arange(8))
    ax.xaxis.set_ticklabels(np.arange(8))
    ax.set_xlim((-0.5, 8.5))
    ax.set_xlabel('Start Position Number')
    ax.set_ylabel('Timesteps survived')
    plt.show()

    import IPython; IPython.embed()
Exemple #20
0
def no_fit(path, fixed_a, fixed_loc, _a, _b, save_image=False):

    matplotlib.rcParams['savefig.dpi'] = 300

    def modified_gamma_3(x, beta):
        a = fixed_a
        loc = fixed_loc
        # scale = _a * beta + _b
        scale = _a * np.log(beta) + _b
        return gamma.pdf(x, a=a, loc=loc, scale=scale)

    betas = []
    scale = []

    fig, ax = plt.subplots()
    for i, result_data_path in enumerate(path):
        globals().update(load_data(result_data_path))
        ax.plot(Ls, M_ave, '.', label=r'$\beta = %2.2f$' % beta,
                color=cm.viridis(float(i) / len(path)))
        betas.append(beta)

        x = np.linspace(0, max(Ls), num=5*max(Ls))
        ax.plot(x, modified_gamma_3(x, beta),
                '-',
                # label=r'fitted $\beta = %2.2f$' % beta,
                color=cm.viridis(float(i) / len(path)))

    show_plot1(ax, num_of_strings)

    if save_image:
        result_image_path = "../results/img/diecutting/fitted_gamma_fixed_a_x0"
        result_image_path += "_" + time.strftime("%y%m%d_%H%M%S")
        pdf = PdfPages(result_image_path + ".pdf")
        plt.savefig(result_image_path + ".png")
        pdf.savefig()
        pdf.close()
        plt.close()
        print "[saved] " + result_image_path
    else:
        plt.show()
        plt.close()
def plotRegression(simple, turner, slope, intercept, r_value):
    import matplotlib.pyplot as plt
    import matplotlib.pylab as pylab
    import matplotlib.cm as cm

    # make histogram plot
    params = {
        'legend.fontsize': 'x-large',
        'figure.figsize': (7, 5),
        'axes.labelsize': 'x-large',
        'axes.titlesize': 'x-large',
        'xtick.labelsize': 'x-large',
        'ytick.labelsize': 'x-large'
    }
    pylab.rcParams.update(params)
    #fig, axes = plt.subplots(3, 1, sharex=False, sharey=False)
    fig = plt.figure()
    plt.xlabel("Energy of Structure in Simple Model [kcal/mol]")
    plt.ylabel("Energy of Structure in Turner Model [kcal/mol]")
    fig.legend().set_visible(False)
    color_i = 0.0

    for t in slope.keys():
        plt.plot(simple[:, t],
                 turner[:, t],
                 '.',
                 color=cm.viridis(color_i),
                 label=str(t),
                 alpha=0.5)
        plt.plot(simple[:, t],
                 slope[t] * simple[:, t] + intercept[t],
                 '-',
                 color=cm.viridis(color_i))
        fig.text(0.16,
                 0.81 - color_i / 7,
                 "$R^{2}$ = " + "{0:.3f}".format(r_value[t]),
                 color=cm.viridis(color_i))
        # increment color
        color_i += 0.3
    #plt.legend(loc='upper left')
    fig.savefig('regression.svg', dpi=300)
Exemple #22
0
def prepare_spec_for_render(spec, score, scale_factor=5):
    spec_excerpt = cv2.resize(
        np.flipud(spec),
        (spec.shape[1] * scale_factor, spec.shape[0] * scale_factor))

    perf_img = np.pad(cm.viridis(spec_excerpt)[:, :, :3],
                      ((score.shape[0] // 2 - spec_excerpt.shape[0] // 2 + 1,
                        score.shape[0] // 2 - spec_excerpt.shape[0] // 2),
                       (20, 20), (0, 0)),
                      mode="constant")

    return perf_img
Exemple #23
0
def husimi_3d(state,
              xrange,
              yrange,
              N=100,
              fname='fig_husimi_3d.eps',
              cmap='viridis',
              alpha=1.0):
    """
    to visualize a 3d Husimi function

    Parameters:
    ------------
    state: quantum object
        A given quantum state needed to visualize
    xrange, yrange: array-like(2)
        The minimum and maximum values of the coordinates 
    N: integer
        number of steps for xrange, yrange
    fname: string
        File name  
    cmap: str or Colormap
        A colormap instance or colormap name (default: 'viridis') 

    Returns:
    A file with fname
    """

    xarray = np.linspace(xrange[0], xrange[1], N)
    yarray = np.linspace(yrange[0], yrange[1], N)
    zarray = husimi(state, xarray, yarray)
    zarray /= amax(zarray)

    xx, yy = meshgrid(xarray, yarray)

    norm = plt.Normalize(zarray.min(), zarray.max())
    colors = cm.viridis(norm(zarray))

    fig = plt.figure()
    ax = fig.gca(projection='3d')
    ax.plot_surface(xx,
                    yy,
                    zarray,
                    cmap=cmap,
                    rstride=1,
                    cstride=1,
                    linewidth=0,
                    facecolors=colors)

    plt.xlabel("x")
    plt.ylabel("y")

    _printout(fname)
    plt.savefig(fname, dpi=25)
Exemple #24
0
def plot_test_different_smoothing(args):

    OUTPUT = dict(np.load(args.datafile_input))

    fig_optimum, [[ax, ax1]] = figure(figsize=(.5, .16),
                                      right=0.85,
                                      top=0.9,
                                      bottom=1.2,
                                      left=.6,
                                      wspace=1.8,
                                      axes=(1, 2))

    i0 = np.argmax(np.mean(OUTPUT['CROSS_CORRELS'], axis=-1))

    mean_Output = np.mean(OUTPUT['CROSS_CORRELS'], axis=-1)

    Tsmooth = 1e3 * OUTPUT['T_SMOOTH']
    ax.plot(Tsmooth, mean_Output, color='k', lw=2)
    ax.scatter([1e3 * OUTPUT['T_SMOOTH'][i0]],
               [np.mean(OUTPUT['CROSS_CORRELS'], axis=-1)[i0]],
               marker='o',
               color=Brown,
               facecolor='None')
    ax.annotate('$T_{opt}$', (Tsmooth[i0] + 4, ax.get_ylim()[0]),
                color=Brown,
                fontsize=FONTSIZE)
    ax.plot(np.array([Tsmooth[i0], Tsmooth[i0]]),
            [mean_Output[i0], ax.get_ylim()[0]],
            '--',
            color=Brown,
            lw=1)

    order = np.argsort(np.mean(OUTPUT['CROSS_CORRELS'], axis=0))
    for i in range(len(order)):
        ax1.plot(Tsmooth,
                 OUTPUT['CROSS_CORRELS'][:, order[i]],
                 color=viridis(i / (len(order) - 1)))
    ax1.plot(Tsmooth, mean_Output, '-', color='k', lw=0.5)
    ax1.fill_between(Tsmooth,\
                     mean_Output+np.std(OUTPUT['CROSS_CORRELS'], axis=-1),
                     mean_Output-np.std(OUTPUT['CROSS_CORRELS'], axis=-1),
                     lw=0, color='k', alpha=.2)

    set_plot(ax, xlabel=' $T_{smoothing}$ (ms)', ylabel='cc $V_m$-pLFP')
    set_plot(ax1, xlabel=' $T_{smoothing}$ (ms)', ylabel='cc $V_m$-pLFP')
    acb = plt.axes([.86, .4, .02, .4])
    cb = build_bar_legend(np.arange(len(order)),
                          acb,
                          viridis,
                          no_ticks=True,
                          label='cell index \n (n=' + str(len(order)) +
                          'cells)')
    return [fig_optimum]
Exemple #25
0
def gaussian_plot(gmm, X, labels, true_labels=True, fig_num=0, title=''):
    plt.figure(fig_num)
    plt.ylabel('p(X)')
    plt.xlabel('VOT')
    plt.title(title)

    means = gmm.means_.flatten()
    stdevs = [np.sqrt(x) for x in gmm.covariances_.flatten()]
    weights = gmm.weights_.flatten()

    x = np.arange(min(X), max(X), 5)  #range between data min and max by 5
    pdfs = [
        p * ss.norm.pdf(x, mu, sd)
        for mu, sd, p in zip(means, stdevs, weights)
    ]
    density = np.sum(np.array(pdfs), axis=0)

    #get colors
    start = 0.0
    stop = 1.0
    num_lines = len(gmm.means_)  #num_classes.
    #note: for dpgmm only predicted gaussians will be plot, but gmm.means_ etc is len(max num allowed gaussians)
    print(np.unique(
        labels))  #(debug) prints idxs of final gaussians relative to init
    print('num classes: %d' % len(np.unique(labels)))
    cm_subsection = np.linspace(start, stop, num_lines)
    colors = [cm.viridis(x) for x in cm_subsection]

    #if true_labels map gaussian order to label order
    if true_labels:
        mean_order = np.argsort(means)
        sorted_colors = [colors[i] for i in mean_order]
        color_labels = [sorted_colors[i] for i in labels]
    else:
        color_labels = [colors[i] for i in labels]

    #plot gmm
    plt.plot(x, density, 'k--')
    plt.scatter(X,
                len(X) * [0],
                c=color_labels,
                s=40,
                cmap='viridis',
                alpha=0.1)

    #plot individual gaussians
    for i, (mu, sd, p) in enumerate(zip(means, stdevs, weights)):
        if not any(x == i for x in labels):
            continue
        plt.plot(x, ss.norm.pdf(x, mu, sd), color=colors[i])

    return
Exemple #26
0
def eval_model(global_step, writer, device, model, checkpoint_dir, ismultispeaker):
    # harded coded
    texts = [
        "Scientists at the CERN laboratory say they have discovered a new particle.",
        "There's a way to measure the acute emotional intelligence that has never gone out of style.",
        "President Trump met with other leaders at the Group of 20 conference.",
        "Generative adversarial network or variational auto-encoder.",
        "Please call Stella.",
        "Some have accepted this as a miracle without any physical explanation.",
    ]
    import synthesis
    synthesis._frontend = _frontend

    eval_output_dir = join(checkpoint_dir, "eval")
    os.makedirs(eval_output_dir, exist_ok=True)

    # Prepare model for evaluation
    model_eval = build_model().to(device)
    model_eval.load_state_dict(model.state_dict())

    # hard coded
    speaker_ids = [0, 1, 10] if ismultispeaker else [None]
    for speaker_id in speaker_ids:
        speaker_str = "multispeaker{}".format(speaker_id) if speaker_id is not None else "single"

        for idx, text in enumerate(texts):
            signal, alignment, _, mel = synthesis.tts(
                model_eval, text, p=0, speaker_id=speaker_id, fast=True)
            signal /= np.max(np.abs(signal))

            # Alignment
            path = join(eval_output_dir, "step{:09d}_text{}_{}_alignment.png".format(
                global_step, idx, speaker_str))
            save_alignment(path, alignment)
            tag = "eval_averaged_alignment_{}_{}".format(idx, speaker_str)
            writer.add_image(tag, np.uint8(cm.viridis(np.flip(alignment, 1).T) * 255), global_step)

            # Mel
            writer.add_image("(Eval) Predicted mel spectrogram text{}_{}".format(idx, speaker_str),
                             prepare_spec_image(mel), global_step)

            # Audio
            path = join(eval_output_dir, "step{:09d}_text{}_{}_predicted.wav".format(
                global_step, idx, speaker_str))
            audio.save_wav(signal, path)

            try:
                writer.add_audio("(Eval) Predicted audio signal {}_{}".format(idx, speaker_str),
                                 signal, global_step, sample_rate=fs)
            except Exception as e:
                warn(str(e))
                pass
Exemple #27
0
 def lda_analysis(self, X_train, X_test, y_train, y_test, data_set_name):
     scl = RobustScaler()
     X_train_scl = scl.fit_transform(X_train)
     X_test_scl = scl.transform(X_test)
     
     ##
     ## Plots
     ##
     ph = plot_helper()
     
     scores = []
     train_scores = []
     rng = range(1, X_train_scl.shape[1]+1)
     for i in rng:
         lda = LinearDiscriminantAnalysis(n_components=i)
         cv = KFold(X_train_scl.shape[0], 3, shuffle=True)
         
         # cross validation
         cv_scores = []
         for (train, test) in cv:
             lda.fit(X_train_scl[train], y_train[train])
             score = lda.score(X_train_scl[test], y_train[test])
             cv_scores.append(score)
         
         mean_score = np.mean(cv_scores)
         scores.append(mean_score)
         
         # train score
         lda = LinearDiscriminantAnalysis(n_components=i)
         lda.fit(X_train_scl, y_train)
         train_score = lda.score(X_train_scl, y_train)
         train_scores.append(train_score)
         
         print(i, mean_score)
         
     ##
     ## Score Plot
     ##
     title = 'Score Summary Plot (LDA) for ' + data_set_name
     name = data_set_name.lower() + '_lda_score'
     filename = './' + self.out_dir + '/' + name + '.png'
                 
     ph.plot_series(rng,
                    [scores, train_scores],
                    [None, None],
                    ['cross validation score', 'training score'],
                    cm.viridis(np.linspace(0, 1, 2)),
                    ['o', '*'],
                    title,
                    'n_components',
                    'Score',
                    filename)
Exemple #28
0
def viz_duo(x_sample, y_sample, name="test.png", show=True, alpha=0.2):
    x_sample = resize(x_sample,
                      (12, 12, 12))  # resize, otherwise it's super slow
    y_sample = resize(y_sample,
                      (12, 12, 12))  # resize, otherwise it's super slow

    fig = plt.figure()
    ax = fig.add_subplot(121, projection="3d")
    ax.set_xlabel("x")
    ax.set_ylabel("y")
    ax.set_zlabel("z")
    ax.set_xticks([])
    ax.set_yticks([])
    ax.set_zticks([])
    ax.set_title("Real")
    colours = cm.viridis(x_sample)
    colours = explode(colours)
    filled = colours[:, :, :, -1] != 0
    x, y, z = expand_coordinates(np.indices(np.array(filled.shape) + 1))
    ax.voxels(x, y, z, filled, facecolors=colours, alpha=alpha)

    ax = fig.add_subplot(122, projection="3d")
    ax.set_xlabel("x")
    ax.set_ylabel("y")
    ax.set_zlabel("z")
    ax.set_xticks([])
    ax.set_yticks([])
    ax.set_zticks([])
    ax.set_title("Predicted")

    colours = cm.viridis(y_sample)
    colours = explode(colours)
    filled = colours[:, :, :, -1] != 0
    x, y, z = expand_coordinates(np.indices(np.array(filled.shape) + 1))
    ax.voxels(x, y, z, filled, facecolors=colours, alpha=alpha)

    if show:
        plt.show()
    plt.close()
Exemple #29
0
def result_n2(path):
    fig, ax = plt.subplots()
    for i, result_data_path in enumerate(path):
        globals().update(load_data(result_data_path))
        ax.plot(Ls[1:], n2, '.', label=r'$\beta = %2.2f$' % beta,
                color=cm.viridis(float(i) / len(path)))
    ax.legend(loc='best')
    ax.set_title('Averaged number of the sites on the cutting edges which \
                is connected to two neighbors.' + 
                ' (sample: {})'.format(num_of_strings))
    ax.set_xlabel(r'Cutting size $L$')
    ax.set_ylabel(r'$n_{2}$')
    plt.show()
 def result_N_minus_rate(self):
     fig, ax = plt.subplots()
     for i, result_data_path in enumerate(self.data_path_list):
         self.load_data(result_data_path)
         ax.plot(self.Ls[1:], self.N_minus_rate[1:], '.',
                 label=r'$\beta = %2.2f$' % self.beta,
                 color=cm.viridis(float(i) / len(self.data_path_list)))
     ax.legend(loc='best')
     ax.set_title('The rate of not occupied site in all N' +
                 ' (sample: {})'.format(self.num_of_strings))
     ax.set_xlabel(r'Cutting size $L$')
     ax.set_ylabel(r'$N_{-1} / N_{\mathrm{all}}$')
     plt.show()
def task_c():
    n = 5
    Betas = 10.**(-1 * np.linspace(1, 4, n))

    for i, b in enumerate(Betas):
        Ngs, Ns = loop(b, d * log(2 / b))
        plt.plot(Ns * (1 / sqrt(b))**3,
                 Ngs / Ns,
                 color=cm.viridis(i / n),
                 label="$\\beta={:.3f}$".format(b))

    plt.legend()
    plt.show()
Exemple #32
0
def save_gradcam(gcam, original_image, axarr, i):
    cmap = cm.viridis(np.squeeze(gcam.numpy()))[..., :3] * 255.0
    raw_image = (
        (
            (original_image - original_image.min())
            / (original_image.max() - original_image.min())
        )
        * 255
    ).astype("uint8")
    gcam = (cmap.astype(float) + raw_image.astype(float)) / 2
    axarr[1].imshow(np.uint8(gcam))
    axarr[1].axis("off")
    plt.savefig("CNN_viz1_{}.png".format(i), dpi=200, bbox_inches="tight")
Exemple #33
0
 def export_response(self):
     # resp = np.mean(self.norm_stack[..., self.n_baseline:(self.n_baseline+30)], 2)
     if 'resp_map' not in self.file['df']:
         self.resp_mapping()
     resp = self.file['df']['resp_map'][()]
     # resp = np.clip(resp, 0, 2/100)
     # resp = exposure.equalize_hist(resp)
     resp = viridis(gauss_filt(resp, 3))[..., :3]
     resp[self.max_project <= 0, :] = 0
     # resp = 255 * (resp - resp.min()) / (resp.max() - resp.min())
     resp = img_to_uint8(resp)
     # resp = np.uint8(resp)
     imsave(self.path.parent / f'response_{self.path.name}.png', resp)
Exemple #34
0
def f():
    color = cm.viridis(0.5)
    f, ax = plt.subplots(1, 1)
    ax.plot(iter, totalreward, color=color)
    ax.legend()
    ax.set_xlabel('Iteration')
    ax.set_ylabel('Return')
    exp_dir = 'Plot/'
    if not os.path.exists(exp_dir):
        os.makedirs(exp_dir, exist_ok=True)
    else:
        os.makedirs(exp_dir, exist_ok=True)
    f.savefig(os.path.join('Plot', 'reward' + '.png'), dpi=1000)
Exemple #35
0
def plot_cube(cube, name='test.png'):
    cube = normalize_plot(cube)

    facecolors = cm.viridis(cube)
    facecolors[:, :, :, -1] = 0.1 * cube
    facecolors = explode(facecolors)

    filled = facecolors[:, :, :, -1] != 0
    x, y, z = expand_coordinates(np.indices(np.array(filled.shape) + 1))
    fig = plt.figure()
    ax = fig.gca(projection='3d')
    ax.voxels(x, y, z, filled, facecolors=facecolors, alpha=0.5)
    plt.savefig(name)
def scatter(x1, x2, Y):

    if Y is None:
        Y = np.ones(x1.shape[0])

    uY = np.unique(Y)
    colors = cm.viridis(np.linspace(0, 1, len(uY)))

    for i, y in enumerate(uY):

        inds = Y == y

        plt.scatter(x1[inds], x2[inds], s=10, color=colors[i], label=y)
Exemple #37
0
def make_frame(time):
    i = int(time*fps)
    autoim =autocorrImages[i]
    norm = mplcol.Normalize(vmin=minA,vmax=maxA)

    #ts = img.metadata['t_s']
    ts = i/24.
    frame = i
    autoim= Image.fromarray(img_as_ubyte(cm.viridis(norm(autoim)))) #get rid of alpha channel, it confuses moviepy, and multiply by 255 as that is how moviepy likes its colors for some reason
    draw= ImageDraw.Draw(autoim)
    draw.text((0,0),"time: "+str(datetime.timedelta(seconds=float(ts))),font=font,fill=(255,255,255,255))
    draw.text((0,400),"frame: "+str(frame),font=font,fill=(255,255,255,255))
    return np.asarray(autoim)[:,:,:3]
 def result_N(self):
     fig, ax = plt.subplots()
     for i, result_data_path in enumerate(self.data_path_list):
         self.load_data(result_data_path)
         ax.plot(self.Ls[1:], self.N[1:], '.',
                 label=r'$\beta = %2.2f$' % self.beta,
                 color=cm.viridis(float(i) / len(self.data_path_list)))
     ax.legend(loc='best')
     ax.set_title('Occupied points in the cutting region' +
                 ' (sample: {})'.format(self.num_of_strings))
     ax.set_xlabel(r'Cutting size $L$')
     ax.set_ylabel(r'$N$')
     plt.show()
def plot_kde():
    # Load files
    print('Loading DIALS files')
    expt_file = "dials_temp_files/mega_ultra_refined.expt"
    refl_file = "dials_temp_files/mega_ultra_refined.refl"
    elist = ExperimentListFactory.from_json_file(expt_file, check_format=False)
    refls = reflection_table.from_file(refl_file)

    # Remove outliers
    print('Removing outliers')
    idx = refls.get_flags(refls.flags.used_in_refinement).as_numpy_array()
    idy = np.arange(len(elist))[idx].tolist()
    elist = ExperimentList([elist[i] for i in idy])
    refls = refls.select(flex.bool(idx))

    # Generate KDE
    normalized_resolution, lams, kde = gen_kde(elist, refls)

    # Make a mesh
    print('Making a meshgrid')
    N = 50
    x = np.linspace(min(normalized_resolution), max(normalized_resolution), N)
    y = np.linspace(min(lams), max(lams), N)
    z = np.zeros(shape=(len(x), len(y)))
    zeros = np.zeros(len(x))

    # Evaluate PDF on mesh
    for x0, y0 in it.product(x, y):
        i = np.where(x == x0)[0][0]
        j = np.where(y == y0)[0][0]
        z[i, j] = kde.pdf([x0, y0])

    # Plot a wireframe
    print('Plotting')
    norm = plt.Normalize(z.min(), z.max())
    colors = cm.viridis(norm(z))
    rcount, ccount, _ = colors.shape
    fig = plt.figure()
    ax = fig.add_subplot(projection='3d')
    surface = ax.plot_surface(x[:, None] + zeros,
                              y[None, :] + zeros,
                              z,
                              rcount=rcount,
                              ccount=ccount,
                              facecolors=colors,
                              shade=False)
    surface.set_facecolor((0, 0, 0, 0))
    plt.xlabel('$1/d^2$ (A$^{-2}$)')
    plt.ylabel('$\lambda$ (A)')
    plt.title('PDF of Reflections')
    plt.show()
Exemple #40
0
    def show_gen_results(self,title='Generation evolution'):
        import matplotlib.cm as cm
        final_arc = self.ea_archive
        gens=[]
        div = 100
        lengen  = len(self.storegens)        
        num     = int(lengen / div)
#        print num
#        print lengen
        i = 0
#        print self.storegens
        while i < len(self.storegens):
            gens.append(self.storegens[i])
            i = i + num
        
        x = []
        y = []
        for f in final_arc:
            x.append(f.fitness[0])
            y.append(f.fitness[1])
        x = -np.array(x)
        y = np.array(y)

#        fig = plt.figure()
#        ax  = plt.axes()
#        ax = fig.add_subplot(111)
        fig,ax = plt.subplots()
        ax.grid()
        ax.set_axisbelow(True)
        ax.yaxis.grid(color='gray', linestyle='dashed')
        ax.xaxis.grid(color='gray', linestyle='dashed')
        cols = cm.viridis(np.linspace(0,1,div+1))
        alpha = .2
        for i in range(0,len(gens)):
#            print i
            xi = -np.array(gens[i])[:,0]
            yi = np.array(gens[i])[:,1]
            sc=ax.scatter(xi,yi,marker='o',c=cols[i],s=7,alpha=0.5)
#            ax.scatter(xi,yi,marker='*')
            alpha = alpha + 0.8/(lengen/num)
#            print alpha
        ax.scatter(x, y, c=cols[len(gens)-1],marker='o',s=8)
        cax, _ = matplotlib.colorbar.make_axes(ax)
        cmap = matplotlib.cm.get_cmap('viridis')
        normalize = matplotlib.colors.Normalize(vmin=0,vmax=self.generations)
        cbar = matplotlib.colorbar.ColorbarBase(cax,cmap=cmap,norm=normalize)
        cbar.set_label('Generation',rotation=270,fontsize=12)
        ax.set_title(title,fontsize=12)
        ax.set_xlabel('L/D',fontsize=12)
        ax.set_ylabel('Mass (kg)',fontsize=12)
        plt.show()
 def result_S(self):
     fig, ax = plt.subplots()
     for i, result_data_path in enumerate(self.data_path_list):
         self.load_data(result_data_path)
         ax.plot(self.Ls[1:], self.S[1:] / np.sum(self.S[1:]), '.',
                 label=r'$\beta = %2.2f$' % self.beta,
                 color=cm.viridis(float(i) / len(self.data_path_list)))
     ax.legend(loc='best')
     ax.set_ylim([0, ax.get_ylim()[1]])
     ax.set_title('Averaged number of the subclusters in the cutted region.'
                  + ' (sample: {})'.format(self.num_of_strings))
     ax.set_xlabel(r'Cutting size $L$')
     ax.set_ylabel(r'$S$')
     plt.show()
 def result_n_minus(self):
     fig, ax = plt.subplots()
     for i, result_data_path in enumerate(self.data_path_list):
         self.load_data(result_data_path)
         ax.plot(self.Ls[1:], self.n_minus, '.',
                 label=r'$\beta = %2.2f$' % self.beta,
                 color=cm.viridis(float(i) / len(self.data_path_list)))
     ax.legend(loc='best')
     ax.set_title('Averaged number of the sites which is not occupied on \
                  the cutting edges.' + 
                 ' (sample: {})'.format(self.num_of_strings))
     ax.set_xlabel(r'Cutting size $L$')
     ax.set_ylabel(r'$n_{-1}$')
     plt.show()
def task_b():
    n = 3
    Betas = 10.**(-1 * np.linspace(1, 5, n))

    for i, b in enumerate(Betas):
        Ngs, Ns = loop(b, d)

        plt.plot(Ns,
                 Ngs / Ns,
                 color=cm.viridis(i / n),
                 label="$\\beta={:.5f}$".format(b))

    plt.legend()
    plt.show()
Exemple #44
0
def power_density_3ds(srs, surface, ax,
                    normal=1, rotations=[0, 0, 0], translation=[0, 0, 0], nparticles=0, gpu=0, nthreads=0,
                    alpha=0.4, transparent=True, max_level=-2):
    """calculate power density for and plot a parametric surface in 3d"""

    points = []


    for u in np.linspace(surface.ustart, surface.ustop, surface.nu):
        for v in np.linspace(surface.vstart, surface.vstop, surface.nv):
            points.append([surface.position(u, v), surface.normal(u, v)])


    power_density = srs.calculate_power_density(points=points, normal=normal, rotations=rotations, translation=translation, nparticles=nparticles, gpu=gpu, nthreads=nthreads, max_level=max_level)
    P = [item[1] for item in power_density]

    X2 = []
    Y2 = []
    Z2 = []
    for i in range(surface.nu):
        tX = []
        tY = []
        tZ = []
        for j in range(surface.nv):
            tX.append(power_density[i * surface.nv + j][0][0])
            tY.append(power_density[i * surface.nv + j][0][1])
            tZ.append(power_density[i * surface.nv + j][0][2])
        X2.append(tX)
        Y2.append(tY)
        Z2.append(tZ)

    colors =[]
    MAXP = max(P)
    PP = []
    for i in range(surface.nu):
        tmpP = []
        tmpPP = []
        for j in range(surface.nv):
            tmpP.append(P[i * surface.nv + j] / MAXP)
            tmpPP.append(P[i * surface.nv + j])

        colors.append(tmpP)
        PP.append(tmpPP)
  



    #ax.invert_xaxis()
    return ax.plot_surface(X2, Z2, Y2, facecolors=cm.viridis(colors), rstride=1, cstride=1, alpha=alpha)
def plot_tortuosity():
    result_data_paths = get_paths()
    fig = plt.figure()
    ax = fig.add_subplot(111)
    betas = [0, 2, 4, 6, 8, 10]
    for beta, result_data_path in zip(betas, result_data_paths):
        x, y = calc_tortuosity_for_each_beta(result_data_path)
        ax.plot(x, y, '.', label=r'$\beta = %2.2f$' % beta,
                color=cm.viridis(float(beta) / (2 * (len(betas) - 1))))
    ax.set_xlabel(r'Path length $L$')
    ax.set_ylabel(r'Tortuosity $T$')
    ax.set_ylim(0, ax.get_ylim()[1])
    ax.set_title(r'Tortuosity $T = L / \lambda_{avg}$')
    ax.legend(loc='best')
    plt.show()
Exemple #46
0
    def plot_simple_bar(self, x, values, labels, xlab, ylab, title, filename):
        plt.clf()
        plt.cla()
        
        fig, ax = plt.subplots()
        
        ax.xaxis.set_major_locator(ticker.MaxNLocator(integer=True))
        
        ax = plt.subplot()
        ax.bar(np.arange(1, len(values)+1), values, align='center', color=cm.viridis(np.linspace(0, 1, len(values))))
        ax.set_xticks(x)
        ax.set_xticklabels(labels)

        plt.grid()
        plt.title(title)
        plt.xlabel(xlab)
        plt.ylabel(ylab)
        
        plt.savefig(filename)
Exemple #47
0
def result_n0(path):
    fig, ax = plt.subplots()
    for i, result_data_path in enumerate(path):
        globals().update(load_data(result_data_path))
        # ax.plot(Ls, S, '.', label=r'$\beta = %2.2f$' % beta,
        #         color=cm.viridis(float(i) / len(path)))
        # ax.plot(Ls, N0, '.', label=r'$\beta = %2.2f$' % beta,
        #         color=cm.viridis(float(i) / len(path)))
        ax.plot(Ls, N1 / (6. * Ls) , '.', label=r'$\beta = %2.2f$' % beta,
                color=cm.viridis(float(i) / len(path)))

    ax.legend(loc='best')
    ax.set_ylim((0., 20.))
    ax.set_title('Strings in hexagonal region' +
                ' (sample: {})'.format(num_of_strings))
    ax.set_xlabel(r'Cutting size $L$')
    ax.set_ylabel(r'$n_{0}$')

    plt.show()
def regression_sample(true_func=np.sin, x_scale=3.):
    """
    regression problem for continuous targets
    :param float x_scale: データのスケール. [-x_scale, x_scale] の範囲のデータを生成する.
    :return:
    """
    x, t = generate_continuous_data(true_function=true_func, x_scale=x_scale)

    trained_models = []
    iteration_dist = [5, 10, 20, 40, 100]
    for n_iter in iteration_dist:
        # GradientBoostedDTの定義
        # 連続変数に対しての回帰問題なので
        # 目的関数:二乗ロス(LeastSquare)
        # 活性化関数:恒等写像(f(x)=x)
        # 今の当てはまりがどの程度なのか評価するロス関数に二乗ロス関数を与える
        rmse_objective = gb.LeastSquare()
        loss_function = gb.functions.least_square
        clf = gb.GradientBoostedDT(
            objective=rmse_objective, loss=loss_function,
            max_depth=4, num_iter=n_iter, gamma=.01, lam=.1, eta=.1)
        clf.fit(x=x, t=t)
        trained_models.append(clf)

    x_test = np.linspace(-x_scale, x_scale, 100).reshape(100, 1)
    fig = plt.figure(figsize=(6, 6))
    ax_i = fig.add_subplot(1, 1, 1)
    ax_i.plot(x_test, true_func(x_test), "--", label='True Function', color="C0")
    ax_i.scatter(x, t, s=50, label='Training Data', linewidth=1., edgecolors="C0", color="white")
    ax_i.set_xlabel("Input")
    ax_i.set_ylabel("Target")

    for i, (n_iter, model) in enumerate(zip(iteration_dist, trained_models)):
        y = model.predict(x_test)
        ax_i.plot(x_test, y, "-", label='n_iter: {}'.format(n_iter), color=cm.viridis(i / len(iteration_dist), 1))
    ax_i.legend(loc=4)
    ax_i.set_title("Transition by Number of Iterations")
    fig.savefig('experiment_figures/regression.png')
    #     ]
    Ds = np.array(Ds)
    return Ds


if __name__ == '__main__':
    frames_list = [200, 400, 600, 800, 1000, 1200, 1400, 1600, 1800, 2000]
    ##             0    1    2    3    4     5     6     7     8     9
    beta_list = [0, 2, 4, 6, 8, 10]
    ##           0  1  2  3  4  5

    Ds = read_from_csv('./results/img/mass_in_r/data_170122.csv').T
    # Ds = manual_data()

    markers = ['o', 'v', '^', 's', 'D', 'h']

    fig, ax = plt.subplots()
    for i, beta in enumerate(beta_list):
        color = cm.viridis(float(i) / (len(beta_list) - 1))
        ax.plot(frames_list, Ds[i], marker=markers[i % len(markers)],
                ls='', color=color, label=r'$\beta = %2.2f$' % beta)
    # ax.legend(loc='best')
    ax.legend(bbox_to_anchor=(1.02, 1), loc='upper left', borderaxespad=0, numpoints=1)
    fig.subplots_adjust(right=0.8)
    ax.set_title(r'Fractal dimension $D$')
    ax.set_xlabel(r'$T$')
    ax.set_ylabel(r'$D$')
    ax.set_xlim(0, 2200)
    ax.set_ylim(1., 2.)
    plt.show()
Exemple #50
0
def plotTransitPerCapita(station, trip, lmstats):
    lm_stations = station.groupby(['landmark', 'station_id'])['station_id'].count()
    lm_stations = { lm: list(lm_stations[lm].index.values) for lm in station['landmark'].values}

    # Prep all of the fields we want
    term_st_counts = trip.groupby(['Start Terminal'])['Start Terminal'].count()
    term_st_counts.name = 'Start Terminal Count'
    term_sub_perc_counts = trip.groupby(['Start Terminal', 'Subscription Type'])['Subscription Type'].count()
    term_sub_perc_counts = term_sub_perc_counts.unstack('Subscription Type')
    term_sub_perc_counts = term_sub_perc_counts['Subscriber']
    term_sub_perc_counts.name = 'Subscriber Count'
    term_dur_cum = trip.groupby(['Start Terminal'])['Duration'].sum()
    term_dur_cum.name = 'Cum Duration'
    term_lm = pd.Series([ station[station['station_id'] == i]['landmark'].values[0] for i in term_st_counts.index.values ], index=term_st_counts.index.values)
    term_lm.name = 'landmark'

    # Group by landmark and clean up data
    term_stats = pd.concat([term_lm, term_st_counts, term_sub_perc_counts, term_dur_cum], axis=1)
    term_stats = term_stats.groupby('landmark')['Start Terminal Count', 'Subscriber Count', 'Cum Duration'].sum()
    term_stats['Subscriber Fraction'] = term_stats['Subscriber Count'] / term_stats['Start Terminal Count']
    term_stats['Avg Duration'] = term_stats['Cum Duration'] / term_stats['Start Terminal Count']

    # get some of that altitude data in here!
    lmelev = station.groupby(['landmark'])['elev'].agg({'Elevation Mean': np.mean, 'Elevation Std': np.std})

    # Lump in our population and income stats from wikipedia
    term_stats = pd.concat([term_stats, lmstats, lmelev], axis=1);
    term_stats['household_median_income'] = term_stats['household_median_income'] / 1000
    term_stats['per_cap_income'] = term_stats['per_cap_income'] / 1000
    term_stats['Avg Duration'] = term_stats['Avg Duration'] / 60

    print(term_stats)

    fig = plt.figure()
    fig.suptitle('Trends Across BABS Locations')
    lm_colors = [cm.viridis(x) for x in np.arange(0,5)/4]

    ax = fig.add_subplot(221)
    ind = np.arange(0,5)
    bar_width = 0.6
    bars = ax.bar(ind+bar_width*0.5, term_stats['Start Terminal Count']/term_stats['population'], width=bar_width, color=lm_colors)
    ax.set_xlim(0, 5)
    ax.set_ylabel('Trips Per Capita')
    ax.xaxis.set_major_formatter(ticker.ScalarFormatter(useOffset=False))
    ax.tick_params(axis='x', which='both', bottom='off', top='off', right='off', left='on', labelbottom='off', labelleft='on', labelright='off')

    ax = fig.add_subplot(223)
    ax.yaxis.grid(b=True, which='major', color='lightgray', linewidth=1.0, linestyle='-')
    ax.scatter(term_stats['per_cap_income'], term_stats['Avg Duration'], c=lm_colors, s=200)
    ax.set_xlabel('Per Capita Income ($1000s)')
    ax.set_ylabel('Avg Trip Duration (min)')
    ax.set_axisbelow(True)

    ax = fig.add_subplot(222)
    ax.yaxis.grid(b=True, which='major', color='lightgray', linewidth=1.0, linestyle='-')
    ax.scatter(term_stats['household_median_income'], term_stats['Subscriber Fraction'], c=lm_colors, s=200)
    ax.set_xlabel('Household Median Income ($1000s)')
    ax.set_ylabel('Subscription Fraction')
    ax.set_axisbelow(True)

    ax = fig.add_subplot(224)
    ind = np.arange(0,5)
    bar_width = 0.6
    bars = ax.bar(ind+bar_width*0.5, term_stats['Elevation Std'], width=bar_width, color=lm_colors)
    ax.set_xlim(0, 5)
    ax.set_ylabel('Station Elevation Std. (m)')
    ax.xaxis.set_major_formatter(ticker.ScalarFormatter(useOffset=False))
    ax.tick_params(axis='x', which='both', bottom='off', top='off', right='off', left='on', labelbottom='off', labelleft='on', labelright='off')

    fig.legend([mpatches.Patch(color=i) for i in lm_colors],term_stats.index.values, 'upper right')

    fig.set_size_inches(12, 12)
    fig.savefig(os.path.join(os.path.dirname(__file__),'plots','BikeShareTrends.png'), transparent=True)
    plt.show()
    def plot_wing(self):

        n_names = len(self.names)
        self.ax.cla()
        az = self.ax.azim
        el = self.ax.elev
        dist = self.ax.dist

        for j, name in enumerate(self.names):
            mesh0 = self.mesh[self.curr_pos*n_names+j].copy()

            self.ax.set_axis_off()

            if self.show_wing:
                def_mesh0 = self.def_mesh[self.curr_pos*n_names+j]
                x = mesh0[:, :, 0]
                y = mesh0[:, :, 1]
                z = mesh0[:, :, 2]

                try:  # show deformed mesh option may not be available
                    if self.show_def_mesh.get():
                        x_def = def_mesh0[:, :, 0]
                        y_def = def_mesh0[:, :, 1]
                        z_def = def_mesh0[:, :, 2]

                        self.c2.grid(row=0, column=3, padx=5, sticky=Tk.W)
                        if self.ex_def.get():
                            z_def = (z_def - z) * 10 + z_def
                            def_mesh0 = (def_mesh0 - mesh0) * 30 + def_mesh0
                        else:
                            def_mesh0 = (def_mesh0 - mesh0) * 2 + def_mesh0
                        self.ax.plot_wireframe(x_def, y_def, z_def, rstride=1, cstride=1, color='k')
                        self.ax.plot_wireframe(x, y, z, rstride=1, cstride=1, color='k', alpha=.3)
                    else:
                        self.ax.plot_wireframe(x, y, z, rstride=1, cstride=1, color='k')
                        self.c2.grid_forget()
                except:
                    self.ax.plot_wireframe(x, y, z, rstride=1, cstride=1, color='k')

                cg = self.cg[self.curr_pos]
                # self.ax.scatter(cg[0], cg[1], cg[2], s=100, color='r')

            if self.show_tube:
                # Get the array of radii and thickness values for the FEM system
                r0 = self.radius[self.curr_pos*n_names+j]
                t0 = self.thickness[self.curr_pos*n_names+j]

                # Create a normalized array of values for the colormap
                colors = t0
                colors = colors / np.max(colors)

                # Set the number of rectangular patches on the cylinder
                num_circ = 12
                fem_origin = self.fem_origin_dict[name.split('.')[-1] + '_fem_origin']

                # Get the number of spanwise nodal points
                n = mesh0.shape[1]

                # Create an array of angles around a circle
                p = np.linspace(0, 2*np.pi, num_circ)

                # This is just to show the deformed mesh if selected
                if self.show_wing:
                    if self.show_def_mesh.get():
                        mesh0[:, :, 2] = def_mesh0[:, :, 2]

                # Loop through each element in the FEM system
                for i, thick in enumerate(t0):

                    # Get the radii describing the circles at each nodal point
                    r = np.array((r0[i], r0[i]))
                    R, P = np.meshgrid(r, p)

                    # Get the X and Z coordinates for all points around the circle
                    X, Z = R*np.cos(P), R*np.sin(P)

                    # Get the chord and center location for the FEM system
                    chords = mesh0[-1, :, 0] - mesh0[0, :, 0]
                    comp = fem_origin * chords + mesh0[0, :, 0]

                    # Add the location of the element centers to the circle coordinates
                    X[:, 0] += comp[i]
                    X[:, 1] += comp[i+1]
                    Z[:, 0] += fem_origin * (mesh0[-1, i, 2] - mesh0[0, i, 2]) + mesh0[0, i, 2]
                    Z[:, 1] += fem_origin * (mesh0[-1, i+1, 2] - mesh0[0, i+1, 2]) + mesh0[0, i+1, 2]

                    # Get the spanwise locations of the spar points
                    Y = np.empty(X.shape)
                    Y[:] = np.linspace(mesh0[0, i, 1], mesh0[0, i+1, 1], 2)

                    # Set the colors of the rectangular surfaces
                    col = np.zeros(X.shape)
                    col[:] = colors[i]

                    # Plot the rectangular surfaces for each individual FEM element
                    try:
                        self.ax.plot_surface(X, Y, Z, rstride=1, cstride=1,
                            facecolors=cm.viridis(col), linewidth=0)
                    except:
                        self.ax.plot_surface(X, Y, Z, rstride=1, cstride=1,
                            facecolors=cm.coolwarm(col), linewidth=0)

        lim = 0.
        for j in range(n_names):
            ma = np.max(self.mesh[self.curr_pos*n_names+j], axis=(0,1,2))
            if ma > lim:
                lim = ma
        lim /= float(self.zoom_scale)
        self.ax.auto_scale_xyz([-lim, lim], [-lim, lim], [-lim, lim])
        self.ax.set_title("Iteration: {}".format(self.curr_pos))

        # round_to_n = lambda x, n: round(x, -int(np.floor(np.log10(abs(x)))) + (n - 1))
        if self.opt:
            obj_val = self.obj[self.curr_pos]
            self.ax.text2D(.15, .05, self.obj_key + ': {}'.format(obj_val),
                transform=self.ax.transAxes, color='k')

        self.ax.view_init(elev=el, azim=az)  # Reproduce view
        self.ax.dist = dist
def plotAttribute(cur, planners, attribute, typename):
    """Create a plot for a particular attribute. It will include data for
    all planners that have data for this attribute."""
    labels = []
    measurements = []
    nanCounts = []
    if typename == 'ENUM':
        cur.execute('SELECT description FROM enums where name IS "%s"' % attribute)
        descriptions = [t[0] for t in cur.fetchall()]
        numValues = len(descriptions)
    for planner in planners:
        cur.execute('SELECT %s FROM runs WHERE plannerid = %s AND %s IS NOT NULL' \
            % (attribute, planner[0], attribute))
        measurement = [t[0] for t in cur.fetchall() if t[0] is not None]
        if measurement:
            cur.execute('SELECT count(*) FROM runs WHERE plannerid = %s AND %s IS NULL' \
                % (planner[0], attribute))
            nanCounts.append(cur.fetchone()[0])
            labels.append(planner[1])
            if typename == 'ENUM':
                scale = 100. / len(measurement)
                measurements.append([measurement.count(i)*scale for i in range(numValues)])
            else:
                measurements.append(measurement)

    if not measurements:
        print('Skipping "%s": no available measurements' % attribute)
        return

    plt.clf()
    ax = plt.gca()
    if typename == 'ENUM':
        width = .5
        measurements = np.transpose(np.vstack(measurements))
        colsum = np.sum(measurements, axis=1)
        rows = np.where(colsum != 0)[0]
        heights = np.zeros((1, measurements.shape[1]))
        ind = range(measurements.shape[1])
        for i in rows:
            plt.bar(ind, measurements[i], width, bottom=heights[0], \
                color=viridis(int(floor(i * 256 / numValues))), \
                label=descriptions[i])
            heights = heights + measurements[i]
        xtickNames = plt.xticks([x+width/2. for x in ind], labels, rotation=30)
        ax.set_ylabel(attribute.replace('_', ' ') + ' (%)')
        box = ax.get_position()
        ax.set_position([box.x0, box.y0, box.width * 0.8, box.height])
        props = matplotlib.font_manager.FontProperties()
        props.set_size('small')
        ax.legend(loc='center left', bbox_to_anchor=(1, 0.5), prop=props)
    elif typename == 'BOOLEAN':
        width = .5
        measurementsPercentage = [sum(m) * 100. / len(m) for m in measurements]
        ind = range(len(measurements))
        plt.bar(ind, measurementsPercentage, width)
        xtickNames = plt.xticks([x + width / 2. for x in ind], labels, rotation=30)
        ax.set_ylabel(attribute.replace('_', ' ') + ' (%)')
    else:
        plt.boxplot(measurements, notch=0, sym='k+', vert=1, whis=1.5, bootstrap=1000)
        ax.set_ylabel(attribute.replace('_', ' '))
        xtickNames = plt.setp(ax, xticklabels=labels)
        plt.setp(xtickNames, rotation=25)
    ax.set_xlabel('Motion planning algorithm')
    ax.yaxis.grid(True, linestyle='-', which='major', color='lightgrey', alpha=0.5)
    if max(nanCounts) > 0:
        maxy = max([max(y) for y in measurements])
        for i in range(len(labels)):
            x = i + width / 2 if typename == 'BOOLEAN' else i + 1
            ax.text(x, .95*maxy, str(nanCounts[i]), horizontalalignment='center', size='small')
    plt.show()
def run(fi):
    ret = []

    print('Doing file: ' + fi)

    dic = xr.open_dataset(fi)

    outt = dic['tc_lag0'].values
    outp = dic['p'].values
    outpc = dic['pconv'].values

    outplot = outp.copy()

    outt[np.isnan(outt)] = 150
    outt[outt >= -40] = 150
    grad = np.gradient(outt)
    outt[outt == 150] = np.nan
    outp[np.isnan(outt)] = np.nan
    outpc[np.isnan(outt)] = np.nan


    area = np.sum(outt <= -40)
    try:
        bulk_pmax = np.max(outp[(np.isfinite(outp)) & (np.isfinite(outt))])
    except ValueError:
        return ret
    try:
        bulk_pmin = np.min(outp[(np.isfinite(outp)) & (np.isfinite(outt))])
    except ValueError:
        return ret

    if (area * 25 < 15000) or (area * 25 > 800000)  or (bulk_pmax > 200) or (bulk_pmin < 0):
        print(area*25)
        print('throw out')
        return

    perc = np.percentile(outt[np.isfinite(outt)], 60)  # 60
    print('perc:',perc)
    perc=-60

    clat = np.min(dic.lat.values) + ((np.max(dic.lat.values) - np.min(dic.lat.values)) * 0.5)
    clon = np.min(dic.lon.values) + ((np.max(dic.lon.values) - np.min(dic.lon.values)) * 0.5)

    if (clon > 28) or (clon < -17.2) or (clat < 4.1):
        return

    figure = np.zeros_like(outt)

    o2 = outt.copy()
    o2[np.isnan(o2)] = perc
    nok = np.where(abs(grad[0]) > 80)
    d = 2
    i = nok[0]
    j = nok[1]

    for ii, jj in zip(i, j):
        kern = o2[ii - d:ii + d + 1, jj - d:jj + d + 1]
        o2[ii - d:ii + d + 1, jj - d:jj + d + 1] = ndimage.gaussian_filter(kern, 3, mode='nearest')

    wav = util.waveletT(o2, 5)

    arr = np.array(wav['scales'], dtype=str)
    arrf = np.array(wav['scales'], dtype=float)

    scale_ind = range(arr.size)

    yp, xp = np.where(outp > 30)

    figure = np.zeros_like(outt)

    wll = wav['t']
    maxs = np.zeros_like(wll)


    yyy=[]
    xxx=[]
    scal=[]
    for nb in scale_ind[::-1]:

        orig = float(arr[nb])
        print(np.round(orig))

        wl = wll[nb, :, :]
        maxout = (
            wl == ndimage.maximum_filter(wl, (5,5), mode='constant', cval=np.amax(wl) + 1))  # (np.round(orig / 5))

        try:
            yy, xx = np.where((maxout == 1) & (outt <= -40)  & (wl >= np.percentile(wl[wl >= 0.5], 90)) & (wl > orig**.5))# & (wl > orig**.5) )) #(wl >= np.percentile(wl[wl >= 0.5], 90)))# & (wl > orig**.5))#& (wl >= np.percentile(wl[wl >= 0.5], 90))) #)& (wl > orig**.5) (wl >= np.percentile(wl[wl >= 0.1], 90)) )#(wl > orig**.5))#  & (wlperc > orig**.5))# & (wlperc > np.percentile(wlperc[wlperc>=0.1], 80)))# & (wlperc > np.percentile(wlperc[wlperc>=0.1], 80) ))  # & (wl100 > 5)
        except IndexError:
            continue

        for y, x in zip(yy, xx):

            ss = orig
            iscale = (np.ceil(ss / 2. / 5.)).astype(int)
            if ss <= 20:
                iscale = iscale+1

            ycirc, xcirc = ua.draw_cut_circle(x, y, iscale, outp)

            figure[ycirc, xcirc] = np.round(orig)
            xxx.append(x)
            yyy.append(y)
            scal.append(orig)

    figure[np.isnan(outt)]=0

    ##file 130!!! nR
    spos = np.where(np.array(scal, dtype=int) == 15)
    figure[figure == 0] = np.nan


    f = plt.figure(figsize = (7,6), dpi=300)
    ax2 = f.add_subplot(111)
    # ax2.autoscale = False
    ttest = outplot.copy()
    ttest = ttest + 1
    ttest[np.isnan(ttest)] = 0
    ax2.contourf(np.arange(wll.shape[2]) ,  np.arange(wll.shape[1]) , outplot, cmap='Blues', zorder=1)

    ax2.imshow(outt, cmap='Greys', vmax=-40, zorder=2)
    mt = ax2.imshow(figure, cmap='OrRd_r', vmax=180, zorder=3)
    ax2.contour(np.arange(wll.shape[2]), np.arange(wll.shape[1]), ttest, cmap='Blues', levels=[-0.5, 0.5], zorder=4)
    plt.plot(np.array(xxx)[spos], np.array(yyy)[spos], 'wo', markersize=3, label='Wavelet power maximum', zorder=5)

    ax2.invert_yaxis()
    ax2.set_xlim(20, 140)
    ax2.set_ylim(20, 140)
    ax2.set_xticklabels(np.arange(100, 800, 100)-100)
    ax2.set_yticklabels(np.arange(100, 800, 100)-100)
    ax2.plot(xp , yp , 'o', markersize=3, label='Rain > 30mm h$^{-1}$', zorder=10)
    ax2.set_xlabel('Location (km)')
    ax2.set_ylabel('Location (km)')
    plt.colorbar(mt, label = 'Scale (km)')

    plt.tight_layout()
    plt.show()
    spath = '/users/global/cornkle/C_paper/wavelet/figs/paper/'
    plt.savefig(spath + '/method_circles2.png', dpi=300)

    f = plt.figure(figsize = (7,6), dpi=300)
    ax2 = f.add_subplot(111)
    # ax2.autoscale = False
    ttest = outplot.copy()
    ttest = ttest + 1
    ttest[np.isnan(ttest)] = 0
    ax2.imshow(outt, cmap='viridis', vmax=-40, zorder=2)

    ax2.invert_yaxis()
    ax2.set_xlim(20, 140)
    ax2.set_ylim(20, 140)
    ax2.set_xticklabels(np.arange(100, 800, 100)-100)
    ax2.set_yticklabels(np.arange(100, 800, 100)-100)
    ax2.plot(xp , yp , 'o', markersize=3, label='Rain > 30mm h$^{-1}$', zorder=10)
    ax2.set_xlabel('Location (km)')
    ax2.set_ylabel('Location (km)')

    plt.tight_layout()
    plt.show()
    spath = '/users/global/cornkle/C_paper/wavelet/figs/paper/'
    plt.savefig(spath + '/method_temp.png', dpi=300)


    #bla = wcno.file_loop(files[130])
    f = plt.figure(figsize = (6.5,11), dpi=300)

    gridspec.GridSpec(4,1)
    posi = 116
    ax1 = plt.subplot2grid((4,1),(0,0),rowspan=2)
    ax2 = plt.subplot2grid((4,1),(2,0))
    ax3 = plt.subplot2grid((4,1),(3,0))

    lev = np.arange(-90,-39,4)

    ax1.contourf(np.arange(wll.shape[2]) * 5, np.arange(wll.shape[1]) * 5, outplot, cmap='Blues')
    mt = ax1.contourf(np.arange(wll.shape[2])*5,np.arange(wll.shape[1])*5,outt, cmap='Greys', vmax=-40, levels = lev)
    ax1.plot(np.arange(wll.shape[2])*5, [posi*5]*len(np.arange(wll.shape[2])*5), linestyle = '--', linewidth=2, color = 'black')

    ttest = outplot.copy()
    ttest = ttest+1
    ttest[np.isnan(ttest)] = 0
    ax1.contour(np.arange(wll.shape[2]) * 5, np.arange(wll.shape[1]) * 5, ttest, cmap='Blues' , levels=[-0.5,0.5])

    ax1.invert_yaxis()
    ax1.set_xlim(100,700)
    ax1.set_ylim(100, 700)

    ax1.set_xticklabels(np.arange(100, 800, 100) - 100)
    ax1.set_yticklabels(np.arange(100, 800, 100) - 100)

    ax1.plot(xp*5, yp*5, 'o', markersize=3, label='Rain > 30mm h$^{-1}$')
    ax1.legend(loc=4)
    ax1.set_ylabel('Location (km)')
    ax1.set_title(str(dic['time.year'].values)+'-'+str(dic['time.month'].values)+'-'+str(dic['time.day'].values)+' '+str(dic['time.hour'].values)+':'+str(dic['time.minute'].values)+'UTC')

    colors = cm.viridis(np.linspace(0, 1, len([0,1, 2,5,10,20,40,60,80])))

    ax2.plot(np.arange(wll.shape[2])*5, outt[posi,:], color='r')  #118
    ax2.set_xlim(100, 700)
    ax2.set_xticklabels(np.arange(100, 800, 100) - 100)


    ax2.set_ylabel('Cloud-top temperature ($^{\circ}$C)')
    ax22 = ax2.twinx()
    ax22.set_xlim(100, 700)
    ax22.plot(np.arange(wll.shape[2])*5, outp[posi,:])
    ax22.set_ylabel('Rain (mm h$^{-1}$)')
    print(np.nanmax(wll[:,posi,:]))

    mp = ax3.contourf(np.arange(wll.shape[2])*5, arr,wll[:,posi,:], levels=[0,1, 2,5,10,20,40,80,100], colors=colors)
    maxs = np.mean(maxs[:,posi-1:posi+2, :], 1) # -1, +2

    ppos = np.where(maxs)

    for p1, p2 in zip(ppos[1], ppos[0]):
        ax3.errorbar((np.arange(wll.shape[2])*5)[p1], arrf[p2], xerr=arrf[p2]/2, fmt='o', ecolor='white', color='white', capthick=3, ms=3, elinewidth=0.7)
    ax3.set_xlim(100,700)
    ax3.set_xticklabels(np.arange(100, 800, 100) - 100)

    ax3.set_ylim(15, 180)
    ax3.set_xlabel('Location (km)')
    ax3.set_ylabel('Length scale (km)')

    plt.tight_layout()

    f.subplots_adjust(right=0.86)

    cax = f.add_axes([0.87, 0.545, 0.025, 0.415])
    cb = plt.colorbar(mt, cax=cax, label='Cloud-top temperature ($^{\circ}$C)')
    cb.ax.tick_params(labelsize=12)

    cax = f.add_axes([0.87, 0.065, 0.025, 0.175])
    cb = plt.colorbar(mp, cax=cax, label='Wavelet power')
    cb.ax.tick_params(labelsize=12)

    fsiz = 14
    x = 0.02
    plt.annotate('a)', xy=(x, 0.96), xytext=(0, 4), size=fsiz, xycoords=('figure fraction', 'figure fraction'),
                 textcoords='offset points')
    plt.annotate('b)', xy=(x, 0.51), xytext=(0, 4), size=fsiz, xycoords=('figure fraction', 'figure fraction'),
                 textcoords='offset points')
    plt.annotate('c)', xy=(x, 0.245), xytext=(0, 4), size=fsiz, xycoords=('figure fraction', 'figure fraction'),
                 textcoords='offset points')

    plt.show()
    spath = '/users/global/cornkle/C_paper/wavelet/figs/paper/'
    plt.savefig(spath+'/method2.png', dpi=300)

    dic.close()

    plt.close('all')
Exemple #54
0
#fig_size = (16, 9)
fig_size = (4,3)
fig = plt.figure(num=None, figsize=fig_size, dpi=200, facecolor='w', edgecolor='k')

with open('acc.json') as data_file:    
    data = json.load(data_file)

df_acc = pd.DataFrame(data["Accelerator"])
df_lum = pd.DataFrame(data["Luminosity"])
df_acc['logE'] = df_acc["Energy_MeV"].apply(np.log)
df_lum['logLum'] = df_lum["Lum_per_cm2s"].apply(np.log)
types_acc = np.hstack(np.array(df_acc['Type']))
types_lum = np.hstack(np.array(df_lum['Type']))

uniq_acc = np.unique(types_acc)
values = cm.viridis(np.linspace(0,1,len(uniq_acc)))
col = dict(zip(uniq_acc, values))

for i,acc in zip(xrange(len(uniq_acc)),uniq_acc):
    temp = df_acc[df_acc['Type'] == acc]
    year = np.hstack(np.array(temp['Year']))
    logE = np.hstack(np.array(temp['logE']))
    plt.scatter(year, logE, alpha=1,color=col[acc],label=r'%s' %(acc))

plt.xlabel('Year')
plt.ylabel(r'Energy $\mathrm{log(MeV)}$')
plt.title('Livingston Plot for Energy')
plt.legend(loc=4)
plt.savefig("acc_logE.png")

fig = plt.figure(num=None, figsize=fig_size, dpi=200, facecolor='w', edgecolor='k')
Exemple #55
0
import pandas as pd
from pandas.tools.plotting import scatter_matrix
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
import matplotlib.cm as cm
import numpy as np

#data = pd.DataFrame.from_csv('events/evnt_1.csv')
data = pd.DataFrame.from_csv('truth.csv')
data = data.apply(pd.to_numeric, errors='coerce')

fig = plt.figure(figsize=(16,9))
ax = fig.add_subplot(111, projection='3d')

mothers = np.hstack(np.array(data['mother']))
uniq_mom = np.unique(mothers)
values = cm.viridis(np.linspace(0,1,len(uniq_mom)))
#values = cm.Paired(np.linspace(0,1,len(uniq_mom)))
col = dict(zip(uniq_mom, values))


for mom in uniq_mom:
    temp = data[data['mother'] == mom]
    xs = np.hstack(np.array(temp['X']))
    ys = np.hstack(np.array(temp['Y']))
    zs = np.hstack(np.array(temp['Z']))
    ax.scatter(xs, ys, zs,alpha=1,color=col[mom])

plt.show()
Exemple #56
0
'''
program: discrete_colorbar.py
author: tc
created: 2016-06-13 -- 18 CEST
'''

import numpy
import matplotlib.pyplot as plt
import matplotlib.cm as cm

N = 5
colors = iter([cm.viridis(x) for x in numpy.linspace(0.0, 0.9, N)])
x = numpy.linspace(0, 10)
for i in range(N):
    plt.plot(x, x / 2.0 + i, lw=3, c=colors.next())
plt.show()
Exemple #57
0
def fit_scale(path, fixed_a, fixed_loc, save_image=False):

    matplotlib.rcParams['savefig.dpi'] = 300

    def modified_gamma_2(x, scale):
        a = fixed_a
        loc = fixed_loc
        return gamma.pdf(x, a=a, loc=loc, scale=scale)

    betas = []
    scale = []

    fig, ax = plt.subplots()
    for i, result_data_path in enumerate(path):
        globals().update(load_data(result_data_path))
        ax.plot(Ls, M_ave, '.', label=r'$\beta = %2.2f$' % beta,
                color=cm.viridis(float(i) / len(path)))
        popt = curve_fit(modified_gamma_2, xdata=Ls, ydata=M_ave, p0=[10.])[0]
        # print beta, popt
        betas.append(beta)
        scale.append(popt[0])

        x = np.linspace(0, max(Ls), num=5*max(Ls))
        ax.plot(x, modified_gamma_2(x, scale=popt[0]),
                '-',
                # label=r'fitted $\beta = %2.2f$' % beta,
                color=cm.viridis(float(i) / len(path)))

        ## critical point
        # critcal_point = 2. * popt[0]  # x = (a - 1) * scale
        # ax.plot([critcal_point] * 2, [0., 0.05], '-',
        #         color=cm.viridis(float(i) / len(path)))
    show_plot1(ax, num_of_strings)

    if save_image:
        result_image_path = "../results/img/diecutting/fitted_gamma_fixed_a_x0"
        result_image_path += "_" + time.strftime("%y%m%d_%H%M%S")
        pdf = PdfPages(result_image_path + ".pdf")
        plt.savefig(result_image_path + ".png")
        pdf.savefig()
        pdf.close()
        plt.close()
        print "[saved] " + result_image_path
    else:
        plt.show()
        plt.close()

    betas = np.array(betas)
    scale = np.array(scale)

    # beta_theta = lambda x, a, b: a*x + b
    beta_theta = lambda x, a, b: a*np.log(x) + b

    fig, ax = plt.subplots()
    ax.set_title(r'Fitting parameter')
    ax.plot(betas, scale, 'o')
    popt = curve_fit(beta_theta, xdata=betas, ydata=scale, p0=[15., 0.])[0]
    x = np.linspace(min(betas), max(betas))
    # ax.plot(x, beta_theta(x, popt[0], popt[1]), '-', color='k',
    #         label=r'$\theta = {} \beta + {}$'.format(*popt),
    #         )
    ax.plot(x, beta_theta(x, popt[0], popt[1]), '-', color='k',
            label=r'$\theta = {} \log \beta + {}$'.format(*popt),
            )
    ax.legend(loc='best')
    ax.set_xlim((0, max(betas)))
    ax.set_ylim((0, ax.get_ylim()[1]))
    ax.set_xlabel(r'$\beta$')
    ax.set_ylabel(r'Scale parameter: $\theta$')

    if save_image:
        result_image_path = "../results/img/diecutting/fitted_parameters_fixed_a_x0"
        result_image_path += "_" + time.strftime("%y%m%d_%H%M%S")
        pdf = PdfPages(result_image_path + ".pdf")
        plt.savefig(result_image_path + ".png")
        pdf.savefig()
        pdf.close()
        plt.close()
        print "[saved] " + result_image_path
    else:
        plt.show()
        plt.close()

    plt.show()
Exemple #58
0
def fit_fermi(path, save_image=False):

    matplotlib.rcParams['savefig.dpi'] = 300

    def fitting_func(x, theta):
        return 0.5 * ((x ** 2.) / ((theta ** 3.) * (np.exp(x / theta) - 1.)))

    betas = []
    scale = []

    fig, ax = plt.subplots()
    for i, result_data_path in enumerate(path):
        globals().update(load_data(result_data_path))
        ax.plot(Ls, M_ave, '.', label=r'$\beta = %2.2f$' % beta,
                color=cm.viridis(float(i) / len(path)))
        popt = curve_fit(fitting_func, xdata=Ls, ydata=M_ave, p0=[10.,])[0]
        # print beta, popt
        betas.append(beta)
        scale.append(popt[0])

        x = np.linspace(0, max(Ls), num=5*max(Ls))
        ax.plot(x, fitting_func(x, theta=popt[0]),
                '-',
                # label=r'fitted $\beta = %2.2f$' % beta,
                color=cm.viridis(float(i) / len(path)))

        ## critical point
        # critcal_point = 2. * popt[0]  # x = (a - 1) * scale
        # ax.plot([critcal_point] * 2, [0., 0.05], '-',
        #         color=cm.viridis(float(i) / len(path)))
    show_plot1(ax, num_of_strings)

    if save_image:
        result_image_path = "../results/img/diecutting/fitted_gamma_fixed_a_x0"
        result_image_path += "_" + time.strftime("%y%m%d_%H%M%S")
        pdf = PdfPages(result_image_path + ".pdf")
        plt.savefig(result_image_path + ".png")
        pdf.savefig()
        pdf.close()
        plt.close()
        print "[saved] " + result_image_path
    else:
        plt.show()
        plt.close()

    betas = np.array(betas)
    scale = np.array(scale)

    fig, ax = plt.subplots()
    ax.set_title(r'Fitting parameter')
    ax.plot(betas, scale, 'o')
    ax.set_xlabel(r'$\beta$')
    ax.set_xlim((0, max(betas)))
    ax.set_ylabel(r'$\theta$')

    if save_image:
        result_image_path = "../results/img/diecutting/fitted_parameters_fixed_a_x0"
        result_image_path += "_" + time.strftime("%y%m%d_%H%M%S")
        pdf = PdfPages(result_image_path + ".pdf")
        plt.savefig(result_image_path + ".png")
        pdf.savefig()
        pdf.close()
        plt.close()
        print "[saved] " + result_image_path
    else:
        plt.show()
        plt.close()

    plt.show()