コード例 #1
0
def plot_stats_pctl(x, stat, pctl, xlabel, ylabel, ftitle, fname):
    mean = stat["mean"]
    std  = stat["std"]
    p10 = pctl['p10']
    p90 = pctl['p90']

    plt.switch_backend('agg')
    fig = plt.figure(figsize=(12,9))

    ax1 = fig.add_subplot(111)
    ax1.plot(x, mean, 'g-', alpha=0.75, label='Mean')
    ax1.plot(x, p10, 'b-', alpha=0.25)
    ax1.plot(x, p90, 'b-', alpha=0.25)
    ax1.fill_between(x, p10, p90, alpha=0.25, label='90% prediction interval')
    ax1.set_xlabel(xlabel)
    ax1.set_ylabel(ylabel, color='b')
    ax1.tick_params('y', colors='b')
    ax1.grid()
    ax1.legend()

    ax2 = ax1.twinx()
    ax2.plot(x, std, 'r-', alpha=0.5)
    ax2.set_ylabel('Standard deviation', color='r')
    ax2.tick_params('y', colors='r')
    ax2 = format_exponent(ax2, axis='y')

    plt.title(ftitle)
    fig.savefig(fname)
    plt.close(fig)
コード例 #2
0
def plot_stats(x, stat, xlabel, ylabel, ftitle, fname):
    mean = np.array(stat["mean"])
    var  = stat["var"]
    std = np.array(stat['std'])

    plt.switch_backend('agg')
    fig = plt.figure(figsize=(12,9))

    ax1 = fig.add_subplot(111)
    ax1.plot(x, mean, 'g-', alpha=0.75, label='Mean')
    ax1.plot(x, mean-std, 'b-', alpha=0.25)
    ax1.plot(x, mean+std, 'b-', alpha=0.25)
    ax1.fill_between(x, mean-std, mean+std, alpha=0.25, label=r'Mean $\pm$ deviation')
    ax1.set_xlabel(xlabel)
    ax1.set_ylabel(ylabel, color='b')
    ax1.tick_params('y', colors='b')
    ax1.grid()
    ax1.legend()

    ax2 = ax1.twinx()
    ax2.plot(x, var, 'r-', alpha=0.5)
    ax2.set_ylabel('Variance', color='r')
    ax2.tick_params('y', colors='r')
    ax2 = format_exponent(ax2, axis='y')

    plt.title(ftitle)
    fig.savefig(fname)
    plt.close(fig)
コード例 #3
0
def plot_mesh(mesh, key, plot_title, color, *args, **kwargs):
    import matplotlib as mtpl
    mtpl.use('pdf')
    from matplotlib.backends.backend_pdf import PdfPages
    import matplotlib.pylab as plt
    plt.switch_backend('pdf')
    import seaborn as sns
    import os

    print('plotting {0} - {1}'.format(key, plot_title))
    path = myfs.setup_output_directory(kwargs.get('expt_dir', ''),
                                       kwargs.get('dir_path_full', ''), *args)
    plt.figure(figsize=(16, 12))
    sns.set("paper", "whitegrid", "dark", font_scale=1.5)
    fmt = 'f' if mesh.dtype == np.float64 else 'd'
    h = sns.heatmap(mesh, annot=True, fmt=fmt, cmap=color)
    h.set(xlabel="Mesh map X")
    h.set(ylabel="Mesh map Y")
    h.set(title=plot_title + " heat map - " + key)
    h_fig = h.get_figure()
    pp = PdfPages(
        os.path.join(path, plot_title + key + kwargs.get('name', '') + '.pdf'))
    h_fig.savefig(pp, format='pdf')
    plt.close()
    pp.close()
def gromov_wasserstein_distance_TSNE_test(data_path, num_labels, num_clusters,
                                          result_path):
    import scipy as sp
    import matplotlib.pylab as pl
    pl.switch_backend('agg')  # FIXME: add this line if executed on server.
    import ot
    d_t = np.load(data_path + config.statistic_name4d_t)
    d_s = np.load(data_path + config.statistic_name4d_s)
    # Compute distance kernels, normalize them and then display
    xs = d_s.item().get('0')
    xt = d_t.item().get('0')
    print(xt.shape)
    print(xs.shape)
    n_samples = min(100, xs.shape[0], xt.shape[0])
    xs = xs[:n_samples]
    xt = xt[:n_samples]
    C1 = sp.spatial.distance.cdist(xs, xs)
    C2 = sp.spatial.distance.cdist(xt, xt)
    C1 /= C1.max()
    C2 /= C2.max()

    p = ot.unif(n_samples)
    q = ot.unif(n_samples)

    gw0, log0 = ot.gromov.gromov_wasserstein(C1,
                                             C2,
                                             p,
                                             q,
                                             'square_loss',
                                             verbose=True,
                                             log=True)

    gw, log = ot.gromov.entropic_gromov_wasserstein(C1,
                                                    C2,
                                                    p,
                                                    q,
                                                    'square_loss',
                                                    epsilon=5e-4,
                                                    log=True,
                                                    verbose=True)

    print('Gromov-Wasserstein distances: ' + str(log0['gw_dist']))
    print('Entropic Gromov-Wasserstein distances: ' + str(log['gw_dist']))
    pl.figure(1, (10, 5))
    pl.subplot(1, 2, 1)
    pl.imshow(gw0, cmap='jet')
    pl.title('Gromov Wasserstein')
    pl.subplot(1, 2, 2)
    pl.imshow(gw, cmap='jet')
    pl.title('Entropic Gromov Wasserstein')
    pl.savefig(result_path + "/WD_TSNE.jpg")
コード例 #5
0
def plot_sobols_all(x, sobols, params, ftitle, fname):
    plt.switch_backend('agg')
    npar = len(params)

    plt.switch_backend('agg')
    fig = plt.figure(figsize=(12,9))
    ax = fig.add_subplot(111)

    for i in range(npar):
        s = sobols[params[i]]
        ax.plot(x, s, label=params[i])

    ax.set_xlabel(r'$\rho_{tor} ~ [m]$')
    ax.set_ylabel('Sobol index')

    ax.set_title(ftitle)
    plt.legend()
    fig.savefig(fname)
    plt.close(fig)
コード例 #6
0
def plot_sobols(x, sobols, params, ftitle, fname):
    plt.switch_backend('agg')
    npar = len(params)

    if npar==2:
        fig = plt.figure(figsize=(12,9))
        ax = fig.add_subplot(111)
        s = sobols[params[0]]
        ax.plot(x, s, label=params[0])
        s = sobols[params[1]]
        ax.plot(x, s, label=params[1])
        ax.legend()
        ax.grid()
        ax.set_title(ftitle)
        fig.savefig(fname)
        plt.close(fig)

    if npar==4:
        fig, axs = plt.subplots(nrows=2, ncols=2, sharex=True, sharey=True)
        for i in range(npar):
            ax = axs[i//2, i%2]
            s = sobols[params[i]]
            ax.plot(x, s)
            ax.grid()
            ax.set_title(params[i])
        fig.suptitle(ftitle)
        fig.savefig(fname)
        plt.close(fig)


    if npar==6:
        fig, axs = plt.subplots(nrows=2, ncols=3, sharex=True, sharey=True)
        for i in range(npar):
            ax = axs[i//3, i%3]
            s = sobols[params[i]]
            ax.plot(x, s)
            ax.grid()
            ax.set_title(params[i])

        fig.suptitle(ftitle)
        fig.savefig(fname)
        plt.close(fig)
コード例 #7
0
ファイル: maps.py プロジェクト: dssg/tuscany-tourism-public
    def __init__(self, gdf_map, path_to_centroids=''):
        """
        Initialize a TrajectoryClustermap objects

        Parameters:
            gdf_map: GeoDataFrame to be plotted (typically the municipalities
                     for all Italy)
            path_to_centroids: str, path to a file containing the controids of
                               each municipality
            """

        plt.switch_backend('agg')
        self.fig = plt.figure(figsize=(13, 15))
        self.ax = self.fig.add_subplot(1, 1, 1)
        self.fontsize = 20

        self.city_markersize = 6
        self.city_marker = 'o'
        self.city_markercolor = 'k'

        self.map = gdf_map
        self.df_centroids = pd.read_csv(path_to_centroids)
コード例 #8
0
def plot_passage(df, plot_title, path, key, *kwargs):
    import os
    import matplotlib as mtpl
    mtpl.use('pdf')
    from matplotlib.backends.backend_pdf import PdfPages
    import matplotlib.pylab as plt
    plt.switch_backend('pdf')
    import seaborn as sns
    print('plotting {}'.format(plot_title))
    phi, t = get_cgs_camera_positions()
    x, y = np.meshgrid(phi, t)
    columns = [
        "longitude", "latitude", "sum_refine", "sum_match", "mean_refine",
        "mean_match", "mean_ratio", "mesh_num"
    ]

    def plot_data(title, data):
        plt.figure(figsize=(16, 12))
        sns.set("paper", "whitegrid", "dark", font_scale=1.5)
        pp = PdfPages(
            os.path.join(path,
                         plot_title + key + kwargs.get('name', '') + '.pdf'))
コード例 #9
0
import numpy as np
import matplotlib.pylab as plt

# Show the plots in the Notebook.
plt.switch_backend("nbagg")

# ---------------------------------------------------------
# Simple finite difference solver
#
# Acoustic wave equation  p_tt = c^2 p_xx + src
# 2-D regular grid
# ---------------------------------------------------------

nx = 200  # grid points in x
nz = 200  # grid points in z
nt = 1000  # number of time steps
dx = 10.0  # grid increment in x
dt = 0.001  # Time step
c0 = 3000.0  # velocity (can be an array)
isx = nx / 2  # source index x
isz = nz / 2  # source index z
ist = 100  # shifting of source time function
f0 = 100.0  # dominant frequency of source (Hz)
isnap = 10  # snapshot frequency
T = 1.0 / f0  # dominant period
nop = 3  # length of operator

# Model type, available are "homogeneous", "fault_zone",
# "surface_low_velocity_zone", "random", "topography",
# "slab"
model_type = "fault_zone"
コード例 #10
0
    def plot_reputation_networks_fs(network, filename='output/final_state.png', dpi=80, width=15, height=15, alpha=1.,
                                    colors_dict=None, sfdp_C=2, sfdp_p=6, node_shrink_fac=0.25, font_size=28,
                                    binary_decision=None):
        # calc values
        color_converter = ColorConverter()
        agents_pmap = network.vp['agents']
        if colors_dict is None:
            colors_dict = defaultdict(lambda: color_converter.to_rgba('blue'))   # covers cases with #words > 3
            colors_dict[1] = color_converter.to_rgba('green')   # one word
            colors_dict[2] = color_converter.to_rgba('blue')  # two words
            colors_dict[3] = color_converter.to_rgba('red')    # three and more words
        for key, val in colors_dict.iteritems():
            if isinstance(val, str):
                val = color_converter.to_rgba(val)
            colors_dict[key] = val[:3] + tuple([alpha])

        colors_pmap = network.new_vertex_property('vector<double>')
        shape_pmap = network.new_vertex_property('int')
        used_keys = set()
        for v in network.vertices():
            agent_size = len(agents_pmap[v])
            if binary_decision is not None:
                agent_size = 1 if agent_size > 1 else 0
            used_keys.add(agent_size)
            colors_pmap[v] = colors_dict[agent_size]
            shape_pmap[v] = agent_size

        colors_dict = {key: val for key, val in colors_dict.iteritems() if key in used_keys}

        # calc node-size
        res = (dpi * height, dpi * width)
        tmp_output_size = min(res) * 0.7
        num_nodes = network.num_vertices()
        if num_nodes < 10:
            num_nodes = 10
        max_vertex_size = np.sqrt((np.pi * tmp_output_size ** 2) / num_nodes)
        if max_vertex_size < 1:
            max_vertex_size = 1
        min_vertex_size = max(max_vertex_size * node_shrink_fac, 1)
        deg_pmap = network.degree_property_map('total')
        verterx_size = prop_to_size(deg_pmap, mi=min_vertex_size, ma=max_vertex_size, power=1)

        # plot
        plt.close('all')
        plt.switch_backend('cairo')
        f, ax = plt.subplots(figsize=(width, height))
        tmp = ["o", "^", "s", "p", "h", "H", "8", "double_circle", "double_triangle", "double_square",
               "double_pentagon", "double_hexagon", "double_heptagon", "double_octagon"]

        if binary_decision:
            gt_shape_to_plt_shape = {idx: val for idx, val in enumerate(tmp)}
        else:
            gt_shape_to_plt_shape = {idx+1: val for idx, val in enumerate(tmp)}

        for key, val in sorted(colors_dict.iteritems(), key=lambda x: x[0]):
            label = str(key) + ' ' + ('word' if key == 1 else 'words')
            if binary_decision is not None:
                label = binary_decision[key]
            ax.plot(None, label=label, color=val, ms=font_size, marker=gt_shape_to_plt_shape[key], lw=10, alpha=alpha, ls='')

        pos = sfdp_layout(network, C=sfdp_C, p=sfdp_p)
        graph_draw(network, pos=pos, vertex_shape=shape_pmap, edge_color=[0.179, 0.203, 0.210, alpha / 6], vertex_size=verterx_size, output_size=res, mplfig=ax, vertex_fill_color=colors_pmap)  # , bg_color=color_converter.to_rgba('white'))
        plt.legend(loc='upper right', prop={'size': font_size})
        plt.axis('off')
        plt.savefig(filename, bbox_inches='tight', dpi=dpi)
        plt.close('all')
        plt.switch_backend('Agg')
import numpy as np
import matplotlib.pylab as plt

import sys

#import pickle
import uproot

plt.switch_backend("Agg")

import plotting_tools as pt

################################################################################

# RUN OVER FILES IN THE PID_assignment subdirectory

# Variables of interest
pars = pt.get_variable_parameters_for_plotting()

voi = ['ncharged', 'pp', 'ep', 'mup']

values = {}
for v in voi:
    values[v] = []

infilenames = sys.argv[1:]

sptag = pt.get_sptag(infilenames[0])

print(sptag)
コード例 #12
0
ファイル: server.py プロジェクト: Debesys/LASIF
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import flask
from flask.ext.cache import Cache

import matplotlib.pylab as plt
from matplotlib.colors import hex2color
plt.switch_backend("agg")

import collections
import copy
import geojson
import json
from obspy.imaging.mopad_wrapper import Beach
import io
import inspect
import numpy as np
import os

import lasif


WEBSERVER_DIRECTORY = os.path.dirname(os.path.abspath(inspect.getfile(
    inspect.currentframe())))
STATIC_DIRECTORY = os.path.join(WEBSERVER_DIRECTORY, "static")

app = flask.Flask("LASIF Webinterface", static_folder=STATIC_DIRECTORY)
cache = Cache()


def make_cache_key(*args, **kwargs):
コード例 #13
0
def freqtor(yr, mo, dy, hr, mn, sc, duration, ndays, datdir, freq1, freq2,
            thresholdv, deltaf, masktimes, madtimes, time_thres,
            distance_thres):

    #control plot behavior

    import matplotlib.pylab as plt
    plt.switch_backend("nbagg")
    plt.style.use('ggplot')
    plt.rcParams['figure.figsize'] = 18, 12  #width,then height
    plt.rcParams['savefig.dpi'] = 80
    from obspy import UTCDateTime
    import numpy as np
    import matplotlib.dates as mdates
    import matplotlib.tri as tri
    from obspy.signal.trigger import recursive_sta_lta as recSTALTA
    from obspy.signal.trigger import trigger_onset as triggerOnset
    import copy, os, bisect, scipy, datetime, itertools
    import pandas as pd
    #suppress the chained assignment warning
    pd.options.mode.chained_assignment = None
    from mpl_toolkits.basemap import Basemap
    from obspy.taup import TauPyModel as TauP
    model = TauP(model="iasp91")
    from obspy.geodetics import locations2degrees as loc2d
    import Util as Ut
    import geopy.distance as pydist
    from obspy.core import read
    #############################
    homedir = ''
    wb = 5  #which basin # are we working on for station list import
    maketemplates = 1
    tlength = 7200  #nsamples on either side of detection time for template
    counter = datetime.date(int(yr), int(mo), int(dy)).timetuple().tm_yday
    edgebuffer = 00
    duration = duration + edgebuffer
    #ndays= 2 #however many days you want to generate images for
    dayat = int(dy)
    #set parameter values; k = area threshold for detections:
    #thresholdv= 2.0
    #deltaf = 250.0
    nseconds = 7200
    npts = int(deltaf * (nseconds + edgebuffer))
    fftsize = 256
    overlap = 4
    hop = fftsize / overlap
    w = scipy.hanning(fftsize + 1)[:-1]
    #delta=250.0

    if duration == 86400:
        im = 12
    elif duration == 7200:
        im = 1
    else:
        im = 1

    #parse the datetime
    counter_3char = str(counter).zfill(3)
    datest = yr + str('-') + mo + str('-') + str(dayat) + str('T') + hr + str(
        ':') + mn + str('.') + sc
    tt = UTCDateTime(datest)

    #####################################################################
    # Now start making the detections, in 2 hour data chunks, 1 day at a time
    print(os.getcwd())
    for days in range(ndays):
        plt.close('all')
        print(str(tt))
        sacyear = str(tt.date.year)
        sacmonth = str(tt.date.month)
        sacday = str(tt.date.day)
        if len(sacmonth) == 1:
            sacmonth = str(0) + sacmonth
        if len(sacday) == 1:
            sacday = str(0) + sacday
        sacname = str(sacyear) + str(sacmonth) + str(sacday)
        sacdir = datdir + sacyear + sacmonth + sacday + '/' + sacname + '*.sac'
        #############################

        s = homedir + 'basin%s/' % wb + yr + str('_') + counter_3char
        if not os.path.exists(s):
            os.makedirs(s)
        sz = read(sacdir)
        sz.sort()
        sz.detrend()
        sz.trim(starttime=tt,
                endtime=tt + duration,
                pad=True,
                fill_value=000,
                nearest_sample=False)
        sz.filter('highpass', freq=1.0)

        alltimes = Ut.gettvals(sz[0], sz[1], sz[2])
        #############################
        #########################
        #%%
        nptsf = edgebuffer * deltaf
        blockette = 0
        d = {
            'Contributor': 'NA',
            'Latitude': 'NA',
            'Longitude': 'NA',
            'S1': 'NA',
            'S1time': 'NA',
            'Magnitude': -999.00,
            'mag_error': -999.00,
            'cent_er': -999.00,
            'Confidence': 0,
            'S2': 'NA',
            'S3': 'NA',
            'S4': 'NA',
            'S5': 'NA',
            'S2time': 'NA',
            'S3time': 'NA',
            'S4time': 'NA',
            'S5time': 'NA',
            'Type': 'Event'
        }
        index = [0]
        df1 = pd.DataFrame(data=d, index=index)
        stations, latitudes, longitudes, distances = [], [], [], []
        snames, latitudes, longitudes = [], [], []
        for i in range(len(sz)):
            snames.append(str(sz[i].stats.station))
            latitudes.append(sz[i].stats.sac['stla'])
            longitudes.append(sz[i].stats.sac['stlo'])
        latmin = min(latitudes)
        lonmin = max(longitudes)
        newlat = np.empty([len(snames)])
        newlon = np.empty([len(snames)])
        stations = copy.copy(snames)
        for i in range(len(snames)):
            reindex = stations.index(snames[i])
            newlat[i] = latitudes[reindex]
            newlon[i] = longitudes[reindex]
            distances.append(
                pydist.vincenty([newlat[i], newlon[i]],
                                [latmin, lonmin]).meters)
        #####this is where maths happends and arrays are created
        for block in range(im):
            print(blockette, tt)
            ll, lo, stalist, vizray, dist = [], [], [], [], []
            shorty = 0
            for z in range(len(snames)):
                szdata = sz[z].data[blockette:blockette + npts]
                # if len(szdata)==npts:
                vizray.append([])
                Bwhite = Ut.w_spec(szdata, deltaf, fftsize, freq1, freq2)
                vizray[shorty].append(np.sum(Bwhite[:, :], axis=0))
                ll.append(newlat[z])
                lo.append(newlon[z])
                dist.append(distances[z])
                stalist.append(snames[z])
                shorty = shorty + 1
            rays = np.vstack(np.array(vizray))
            ix = np.where(np.isnan(rays))
            rays[ix] = 0
            rayz = np.copy(rays)
            latudes = copy.copy(ll)
            longitudes = copy.copy(lo)
            slist = copy.copy(stalist)
            #sort the array orders by distance from lomin,latmin
            for i in range(len(slist)):
                junk = np.where(np.array(dist) == max(dist))
                rayz[i] = rays[junk[0][0]]
                ll[i] = latudes[junk[0][0]]
                lo[i] = longitudes[junk[0][0]]
                slist[i] = stalist[junk[0][0]]
                dist[junk[0][0]] = -9999999999
            timevector = Ut.getfvals(tt, Bwhite, nseconds, edgebuffer)

            #clean up the array
            rayz = Ut.saturateArray(rayz, masktimes)
            ix = np.where(np.isnan(rayz))
            rayz[ix] = 0

            #determine which level to use as detections 4* MAD
            levels = [Ut.get_levels(rayz, madtimes)]

            #get the ANF catalog events and get closest station

            localE, globalE, closesti = Ut.getCatalogData(tt, nseconds, lo, ll)

            #closesti = np.flipud(closesti)
            #unstructured triangular mesh with stations as verticies, mask out the long edges
            triang = tri.Triangulation(lo, ll)
            mask, edgeL = Ut.long_edges(lo, ll, triang.triangles)
            triang.set_mask(mask)
            kval = Ut.get_k(lo, ll, triang.triangles, thresholdv)

            #%%
            #get contour areas by frame
            av,aa,xc,yc,centroids,ctimes,ctimesdate,junkx,junky=[],[],[],[],[],[],[],[],[]
            for each in range(len(rayz[0, :])):
                #                refiner = tri.UniformTriRefiner(triang)
                #                tri_refi, z_refi = refiner.refine_field(rayz[0:,each], subdiv=0)
                cs = plt.tricontour(triang,
                                    rayz[0:, each],
                                    mask=mask,
                                    levels=levels,
                                    colors='c',
                                    linewidths=[1.5])
                contour = cs.collections[0].get_paths()
                for alls in range(len(contour)):
                    vs = contour[alls].vertices
                    a = Ut.PolygonArea(vs)
                    aa.append(a)
                    x = vs[:, 0]
                    y = vs[:, 1]
                    points = np.array([x, y])
                    points = points.transpose()
                    sx = sy = sL = 0
                    for i in range(
                            len(points)):  # counts from 0 to len(points)-1
                        x0, y0 = points[
                            i -
                            1]  # in Python points[-1] is last element of points
                        x1, y1 = points[i]
                        L = ((x1 - x0)**2 + (y1 - y0)**2)**0.5
                        sx += (x0 + x1) / 2 * L
                        sy += (y0 + y1) / 2 * L
                        sL += L
                    xc.append(sx / sL)
                    yc.append(sy / sL)
                if aa != []:
                    idi = np.where(np.array(aa) > kval)
                    filler = np.where(np.array(aa) <= kval)
                    chained = itertools.chain.from_iterable(filler)
                    chain = itertools.chain.from_iterable(idi)
                    idi = list(chain)
                    filler = list(chained)
                    for alls in range(len(aa)):
                        if aa[alls] > kval:
                            centroids.append([xc[idi[0]], yc[idi[0]]])
                            ctimes.append(timevector[each])
                            ctimesdate.append(timevector[each])
                            av.append(aa[idi[0]])
                        else:
                            centroids.append([0, 0])
                            ctimes.append(timevector[each])
                            ctimesdate.append(timevector[each])
                            av.append(0)

                aa, yc, xc = [], [], []
    #%%     Filter peaks in av above threshold by time and distance to remove redundant.
            idxx, idx, regionals, localev = [], [], [], []
            coordinatesz = np.transpose(centroids)
            avz = av
            abovek = np.where(np.array(avz) > 0)
            idxx = abovek[0]
            iii = []
            for i in range(len(abovek[0]) - 1):
                junk = ctimes[idxx[i + 1]] - ctimes[idxx[i]]
                junk1 = centroids[idxx[i]]
                junk2 = centroids[idxx[i + 1]]
                if junk.seconds < time_thres and pydist.vincenty(
                        junk2, junk1).meters < distance_thres:
                    iii.append(idxx[i + 1])

            idxx = set(idxx) - set(iii)
            idxx = list(idxx)
            idxx.sort()
            idx = idxx
            ltxlocal, ltxlocalexist = [], []
            ltxglobal = []
            ltxglobalexist = []
            doubles, localev = [], []
            dit2 = []
            #%%
            #if there are no picks but cataloged events exist, make null arrays
            if len(idx) == 0 and len(globalE) > 0:
                ltxglobalexist = np.ones(len(globalE))
            if len(idx) == 0 and len(localE) > 0:
                ltxlocalexist = np.ones(len(localE))
    #try to match detections with known catalog events based on time and location
            if len(idx) > 0:
                distarray = []
                dmin = np.zeros([5])
                dval = np.zeros([5])
                closestl = np.empty([len(idx), 5])
                dvals = np.empty([len(idx), 5])
                closestl = closestl.astype(np.int64)
                for i in range(len(idx)):
                    #find distance to the 5 nearest stations and save them for plotting templates
                    for each in range(len(ll)):
                        distarray.append(
                            pydist.vincenty([
                                coordinatesz[1][idx[i]],
                                coordinatesz[0][idx[i]]
                            ], [ll[each], lo[each]]).meters)
                    for all5 in range(5):
                        dmin[all5] = np.argmin(distarray)
                        dmin = dmin.astype(np.int64)
                        dval[all5] = distarray[dmin[all5]]
                        distarray[dmin[all5]] = 9e10
                    closestl[i][:] = dmin
                    dvals[i][:] = dval
                    dmin = np.zeros_like(dmin)
                    distarray = []
                    #get timeseries for this pick
                    stg = slist[closestl[i][0]]
                    timeindex = bisect.bisect_left(alltimes, ctimes[idx[i]])
                    sss = sz.select(station=stg)
                    av = sss[0].data[timeindex - tlength:timeindex + tlength]
                    cf = recSTALTA(av, int(40), int(1200))
                    peaks = triggerOnset(cf, 3, .2)
                    #get rid of peaks that are way off LTX times
                    peaksi = []
                    for peak in peaks:
                        peak = peak[0]
                        junk = alltimes[timeindex] - alltimes[timeindex -
                                                              tlength + peak]
                        if abs(junk.seconds) > 45:
                            peaksi.append(i)

                    peaks = np.delete(peaks, peaksi, axis=0)
                    #look for overlap with ANF global

                    for j in range(len(globalE)):
                        #get distance between stations and depth for theoretical ttime calc
                        # the time buffers are somewhat arbitrary
                        dep = globalE.depth[j]
                        dit = loc2d(centroids[idx[i]][1], centroids[idx[i]][0],
                                    globalE.Lat[j], globalE.Lon[j])
                        arrivals = model.get_travel_times(dep,
                                                          dit,
                                                          phase_list=['P'])
                        #if no calculated tt but sta/lta peak
                        if len(arrivals) == 0 and len(peaks) != 0:
                            junk = UTCDateTime(
                                alltimes[timeindex - tlength +
                                         peaks[0][0]]) - UTCDateTime(
                                             globalE.DateString[j])
                            if junk > -40 and junk < 40:
                                doubles.append(idx[i])
                                ltxglobal.append(
                                    UTCDateTime(alltimes[timeindex - tlength +
                                                         peaks[0][0]]))
                                ltxglobalexist.append(0)
                            else:
                                ltxglobalexist.append(1)
                        #if no calculated tt and no sta/lta peak use ltx time
                        elif len(arrivals) == 0 and len(peaks) == 0:
                            junk = UTCDateTime(
                                alltimes[timeindex]) - UTCDateTime(
                                    globalE.DateString[j])
                            if junk > -40 and junk < 40:
                                doubles.append(idx[i])
                                ltxglobal.append(
                                    UTCDateTime(alltimes[timeindex]))
                                ltxglobalexist.append(0)
                            else:
                                ltxglobalexist.append(1)
                        #if there are calculated arrivals and sta/lta peak
                        elif len(peaks) != 0:
                            junk = UTCDateTime(
                                alltimes[timeindex - tlength + peaks[0][0]]
                            ) - (UTCDateTime(globalE.DateString[j]) +
                                 datetime.timedelta(seconds=arrivals[0].time))
                            if junk > -30 and junk < 30:
                                doubles.append(idx[i])
                                ltxglobal.append(
                                    UTCDateTime(alltimes[timeindex - tlength +
                                                         peaks[0][0]]))
                                ltxglobalexist.append(0)
                            else:
                                ltxglobalexist.append(1)
                        #if there are calculated arrivals and no sta/lta peaks
                        else:

                            junk = UTCDateTime(alltimes[timeindex]) - (
                                UTCDateTime(globalE.DateString[j]) +
                                datetime.timedelta(seconds=arrivals[0].time))
                            if junk > -60 and junk < 60:
                                doubles.append(idx[i])
                                ltxglobalexist.append(0)
                            else:
                                ltxglobalexist.append(1)
                    #look for overlap with ANF local
                    if len(localE) > 0 and len(peaks) != 0:
                        for eachlocal in range(len(localE)):
                            #junk= UTCDateTime(alltimes[timeindex-tlength+peaks[0][0]]) - UTCDateTime(localE.DateString[eachlocal])
                            #took this out because faulty picks disassociated too many events
                            #calculate with LTX pick time instead
                            dep = localE.depth[eachlocal]
                            dit = pydist.vincenty(
                                [centroids[idx[i]][1], centroids[idx[i]][0]],
                                [localE.Lat[eachlocal], localE.Lon[eachlocal]
                                 ]).meters
                            junk = UTCDateTime(
                                alltimes[timeindex]) - UTCDateTime(
                                    localE.DateString[eachlocal])
                            if junk > -60 and junk < 60 and dit < 2.0 * edgeL:
                                localev.append(idx[i])
                                ltxlocal.append(
                                    UTCDateTime(alltimes[timeindex - tlength +
                                                         peaks[0][0]]))
                                ltxlocalexist.append(0)
                            else:
                                ltxlocalexist.append(1)
                    if len(localE) > 0 and len(peaks) == 0:
                        for eachlocal in range(len(localE)):
                            dep = localE.depth[eachlocal]
                            dit = pydist.vincenty(
                                [centroids[idx[i]][1], centroids[idx[i]][0]],
                                [localE.Lat[eachlocal], localE.Lon[eachlocal]
                                 ]).meters
                            junk = UTCDateTime(
                                alltimes[timeindex]) - UTCDateTime(
                                    localE.DateString[eachlocal])
                            if junk > -60 and junk < 60 and dit < 2.0 * edgeL:
                                localev.append(idx[i])
                                ltxlocal.append(
                                    UTCDateTime(alltimes[timeindex]))
                                ltxlocalexist.append(0)
                            else:
                                ltxlocalexist.append(1)
                #if it goes with a local- don't let it go with a double too
                dupe = []
                for dl in range(len(doubles)):
                    if localev.count(doubles[dl]) > 0:
                        dupe.append(doubles[dl])
                for repeats in range(len(dupe)):
                    doubles.remove(dupe[repeats])
    #
                detections = []
                detections = set(idx)  #-set(doubles)
                detections = list(detections)
                #or if there are more locals LTX detections than ANF locals, fix it
                #by assuming double pick on closest pair
                pdist = []
                if len(localev) > len(localE):
                    for i in range(len(localev) - 1):
                        pdist.append(localev[i + 1] - localev[i])
                        junk = np.where(pdist == min(pdist))
                    localev.pop(junk[0][0] + 1)
                    #detections.remove(localev[junk[0][0]+1])
                detections.sort()
                idx = detections
                dtype, cents = Ut.markType(detections, centroids, localev,
                                           localE, ctimes, doubles)
                #get the nearest station also for cataloged events
                closestd = np.zeros([len(doubles)])
                distarray = np.zeros([len(ll)])
                for event in range(len(doubles)):
                    for each in range(len(ll)):
                        distarray[each] = pydist.vincenty([
                            coordinatesz[1][doubles[event]],
                            coordinatesz[0][doubles[event]]
                        ], [ll[each], lo[each]]).meters

                    finder = np.argmin(distarray)
                    closestd[event] = finder
                    distarray[finder] = 9e10
                    closestd = closestd.astype(np.int64)

                closestp = []
                distarray = np.zeros([len(ll)])
                for event in range(len(localev)):
                    for each in range(len(ll)):
                        distarray[each] = pydist.vincenty([
                            coordinatesz[1][localev[event]],
                            coordinatesz[0][localev[event]]
                        ], [ll[each], lo[each]]).meters

                    finder = np.argmin(distarray)
                    closestp.append(finder)
                    distarray[finder] = 9e10

    #%%#save templates from this round of picks to verify on closest station
            ss = str(tt)
            ss = ss[0:13]
            if 'detections' in locals():
                index = range(len(detections))
            else:
                index = [0]
                detections = []
            df = pd.DataFrame(data=d, index=index)
            if maketemplates == 1 and len(detections) > 0:
                ptimes, confidence = [], []
                magi = np.zeros_like(dvals)
                dum = 0
                for fi in range(len(detections)):
                    if localev.count(detections[fi]) == 0:
                        df.Contributor[fi] = 'LTX'
                    else:
                        df.Contributor[fi] = 'ANF,LTX'
                        allmags = [
                            localE.ms[dum], localE.mb[dum], localE.ml[dum]
                        ]
                        df.Magnitude[fi] = np.max(allmags)
                        dum = dum + 1
                    #df.Latitude[fi] = coordinatesz[1][detections[fi]]
                    #df.Longitude[fi]=coordinatesz[0][detections[fi]]
                    df.Latitude[fi] = cents[fi][0]
                    df.Longitude[fi] = cents[fi][1]
                    df.Type[fi] = dtype[fi]
                    plt.cla()
                    ax = plt.gca()
                    timeindex = bisect.bisect_left(alltimes,
                                                   (ctimes[detections[fi]]))
                    sss = np.zeros([5, tlength * 2])
                    for stas in range(5):
                        stg = slist[closestl[fi][stas]]
                        tr = sz.select(station=stg)
                        if ctimes[detections[fi]] - datetime.timedelta(
                                seconds=80) < tt.datetime:
                            sss[stas][tlength:] = tr[0].data[
                                timeindex:timeindex + tlength]
                        elif ctimes[detections[fi]] + datetime.timedelta(
                                seconds=80) > tt.datetime + datetime.timedelta(
                                    seconds=nseconds + edgebuffer):
                            sss[stas][0:tlength] = tr[0].data[
                                timeindex - tlength:timeindex]
                        else:
                            sss[stas][:] = tr[0].data[timeindex -
                                                      tlength:timeindex +
                                                      tlength]
                    sss = np.nan_to_num(sss)
                    stg = slist[closestl[0][0]]
                    #plt.figure(fi)
                    peak = None
                    plt.suptitle('nearest station:' + stg + ' ' +
                                 str(ctimes[detections[fi]]) + 'TYPE = ' +
                                 dtype[fi])
                    for plots in range(5):
                        plt.subplot(5, 1, plots + 1)
                        cf = recSTALTA(sss[plots][:], int(80), int(500))
                        peaks = triggerOnset(cf, 3, .1)
                        peaksi = []
                        dummy = 0
                        for pk in peaks:
                            endi = pk[1]
                            peak = pk[0]
                            mcdur = alltimes[timeindex - tlength +
                                             endi] - alltimes[timeindex -
                                                              tlength + peak]
                            mdur = mcdur.total_seconds()
                            if alltimes[timeindex] > alltimes[timeindex -
                                                              tlength + peak]:
                                junk = alltimes[timeindex] - alltimes[
                                    timeindex - tlength + peak]
                            else:
                                junk = alltimes[timeindex - tlength +
                                                peak] - alltimes[timeindex]
                            if (junk.seconds) > 40:
                                peaksi.append(dummy)
                            dummy = dummy + 1
                        peaks = np.delete(peaks, peaksi, axis=0)

                        sss[plots] = np.nan_to_num(sss[plots])
                        #if the channel is blank underflow problems occur plotting station name
                        sss = np.round(sss, decimals=10)
                        plt.plot(
                            Ut.templatetimes(alltimes[timeindex], tlength,
                                             deltaf), sss[plots][:], 'black')
                        plt.axvline(x=alltimes[timeindex])
                        plt.text(alltimes[timeindex],
                                 0,
                                 slist[closestl[fi][plots]],
                                 color='red',
                                 fontsize=20)
                        plt.axis('tight')
                        for arc in range(len(peaks)):
                            plt.axvline(x=alltimes[timeindex - tlength - 10 +
                                                   peaks[arc][0]],
                                        color='orange')
                            plt.axvline(x=alltimes[timeindex - tlength - 10 +
                                                   peaks[arc][1]],
                                        color='purple')

                        if len(peaks) > 0:
                            ptimes.append(
                                UTCDateTime(alltimes[timeindex - tlength - 10 +
                                                     peaks[0][0]]))
                            confidence.append(len(peaks))
                            magi[fi][plots] = (
                                -2.25 + 2.32 * np.log10(mdur) +
                                0.0023 * dvals[fi][plots] / 1000)
                            #magi[fi][plots]=(1.86*np.log10(mdur)-0.85)
                        else:
                            ptimes.append(UTCDateTime(alltimes[timeindex]))
                            confidence.append(2)

                    magi = np.round(magi, decimals=2)
                    magii = pd.DataFrame(magi)
                    magu = magii[magii != 0]
                    if df.Contributor[fi] == 'ANF,LTX':
                        df.mag_error[fi] = np.round(np.max(allmags) -
                                                    np.mean(magu, axis=1)[fi],
                                                    decimals=2)
                        df.Magnitude[fi] = str(
                            str(df.Magnitude[fi]) + ',' +
                            str(np.round(np.mean(magu, axis=1)[fi],
                                         decimals=2)))
                        df.cent_er[fi] = np.round(pydist.vincenty([
                            coordinatesz[1][detections[fi]],
                            coordinatesz[0][detections[fi]]
                        ], [cents[fi][0], cents[fi][1]]).meters / 1000.00,
                                                  decimals=2)
                    else:
                        df.Magnitude[fi] = np.round(np.mean(magu, axis=1)[fi],
                                                    decimals=2)
                    #ptimes = np.reshape(ptimes,[len(ptimes)/5,5])
                    df.S1[fi] = slist[closestl[fi][0]]
                    df.S1time[fi] = ptimes[0]
                    df.S2[fi] = slist[closestl[fi][1]]
                    df.S2time[fi] = (ptimes[1])
                    df.S3[fi] = slist[closestl[fi][2]]
                    df.S3time[fi] = (ptimes[2])
                    df.S4[fi] = slist[closestl[fi][3]]
                    df.S4time[fi] = (ptimes[3])
                    df.S5[fi] = slist[closestl[fi][4]]
                    df.S5time[fi] = (ptimes[4])
                    #df.Confidence[fi]= confidence[0]
                    ptimes = []
                    if dtype[fi] == 'earthquake':
                        svname = homedir + str(s) + "/image" + ss[
                            11:13] + "_pick_" + str(fi + 1) + ".png"
                        plt.savefig(svname, format='png')
                    plt.clf()

    #%%
            df1 = [df1, df]
            df1 = pd.concat(df1)

            ################################################
            #%%

            fig = plt.figure()
            plt.cla()
            ax = plt.gca()
            #plot it all
            for i in range(len(detections)):

                if localev.count(detections[i]) == 1:
                    color = 'c'
                elif doubles.count(detections[i]) == 1:
                    color = 'blue'
                else:
                    color = 'white'
                if dtype[i] == 'blast':
                    facecolor = 'none'
                else:
                    facecolor = color
                plt.scatter(mdates.date2num(ctimes[detections[i]]),
                            closestl[i][0],
                            s=200,
                            color=color,
                            facecolor=facecolor)

    #
            for i in range(len(globalE)):
                plt.scatter(mdates.date2num(UTCDateTime(globalE.time[i])),
                            1,
                            s=100,
                            color='b',
                            alpha=.8)
            for i in range(len(localE)):
                plt.scatter(mdates.date2num(UTCDateTime(localE.time[i])),
                            closesti[i],
                            s=100,
                            facecolor='c',
                            edgecolor='grey')
            plt.imshow(np.flipud(rayz),
                       extent=[
                           mdates.date2num(tt.datetime),
                           mdates.date2num(
                               (tt + nseconds + edgebuffer).datetime), 0,
                           len(slist)
                       ],
                       aspect='auto',
                       interpolation='nearest',
                       cmap='bone',
                       vmin=np.min(rayz) / 2,
                       vmax=np.max(rayz) * 2)

            ax.set_adjustable('box-forced')
            ax.xaxis_date()
            plt.yticks(np.arange(len(ll)))
            ax.set_yticklabels(slist)
            tdate = yr + '-' + mo + '-' + str(dayat).zfill(2)
            plt.title(tdate)
            ax.grid(color='black')
            ss = str(tt)
            ss = ss[0:13]
            kurs = "%s/" % s + "%s.png" % ss
            svpath = homedir + kurs
            plt.savefig(svpath, format='png')
            plt.close()

            #%%
            blockette = blockette + (npts - nptsf)
            tt = tt + nseconds
            detections = []
            localev = []
            doubles = []
        #############################

        svpath = homedir + '%s' % s + "/picktable.html"
        df1.to_html(open(svpath, 'w'), index=False)
        svpath = homedir + '%s' % s + "/picktable.pkl"
        df1.to_pickle(svpath)
        dayat = dayat + 1
        counter = counter + 1
        counter_3char = str(counter).zfill(3)

        #############################

    if __name__ == '__main__':
        detection_function()
コード例 #14
0
ファイル: FINDBH.py プロジェクト: SarraHayoune/Summer2018
# assignement: Find Black Hole
    
import pynbody 
import matplotlib.pylab as plt
plt.switch_backend("agg") 

   # loading the snapshot
s =pynbody.load('/mnt/cptmarvel/cptmarvel.cosmo25cmb.4096g5HbwK1BH.004096/cptmarvel.cosmo25cmb.4096g5HbwK1BH.004096')
  
   # convert the units 
s.physical_units()

  # the halo that I need is h[5]
h = s.halos()
h5=h[5]

  # put your galaxy that you care about in the center of the simulation
pynbody.analysis.angmom.faceon(h5)

  # convert the units 
s.physical_units()


with pynbody.analysis.halo.center(h[5], mode='hyb'):
    print (h[5]['pos'][0])
    print (h[5]['pos'][1])
    print (h[5]['pos'][2])
    
print ('pos')

def findBH(s):
コード例 #15
0
import os
import torch
import random
import itertools
import numpy as np
import matplotlib.pylab as plt
plt.switch_backend("qt5agg")
from sklearn.metrics import confusion_matrix

def accuracy(output,label,topk=(0,)):

    # compute accuracy for precision@k for the specified k and each class
    # output : BatchSize x n_classes

    maxk = max(topk)
    _, pred_index = torch.topk(output,maxk+1,dim=1,largest=True,sorted=True) # descending order
    correct = pred_index.eq(label.view(len(label),-1).expand_as(pred_index))
    for i in range(len(label)):
        for j in range(3):
            if correct[i,j] == 1 :
                for k in range(j+1,3):
                    correct[i,k] = 1
                break

    res=[]
    for k in topk:
        correct_k = float(correct[:,k].sum()) / float(len(label))
        res.append(correct_k)

    cls1, cls3 = list(), list()
    for i in range(len(label)):
import numpy as np
import chainer
import os
from chainer import serializers, optimizers, cuda, training
from chainer.training import extension, extensions, updaters
import argparse
import glob
from matplotlib import pylab as plt
plt.switch_backend('agg')

from chainer.datasets import TransformDataset

from user_dataset_3class import UserDataset3Class
from guinness_net_yolov2 import GUINNESS_YOLOv2
from yolo_predictor import YOLOv2Predictor
from transform_sg import convert_sg, Transform

parser = argparse.ArgumentParser(description='YOLOv2 trainer')
parser.add_argument('--batch_size',
                    '-b',
                    type=int,
                    default=6,
                    help='Mini batch size')
parser.add_argument('--img_size',
                    '-s',
                    type=int,
                    default=213,
                    help='test image size')
parser.add_argument('--gpu',
                    '-g',
                    type=int,
コード例 #17
0
import numpy as np
from numpy import exp
from numpy import pi
from numpy import sqrt
from numpy import log
import numpy.testing as nut
import scipy.integrate as si
import mpmath as mp
import matplotlib.pylab as plt

# these three lines are in case you work with a specific software
try:
    plt.switch_backend("Qt5Agg")
except:
    raise Exception("Failed to set matplotlib backend to Qt5Agg")


#DEFINITION OF MATHEMATICAL OBJECTS
#Definition of the transformation matrices

def matrixFlight(L):                                                       # For a flight path of length
    return np.array([[1,-L,0],[0,1,0],[0,0,1]])

def matrixMonoPlane(b, ThetaB):                                         # For a perfect flat crystal monochromator
    return np.array([[b,0,0],[0,1/b,(1-1/b)*np.tan(ThetaB)],[0,0,1]])

def matrixMonoBent(b, Fc, ThetaB):   # For a perfect curved crystal monochromator (meridionally and sagitally focusing)
    return np.array([[b,0,0],[1/Fc,1/b,(1-1/b)*np.tan(ThetaB)],[0,0,1]])

def matrixMonoMosaic(ThetaB):                                             # For a mosaic monochromator
    return np.array([[1,0,0],[0,-1,2*np.tan(ThetaB)],[0,0,1]])
コード例 #18
0
def draw_graph(network, color, min_color=None, max_color=None, groups=None, sizep=None, colormap_name='bwr', min_vertex_size_shrinking_factor=4, output='graph.png', output_size=(15, 15), dpi=80, standardize=False, color_bar=True, crop=True, **kwargs):
    output_splitted = output.rsplit('/', 1)[-1].split('_graph_')
    net_name, prop_key = output_splitted[0], output_splitted[-1]
    print_prefix = utils.color_string('[' + net_name + '] ') + '[' + prop_key + '] [' + str(
        datetime.datetime.now().replace(microsecond=0)) + '] draw graph'
    print print_prefix
    print_prefix += ': '
    num_nodes = network.num_vertices()
    min_vertex_size_shrinking_factor = min_vertex_size_shrinking_factor
    if num_nodes < 10:
        num_nodes = 10
    max_vertex_size = np.sqrt((np.pi * (min(output_size) * dpi / 2) ** 2) / num_nodes)
    if max_vertex_size < min_vertex_size_shrinking_factor:
        max_vertex_size = min_vertex_size_shrinking_factor
    min_vertex_size = max_vertex_size / min_vertex_size_shrinking_factor
    if sizep is None:
        sizep = max_vertex_size + min_vertex_size
        sizep /= 3
    else:
        sizep = prop_to_size(sizep, mi=min_vertex_size / 3 * 2, ma=max_vertex_size / 3 * 2, power=2)
    v_shape = 'circle'
    if isinstance(groups, str):
        try:
            v_shape = network.vp[groups].copy()
            #groups = network.vp[groups]
            #unique_groups = set(np.array(groups.a))
            #num_groups = len(unique_groups)
            #groups_c_map = colormap.get_cmap('gist_rainbow')
            #groups_c_map = {i: groups_c_map(idx / (num_groups - 1)) for idx, i in enumerate(unique_groups)}
            #v_pen_color = network.new_vertex_property('vector<float>')
            #for v in network.vertices():
            #    v_pen_color = groups_c_map[groups[v]]

            v_shape.a %= 14
        except KeyError:
            # print print_prefix + 'cannot find groups property:', groups
            v_shape = 'circle'

    cmap = colormap.get_cmap(colormap_name)
    color = color.copy()
    v_shape = network.new_vertex_property('int')
    v_shape.a = np.array(
        [0 if np.isclose(color[int(v)], 1.) else (1 if color[int(v)] > 1. else 4) for v in network.vertices()],
        dtype='int')

    try:
        _ = color.a
    except AttributeError:
        c = network.new_vertex_property('float')
        c.a = color
        color = c
    min_color = color.a.min() if min_color is None else min_color
    max_color = color.a.max() if max_color is None else max_color
    if np.isclose(min_color, max_color):
        min_color = 0
        max_color = 2

    #orig_color = np.array(color.a)
    if standardize:
        color.a -= color.a.mean()
        color.a /= color.a.var()
        color.a += 1
        color.a /= 2
    else:
        #color.a -= min_color
        #color.a /= max_color
        tmp = np.array(color.a)
        tmp[tmp > 1] = 1 + (tmp[tmp > 1] / (max_color/1))
        color.a = tmp
        color.a /= 2
    if not output.endswith('.png'):
        output += '.png'
    color_pmap = network.new_vertex_property('vector<float>')
    tmp = np.array([np.array(cmap(i)) for i in color.a])
    color_pmap.set_2d_array(tmp.T)
    plt.switch_backend('cairo')
    f, ax = plt.subplots(figsize=(15, 15))
    output_size = (output_size[0], output_size[1]*.3)  # make space for colorbar
    edge_alpha = 0.3 if network.num_vertices() < 1000 else 0.01
    pen_width = 0.8 if network.num_vertices() < 1000 else 0.1
    v_pen_color = [0., 0., 0., 1] if network.num_vertices() < 1000 else [0.0, 0.0, 0.0, edge_alpha]
    graph_draw(network, vertex_fill_color=color_pmap, mplfig=ax, vertex_pen_width=pen_width, vertex_shape=v_shape,
               vertex_color=v_pen_color, edge_color=[0.179, 0.203, 0.210, edge_alpha], vertex_size=sizep,
               output_size=output_size, output=output, **kwargs)
    if color_bar:
        cmap = plt.cm.ScalarMappable(cmap=cmap)
        cmap.set_array([0., 2.])
        cbar = f.colorbar(cmap, drawedges=False)
        ticks = [0, 1.0, max_color / 1]
        cbar.set_ticks([0., 1., 2.])
        tick_labels = None
        non_zero_dig = 1
        for digi in range(10):
            tick_labels = [str("{:2." + str(digi) + "f}").format(i) for i in ticks]
            if any([len(i.replace('.', '').replace('0', '').replace(' ', '').replace('-', '').replace('+', '')) > 0 for
                    i in tick_labels]):
                non_zero_dig -= 1
                if non_zero_dig == 0:
                    break
        cbar.ax.set_yticklabels(tick_labels)
        cbar.ax.tick_params(labelsize=40)
        #var = stats.tvar(orig_color)
        cbar.set_label('Bias Factor', labelpad=+30)
    matplotlib.rcParams.update({'font.size': 40})
    plt.axis('off')
    plt.savefig(output, bbox_tight=True, dpi=dpi)
    plt.close('all')
    plt.switch_backend('Agg')
    if crop:
        if output.endswith('.pdf'):
            os.system('pdfcrop ' + output + ' ' + output)
        else:
            pass
コード例 #19
0
#
# interpolate using Lagrangian interpolation
#

import numpy as np
from scipy import interpolate as int


if '__main__' in __name__:
    import matplotlib as mpl
    import matplotlib.pylab as plt
    
    plt.switch_backend('TkAgg')
    plt.ion()
    x = lambda n: np.linspace(-1,1,n)
    f = lambda x: np.cos(np.sin(np.pi*x))
    xd = x(300); fd=f(xd)
    plt.plot(xd,fd,'k')

    spf = int.interp1d(x(300),fd,kind='cubic')
    
    plt.plot(xd,spf(xd),'r')
コード例 #20
0
ファイル: __init__.py プロジェクト: boutproject/BOUT-dev
#!/usr/bin/env python

"""Init file for pre and post processing"""

import os
import matplotlib.pylab as plt

# Set proper backend for the display
try:
    os.environ["DISPLAY"]
except KeyError:
    plt.switch_backend("Agg")
コード例 #21
0
ファイル: lr_scheduler.py プロジェクト: MLsmaller/cpp_piano
        cos_anneling = (1 + math.cos(math.pi * T / self.phase2_iters)) / 2

        for i in range(len(self.optimizer.param_groups)):
            self.optimizer.param_groups[i][
                'momentum'] = self.momentums[1] - self.mom_diff * cos_anneling
        return [
            final_lr + (base_lr - final_lr) * cos_anneling
            for base_lr, final_lr in zip(self.base_lrs, self.final_lrs)
        ]


if __name__ == "__main__":
    import torchvision
    import torch
    import matplotlib.pylab as plt
    plt.switch_backend('Agg')

    resnet = torchvision.models.resnet34()
    params = {"lr": 0.01, "weight_decay": 0.001, "momentum": 0.9}
    optimizer = torch.optim.SGD(params=resnet.parameters(), **params)

    epochs = 2
    iters_per_epoch = 100
    lrs = []
    mementums = []
    # lr_scheduler = OneCycle(optimizer, epochs, iters_per_epoch)
    lr_scheduler = Poly(optimizer, epochs, iters_per_epoch)

    for epoch in range(epochs):
        for i in range(iters_per_epoch):
            lr_scheduler.step(epoch=epoch)
コード例 #22
0
ファイル: finite_diff.py プロジェクト: whu-pzhang/seismic
import numpy as np
import matplotlib.pylab as plt

# Show the plots in the Notebook.
plt.switch_backend("nbagg")

# ---------------------------------------------------------
# Simple finite difference solver
#
# Acoustic wave equation  p_tt = c^2 p_xx + src
# 2-D regular grid
# ---------------------------------------------------------

nx = 200      # grid points in x
nz = 200      # grid points in z
nt = 1000      # number of time steps
dx = 10.0     # grid increment in x
dt = 0.001    # Time step
c0 = 3000.0   # velocity (can be an array)
isx = nx / 2  # source index x
isz = nz / 2  # source index z
ist = 100     # shifting of source time function
f0 = 100.0    # dominant frequency of source (Hz)
isnap = 10    # snapshot frequency
T = 1.0 / f0  # dominant period
nop = 3       # length of operator

# Model type, available are "homogeneous", "fault_zone",
# "surface_low_velocity_zone", "random", "topography",
# "slab"
model_type = "fault_zone"
コード例 #23
0
items = d.get_items_df()
skills = d.get_skills_df()
items = items.join(skills, on="skill_lvl_1")

concepts = items["name"].unique()
sts = {}

for concept in concepts:
    print(concept)
    its = list(items[items["name"] == concept].index)
    students = answers[answers["item"].isin(its)].groupby("student").size()
    students = students[students >= TRASHOLD]
    print(len(students))
    sts[concept] = students

data = pd.DataFrame(index=concepts, columns=concepts, dtype=float)

for concept1 in concepts:
    for concept2 in concepts:
        count = len(set(sts[concept1]) & set(sts[concept2]))
        print(concept1, concept2, count)
        data[concept1][concept2] = count

print(data)
plt.switch_backend('agg')
sns.heatmap(data, annot=True)
plt.show()

plt.title("Student with {} answer in concept".format(TRASHOLD))
plt.savefig("concepts crossolving - {}.png".format(TRASHOLD))
コード例 #24
0
#
#  calculate 2nd moments of the power spectrum for the TH and Gaussian filters 
#

import math
import numpy as np
from scipy import interpolate as intp
from scipy import integrate as integrate
from matplotlib import pylab as plt
from socket import gethostname

if ( gethostname()[0:6] == 'midway' ):
    plt.switch_backend('TkAgg')

k,Pk = np.loadtxt('matter_power_kmax10000.dat',usecols=(0,1), unpack=True)
lnk = np.log(k); lk = np.log10(k)

#
# set relevant cosmological parameters
#
h = 0.7; Omega_m = 0.276; rho_mean = 2.77e11*h*h*Omega_m # in Msun/Mpc^3

lr = np.arange(-1.0,2.5,0.01)
r = 10.0**lr

#
# set a desired grid of masses and corresponding radii
#
M = 4.0*math.pi*r**3*rho_mean/3.0; lM = np.log10(M)

# intial P(k) cutoff scale
コード例 #25
0
ファイル: plot_kldivs.py プロジェクト: AmyBryce/audionet
import collections
import json
import sys

import os.path

import matplotlib
import matplotlib.pylab as pylab

import numpy as np

pylab.switch_backend('agg')

stats_file = sys.argv[1]
beg_range = int(sys.argv[2])
end_range = int(sys.argv[3])

with open(stats_file) as jsondata:
    statistics = json.load(jsondata)

video_labels = {
    "67GZuUxV27w_30.000.mkv.gz": "Rooster (C**k)",
    "9PmzQI8ZYpg_30.000.mkv.gz": "Sewing Machine",
    "_A30xsFBMXA_40.000.mkv.gz": "Fire Truck",
    "BUGx2e7OgFE_30.000.mkv.gz": "Harmonica",
    "eHIlPlNWISg_90.000.mkv.gz": "Polaroid Camera",
    "eV5JX81GzqA_150.000.mkv.gz": "Race Car",
    "-OAyRsvFGgc_30.000.mkv.gz": "Electric Guitar",
    "rctt0dhCHxs_16.000.mkv.gz": "Tree Frog",
    "rTh92nlG9io_30.000.mkv.gz": "Keyboard",
    "-XilaFMUwng_50.000.mkv.gz": "Magpie"