Beispiel #1
0
def comparing_humi_temps_from_dataframe_by_day(dataframe_thermo, dataframe_SHT1x, string_day, temp_param_MAX, temp_param_MIN, temp_MAX, temp_MIN, temp_limit_SUP, temp_limit_INF, humi_limit_SUP, humi_limit_INF):
 	f, axarr = plt.subplots(2, sharex=True)
 	# First subplot
 	plt.subplot(2, 1, 1)
	today_plot_thermo = dataframe_thermo.TEMP_LOG[string_day]
 	plt.ylim(temp_limit_INF, temp_limit_SUP)
 	y=np.arange(temp_limit_INF, temp_limit_SUP, 0.2)
 	plt.grid()
 	plt.yticks(y)
	today_plot_thermo.plot()
	plt.axhspan(temp_param_MIN, temp_param_MAX, facecolor='g', alpha=0.2)
	plt.axhspan(temp_MIN, temp_MAX, facecolor='g', alpha=0.2)
	plt.ylabel('Temperature (C)')
	plt.xlabel('')
	# Second subplot
 	plt.subplot(2, 1, 2)
	today_plot_SHT1x_humi = dataframe_SHT1x.humi[string_day]
	today_plot_SHT1x_temp = dataframe_SHT1x.temp[string_day]
	plt.ylim(humi_limit_INF, humi_limit_SUP)
 	plt.grid()
	today_plot_SHT1x_humi.plot()
	plt.axhspan(humi_param_MIN, humi_param_MAX, facecolor='g', alpha=0.2)
	plt.axhspan(humi_MIN, humi_MAX, facecolor='g', alpha=0.2)
	plt.ylabel('Humidity (%)')
	plt.xlabel('Hours')
	# Pearson correlation: http://en.wikipedia.org/wiki/Pearson_product-moment_correlation_coefficient
	# 0 implies that there is no linear correlation between the variables.
	pearson_corr_index = today_plot_SHT1x_temp.corr(today_plot_SHT1x_humi)
	plt.suptitle('Correlation between temperature and humidity\n')
	plt.suptitle('\n\nCalculated Pearson correlation index: %.2f' % pearson_corr_index, color='b')
	plt.savefig('static/data/comparing_humi_temps_%s.png' % string_day, orientation='landscape')
	plt.close()
def plot_scatter_matrix(df, plotdir):
    "Plot scatter matrix."
    print('plotting scatter matrix, this may take a while')
    plt.clf()
    pd_scatter_matrix(df, figsize=(16,16))
    plt.suptitle("Scatter Matrix", fontsize=14)
    plt.savefig(plotdir + 'scatter_matrix.png')
Beispiel #3
0
def display_event(event):
    """an extremely inefficient display. It creates new instances of
    CameraDisplay for every event and every camera, and also new axes
    for each event. It's hacked, but it works
    """
    print("Displaying... please wait (this is an inefficient implementation)")
    global fig
    ntels = len(event.dl0.tels_with_data)
    fig.clear()

    plt.suptitle("EVENT {}".format(event.dl0.event_id))

    disps = []

    for ii, tel_id in enumerate(event.dl0.tels_with_data):
        print("\t draw cam {}...".format(tel_id))
        nn = int(ceil(sqrt(ntels)))
        ax = plt.subplot(nn, nn, ii + 1)

        x, y = event.meta.pixel_pos[tel_id]
        geom = io.CameraGeometry.guess(x, y)
        disp = visualization.CameraDisplay(geom, ax=ax,
                                           title="CT{0}".format(tel_id))
        disp.pixels.set_antialiaseds(False)
        disp.autoupdate = False
        disp.cmap = random.choice(cmaps)
        chan = 0
        signals = event.dl0.tel[tel_id].adc_sums[chan].astype(float)
        signals -= signals.mean()
        disp.image = signals
        disp.set_limits_percent(95)
        disp.add_colorbar()
        disps.append(disp)

    return disps
Beispiel #4
0
    def plot_single_output_multiple_days(self, single_output, days_list=SOLSTICES, title_str=''):
        # Output: graph with subplots for each day
        # Inputs: df_dict: dict of dataframes of PVsyst result
        #        single_output: keyword name of data to plot
        #        days_list: list of datetimes as strings in format %Y-%m-%d; used for subplots' titles
        single_output_df = self.get_single_output_df(single_output)
        legend_list = list(single_output_df.columns)
        y_info = get_y_axis_info(single_output)
        plt.figure(figsize=(8, 15))
        plt.suptitle(self.location + ': ' + single_output + '\n' + title_str, fontsize=16)

        ax = []
        for d in range(1, len(days_list) + 1):
            if d == 1:
                ax.append(plt.subplot(len(days_list), 1, d))
                plt.ylabel(y_info[UNITS], fontsize=14)
            else:
                ax.append(plt.subplot(len(days_list), 1, d, sharex=ax[0], sharey=ax[0]))
            plt.grid(True, which='major', axis='x')
            ax[d - 1].plot(single_output_df[days_list[d - 1]].index.hour, single_output_df[days_list[d - 1]]/y_info[SCALE],
                           marker='.')
            # if show_flat == True:
            #     ax[d - 1].plot(flat_df[days_list[d - 1]].index.hour, flat_df[days_list[d - 1]], 'k--')
            ax[d - 1].legend(legend_list, loc='lower right', fontsize=9)
            ax[d - 1].set_title(days_list[d - 1], fontsize=12)

        plt.xlabel('Hour of day', fontsize=14)
        ax[0].set_xlim([0, 24])
        ax[0].set_xticks([0, 6, 12, 18, 24])
        ax[0].set_ylim(y_info[LIM])
def scatter_time_vs_s(time, norm, point_labels, title):
    plt.figure()
    size = 100
    for i, l in enumerate(sorted(norm.keys())):
        if l is not "fbpca":
            plt.scatter(time[l], norm[l], label=l, marker='o', c='b', s=size)
            for label, x, y in zip(point_labels, list(time[l]), list(norm[l])):
                plt.annotate(label, xy=(x, y), xytext=(0, -80),
                             textcoords='offset points', ha='right',
                             arrowprops=dict(arrowstyle="->",
                                             connectionstyle="arc3"),
                             va='bottom', size=11, rotation=90)
        else:
            plt.scatter(time[l], norm[l], label=l, marker='^', c='red', s=size)
            for label, x, y in zip(point_labels, list(time[l]), list(norm[l])):
                plt.annotate(label, xy=(x, y), xytext=(0, 30),
                             textcoords='offset points', ha='right',
                             arrowprops=dict(arrowstyle="->",
                                             connectionstyle="arc3"),
                             va='bottom', size=11, rotation=90)

    plt.legend(loc="best")
    plt.suptitle(title)
    plt.ylabel("norm discrepancy")
    plt.xlabel("running time [s]")
Beispiel #6
0
    def graph_by_sender(self):

        mac_adresses = {}  # new dictionary
        for pkt in self.pcap_file:
            mac_adresses.update({pkt[Dot11].addr2: 0})
        for pkt in self.pcap_file:
            mac_adresses[pkt[Dot11].addr2] += 1

        MA = []
        for ma in mac_adresses:
            MA.append(mac_adresses[ma])

        plt.clf()
        plt.suptitle('Number of packets of every sender', fontsize=14, fontweight='bold')
        plt.bar(range(len(mac_adresses)), sorted(MA), align='center', color=MY_COLORS)

        plt.xticks(range(len(mac_adresses)), sorted(mac_adresses.keys()))

        plt.rcParams.update({'font.size': 10})

        plt.xlabel('Senders mac addresses')
        plt.ylabel('Number of packets')

        # Set tick colors:
        ax = plt.gca()
        ax.tick_params(axis='x', colors='k')
        ax.tick_params(axis='y', colors='r')
        ax.set_xticklabels(ax.xaxis.get_majorticklabels(), rotation=45)

        plt.show()
	def plotPSD(self, chan, time_interval):
		Npackets = np.int(time_interval * self.accum_freq)
		plot_range = (Npackets / 2) + 1
		figure = plt.figure(num= None, figsize=(12,12), dpi=80, facecolor='w', edgecolor='w')
		# I 
		plt.suptitle('Channel ' + str(chan) + ' , Freq = ' + str((self.freqs[chan] + self.LO_freq)/1.0e6) + ' MHz') 
		plot1 = figure.add_subplot(311)
		plot1.set_xscale('log')
		plot1.set_autoscale_on(True)
		plt.ylim((-160,-80))
		plt.title('I')
		line1, = plot1.plot(np.linspace(0, self.accum_freq/2., (Npackets/2) + 1), np.zeros(plot_range), label = 'I', color = 'green', linewidth = 1)
		plt.grid()
		# Q
		plot2 = figure.add_subplot(312)
		plot2.set_xscale('log')
		plot2.set_autoscale_on(True)
		plt.ylim((-160,-80))
		plt.title('Q')
		line2, = plot2.plot(np.linspace(0, self.accum_freq/2., (Npackets/2) + 1), np.zeros(plot_range), label = 'Q', color = 'red', linewidth = 1)
		plt.grid()
		# Phase
		plot3 = figure.add_subplot(313)
		plot3.set_xscale('log')
		plot3.set_autoscale_on(True)
		plt.ylim((-120,-70))
		#plt.xlim((0.0001, self.accum_freq/2.))
		plt.title('Phase')
		plt.ylabel('dBc rad^2/Hz')
		plt.xlabel('log Hz')
		line3, = plot3.plot(np.linspace(0, self.accum_freq/2., (Npackets/2) + 1), np.zeros(plot_range), label = 'Phase', color = 'black', linewidth = 1)
		plt.grid()
		plt.show(block = False)
		count = 0
		stop = 1.0e10
		while count < stop:
			Is, Qs, phases = self.get_stream(chan, time_interval)
			I_mags = np.fft.rfft(Is, Npackets)
			Q_mags = np.fft.rfft(Is, Npackets)
			phase_mags = np.fft.rfft(phases, Npackets)
			I_vals = (np.abs(I_mags)**2 * ((1./self.accum_freq)**2 / (1.0*time_interval)))
			Q_vals = (np.abs(Q_mags)**2 * ((1./self.accum_freq)**2 / (1.0*time_interval)))
			phase_vals = (np.abs(phase_mags)**2 * ((1./self.accum_freq)**2 / (1.0*time_interval)))
			phase_vals = 10*np.log10(phase_vals)
			phase_vals -= phase_vals[0]
			#line1.set_ydata(Is)
			#line2.set_ydata(Qs)
			#line3.set_ydata(phases)
			line1.set_ydata(10*np.log10(I_vals))
			line2.set_ydata(10*np.log10(Q_vals))
			line3.set_ydata(phase_vals)
			plot1.relim()
			plot1.autoscale_view(True,True,False)
			plot2.relim()
			plot2.autoscale_view(True,True,False)
			#plot3.relim()
			plot3.autoscale_view(True,True,False)
			plt.draw()
			count +=1
		return
Beispiel #8
0
    def display_PER(self):

        number_of_pkts = len(self.pcap_file)
        retransmission_pkts = 0

        for pkt in self.pcap_file:

            if (pkt[Dot11].FCfield & 0x8) != 0:
                retransmission_pkts += 1

        ans = (retransmission_pkts / number_of_pkts)*100
        ans = float("%.2f" % ans)
        labels = ['Standard packets', 'Retransmitted packets']
        sizes = [100.0 - ans,ans]


        colors = ['g', 'firebrick']

        # Make a pie graph
        plt.clf()
        plt.figure(num=1, figsize=(8, 6))
        plt.axes(aspect=1)
        plt.suptitle('Retransmitted packet', fontsize=14, fontweight='bold')
        plt.rcParams.update({'font.size': 13})
        plt.pie(sizes, labels=labels, autopct='%.2f%%', startangle=60, colors=colors, pctdistance=0.7, labeldistance=1.2)

        plt.show()
Beispiel #9
0
    def display_graph_by_specific_mac(self, mac_address):

        G = nx.Graph()

        count = 0
        edges = set()
        edges_list = []


        for pkt in self.pcap_file:

            src = pkt[Dot11].addr1
            dst = pkt[Dot11].addr2

            if mac_address in [src, dst]:
                edges_list.append((src, dst))
                edges.add(src)
                edges.add(dst)

        plt.clf()
        plt.suptitle('Communicating with ' + str(mac_address), fontsize=14, fontweight='bold')
        plt.title("\n Number of Communicating Users: " + str(int(len(edges))))
        plt.rcParams.update({'font.size': 10})
        G.add_edges_from(edges_list)
        nx.draw(G, with_labels=True, node_color=MY_COLORS)
        plt.show()
Beispiel #10
0
    def display_graph(self):

        G = nx.Graph()

        count = 0
        edges = set()
        edges_list = []

        for pkt in self.pcap_file:
            if pkt.haslayer(Dot11Elt):
                src = pkt[Dot11].addr1
                dst = pkt[Dot11].addr2

                edges_list.append((src, dst))
                edges.add(src)
                edges.add(dst)


        plt.clf()
        filepath = os.path.splitext(self.path)[0]
        filename = basename(filepath)
        plt.suptitle('Connection Map of: '+ str(filename), fontsize=14, fontweight='bold')
        plt.title("\n Number of Users: " + str(int(len(edges))))
        plt.rcParams.update({'font.size': 10})
        G.add_edges_from(edges_list)
        nx.draw(G, with_labels=True, node_color=MY_COLORS)
        plt.show()
Beispiel #11
0
    def display_channel_efficiency(self):

        size = 0

        start_time = self.pcap_file[0].time
        end_time = self.pcap_file[len(self.pcap_file) - 1].time

        duration = (end_time - start_time)/1000

        for i in range(len(self.pcap_file) - 1):
            size += len(self.pcap_file[i])
        ans = (((size * 8) / duration) / BW_STANDARD_WIFI) * 100
        ans = float("%.2f" % ans)
        labels = ['utilized', 'unutilized']
        sizes = [ans, 100.0 - ans]
        colors = ['g', 'r']

        # Make a pie graph
        plt.clf()
        plt.figure(num=1, figsize=(8, 6))
        plt.axes(aspect=1)
        plt.suptitle('Channel efficiency', fontsize=14, fontweight='bold')
        plt.title("Bits/s: " + str(float("%.2f" % ((size*8)/duration))),fontsize = 12)
        plt.rcParams.update({'font.size': 17})
        plt.pie(sizes, labels=labels, autopct='%.2f%%', startangle=60, colors=colors, pctdistance=0.7, labeldistance=1.2)

        plt.show()
Beispiel #12
0
def signmag_plot(a, b, z, ref):
    imdata1 = np.sign(ref)
    cmap1 = plt.cm.RdBu
    cmap1.set_bad('k', 1)

    imdata2 = np.log10(np.abs(ref))
    cmap2 = plt.cm.YlOrRd
    cmap2.set_bad('k', 1)

    fig, axarr = plt.subplots(ncols=2, figsize=(12, 6))
    axarr[0].pcolormesh(a, b, imdata1, cmap=cmap1, vmin=-1, vmax=1)
    im = axarr[1].pcolormesh(a, b, imdata2, cmap=cmap2,
                             vmin=np.percentile(imdata2,  5),
                             vmax=np.percentile(imdata2, 95))

    for ax in axarr:
        ax.set_xlim((np.min(a), np.max(a)))
        ax.set_ylim((np.min(b), np.max(b)))
        ax.set_xlabel("a")
        ax.set_ylabel("b")
        ax.set(adjustable='box-forced', aspect='equal')

    fig.subplots_adjust(right=0.8)
    cbar_ax = fig.add_axes([0.85, 0.15, 0.03, 0.7])
    fig.colorbar(im, cax=cbar_ax)

    axarr[0].set_title("Sign of hyp1f1")
    axarr[1].set_title("Magnitude of hyp1f1")
    plt.suptitle("z = {:.2e}".format(np.float64(z)))

    return fig
def template_matching():
    img = cv2.imread('messi.jpg',0)
    img2 = img.copy()
    template = cv2.imread('face.png',0)
    w, h = template.shape[::-1]

    # All the 6 methods for comparison in a list
    methods = ['cv2.TM_CCOEFF', 'cv2.TM_CCOEFF_NORMED', 'cv2.TM_CCORR',
            'cv2.TM_CCORR_NORMED', 'cv2.TM_SQDIFF', 'cv2.TM_SQDIFF_NORMED']

    for meth in methods:
        img = img2.copy()
        method = eval(meth)

        # Apply template Matching
        res = cv2.matchTemplate(img,template,method)
        min_val, max_val, min_loc, max_loc = cv2.minMaxLoc(res)

        # If the method is TM_SQDIFF or TM_SQDIFF_NORMED, take minimum
        if method in [cv2.TM_SQDIFF, cv2.TM_SQDIFF_NORMED]:
            top_left = min_loc
        else:
            top_left = max_loc
        bottom_right = (top_left[0] + w, top_left[1] + h)

        cv2.rectangle(img,top_left, bottom_right, 255, 2)

        plt.subplot(121),plt.imshow(res,cmap = 'gray')
        plt.title('Matching Result'), plt.xticks([]), plt.yticks([])
        plt.subplot(122),plt.imshow(img,cmap = 'gray')
        plt.title('Detected Point'), plt.xticks([]), plt.yticks([])
        plt.suptitle(meth)

        plt.show()
Beispiel #14
0
def plotInLen(r,freqs,fits,title):
    nc = 3
    minLength=8
    maxLength=25
    fig, axes = plt.subplots(
        nrows=int((maxLength-minLength+1)/2/nc), ncols=nc, figsize=(12, 12)
        )
    index = 0
    for (length,freqs) in freqs.items():
        if length>=minLength and length<=maxLength and length%2==1:
            sortedFreqs = OrderedDict(sorted(freqs.items(),key=lambda t: t[0], reverse=False))
            axes[int((index)/nc),(index)%(nc)].plot(
                list(sortedFreqs.keys()),list(sortedFreqs.values()),'o'
                )
            m, c = fits[length-1]
            axes[int((index)/nc),(index)%(nc)].plot(
                list(sortedFreqs.keys()),
                [(10**(c)*xi**(m)) for xi in sortedFreqs.keys()],
                linewidth = 3,
                label = 'y = '+str("%.2f" %(10**c))+'x^'+str("%.2f" %m)
                )
            axes[int((index)/nc),(index)%(nc)].set_yscale('log')
            axes[int((index)/nc),(index)%(nc)].set_xscale('log')
            #axes[int((index)/nc),(index)%(nc)].legend()
            axes[int((index)/nc),(index)%(nc)].set_title(str(length)+'-mers')
            index+=1
    
    plt.suptitle(title,fontsize=25)
    plt.savefig(r.outputDir+'inlen.png')
Beispiel #15
0
def run_div_test(fld, exact, title='', show=False, ignore_inexact=False):
    t0 = time()
    result_numexpr = viscid.div(fld, preferred="numexpr", only=False)
    t1 = time()
    logger.info("numexpr magnitude runtime: %g", t1 - t0)

    result_diff = viscid.diff(result_numexpr, exact)['x=1:-1, y=1:-1, z=1:-1']
    if not ignore_inexact and not (result_diff.data < 5e-5).all():
        logger.warning("numexpr result is far from the exact result")
    logger.info("min/max(abs(numexpr - exact)): %g / %g",
                np.min(result_diff.data), np.max(result_diff.data))

    planes = ["y=0j", "z=0j"]
    nrows = 2
    ncols = len(planes)
    _, axes = plt.subplots(nrows, ncols, squeeze=False)

    for i, p in enumerate(planes):
        vlt.plot(result_numexpr, p, ax=axes[0, i], show=False)
        vlt.plot(result_diff, p, ax=axes[1, i], show=False)

    plt.suptitle(title)
    vlt.auto_adjust_subplots(subplot_params=dict(top=0.9))

    plt.savefig(next_plot_fname(__file__))
    if show:
        vlt.mplshow()
Beispiel #16
0
    def plot_main_seeds(self, qname, radio=False, checkbox=False, 
                        numerical=False, array=False):
        """ Plot the responses separately for each seed group in main_seeds. """
        
        assert sum([radio, checkbox, numerical, array]) == 1

        for seed in self.main_seeds:
            responses_seed = self.filter_rows_by_seed(seed, self.responses)
            responses_seed_question = self.filter_columns_by_name(qname, responses_seed)

            plt.subplot(int("22" + str(self.main_seeds.index(seed))))
            plt.title("Seed " + seed)

            if radio:
                self.plot_convergence_radio(qname, responses_seed_question)
            elif checkbox:
                self.plot_convergence_checkbox(responses_seed_question)
            elif numerical:
                self.plot_convergence_numerical(responses_seed_question)
            elif array:
                self.plot_array_boxes(qname, responses_seed_question)

        qtext = self.get_qtext_from_qname(qname)
        plt.suptitle(qtext)
        plt.tight_layout()
        plt.show()
Beispiel #17
0
def boxplotThem(dataToPlot,title):
    means = [np.mean(item) for item in dataToPlot]
    fig = plt.figure(1, figsize=(9,6))
    ax = fig.add_subplot(111)

    plt.rc('text', usetex=True)
    plt.rc('font', family='serif')
    for i in range(25):
        print('length ',str(i+1))
        for item in dataToPlot[i]:
            ax.scatter(i+1,item)

    ax.plot(list(range(1,26)),means,linewidth=4,color='r')

    ax.set_xticks([1,5,10,15,20,25])
    ax.set_xticklabels([r"$1",r"$5$",r"$10$",r"$15$",r"$20$",r"$25$"],fontsize = 25)

    ax.set_yscale('log')
    ax.set_yticks([0.00000001,0.000001,0.0001,0.01,1])
    ax.xaxis.set_tick_params(width=1.5)
    ax.yaxis.set_tick_params(width=1.5)
    ax.set_yticklabels([r"$10^{-8}$",r"$10^{-6}$",r"$10^{-4}$",r"$10^{-2}$",r"$1$"],fontsize = 25)
    ax.get_yaxis().get_major_formatter().labelOnlyBase = False
    ax.set_ylabel('relative population',fontsize = 30)
    ax.set_xlabel(r'length',fontsize = 30)
    ax.set_xlim(0,26)
    ax.set_ylim(0.00000005)
    plt.suptitle(title,fontsize=25)
    plt.savefig(r.outputDir+'distr.png')
def make_intergenerational_figure(data, lowerbound, upperbound, rows, title):
    plt.figure(figsize=(10,10))
    plt.suptitle(title,fontsize=20)
    for index in range(4):
        plt.subplot(2,2,index+1)    
        #simulation distribution
        plt.hist(accepted[:,rows[index]], normed=True, bins = range(0,100,5), color = col)
        #simulation values
        value = np.mean(accepted[:,rows[index]])
        std = 2*np.std(accepted[:,rows[index]])
        plt.errorbar((value,), (red_marker_location-0.02), xerr=((std,),(std,)),
                     color=col, fmt='o', linewidth=2, capsize=5, mec = col)
        #survey values
        value = data[index]
        lb = lowerbound[index]
        ub = upperbound[index]
        plt.errorbar((value,), (red_marker_location,), xerr=((value-lb,),(ub-value,)),
                     color='r', fmt='o', linewidth=2, capsize=5, mec = 'r')
        #labeling    
        plt.ylim(0,ylimit)
        plt.xlim(0,100)
    #make subplots pretty
    plt.subplot(2,2,1)
    plt.title("Males")
    plt.ylabel("'05\nFrequency")
    plt.subplot(2,2,2)
    plt.title("Females")
    plt.subplot(2,2,3)
    plt.ylabel("'08\nFrequency")
    plt.xlabel("Percent Responding Affirmatively")
    plt.subplot(2,2,4)
    plt.xlabel("Percent Responding Affirmatively")
Beispiel #19
0
def plot_scatter_with_histograms(xvals, yvals, colour='k', oneToOneLine=True, xlabel=None, ylabel=None, title=None):
    gs = gridspec.GridSpec(5, 5)
    xmin = np.floor(min(xvals))
    xmax = np.ceil(max(xvals))
    ymin = np.floor(min(yvals))
    ymax = np.ceil(max(yvals))
    plt.subplot(gs[1:, 0:4])
    plt.plot(xvals, yvals, 'o', color=colour)
    if xlabel is not None:
        plt.xlabel(xlabel)
    if ylabel is not None:
        plt.ylabel(ylabel)
    if oneToOneLine:
        oneToOneMax = max([max(xvals),max(yvals)])
        plt.plot([0,oneToOneMax],[0,oneToOneMax],'b--')
    plt.xlim(xmin,xmax)
    plt.ylim(ymin,ymax)
    plt.subplot(gs[0, 0:4])
    plt.hist(xvals, np.linspace(xmin,xmax,50))
    plt.axis('off')
    plt.subplot(gs[1:,4])
    plt.hist(yvals, np.linspace(ymin,ymax,50), orientation='horizontal')
    plt.axis('off')
    if title is not None:
        plt.suptitle(title)
def plot_forces_violinplots(experiment):
    ensemble = experiment.observations.kinematics
    ensembleF = ensemble.loc[
        (ensemble['position_x'] > 0.25) & (ensemble['position_x'] < 0.95),
        ['totalF_x', 'totalF_y', 'totalF_z',
         'randomF_x', 'randomF_y', 'randomF_z',
         'upwindF_x',
         'wallRepulsiveF_x', 'wallRepulsiveF_y', 'wallRepulsiveF_z',
         'stimF_x', 'stimF_y', 'stimF_z']] #== Nans
    # plot Forces
    #    f, axes = plt.subplots(2, 2, figsize=(9, 9), sharex=True, sharey=True)
    ##    forcefig = plt.figure(5, figsize=(9, 8))
    ##    gs2 = gridspec.GridSpec(2, 2)
    ##    Faxs = [fig.add_subplot(ss) for ss in gs2]
    forcefig = plt.figure()
    #    Faxs1 = forcefig.add_subplot(211)
    #    Faxs2 = forcefig.add_subplot(212)
    sns.violinplot(ensembleF, lw=3, alpha=0.7, palette="Set2")
    #    tF = sns.jointplot('totalF_x', 'totalF_y', ensemble, kind="hex", size=10)
    plt.suptitle("Force distributions")
    #    plt.xticks(range(4,((len(alignments.keys())+1)*4),4), [i[1] for i in medians_sgc], rotation=90, fontsize = 4)
    plt.tick_params(axis='x', pad=4)
    plt.xticks(rotation=40)
    #    remove_border()
    plt.tight_layout(pad=1.8)
    plt.ylabel("Force magnitude distribution (newtons)")

    fileappend, path, agent = get_agent_info(experiment.agent)

    plt.savefig(os.path.join(path, "Force Distributions" + fileappend + FIG_FORMAT))
    plt.show()
Beispiel #21
0
def plot_integral(qa_dict,outfile,plotconf=None,hardplots=False):
    import matplotlib.ticker as ticker
    """
    Plot integral.

    Args:
        qa_dict: qa dictionary
        outfile : output plot file
    """
    expid=qa_dict["EXPID"]
    camera=qa_dict["CAMERA"]
    paname=qa_dict["PANAME"]

    fig=plt.figure()

    if plotconf:
        hardplots=ql_qaplot(fig,plotconf,qa_dict,camera,expid,outfile)

    if not hardplots:
        pass
    else:
        ax1=fig.add_subplot(111)
        integral=np.array(qa_dict["METRICS"]["SPEC_MAGS"])
        plt.suptitle("Integrated Spectral Magnitudes, Camera: {}, ExpID: {}".format(paname,camera,expid),fontsize=10,y=0.99)
        index=np.arange(len(integral))
        hist_med=ax1.bar(index,integral,color='b',align='center')
        ax1.set_xlabel('Fibers',fontsize=10)
        ax1.set_ylabel('Integral (photon counts)',fontsize=10)
        ax1.tick_params(axis='x',labelsize=10)
        ax1.tick_params(axis='y',labelsize=10)
        ax1.xaxis.set_major_locator(ticker.AutoLocator())
        #ax1.set_xticklabels(std_fiberid)
        
        plt.tight_layout()
        fig.savefig(outfile)
Beispiel #22
0
def plot_TP():
    with open ('p_files/ROC_table_' + str(seg) + '_pc' + str(p) + '_k' + str(k) + '.p', 'rb') as f:
        ROC_table = pickle.load(f)
    with open ('p_files/species_stats.p', 'rb') as f:
        species_table = pickle.load(f)

    with open ('p_files/species.p', 'rb') as f:
        species_name = pickle.load(f)

    xes = []#[item['number'] for item in ROC_table.values()]
    yes = []#[item['fp'] for item in ROC_table.values()]
    label = []
    low_TP = []
    for specie in ROC_table:
        xes.append(species_table[specie])
        yes.append(ROC_table[specie]['tp_rate'])
        label.append(specie)
        if float(ROC_table[specie]['tp_rate']) < 0.3 and species_table[specie] > 100:
            #print(ROC_table[specie]['tp_rate'])
            low_TP.append((specie, ROC_table[specie]['tp']))

    fig, ax = plt.subplots()
    plt.subplots_adjust(bottom=0.1)
    ax.plot([0,max(xes)],[0.3,0.3], ls="--")
    ax.scatter(xes, yes, marker = '.')
    for i, txt in enumerate(label):
        if txt in interesting:
        #if float(yes[i]) < 0.3 and xes[i] > 100 :
            #print(txt)
            #ax.annotate(parser.get_specie_name('../data/train/', str(species_name[txt][0]) + '.xml'), (xes[i],yes[i]))
            ax.annotate(txt, (xes[i],yes[i]))
    plt.suptitle('False Positive', fontsize = 14)
#    ax.set_xlabel('Principal Components')
#    ax.set_ylabel('Percentage of Variance')
    plt.show()
Beispiel #23
0
    def generate_line_chart(self, data, node_type, field, append_table=True):
        output = []
        common.bash("mkdir -p ../visualizer/include/pic")
        common.bash("mkdir -p ../visualizer/include/csv")
        common.printout("LOG","generate %s line chart" % node_type)
        for field_column, field_data in data.items():
            pyplot.figure(figsize=(9, 4))
            for node, node_data in field_data.items():
                pyplot.plot(node_data, label=node)
            pyplot.xlabel("time(sec)")
            pyplot.ylabel("%s" % field_column)
            # Shrink current axis's height by 10% on the bottom
            pyplot.legend(loc = 'center left', bbox_to_anchor = (1, 0.5), prop={'size':6})
            pyplot.grid(True)
            pyplot.suptitle("%s" % field_column)
            pic_name = '%s_%s_%s.png' % (node_type, field, re.sub('[/%]','',field_column))
            pyplot.savefig('../visualizer/include/pic/%s' % pic_name) 
            pyplot.close()
            line_table = []
            csv = self.generate_csv_from_json(field_data,'line_table',field_column)
            csv_name = '%s_%s_%s.csv' % (node_type, field, re.sub('[/%]','',field_column))
            with open( '../visualizer/include/csv/%s' % csv_name, 'w' ) as f:
                f.write( csv )
#output.append("<div class='cetune_pic' id='%s_%s_pic'><button><a href='./include/csv/%s'>Download detail csv table</a></button><img src='./include/pic/%s' alt='%s' style='height:400px; width:1000px'></div>" % (field, re.sub('[/%]','',field_column), csv_name, pic_name, field_column))
            output.append("<div class='cetune_pic' id='%s_%s_pic'><img src='./include/pic/%s' alt='%s' style='height:400px; width:1000px'><button><a href='./include/csv/%s'>Download detail csv table</a></button></div>" % (field, re.sub('[/%]','',field_column), pic_name, field_column, csv_name))
        return output
Beispiel #24
0
def localisationFault():
    # read file
    faults = []
    for line in f1:
        line = line.split('\t')
        lst = [float(x) for x in line]
        faults.append(lst)
    
    # extract
    xFault = list(map(lambda l : l[0], faults))
    yFault = list(map(lambda l : l[1], faults))
    thetaFault = list(map(lambda l : l[2], faults))
    
    # number of items
    num = len(xFault) + 1
    time = list(range(1, num))
    
    plt.suptitle('Robot localisation fault', fontsize=20)
    # plot
    plt.plot(time, xFault, 'r', label='x fault')
    plt.plot(time, yFault, 'b', label='y fault')
    plt.plot(time, thetaFault, 'y', label='theta fault')
    plt.legend(numpoints=3)
    # xMin - xMax and yMin - yMax
    plt.axis([0, num, 0, 1.2])
    plt.show()
Beispiel #25
0
def boxPositions():
    positons = []
    f2 = open(Stats.BOX_POSITIONS_FILE)
    for line in f2:
        line = line.split('\t')
        lst = [float(x) for x in line]
        positons.append(lst)
        
    approX = list(map(lambda l : l[0], positons))
    approY = list(map(lambda l : l[1], positons))
    trueX = list(map(lambda l : l[2], positons))
    trueY = list(map(lambda l : l[3], positons))
    
    # number of items
    num = len(approX) + 1
    time = list(range(1, num))
    
    plt.suptitle('Box positions', fontsize=20)
    # plot
    plt.subplot(211)
    plt.ylabel('x', fontsize=18)
    plt.plot(time, approX, 'r', label='Approximate Position')
    plt.plot(time, trueX, 'g', label='True Position')
    plt.legend(numpoints=1)
    plt.subplot(212)
    plt.ylabel('y', fontsize=18)
    plt.plot(time, approY, 'r')
    plt.plot(time, trueY, 'g')

    plt.show()
Beispiel #26
0
def plot_marginals(state_space,p,D,name,rank,t,to_file = False):
	import matplotlib
	#matplotlib.use("PDF")
	#matplotlib.rcParams['figure.figsize'] = 5,10
	import matplotlib.pyplot as pl
	pl.suptitle("time: "+ str(t)+" units")
	print("time : "+ str(t))
	for i in range(D):
		marg_X = np.unique(state_space[:,i])
		A = np.where(marg_X[:,np.newaxis] == state_space[:,i].T[np.newaxis,:],1,0)
		marg_p = np.dot(A,p)
		pl.subplot(int(D/2)+1,2,i+1)
		pl.plot(marg_X,marg_p)
		if to_file == True:
			string_state = str(t) 
			string_prob = str(t) 
			f = open("Visuals/cummulative_state_"+ str(i)+".txt",'a')
			g = open("Visuals/cummulative_prob_"+ str(i)+".txt",'a')
			for j in range(len(marg_X)):
				string_state +=','+ str(marg_X[j])
				string_prob += ','+ str(marg_p[j])
			string_state += '\n'
			string_prob += '\n'
			f.write(string_state)
			g.write(string_prob)
			f.close()
			g.close()	
	 

	pl.savefig("Visuals/marginal_"+name+".pdf",format='pdf')
	pl.show()
	pl.clf()
Beispiel #27
0
 def _drawTitle(self,isContour=False):
     #Add a title
     hlon = self.shakemap.getEventDict()['lon']
     hlat = self.shakemap.getEventDict()['lat']
     edict = self.shakemap.getEventDict()
     eloc = edict['event_description']
     timestr = edict['event_timestamp'].strftime('%b %d, %Y %H:%M:%S')
     mag = edict['magnitude']
     if hlon < 0:
         lonstr = 'W%.2f' % np.abs(hlon)
     else:
         lonstr = 'E%.2f' % hlon
     if hlat < 0:
         latstr = 'S%.2f' % np.abs(hlat)
     else:
         latstr = 'N%.2f' % hlat
     dep = edict['depth']
     eid = edict['event_id']
     net = edict['event_network']
     if not eid.startswith(net):
         eid = net + eid
     tpl = (timestr,mag,latstr,lonstr,dep,eid)
     layername = 'MMI'
     if isContour:
         layername = self.contour_layer.upper()
     plt.suptitle('USGS ShakeMap (%s): %s' % (layername,eloc),fontsize=14,verticalalignment='top',y=0.95)
     plt.title('%s UTC M%.1f %s %s Depth: %.1fkm ID:%s' % tpl,fontsize=10,verticalalignment='bottom')
     return eid
Beispiel #28
0
def run_mag_test(fld, title="", show=False):
    vx, vy, vz = fld.component_views()  # pylint: disable=W0612
    vx, vy, vz = fld.component_fields()

    try:
        t0 = time()
        mag_ne = viscid.magnitude(fld, preferred="numexpr", only=False)
        t1 = time()
        logger.info("numexpr mag runtime: %g", t1 - t0)
    except viscid.verror.BackendNotFound:
        xfail("Numexpr is not installed")

    planes = ["z=0", "y=0"]
    nrows = 4
    ncols = len(planes)

    _, axes = plt.subplots(nrows, ncols, sharex=True, sharey=True, squeeze=False)

    for ind, p in enumerate(planes):
        vlt.plot(vx, p, ax=axes[0, ind], show=False)
        vlt.plot(vy, p, ax=axes[1, ind], show=False)
        vlt.plot(vz, p, ax=axes[2, ind], show=False)
        vlt.plot(mag_ne, p, ax=axes[3, ind], show=False)

    plt.suptitle(title)
    vlt.auto_adjust_subplots(subplot_params=dict(top=0.9, right=0.9))
    plt.gcf().set_size_inches(6, 7)

    plt.savefig(next_plot_fname(__file__))
    if show:
        vlt.mplshow()
Beispiel #29
0
def compare_single_output_across_batches(batches_list, variants_list, params_list, single_output, days_list=SOLSTICES):
    # ORDER MATTERS for batches_list, variants_list, and params_list!!
    # e.g. this method will compare the param[i] of variant[i] of the batch[i] with
    # the param[i+1] of variant[i+1] of batch[i+1], etc.
    results_df_list = []
    legend_list = []
    for b in range(len(batches_list)):
        results_df_list.append(batches_list[b].results_dict.get(variants_list[b]))
        # legend_list.append(batches_list[b].location[:11] +
        #                    str(batches_list[b].variant_output_map[variants_list[b]]) +
        #                    batches_list[b].output_name)

    plt.figure(figsize=(8, 15))
    plt.suptitle(single_output, fontsize=16)
    y_info = get_y_axis_info(single_output)

    ax = []
    for d in range(1, len(days_list) + 1):
        if d == 1:
            ax.append(plt.subplot(len(days_list), 1, d))
            plt.ylabel(y_info[UNITS], fontsize=14)
        else:
            ax.append(plt.subplot(len(days_list), 1, d, sharex=ax[0], sharey=ax[0]))
        plt.grid(True, which='major', axis='x')
        [ax[d - 1].plot(r[days_list[d - 1]].index.hour, r.loc[days_list[d - 1], single_output]/y_info[SCALE])
         for r in results_df_list]
        ax[d - 1].set_title(days_list[d - 1], fontsize=12)

    plt.xlabel('Hour of day', fontsize=14)
    ax[0].set_xlim([0, 24])
    ax[0].set_xticks([0, 6, 12, 18, 24])
    ax[0].set_ylim(y_info[LIM])
    ax[0].legend(params_list, loc='lower right', fontsize=9)
def trading_iteration_figure2():
    #file_handler = open("simulation_result/FBSDE_expXdrift_linarYdrift/" + 
    #                "model_type_expXdrift_linearYdrift_A0.1_M_space10.0_M_time3.0_beta0.1_delta_space0.005_delta_time0.01_iter_number6_kappa3.0_m0.1_num_MC_path100000_sigma2.0_x_02.0",
    #                'rb')
    file_handler = open("simulation_result/FBSDE_expXdrift_linarYdrift/" +                  
                     "model_type_expXdrift_linearYdrift_A0.05_M_space15.0_M_time3.0_beta0.1_delta_space0.005_delta_time0.01_iter_number6_kappa3.0_m0.1_num_MC_path100000_sigma2.0_x_02.0",
                     'rb')
    tmp = pickle.load(file_handler)
    _, (ax1, ax2) = plt.subplots(2, 1, figsize=(9, 10))
    plt.suptitle("convergence of outputs from each iteration",
            y=0.95, size=18)
    iteration_figure(tmp, 
                 [0,1,2,3,-2,-1],
                 figure_ax=ax1,
                 x_array_limit=3,
                 xlabel="time",
                 ylabel="function value",
                 other=ax1.plot(np.arange(0,3,0.01),
                        trading_linear_theo(x_0=2,
                        m=0.1,
                        A=0.1,
                        kappa=3,
                        beta=0.1,
                        t_space=np.arange(0,3,0.01)), "--",
                        label="linearization result"))
    iteration_difference_figure(tmp, 
                            [0,1,2,3,-2],
                            figure_ax=ax2,
                            x_array_limit=3,
                            xlabel="time",
                            ylabel="difference from final output",
                            other=ax2.plot(np.arange(0,3,0.01),
                        np.zeros(len(np.arange(0,3,0.01))), "black"))
    
    plt.savefig("trading_iteration_figure.png")
def clusterS(numRows, numCols, startCluster, numClusters, dsiNums):
    
    from sklearn.decomposition import PCA
    
    pca = PCA(n_components=2).fit(doc_term_matrix_tfidf_l2)
    data2D = pca.transform(doc_term_matrix_tfidf_l2)
    # reduce to two dimensions
    
    plt.rcParams['axes.facecolor']='white'
    plt.rcParams['savefig.facecolor']='white'
    plt.figure(facecolor="white")
    #f, axarr = plt.subplots(numRows, 2, figsize=(8,20))
    #f.set_size_inches(numRows*3,numCols*6, forward=True)
    #f.set_size_inches(8,20)
    
    clusterSilhouetteScores = []
    
    n_clusters = startCluster
    
    for indxR in range(numRows):
        for indxC in range(numCols):
            f, axarr = plt.subplots(nrows=1,ncols=2, figsize=(8,6))
                
            f.patch.set_facecolor('white')
            f.patch.set_alpha(1.0)
            indxRStatic = 0
            firstChart = 0
            secondChart = 1
            axarr[firstChart].set_xlim([-0.1, 1])
            axarr[firstChart].set_ylim([0, len(data2D) + (n_clusters + 1) * 10])
        
            clusterer = KMeans(n_clusters=n_clusters, random_state=3425)
            cluster_labels = clusterer.fit_predict(data2D)
            silhouette_avg = silhouette_score(data2D, cluster_labels)
            clusterSilhouetteScores.append(silhouette_avg)
            print "For number of clusters = " + str(n_clusters)
            print "The average Silhouette score is :" + str(silhouette_avg)
            for indx in range(n_clusters):
                print "Cluster: ", indx, "  DSIs: " , 
                for indx2 in range(len(cluster_labels)):
                    if cluster_labels[indx2] == indx:
                        print dsiNums[indx2] ,
                print
            print
            
            # Compute the silhouette scores for each sample
            sample_silhouette_values = silhouette_samples(data2D, cluster_labels)
        
            y_lower = 10
            for i in range(n_clusters):
                # Aggregate the silhouette scores for samples belonging to
                # cluster i, and sort them
                ith_cluster_silhouette_values = \
                    sample_silhouette_values[cluster_labels == i]
        
                ith_cluster_silhouette_values.sort()
        
                size_cluster_i = ith_cluster_silhouette_values.shape[0]
                y_upper = y_lower + size_cluster_i
        
                color = cm.spectral(float(i) / n_clusters)
                axarr[firstChart].fill_betweenx(np.arange(y_lower, y_upper),
                                0, ith_cluster_silhouette_values,
                                facecolor=color, edgecolor=color, alpha=0.7)
                axarr[firstChart].set_axis_bgcolor("white")
                axarr[firstChart].patch.set_facecolor('white')
            
                # Label the silhouette plots with their cluster numbers at the middle
                axarr[firstChart].text(-0.05, y_lower + 0.5 * size_cluster_i, str(i))
                
                # Compute the new y_lower for next plot
                y_lower = y_upper + 10  # 10 for the 0 samples
        
            axarr[firstChart].set_title("Silhouette Plot", fontsize='10')
            axarr[firstChart].set_xlabel("Silhouette Coefficients", fontsize='10')
            axarr[firstChart].set_ylabel("Cluster", fontsize='10')
        
            # The vertical line for average silhouette score of all the values
            axarr[firstChart].axvline(x=silhouette_avg, color="red", linestyle="--")
        
            axarr[firstChart].set_yticks([])  # Clear the yaxis labels / ticks
            axarr[firstChart].set_xticks([-0.1, 0, 0.2, 0.4, 0.6, 0.8, 1])
            plt.setp(axarr[firstChart].get_xticklabels(), fontsize=8)
    
            # 2nd Plot showing the actual clusters formed
            colors = cm.spectral(cluster_labels.astype(float) / n_clusters)
            
            #print doc_term_matrix_tfidf_l2
            axarr[secondChart].scatter(data2D[:, 0], data2D[:, 1], marker='.', s=80, lw=0, 
            c=colors, alpha=0.7)
            plt.setp(axarr[secondChart].get_xticklabels(), fontsize=8)
            plt.setp(axarr[secondChart].get_yticklabels(), fontsize=8)
        
            # Labeling the clusters
            centers = clusterer.cluster_centers_
            # Draw white circles at cluster centers
            axarr[secondChart].scatter(centers[:,0], centers[:, 1],
                    marker='o', c="white", alpha=0.25, s=200)
            axarr[secondChart].set_axis_bgcolor("white")
            axarr[secondChart].patch.set_facecolor('white')
            for i, c in enumerate(centers):
                axarr[secondChart].scatter(c[0], c[1], marker='$%d$' % i, alpha=1, s=80)
        
            axarr[secondChart].set_title("PCA Reduction", fontsize='8')
            axarr[secondChart].set_xlabel("", fontsize='8')
            axarr[secondChart].set_ylabel("", fontsize='8')
        
            plt.suptitle(("Silhouette analysis for KMeans/PCA clustering "
                        "with Number of Clusters = {:4d}".format( n_clusters)),
                        fontsize=10, fontweight='bold')
            plt.show()
            
            
            plt.savefig('/Users/Robert/Documents/Predict453/Plot_' + str(n_clusters) + '_Clusters.png',  
                edgecolor='white',facecolor=f.get_facecolor(), transparent=False,
                orientation='portrait',
                pad_inches=0.1,
                frameon=False)
            pdfdoc.savefig()
    
            n_clusters += 1
            
    x = range(2,numRows+2)
    print "Silhouette Scores"
    print clusterSilhouetteScores
    plt.figure(figsize=(8,6))
    plt.plot(x,clusterSilhouetteScores, 'ro-')
    plt.grid()
    plt.title('Silhouette Scores by Number of Clusters - First 2 PCA')
    plt.show()
    plt.savefig('/Users/Robert/Documents/Predict453/plotSilhouetteSummaryLinePlot.png',  
                edgecolor='white',facecolor=f.get_facecolor(), transparent=False,
                orientation='portrait',
                pad_inches=0.1,
                frameon=False)
    pdfdoc.savefig()
Beispiel #32
0
def plot_nine_images(images,
                     classes,
                     cls_true,
                     plt_show,
                     img_shape,
                     channels,
                     cls_pred=None,
                     name=""):
    '''
	Plots nine images in one view
	:param images: the images to plot
	:param class_one: name to identify class one
	:param class_zero: name to identify class zero
	:param cls_true: the true classes
	:param plt_show: debug parameter, if false it wont plot the image
	:param cls_pred: the predicted classes, default: None
	:param name: title of plot, default: ""
	'''

    try:
        assert len(images) == len(cls_true) == 9
    except:
        return

    # Create figure with 3x3 sub-plots.
    fig, axes = plt.subplots(3, 3)
    fig.subplots_adjust(hspace=0.3, wspace=0.3)

    for i, ax in enumerate(axes.flat):
        # Plot image.

        img = images[i]
        cpy = np.copy(img)

        if channels == 3:
            cpy = cpy.reshape((img_shape[0], img_shape[1], 3))
            cpy = cv2.resize(cpy, (100, 100))
            cpy = cv2.cvtColor(cpy, cv2.COLOR_BGR2RGB)
            cv2.imwrite("tmp_img/cpy.png", cpy)
            cpy = cv2.imread(
                "tmp_img/cpy.png"
            )  #not sure why it's not working without but this fixes some color isssues
            # exit()
        else:
            cpy = cpy.reshape(img_shape)
            cpy = np.resize(cpy, (100, 100))
            cv2.imwrite("tmp_img/cpy.png", cpy)
            cpy = cv2.imread(
                "tmp_img/cpy.png"
            )  #not sure why it's not working without but this fixes some color isssues
        ax.imshow(cpy)

        # Show true and predicted classes.
        if cls_pred is None:

            label_name = classes[cls_true[i]]
            xlabel = "True: {0}".format(label_name)
        else:
            label_name = classes[cls_true[i]]
            label_name_pred = classes[cls_pred[i]]
            xlabel = "True: {0}, Predict: {1}".format(label_name,
                                                      label_name_pred)

        # Show the classes as the label on the x-axis.
        ax.set_xlabel(xlabel)

        # Remove ticks from the plot.
        ax.set_xticks([])
        ax.set_yticks([])

    # Sets the title of the figure window
    plt.suptitle(name)
    # Ensure the plot is shown correctly with multiple plots
    # in a single Notebook cell.
    if plt_show == True:
        plt.show()
Beispiel #33
0
def plot_conv_weights(weights,
                      session,
                      plt_show,
                      convolutional_layer=0,
                      name=""):
    """
	@author = Magnus Erik Hvass Pedersen
	plots the kernel in the convolotional layers. Strong blue = means the filter has a negative reaction to black pixels
	and strong white means the filter has a positive reaction to white pixels
	:param weights: weights to print
	:param session: the TF session
	:param plt_show: debug parameter, if false it wont plot the image
	:param convolutional_layer: the convolutional layer to print the weights from. default: 0
	:param name: title of plot, default: ""
	"""
    # Assume weights are TensorFlow ops for 4-dim variables
    # e.g. weights_conv1 or weights_conv2.

    # Retrieve the values of the weight-variables from TensorFlow.
    # A feed-dict is not necessary because nothing is calculated.
    w = session.run(weights)

    # Get the lowest and highest values for the weights.
    # This is used to correct the colour intensity across
    # the images so they can be compared with each other.
    w_min = np.min(w)
    w_max = np.max(w)

    # Number of filters used in the conv. layer.
    num_filters = w.shape[3]

    # Number of grids to plot.
    # Rounded-up, square-root of the number of filters.
    num_grids = math.ceil(math.sqrt(num_filters))

    # Create figure with a grid of sub-plots.
    fig, axes = plt.subplots(num_grids, num_grids)

    # Plot all the filter-weights.
    for i, ax in enumerate(axes.flat):
        # Only plot the valid filter-weights.
        if i < num_filters:
            # Get the weights for the i'th filter of the input channel.
            # See new_conv_layer() for details on the format
            # of this 4-dim tensor.
            img = w[:, :, convolutional_layer, i]

            # Plot image.
            ax.imshow(img,
                      vmin=w_min,
                      vmax=w_max,
                      interpolation='nearest',
                      cmap='seismic')

        # Remove ticks from the plot.
        ax.set_xticks([])
        ax.set_yticks([])

    # Sets the title of the figure window
    fig.canvas.set_window_title(name)
    plt.suptitle(name)

    # Ensure the plot is shown correctly with multiple plots
    # in a single Notebook cell.
    if plt_show == True:
        plt.show()
Beispiel #34
0
def main():
    pdf = PdfPages('3x3 result.pdf')
    f = open("SEQUENTIAL.txt")
    d = f.read()

    a = d.split('\n')
    while '' in a:
        a.remove('')

    fig, ax = plt.subplots()
    plt.suptitle("Test results 3x3 filter")
    plt.ylabel('time(ms)')
    plt.xlabel('threads')
    tmp = float(a[1][0:-2])
    plt.hlines(float(a[0][0:-2]), 0, 8, 'r')
    red_patch = mpatches.Patch(color='red', label='sequential')

    # plt.hlines(float(a[1][0:-2]), 0, 8, 'b')
    f.close()
    f = open("SHARDED_ROWS.txt")
    d = f.read()
    b = []
    a = d.split('\n')
    for i in a:
        b.append(i.split(' '))

    row3 = b[0:8]
    row5 = b[9:17]
    row3_thread = []
    row3_time = []
    row5_thread = []
    row5_time = []
    for i in row3:
        row3_thread.append(float(i[1]))
        row3_time.append(float(i[0][0:-2]))
    for i in row5:
        row5_thread.append(float(i[1]))
        row5_time.append(float(i[0][0:-2]))
    plt.plot(row3_thread, row3_time, 'g-')
    green_patch = mpatches.Patch(color='green', label='sharded rows')

    f.close()
    f = open("SHARDED_COLUMNS_COLUMN_MAJOR.txt")
    d = f.read()
    b = []
    a = d.split('\n')
    for i in a:
        b.append(i.split(' '))
    cc3 = b[0:8]
    cc5 = b[9:17]
    cc3_thread = []
    cc3_time = []
    cc5_thread = []
    cc5_time = []
    for i in cc3:
        cc3_thread.append(float(i[1]))
        cc3_time.append(float(i[0][0:-2]))
    for i in cc5:
        cc5_thread.append(float(i[1]))
        cc5_time.append(float(i[0][0:-2]))
    plt.plot(cc3_thread, cc3_time, 'b-')
    blue_patch = mpatches.Patch(color='blue',
                                label='sharded columns column major')

    f.close()

    f = open("SHARDED_COLUMNS_ROW_MAJOR.txt")
    d = f.read()
    b = []
    a = d.split('\n')
    for i in a:
        b.append(i.split(' '))
    cr3 = b[0:8]
    cr5 = b[9:17]
    cr3_thread = []
    cr3_time = []
    cr5_thread = []
    cr5_time = []
    for i in cr3:
        cr3_thread.append(float(i[1]))
        cr3_time.append(float(i[0][0:-2]))
    for i in cr5:
        cr5_thread.append(float(i[1]))
        cr5_time.append(float(i[0][0:-2]))
    plt.plot(cr3_thread, cr3_time, 'k-')
    black_patch = mpatches.Patch(color='black',
                                 label='sharded columns row major')
    plt.legend(handles=[red_patch, blue_patch, green_patch, black_patch])
    f.close()

    plt.savefig(pdf, format='pdf')
    pdf.close()

    plt.figure()
    pdf = PdfPages('5x5 result.pdf')
    plt.suptitle("Test results 5x5 filter")
    plt.ylabel('time(ms)')
    plt.xlabel('threads')
    plt.legend(handles=[red_patch, blue_patch, green_patch, black_patch])
    plt.hlines(tmp, 0, 8, 'r')
    plt.plot(row5_thread, row5_time, 'b-')
    plt.plot(cc5_thread, cc5_time, 'g-')
    plt.plot(cr5_thread, cr5_time, 'k-')
    plt.savefig(pdf, format='pdf')
    pdf.close()

    plt.figure()
    pdf = PdfPages('Work Queue test result.pdf')
    plt.suptitle("Work Queue Test results 3x3 filter")
    plt.ylabel('time(ms)')
    plt.xlabel('threads')
    f = open('WORK_QUEUE.txt')
    d = f.read()
    a = d.split('\n')
    b = []
    for i in a:
        b.append(i.split(' '))

    d1_thread, d2_thread, d3_thread, d4_thread, d5_thread = [], [], [], [], []
    d1_time, d2_time, d3_time, d4_time, d5_time = [], [], [], [], []
    d1 = b[0:8]
    d2 = b[9:17]
    d3 = b[18:26]
    d4 = b[27:35]
    d5 = b[36:44]

    for i in range(8):

        d1_thread.append(float(d1[i][1]))
        d1_time.append(float(d1[i][0][0:-2]))
        d2_thread.append(float(d2[i][1]))
        d2_time.append(float(d2[i][0][0:-2]))
        d3_thread.append(float(d3[i][1]))
        d3_time.append(float(d3[i][0][0:-2]))
        d4_thread.append(float(d4[i][1]))
        d4_time.append(float(d4[i][0][0:-2]))
        d5_thread.append(float(d5[i][1]))
        d5_time.append(float(d5[i][0][0:-2]))

    plt.plot(d1_thread, d1_time, 'r-')
    plt.plot(d2_thread, d2_time, 'g-')
    plt.plot(d3_thread, d3_time, 'b-')
    plt.plot(d4_thread, d4_time, 'k-')
    plt.plot(d5_thread, d5_time, 'c-')
    red_patch = mpatches.Patch(color='red', label='work chunk = ' + d1[0][2])
    green_patch = mpatches.Patch(color='green',
                                 label='work chunk = ' + d2[0][2])
    blue_patch = mpatches.Patch(color='blue', label='work chunk = ' + d3[0][2])
    black_patch = mpatches.Patch(color='black',
                                 label='work chunk = ' + d4[0][2])
    cyan_patch = mpatches.Patch(color='cyan', label='work chunk = ' + d5[0][2])
    plt.legend(
        handles=[red_patch, green_patch, blue_patch, black_patch, cyan_patch])
    plt.savefig(pdf, format='pdf')

    plt.figure()
    f.close()

    plt.suptitle("Work Queue Test results 5x5 filter")
    plt.ylabel('time(ms)')
    plt.xlabel('threads')
    d1_thread, d2_thread, d3_thread, d4_thread, d5_thread = [], [], [], [], []
    d1_time, d2_time, d3_time, d4_time, d5_time = [], [], [], [], []

    d1 = b[45:53]
    d2 = b[54:62]
    d3 = b[63:71]
    d4 = b[72:90]
    d5 = b[81:99]

    for i in range(8):
        d1_thread.append(float(d1[i][1]))
        d1_time.append(float(d1[i][0][0:-2]))
        d2_thread.append(float(d2[i][1]))
        d2_time.append(float(d2[i][0][0:-2]))
        d3_thread.append(float(d3[i][1]))
        d3_time.append(float(d3[i][0][0:-2]))
        d4_thread.append(float(d4[i][1]))
        d4_time.append(float(d4[i][0][0:-2]))
        d5_thread.append(float(d5[i][1]))
        d5_time.append(float(d5[i][0][0:-2]))

    plt.plot(d1_thread, d1_time, 'r-')
    plt.plot(d2_thread, d2_time, 'g-')
    plt.plot(d3_thread, d3_time, 'b-')
    plt.plot(d4_thread, d4_time, 'k-')
    plt.plot(d5_thread, d5_time, 'c-')
    red_patch = mpatches.Patch(color='red', label='work chunk = ' + d1[0][2])
    green_patch = mpatches.Patch(color='green',
                                 label='work chunk = ' + d2[0][2])
    blue_patch = mpatches.Patch(color='blue', label='work chunk = ' + d3[0][2])
    black_patch = mpatches.Patch(color='black',
                                 label='work chunk = ' + d4[0][2])
    cyan_patch = mpatches.Patch(color='cyan', label='work chunk = ' + d5[0][2])
    plt.legend(
        handles=[red_patch, green_patch, blue_patch, black_patch, cyan_patch])
    plt.savefig(pdf, format='pdf')
    pdf.close()
    # Plot data

    plt.figure(figsize=(10, 6))
    plt.plot(xplot[:, 0], xplot[:, 1], label='Acc. x-Mass')
    plt.plot(yplot[:, 0], yplot[:, 1], label='Acc. y-Mass')
    plt.plot(zplot[:, 0], zplot[:, 1], label='Acc. z-Mass')

    plt.ylim(0, 100)

    plt.minorticks_on()
    plt.grid(b=True, which='minor', color='#999999', linestyle='-', alpha=0.2)
    plt.grid(b=True, which='major', color='#999999', linestyle='-', alpha=0.5)
    plt.legend(loc='best')

    plt.suptitle('Accumulated ratio modal mass to total mass',
                 fontsize=16,
                 y=0.97)
    plt.xlabel('Frequency $[Hz]$')
    plt.ylabel('Acc. ratio mass to total mass [%]')

    MaxX = xplot[len(xplot) - 1, 1]
    MaxY = yplot[len(yplot) - 1, 1]
    MaxZ = zplot[len(zplot) - 1, 1]

    plt.title(
        'Max. ratio in (x,y,z) = ({:2.1f},{:2.1f},{:2.1f})[%], Number of modes = {}'
        .format(MaxX, MaxY, MaxZ, nModes),
        fontsize=10,
        y=1.00)

    del MaxX, MaxY, MaxZ
#    annotate_target(member[1].ra, member[1].dec, "", size=5, color='#c0392b')

text = pl.text(203.5,
               -8.2,
               'Galaxies',
               style='italic',
               color='#27ae60',
               zorder=999,
               fontsize=30,
               va='center',
               ha='center')
text.set_path_effects([
    path_effects.Stroke(linewidth=4, foreground='white'),
    path_effects.Normal()
])

annotate_target(201.29824736, -11.16131949, "Spica", padding=0.5, zorder=9e4)
annotate_target(208.77375, -5.44247222, "K2-99")
annotate_target(207.34954167, -12.2845, "K2-110")
#annotate_target(207.655875, -6.80402778, "Qatar-2")

pl.suptitle('K2 Campaign {}'.format(CAMPAIGN), fontsize=44)
pl.xlim([212.9, 195.5])
pl.ylim([-16., 1.2])
p.ax.xaxis.set_major_locator(MultipleLocator(2))
p.ax.yaxis.set_major_locator(MultipleLocator(2))
pl.tight_layout()
for extension in ['png', 'eps']:
    pl.savefig('k2-c{}-field.{}'.format(CAMPAIGN, extension), dpi=100)
pl.close()
Beispiel #37
0
    #--------------------------------------------------First dataset---------------------------------------------------
    print('Dataset {}\n\n'.format('kc1.arff'))
    knn = KNN('kc1.arff')
    print('{} Unweighted'.format(euclidian_distance.__name__))
    prev = time.time()
    cv = cross_validation(knn,
                          hm_neighbours=[1, 2, 3, 5, 7, 9, 11, 13, 15],
                          weight=False,
                          distance=euclidian_distance)
    ts = int(time.time() - prev)
    plt.xticks(np.array([x for x in range(15)]),
               [1, 2, 3, 5, 7, 9, 11, 13, 15])
    plt.plot(cv)
    plt.ylabel('Accuracy')
    plt.xlabel('Neighbours')
    plt.suptitle('Unweighted', fontsize=14, fontweight='bold')
    plt.title('Timestamp: {} seconds'.format(ts))
    plt.savefig('KNN_{}_Unweighted_{}.png'.format(
        euclidian_distance.__name__, 'kc1.arff'.replace('.arff', '')))

    print('Weighted')
    prev = time.time()
    cv = cross_validation(knn,
                          hm_neighbours=[1, 2, 3, 5, 7, 9, 11, 13, 15],
                          weight=True,
                          distance=euclidian_distance)
    ts = int(time.time() - prev)
    plt.xticks(np.array([x for x in range(15)]),
               [1, 2, 3, 5, 7, 9, 11, 13, 15])
    plt.plot(cv)
    plt.ylabel('Accuracy')
mask_sets = np.array(masks1['masks1'])

#Free the memory, we just need the two sets for training,validating and testing
images = 0
masks1 = 0
image_sets[image_sets == 255] = 0
#Demonstrate a random pair of image and corresponding masks
[m, n, l] = np.shape(image_sets)
demo_num = random.randrange(0, l, 1)
demo_image = image_sets[:, :, demo_num]
demo_mask = mask_sets[:, :, demo_num]
plt.subplot(121)
plt.imshow(demo_image)
plt.subplot(122)
plt.imshow(demo_mask)
plt.suptitle('OCT images #' + str(demo_num) + ' Segmentation mask #' +
             str(demo_num))
plt.show()
#Saving them to the input folder and mask folder
from skimage.io import imsave, imread
import os
data_dir = os.path.join(".\data_2")
if not os.path.exists(data_dir):
    os.mkdir(data_dir)
    input_dir = os.path.join(".\data_2\input")
    os.mkdir(input_dir)
    mask_dir = os.path.join(".\data_2\mask")
    os.mkdir(mask_dir)
else:
    print("The data folder has already built!")

Beispiel #39
0
        for x in line:
            if x == '':
                continue
            else:
                tmpline.append(float(x))
        if tmpline == []:
            continue
        tmp.append(tmpline)
    data = tmp
    return data


data = read_tsp('data/st70.tsp')

data = np.array(data)
plt.suptitle('GA in st70.tsp')
data = data[:, 1:]
plt.subplot(2, 2, 1)
plt.title('raw data')
show_data = np.vstack([data, data[0]])
plt.plot(data[:, 0], data[:, 1])
Best, Best_path = math.inf, None

foa = GA(num_city=data.shape[0], num_total=50, iteration=400, data=data.copy())
path, path_len = foa.run()
if path_len < Best:
    Best = path_len
    Best_path = path
plt.subplot(2, 2, 3)
# 加上一行因为会回到起点
Best_path = np.vstack([Best_path, Best_path[0]])
Beispiel #40
0
def hh0_sim(setup="not_so_simple", fix_redshifts=True, \
            model_outliers=None, inc_met_dep=True, inc_zp_off=True, \
            constrain=True, round_data=False):
    
    """
    model_outliers = None / "gmm" / "ht"
    """

    # settings and cuts to match previous analyses
    p_c_min = 5.0 # 2.5 # lower limit ~ 2.5 but for local hosts
    p_c_max = 60.0 # tail beyond 60 but not many
    if setup == "d17":
        z_s_min = 0.011631
        z_s_max = 0.0762
    else:
        z_s_min = 0.023
        z_s_max = 0.1
    # @TODO: update metallicity stats to R16
    z_c_mean = 8.86101933216 # derived from R11. true dist bimodal!
    z_c_sigma = 0.15312221469
    ff_s_mean = np.array([ -0.2, 0.0 ])  # derived from R16
    ff_s_sigma = np.array([ 1.2, 0.89 ])
    if setup == "simple":
        n_ch_d = 1
        n_ch_p = 0
        n_ch_c = 0
        n_ch_s = 1
        n_c_ch = np.array([40, 40])
        n_c_ch = np.concatenate((40 * np.ones(n_ch_d, dtype=int), \
                                 np.ones(n_ch_p, dtype=int), \
                                 40 * np.ones(n_ch_c, dtype=int), \
                                 40 * np.ones(n_ch_s, dtype=int)))
        zp_off_mask = np.zeros(len(n_c_ch))
        n_s = 80
    elif setup == "not_so_simple":
        n_ch_d = 2
        n_ch_p = 2
        n_ch_c = 1
        n_ch_s = 1
        n_c_ch = np.concatenate((20 * np.ones(n_ch_d, dtype=int), \
                                 np.ones(n_ch_p, dtype=int), \
                                 20 * np.ones(n_ch_c, dtype=int), \
                                 20 * np.ones(n_ch_s, dtype=int)))
        zp_off_mask = np.zeros(len(n_c_ch))
        n_s = 80
    elif setup == "r11":
        n_ch_d = 1
        n_ch_p = 0
        n_ch_c = 0
        n_ch_s = 8
        n_c_ch = np.array([69, 32, 79, 29, 26, 36, 95, 39, 164])
        zp_off_mask = np.zeros(len(n_c_ch))
        n_s = 253
    elif setup == "r16_one_anc":
        # R16 single-anchor fit
        # anchor: NGC4258; cal: M31; 19 C/SN hosts
        n_ch_d = 1
        n_ch_p = 0
        n_ch_c = 1
        n_ch_s = 19
        n_c_ch = np.array([139, 372, 251, 14, 44, 32, 54, 141, 18, 63, \
                           80, 42, 16, 13, 3, 33, 25, 83, 13, 22, 28])
        zp_off_mask = np.zeros(len(n_c_ch))
        n_s = 217
        z_s_max = 0.15
    elif setup == "d17":
        # D17/R16 hybrid fit
        # anchors: NGC4258, LMC, MW C
        # cal: M31, N3021, N3370, N3982, N4639, N4038, N4536, N1015, 
        #      N1365, N3447, N7250; 
        # C/SN hosts: N1448, N1309, U9391, N5917, N5584, N3972, M101,
        #             N4424, N2442
        n_ch_d = 2
        n_ch_p = 15
        n_ch_c = 11
        n_ch_s = 9
        n_c_ch = np.concatenate((np.array([139, 775]), \
                                 np.ones(n_ch_p, dtype=int), \
                                 np.array([372, 18, 63, 16, 25, 13, \
                                           33, 14, 32, 80, 22, 54, \
                                           44, 28, 13, 83, 42, 251, \
                                           3, 141])))
        zp_off_mask = np.zeros(len(n_c_ch))
        zp_off_mask[1: n_ch_p + 2] = 1.0
        n_s = 27
    else:
        # R16 preferred fit
        # anchors: NGC4258, LMC, MW C; cal: M31; 19 C/SN hosts
        n_ch_d = 2
        n_ch_p = 15
        n_ch_c = 1
        n_ch_s = 19
        n_c_ch = np.concatenate((np.array([139, 775]), \
                                 np.ones(n_ch_p, dtype=int), \
                                 np.array([372, 251, 14, 44, 32, 54, \
                                           141, 18, 63, 80, 42, 16, \
                                           13, 3, 33, 25, 83, 13, 22, \
                                           28])))
        zp_off_mask = np.zeros(len(n_c_ch))
        zp_off_mask[1: n_ch_p + 2] = 1.0
        n_s = 217#281
        z_s_max = 0.15
    n_ch = n_ch_d + n_ch_p + n_ch_c + n_ch_s
    n_ch_g = n_ch_d + n_ch_p

    # read in Riess Cepheid data to estimate magnitude error
    # distribution (and eyeball metallicities if desired)
    riess_app_mag_err = np.zeros(569)
    riess_metals = np.zeros(569)
    pardir = os.path.dirname(os.path.abspath(__file__))
    with open(os.path.join(pardir, 'data/Riess2.txt')) as f:
        for i, l in enumerate(f):
            if (i > 2):
                vals = [val for val in l.split()]
                riess_app_mag_err[i-3] = float(vals[7])
                riess_metals[i-3] = float(vals[11])
    sig_app_mag_c_shape = np.mean(riess_app_mag_err) ** 2 / \
                          np.var(riess_app_mag_err)
    sig_app_mag_c_scale = np.var(riess_app_mag_err) / \
                          np.mean(riess_app_mag_err)
    '''
    z_grid = np.linspace(8.0, 9.5, 1000)
    kde_z = sps.gaussian_kde(riess_metals)
    mp.plot(z_grid, kde_z.evaluate(z_grid), 'b')
    mp.xlabel(r'$\Delta\log_{10}[O/H]$')
    mp.ylabel(r'$P(\Delta\log_{10}[O/H])$')
    mp.show()
    '''

    # constants
    c = 299792.458 # km s^-1

    # dimension observable arrays:
    #  - apparent magnitudes of Cepheids in each SH0ES host
    #  - measured periods of Cepheids in each SH0ES host
    #  - apparent magnitudes of supernovae
    #  - measured redshifts of supernovae
    est_app_mag_c = np.zeros((n_ch, np.max(n_c_ch)))
    est_p_c = np.zeros((n_ch, np.max(n_c_ch)))
    est_app_mag_s_ch = np.zeros(n_ch_s)
    est_app_mag_s = np.zeros(n_s)
    est_z_s = np.zeros(n_s)

    # dimension true underlying arrays:
    #  - true absolute magnitudes of Cepheids in each SH0ES host
    #  - true periods of Cepheids in each SH0ES host
    #  - true distances of SH0ES hosts
    #  - true distances of supernovae
    true_abs_mag_c = np.zeros((n_ch, np.max(n_c_ch)))
    true_p_c = np.zeros((n_ch, np.max(n_c_ch)))
    true_z_c = np.zeros((n_ch, np.max(n_c_ch)))
    sig_app_mag_c = np.zeros((n_ch, np.max(n_c_ch)))
    io_c = np.zeros((n_ch, np.max(n_c_ch)), dtype = np.int)
    true_dis_ch = np.zeros(n_ch)
    true_dis_s = np.zeros(n_s)

    # seed random number generator if desired
    if constrain:
        npr.seed(0)

    # "sample" hyperparameters
    # LMC distance from http://www.nature.com/nature/journal/v495/n7439/full/nature11878.html
    dis_anc = np.array([7.54e6, 49.97e3])
    sig_dis_anc = np.array([np.sqrt(0.17e6 ** 2 + 0.10e6 ** 2), \
                            np.sqrt(0.19e3 ** 2 + 1.11e3 ** 2)])
    par_anc = np.array([2.03, 2.74, 3.26, 2.30, 3.17, \
                        2.13, 3.71, 2.64, 2.06, 2.31, \
                        2.57, 2.23, 2.19, 0.428, 0.348]) * 1.0e-3
    sig_par_anc = np.array([0.16, 0.12, 0.14, 0.19, 0.14, \
                            0.29, 0.12, 0.16, 0.22, 0.19, \
                            0.33, 0.30, 0.33, 0.054, 0.038]) * 1.0e-3
    par_anc_lkc = np.array([-0.05, -0.02, -0.02, -0.06, -0.02, \
                            -0.15, -0.01, -0.03, -0.09, -0.06, \
                            -0.13, -0.15, -0.18, -0.04, -0.04])
    mu_dis_anc = 5.0 * np.log10(dis_anc) - 5.0
    sig_mu_dis_anc = 5.0 / np.log(10.0) / dis_anc * sig_dis_anc
    mu_par_anc = -5.0 * np.log10(par_anc) - 5.0 - par_anc_lkc
    sig_mu_par_anc = 5.0 / np.log(10.0) / par_anc * sig_par_anc
    if n_ch_p == 0:
        dis_anc = dis_anc[0: n_ch_d] / 1.0e6
        sig_dis_anc = sig_dis_anc[0: n_ch_d] / 1.0e6
        mu_anc = mu_dis_anc[0: n_ch_d]
        sig_mu_anc = sig_mu_dis_anc[0: n_ch_d]
        par_anc_lkc = []
    else:
        dis_anc = np.concatenate([dis_anc[0: n_ch_d] / 1.0e6, \
                                  par_anc[0: n_ch_p] / 1.0e-3])
        sig_dis_anc = np.concatenate([sig_dis_anc[0: n_ch_d] / 1.0e6, \
                                      sig_par_anc[0: n_ch_p] / 1.0e-3])
        mu_anc = np.concatenate((mu_dis_anc[0: n_ch_d], \
                                 mu_par_anc[0: n_ch_p]))
        sig_mu_anc = np.concatenate((sig_mu_dis_anc[0: n_ch_d], \
                                     sig_mu_par_anc[0: n_ch_p]))
        par_anc_lkc = par_anc_lkc[0: n_ch_p]
    abs_mag_c_std = 26.3 - mu_anc[0]
    slope_p = -3.05
    slope_z = -0.25
    if "r16" in setup or setup == "not_so_simple" or setup == "d17":
        sig_app_mag_c_mean = 0.276
        sig_int_c = 0.065
        if setup == "d17":
            sig_app_mag_s_ch_mean = 0.02769
        else:
            sig_app_mag_s_ch_mean = 0.064
    else:
        sig_app_mag_c_mean = 0.3
        sig_int_c = 0.21
        sig_app_mag_s_ch_mean = 0.1
    sig_int_s = 0.1
    if setup == "d17":
        sig_z_s = 0.001
    else:
        sig_z_s = 0.00001
    sig_v_pec = 250.0 # km s^-1
    sig_z_s_tot = np.sqrt(sig_z_s ** 2 + (sig_v_pec / c) ** 2)
    if setup == "d17":
        abs_mag_s_std = -18.524
        sig_app_mag_s_mean = 0.05192
    else:
        abs_mag_s_std = -19.2
    alpha_s = -0.14
    beta_s = 3.1
    cov_s = np.array([[0.00396, 0.00186, 0.00163],
                      [0.00186, 0.06566, 0.00100],
                      [0.00163, 0.00100, 0.00123]])
    if model_outliers == "ht":
        st_nu_c = 2.0
        st_nu_s = 2.0
    elif model_outliers == "gmm":
        f_out_c = 0.3
        dmag_out_c = 0.0#0.7
        sig_out_c = 1.0
        f_out_s = 0.3
        dmag_out_s = 0.0#0.7
        sig_out_s = 1.0
    else:
        f_out_c = 0.0
        dmag_out_c = 0.0
        sig_out_c = 1.0
        f_out_s = 0.0
        dmag_out_s = 0.0
        sig_out_s = 1.0
    if setup == "d17":
        h_0 = 72.78
    else:
        h_0 = 71.10
    est_q_0 = -0.5575 # Betoule et al. 2014
    sig_q_0 =  0.0510 # Betoule et al. 2014
    j_0 = 1.0         # FIXED by assumption of flat LCDM universe
    zp_off = 0.01
    sig_zp_off = 0.03

    # distance-redshift conversion functions
    z2d_p_0 = -(1.0 - est_q_0 - 3.0 * est_q_0 ** 2 + j_0) * c / 6.0
    z2d_p_1 = (1.0 - est_q_0) * c / 2.0
    z2d_p_2 = c
    temp_0 = (3.0 * z2d_p_0 * z2d_p_2 - z2d_p_1 ** 2) / (3.0 * z2d_p_0 ** 2)
    temp_1 = (2.0 * z2d_p_1 ** 3 - 9.0 * z2d_p_0 * z2d_p_1 * z2d_p_2) / \
             (27.0 * z2d_p_0 ** 3)
    d2z_p_0 = 2.0 * np.sqrt(-temp_0 / 3.0)
    d2z_p_1 = -z2d_p_1 / (3.0 * z2d_p_0)
    d2z_p_2 = 3.0 * temp_1 / (d2z_p_0 * temp_0)
    d2z_p_3 = 3.0 / (d2z_p_0 * temp_0 * z2d_p_0)
    def d2z(d):
        phi = np.arccos(d2z_p_2 - d2z_p_3 * h_0 * d / 1.0e6)
        return d2z_p_0 * np.cos((phi + 4.0 * np.pi) / 3.0) + d2z_p_1
    def z2d(z):
        return (z2d_p_0 * z ** 3 + z2d_p_1 * z ** 2 + z2d_p_2 * z) / h_0 * 1.0e6

    # "sample" distances
    r16_cal_m31_mu = np.array([24.36])
    r16_sh0es_mu = np.array([29.135, 32.497, 32.523, \
                             31.307, 31.311, 31.511, \
                             32.498, 32.072, 31.908, \
                             31.587, 31.737, 31.290, \
                             31.080, 30.906, 31.532, \
                             31.786, 32.263, 31.499, \
                             32.919])
    if setup == "r11":
        true_mu_ch = np.concatenate((mu_anc, 
                                     np.array([30.91, 31.67, 32.13, \
                                               31.70, 32.27, 32.59, \
                                               31.72, 31.66])))
    elif setup == "r16_one_anc":
        true_mu_ch = np.concatenate((mu_anc, r16_cal_m31_mu, \
                                     r16_sh0es_mu[0: n_ch - n_ch_g]))
    elif setup == "r16":
        true_mu_ch = np.concatenate((mu_anc, r16_cal_m31_mu, r16_sh0es_mu))
    elif setup == "d17":
        ordering = [0, 1, 3, 4, 6, 7, 8, 9, 11, 12] + \
                   [10, 2, 15, 13, 5, 16, 18, 14, 17]
        true_mu_ch = np.concatenate((mu_anc, r16_cal_m31_mu, \
                                     r16_sh0es_mu[ordering]))
    else:
        #true_mu_ch = np.concatenate((mu_anc, np.array([31.83])))
        true_mu_ch = np.concatenate((mu_anc, \
                                     r16_sh0es_mu[0: n_ch - n_ch_g]))
    true_dis_ch = 10.0 ** ((true_mu_ch + 5.0) / 5.0)
    print 'simulating {:d} Cepheids'.format(np.sum(n_c_ch))

    # loop over SH0ES hosts
    i_res = 0
    res_to_plot = np.zeros(np.sum(n_c_ch))
    for i in range(0, n_ch):

        # optionally include outliers
        if model_outliers != "ht":
            outliers = npr.uniform(0.0, 1.0, n_c_ch[i]) < f_out_c
            io_c[i, 0: n_c_ch[i]] = np.array(outliers, dtype = np.int)
            sig_extra = np.ones(n_c_ch[i]) * sig_int_c
            sig_extra[outliers] = sig_out_c
            offset = np.zeros(n_c_ch[i])
            offset[outliers] = dmag_out_c

        # simulate Cepheids: uniformly distributed periods within limits of
        # P-L relation, plus intrinsic Gaussian scatter following Niccolo,
        # draw measurement errors from Gamma distribution with appropriate
        # parameters. include metallicity if desired; note that true 
        # metallicity dependence is (at least) bimodal and asymmetric
        #true_p_c[i, 0: n_c_ch[i]] = npr.uniform(p_c_min, p_c_max, n_c_ch[i])
        true_p_c[i, 0: n_c_ch[i]] = 10.0 ** npr.uniform(np.log10(p_c_min), \
                                                        np.log10(p_c_max), \
                                                        n_c_ch[i])
        true_abs_mag_c[i, 0: n_c_ch[i]] = abs_mag_c_std + \
                                          slope_p * np.log10(true_p_c[i, 0: n_c_ch[i]])
        if inc_met_dep:
            true_z_c[i, 0: n_c_ch[i]] = npr.normal(z_c_mean, z_c_sigma, \
                                                   n_c_ch[i])
            true_abs_mag_c[i, 0: n_c_ch[i]] += slope_z * \
                                               true_z_c[i, 0: n_c_ch[i]]
            est_z_c = true_z_c
        if model_outliers == "ht":
            true_abs_mag_c[i, 0: n_c_ch[i]] += npr.standard_t(st_nu_c, n_c_ch[i]) * \
                                               sig_int_c
        else:
            true_abs_mag_c[i, 0: n_c_ch[i]] += npr.normal(0.0, 1.0, n_c_ch[i]) * \
                                               sig_extra + offset
        #sig_app_mag_c[i, 0: n_c_ch[i]] = npr.gamma(sig_app_mag_c_shape, \
        #                                           sig_app_mag_c_scale, n_c_ch[i])
        sig_app_mag_c[i, 0: n_c_ch[i]] = sig_app_mag_c_mean
        est_p_c = true_p_c
        est_app_mag_c[i, 0: n_c_ch[i]] = true_abs_mag_c[i, 0: n_c_ch[i]] + \
                                         true_mu_ch[i] + \
                                         npr.normal(0.0, 1.0, n_c_ch[i]) * \
                                         sig_app_mag_c[i, 0: n_c_ch[i]]

        # plots
        '''colors = ['r' if int(j) else 'g' for j in outliers]
        mp.scatter(true_p_c[i, 0: n_c_ch[i]], \
                   true_abs_mag_c[i, 0: n_c_ch[i]] - \
                   (abs_mag_c_std + \
                    slope_p * \
                    np.log10(true_p_c[i, 0: n_c_ch[i]])), \
                   c = colors)'''
        res_to_plot[i_res: i_res + n_c_ch[i]] = true_abs_mag_c[i, 0: n_c_ch[i]] - \
                                                (abs_mag_c_std + \
                                                 slope_p * \
                                                 np.log10(true_p_c[i, 0: n_c_ch[i]]))
        i_res += n_c_ch[i]
    mp.hist(res_to_plot, bins = 30)
    mp.show()

    # simulate SH0ES SNe: already have their true distances. no 
    # intrinsic scatter in r16 sims, though there probably should be
    print 'simulating {:d} supernovae'.format(n_ch_s + n_s)
    true_app_mag_s_ch = abs_mag_s_std + \
                        true_mu_ch[n_ch_g + n_ch_c:]
    if setup == "d17":
        if model_outliers == "ht":
            true_app_mag_s_ch += npr.standard_t(st_nu_s, n_ch_s) * \
                                 sig_int_s
        else:
            outliers = npr.uniform(0.0, 1.0, n_ch_s) < f_out_s
            io_ch_s = np.array(outliers, dtype = np.int)
            sig_extra = np.ones(n_ch_s) * sig_int_s
            sig_extra[outliers] = sig_out_s
            offset = np.zeros(n_ch_s)
            offset[outliers] = dmag_out_s
            true_app_mag_s_ch += npr.normal(0.0, 1.0, n_ch_s) * \
                                 sig_extra + offset
    est_app_mag_s_ch = true_app_mag_s_ch + \
                       npr.normal(0.0, sig_app_mag_s_ch_mean, n_ch_s)
    sig_app_mag_s_ch = np.ones(n_ch_s) * sig_app_mag_s_ch_mean

    # add in zero-point offset if desired
    if inc_zp_off:
        est_app_mag_s_ch += zp_off_mask[n_ch_g + n_ch_c:] * zp_off
        for i in range(0, n_ch):
            est_app_mag_c[i, 0: n_c_ch[i]] += zp_off_mask[i] * zp_off

    # simulate high-z SNe. need to sample true distances,
    # then generate observed apparent magnitudes and redshifts
    # optionally include SNe outliers
    true_z_s = npr.uniform(z_s_min, z_s_max, n_s)
    true_dis_s = z2d(true_z_s)
    true_app_mag_s = abs_mag_s_std + \
                     5.0 * np.log10(true_dis_s) - 5.0
    if setup != "d17":
        true_ff_s = npr.multivariate_normal(ff_s_mean, \
                                            np.diag(ff_s_sigma ** 2), \
                                            n_s)
        true_app_mag_s += alpha_s * true_ff_s[:, 0] + \
                          beta_s * true_ff_s[:, 1]
    if model_outliers == "ht":
        true_app_mag_s += npr.standard_t(st_nu_s, n_s) * sig_int_s
    else:
        outliers = npr.uniform(0.0, 1.0, n_s) < f_out_s
        io_s = np.array(outliers, dtype = np.int)
        sig_extra = np.ones(n_s) * sig_int_s
        sig_extra[outliers] = sig_out_s
        offset = np.zeros(n_s)
        offset[outliers] = dmag_out_s
        true_app_mag_s += npr.normal(0.0, 1.0, n_s) * sig_extra + offset
    if not fix_redshifts:
        est_z_s = true_z_s + npr.normal(0.0, sig_z_s_tot, n_s)
    else:
        est_z_s = true_z_s
    if setup == "d17":
        est_app_mag_s = true_app_mag_s + npr.normal(0.0, 1.0, n_s) * \
                        sig_app_mag_s_mean
        sig_app_mag_s = np.ones(n_s) * sig_app_mag_s_mean
        est_ff_s = np.array([[0.0, 0.0, 0.0],
                             [0.0, 0.0, 0.0],
                             [0.0, 0.0, 0.0]])
        sig_x_1_s = None
        sig_c_s = None
        cov_x_1_app_mag_s = None
        cov_c_app_mag_s = None
        cov_x_1_c_s = None
    else:
        corr_noise_s = npr.multivariate_normal([0, 0, 0], cov_s, n_s)
        est_ff_s = true_ff_s + corr_noise_s[:, 1:]
        est_app_mag_s = true_app_mag_s + corr_noise_s[:, 0]
        sig_app_mag_s = np.ones(n_s) * np.sqrt(cov_s[0, 0])
        sig_x_1_s = np.ones(n_s) * np.sqrt(cov_s[1, 1])
        sig_c_s = np.ones(n_s) * np.sqrt(cov_s[2, 2])
        cov_x_1_app_mag_s = np.ones(n_s) * cov_s[0, 1]
        cov_c_app_mag_s = np.ones(n_s) * cov_s[0, 2]
        cov_x_1_c_s = np.ones(n_s) * cov_s[1, 2]
        print 'input high-z SN parameter observation covariance:'
        print cov_s
        print 'sample high-z SN parameter observation covariance:'
        print np.cov(corr_noise_s.transpose())
    res_to_plot = est_app_mag_s - \
                  (abs_mag_s_std + 5.0 * np.log10(true_dis_s) - 5.0)
    fig, axes = mp.subplots(1, 2)
    axes[0].hist(res_to_plot, bins = 30, normed = True)
    axes[1].plot(5.0 * np.log10(true_dis_s) - 5.0, est_app_mag_s, 'ro')
    if model_outliers != "ht":
        axes[1].plot(5.0 * np.log10(true_dis_s[io_s == 0]) - 5.0, \
                     est_app_mag_s[io_s == 0], 'go')
    mp.suptitle("Supernovae")
    mp.show()

    # simulate writing to R16 file and reading in
    if round_data:
        for i in range(0, n_ch):
            for j in range(0, n_c_ch[i]):
                # NB: only Cepheid data rounded for now. app mags
                #     really drawn from colour and H mag so rounding
                #     could be a little worse
                est_p_c[i, j] = np.float('{:4.4g}'.format(est_p_c[i, j]))
                est_app_mag_c[i, j] = np.float('{:4.4g}'.format(est_app_mag_c[i, j]))
                if inc_met_dep:
                    est_z_c[i, j] = np.float('{:4.4g}'.format(est_z_c[i, j]))
        for i in range(0, n_ch_s):
            est_app_mag_s_ch[i] = np.float('{:5.5g}'.format(est_app_mag_s_ch[i]))

    # sim info
    print 'true abs_mag_c_std: ', abs_mag_c_std
    print 'true slope_p:       ', slope_p
    if inc_met_dep:
        print 'true slope_z:       ', slope_z
    print 'true sig_int:       ', sig_int_c
    if model_outliers == "gmm":
        print 'true f_out_c:       ', f_out_c
        print 'true dmag_out_c:    ', dmag_out_c
        print 'true sig_out_c:     ', sig_out_c
        print 'true f_out_s:       ', f_out_s
        print 'true dmag_out_s:    ', dmag_out_s
        print 'true sig_out_s:     ', sig_out_s
    print 'true abs_mag_s_std: ', abs_mag_s_std
    print 'true true_mu_h:     ', np.array_str(true_mu_ch, precision = 2)
    print 'true h_0:           ', h_0
    sim_info = {'abs_mag_c_std': abs_mag_c_std, \
                'slope_p': slope_p, 'sig_int_c': sig_int_c, \
                'abs_mag_s_std': abs_mag_s_std, \
                'true_mu_ch': true_mu_ch, 'h_0': h_0}
    if inc_met_dep:
        sim_info['slope_z'] = slope_z
    if model_outliers == "gmm":
        sim_info['f_out_c'] = f_out_c
        sim_info['dmag_out_c'] = dmag_out_c
        sim_info['sig_out_c'] = sig_out_c
        sim_info['f_out_s'] = f_out_s
        sim_info['dmag_out_s'] = dmag_out_s
        sim_info['sig_out_s'] = sig_out_s
    elif model_outliers == "ht":
        sim_info['st_nu_c'] = st_nu_c
        sim_info['st_nu_s'] = st_nu_s
    if inc_zp_off:
        sim_info['zp_off'] = zp_off

    # return simulated data
    to_return = [n_ch_d, n_ch_p, n_ch_c, n_ch_s, n_c_ch, n_s, \
                 dis_anc, sig_dis_anc, est_app_mag_c, \
                 sig_app_mag_c, est_p_c, sig_int_c, \
                 est_app_mag_s_ch, sig_app_mag_s_ch, est_app_mag_s, \
                 sig_app_mag_s, est_z_s, est_ff_s[:, 0], sig_x_1_s, \
                 est_ff_s[:, 1], sig_c_s, cov_x_1_app_mag_s, \
                 cov_c_app_mag_s, cov_x_1_c_s, sig_int_s, \
                 est_q_0, sig_q_0, sig_zp_off, zp_off_mask, \
                 par_anc_lkc, sim_info]
    if not fix_redshifts:
        to_return.append(np.ones(n_s) * sig_z_s)
        to_return.append(sig_v_pec)
    if inc_met_dep:
        to_return.append(est_z_c)
    return to_return
    output_node = [
        model.get_layer(name=layer_name).output for layer_name in layers_name
    ]
    model1 = Model(inputs=input_node, outputs=output_node)
    outputs = model1.predict(img)
    for index, feature_map in enumerate(outputs):
        # [N, H, W, C] -> [H, W, C]
        im = np.squeeze(feature_map)

        # show top 12 feature maps
        plt.figure()
        for i in range(12):
            ax = plt.subplot(3, 4, i + 1)
            # [H, W, C]
            plt.imshow(im[:, :, i], cmap='gray')
        plt.suptitle(layers_name[index])
        plt.show()
except Exception as e:
    print(e)

# subclasses API
# outputs = model.receive_feature_map(img, layers_name)
# for index, feature_maps in enumerate(outputs):
#     # [N, H, W, C] -> [H, W, C]
#     im = np.squeeze(feature_maps)
#
#     # show top 12 feature maps
#     plt.figure()
#     for i in range(12):
#         ax = plt.subplot(3, 4, i + 1)
#         # [H, W, C]
Beispiel #42
0
def plot_audio(s,
               sr=None,
               win=0.02,
               shift=0.01,
               nb_melfilters=40,
               nb_ceps=12,
               get_qspec=False,
               get_vad=True,
               fmin=64,
               fmax=None,
               sr_new=None,
               preemphasis=0.97,
               pitch_threshold=0.8,
               pitch_fmax=1200,
               vad_smooth=3,
               vad_minlen=0.1,
               cqt_bins=96,
               center=True,
               title=""):
    from matplotlib import pyplot as plt
    from odin.preprocessing import speech

    # ====== helper ====== #
    def spectrogram(spec, vad, title):
        plt.figure()
        if spec.shape[0] / spec.shape[1] >= 8.:
            nb_samples = len(spec)
            n1, n2, n3 = nb_samples // 4, nb_samples // 2, 3 * nb_samples // 4
            plt.subplot2grid((3, 4), (0, 0), rowspan=1, colspan=4)
            plot_spectrogram(spec.T, vad=vad)
            plt.subplot2grid((3, 4), (1, 0), rowspan=1, colspan=2)
            plot_spectrogram(spec[:n1].T, vad=vad[:n1])
            plt.subplot2grid((3, 4), (1, 2), rowspan=1, colspan=2)
            plot_spectrogram(spec[n1:n2].T, vad=vad[n1:n2])
            plt.subplot2grid((3, 4), (2, 0), rowspan=1, colspan=2)
            plot_spectrogram(spec[n2:n3].T, vad=vad[n2:n3])
            plt.subplot2grid((3, 4), (2, 2), rowspan=1, colspan=2)
            plot_spectrogram(spec[n3:].T, vad=vad[n3:])
        else:
            plot_spectrogram(spec.T, vad=vad)
        plt.suptitle(str(title))
        plt.tight_layout()

    # ====== load signal ====== #
    if isinstance(s, string_types):
        name = os.path.basename(s)
        s, _ = speech.read(s)
        if sr is None:
            sr = _
    else:
        name = "-".join([str(s.shape), str(sr)])
    title = str(title) + ":" + name
    # ====== processing ====== #
    get_vad = True if not get_vad else get_vad
    y = speech.speech_features(s,
                               sr,
                               win=win,
                               shift=shift,
                               nb_melfilters=nb_melfilters,
                               nb_ceps=nb_ceps,
                               get_spec=True,
                               get_mspec=True,
                               get_mfcc=True,
                               get_qspec=get_qspec,
                               get_phase=False,
                               get_pitch=False,
                               get_vad=get_vad,
                               get_energy=True,
                               get_delta=False,
                               fmin=fmin,
                               fmax=fmax,
                               sr_new=sr_new,
                               preemphasis=preemphasis,
                               pitch_threshold=pitch_threshold,
                               pitch_fmax=pitch_fmax,
                               vad_smooth=vad_smooth,
                               vad_minlen=vad_minlen,
                               cqt_bins=cqt_bins,
                               center=center)
    # ====== plot raw signals ====== #
    if sr > 16000:
        s = speech.resample(s, sr, 16000)
        sr = 16000
    plt.figure()
    plt.subplot(2, 1, 1)
    plt.plot(s)
    plt.subplot(2, 1, 2)
    plt.plot(y['energy'].ravel())
    plt.tight_layout()
    plt.suptitle(title)
    # ====== plot spectrogram ====== #
    spectrogram(y['spec'], y['vad'], title='STFT power spectrum')
    if get_qspec:
        spectrogram(y['qspec'], y['vad'], title='CQT power spectrum')
    return y
Beispiel #43
0
    def feature_pair_comparision(self, X, y, feature_names, fitted_model):
        """ F-value, mutual Information, for two feature and partial dependence
        """
        cr = ''
        delim = '<br/>'
        #TODO: currently only paritial dependence is adapted.
        f_test = f_regression(X, y)
        f_test /= np.max(f_test)

        mi = mutual_info_regression(X, y)
        mi /= np.max(mi)
        if self.native_flag == True:
            print("f-value : ", f_test)
            print("mutual info : ", mi)

            print('Convenience plot with ``partial_dependence_plots``')
        else:
            cr += "f-value : {}".format(f_test) + delim
            cr += "mutual info : ".format(mi) + delim
            cr += 'Convenience plot with ``partial_dependence_plots``' + delim

        features = [0, 5, 1, 2, (5, 1)]
        fig, axs = plot_partial_dependence(fitted_model,
                                           X,
                                           features,
                                           feature_names=feature_names,
                                           n_jobs=3,
                                           grid_resolution=50)
        fig.suptitle(
            'Partial dependence of house value on nonlocation features\n'
            'for the California housing dataset')
        plt.subplots_adjust(
            top=0.9)  # tight_layout causes overlap with suptitle

        if self.native_flag == True:
            print('Custom 3d plot via ``partial_dependence``')
        else:
            cr += 'Custom 3d plot via ``partial_dependence``' + delim
        #fig = plt.figure()

        target_feature = (1, 5)
        pdp, axes = partial_dependence(fitted_model,
                                       target_feature,
                                       X=X,
                                       grid_resolution=50)
        XX, YY = np.meshgrid(axes[0], axes[1])
        Z = pdp[0].reshape(list(map(np.size, axes))).T
        print("Z shape : ", Z.shape)

        if self.native_flag == True:
            ax = Axes3D(fig)
            surf = ax.plot_surface(XX,
                                   YY,
                                   Z,
                                   rstride=1,
                                   cstride=1,
                                   cmap=plt.cm.BuPu)
            ax.set_xlabel(feature_names[target_feature[0]])
            ax.set_ylabel(feature_names[target_feature[1]])
            ax.set_zlabel('Partial dependence')
            #  pretty init view
            ax.view_init(elev=22, azim=122)
            plt.colorbar(surf)
            plt.suptitle('Partial dependence plot')
            plt.subplots_adjust(top=0.9)

            plt.show()
        else:
            return [
                cr,
                mpld3.fig_to_html(fig),
                Z.tolist(), feature_names[target_feature[0]],
                feature_names[target_feature[1]], 'partial dependence'
            ]
Beispiel #44
0
def makeMap(lons, lats, hr, m, m2, df, deltaDay):
    output_dir = os.path.join(dir_path, 'images', grib.basin,
                              grib.date.strftime("%Y%m%d"))
    imgtype = None
    if imgtype == 'cumulative':
        raster = sum(grib.data[0:grib.hours.index(hr)], axis=0)  #cumulative
    else:
        raster = grib.data  # 1 hr forecast (not cumulative)

    if grib.displayunits == 'US' and grib.units == '[kg/(m^2)]':
        raster = raster * 0.03937
        grib.units = 'inches'

    #YOU CAN NOT PUT NAN VAULES IN BEFORE DOING scipy.ndimage.zoom
    raster[raster ==
           0] = np.nan  #this will prevent values of zero from being plotted.

    maxVal = int(np.nanpercentile(raster, 99, interpolation='linear'))
    minVal = int(np.nanpercentile(raster, 1, interpolation='linear'))

    im = m.pcolormesh(
        lons, lats, raster.T, cmap=plt.cm.jet, vmin=minVal,
        vmax=maxVal)  # return 50th percentile, e.g median., latlon=True)
    im2 = m2.pcolormesh(
        lons, lats, raster.T, cmap=plt.cm.jet, vmin=minVal,
        vmax=maxVal)  # return 50th percentile, e.g median., latlon=True)
    cb = m.colorbar(mappable=im, location='right', label='SWE (in.)')
    #Show user defined points on map.
    if df != None:
        for index, row in df.iterrows():
            m.plot(row['Longitude'], row['Latitude'], 'ro')
            plt.text(
                row['Longitude'], row['Latitude'],
                str(round(row['Model_Value'] * 0.03937, 1)) + ' / ' +
                str(row['SWE']))
            print("Modeled Value: " +
                  str(round(row['Model_Value'] * 0.03937, 1)) +
                  ' / Actual Value: ' + str(row['SWE']))

    #plot shapefile
    m.readshapefile(dir_path + '/Shapefiles/' + grib.basin + '/' + grib.basin +
                    '_EPSG4326',
                    grib.basin + '_EPSG4326',
                    linewidth=1)
    m2.readshapefile(dir_path + '/Shapefiles/' + grib.basin + '/' +
                     grib.basin + '_EPSG4326',
                     grib.basin + '_EPSG4326',
                     linewidth=1)
    if grib.basin == 'Hell_Hole':
        m.readshapefile(dir_path +
                        '/Shapefiles/Hell_Hole_SMUD/Hell_Hole_SMUD' +
                        '_EPSG4326',
                        'Hell_Hole_SMUD' + '_EPSG4326',
                        linewidth=1)
        m2.readshapefile(dir_path +
                         '/Shapefiles/Hell_Hole_SMUD/Hell_Hole_SMUD' +
                         '_EPSG4326',
                         'Hell_Hole_SMUD' + '_EPSG4326',
                         linewidth=1)

    # annotate
    m.drawcountries()
    m.drawstates()
    #m.drawrivers()
    m.drawcounties(color='darkred')
    if inputArgs.date2 != None:
        plt.suptitle(
            grib.basin.replace('_', " ") + ' Difference in SWE between ' +
            grib.date.strftime("%m/%d/%Y") + ' and ' +
            grib.date2.strftime("%m/%d/%Y") +
            '\n Total Difference in AF (calculated from SWE): ' +
            str("{:,}".format(int(grib.basinTotal))) + ' acre feet')
        img = Image.open(output_dir + "/" + grib.date.strftime("%Y%m%d") +
                         "_0_" + grib.basin + '.png')
        w, h = img.size
        draw = ImageDraw.Draw(img)
        font = ImageFont.truetype("micross.ttf",
                                  120)  #Avail in C:\\Windows\Fonts
        plus_sign = ''
        if grib.basinTotal > 0:
            plus_sign = "+"
        draw.text((1000, h - 400),
                  '7 Day Change from ' + grib.date2.strftime("%#m/%d") +
                  ' to ' + grib.date.strftime("%#m/%d") + ': ' + plus_sign +
                  str("{:,}".format(int(grib.basinTotal))) + ' acre feet',
                  (0, 0, 0),
                  font=font)
        img.save(output_dir + "/" + grib.date.strftime("%Y%m%d") + "_0_" +
                 grib.basin + '.png')
    else:
        plt.suptitle(
            grib.basin.replace('_', " ") + ' ' +
            grib.date.strftime("%m/%d/%Y") + '\n Total AF from SWE: ' +
            str("{:,}".format(int(grib.basinTotal))) + ' acre feet')
    if not os.path.exists(output_dir):
        os.makedirs(output_dir)
    plt.savefig(output_dir + "/" + grib.date.strftime("%Y%m%d") + "_" +
                str(-deltaDay) + "_" + grib.basin + '.png',
                dpi=775)
    print("Saved to " + dir_path + '/images/' + grib.basin + '.png')
    #plt.show()
    plt.close()
def annual_cycle_zt_plot(parameter):
    """Prepare annual cycle and diurnal cycle plots of cloud fraction fro set 3 and set 5 diag"""
    variables = parameter.variables
    seasons = parameter.season
    output_path = parameter.output_path
    sites = parameter.sites

    if not os.path.exists(os.path.join(output_path, 'figures', sites[0])):
        os.makedirs(os.path.join(output_path, 'figures', sites[0]))

    # check test file
    test_findex = 0  #preset of test model indicator
    test_file = glob.glob(output_path + '/metrics/' + sites[0] + '/' +
                          'cl_p_test_diurnal_climo_' + sites[0] + '.csv')
    if len(test_file) == 0:
        print('No test model plotted for cl_p: ' + sites[0])

    if len(test_file) > 0:
        test_findex = 1  #test model exist

    month = seasons  #['Jan','Feb','Mar','Apr','May','Jun','Jul','Aug','Sep','Oct','Nov','Dec']
    month_legend = [
        'Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep', 'Oct',
        'Nov', 'Dec'
    ]

    for j, variable in enumerate(variables):

        # process test model data
        if test_findex == 1:
            test_data = np.loadtxt(output_path + '/metrics/' + sites[0] + '/' +
                                   variable + '_test_diurnal_climo_' +
                                   sites[0] + '.csv')
            test_data = test_data.reshape((12, 8, 37))

            cl_p = np.nanmean(test_data, axis=1)
            cl_p_diurnal = np.nanmean(test_data, axis=0)
            cl_p_ann = np.nanmean(cl_p, axis=0)

        # process observation data
        obs_data = np.loadtxt(output_path + '/metrics/' + sites[0] + '/' +
                              variable + '_obs_diurnal_climo_' + sites[0] +
                              '.csv')
        obs_data = obs_data.reshape((12, 24, 37))

        cl_ob = np.nanmean(obs_data, axis=1)
        cl_ob_diurnal = np.nanmean(obs_data, axis=0)
        cl_ob_ann = np.nanmean(cl_ob, axis=0)

        #################### Monthly Mean Diurnal Cycle Contours
        # define plotting controller [XZ]
        if test_findex == 0: index_list = np.arange(1)
        if test_findex == 1: index_list = np.arange(2)
        #----------------------------------------------
        # define site-dependent contour levels [XZ]
        if sites[0] == 'sgpc1':
            ct_lo = 0
            ct_up = 25.5
        if sites[0] == 'nsac1':
            ct_lo = 0
            ct_up = 50.5
        if sites[0] == 'enac1':
            ct_lo = 0
            ct_up = 35.5
        if sites[0] == 'twpc1':
            ct_lo = 0
            ct_up = 70.5
        if sites[0] == 'twpc2':
            ct_lo = 0
            ct_up = 70.5
        if sites[0] == 'twpc3':
            ct_lo = 0
            ct_up = 60.5
        if sites[0] == 'maom1':
            ct_lo = 0
            ct_up = 80.5
        rlevel = np.arange(ct_lo, ct_up, 0.5)
        #----------------------------------------------
        #plotting contours
        for iid, index in enumerate(index_list):
            fig1, axs = plt.subplots(4,
                                     3,
                                     figsize=(15, 12),
                                     facecolor='w',
                                     edgecolor='k',
                                     sharex=True,
                                     sharey=True)
            fig1.subplots_adjust(hspace=.3, wspace=.1)
            axs = axs.ravel()
            for imon in range(12):
                if index == 0:
                    title = variable + '_obs_mon_diurnal_clim'
                    yy = np.linspace(0, 23, 24)
                    xx = np.linspace(100, 1000, 37)
                    x, y = np.meshgrid(xx, yy)
                    obs_data_con = np.concatenate(
                        (obs_data[imon, :, :], obs_data[imon, :, :]),
                        axis=0)  #6 hour GMT to Local time
                    im = axs[imon].contourf(y,
                                            x,
                                            obs_data_con[6:30, ::-1],
                                            cmap='jet',
                                            levels=rlevel)
                    #im=axs[imon].pcolormesh(y,x,obs_data_con[6:30,::-1], vmin=0, vmax=25)
                    #im=axs[imon].pcolormesh(y,x,obs_data_con[:24,::-1], vmin=0, vmax=25)
                    plt.xlim([0, 23])
                    xax = np.arange(0, 24, 3)
                    my_xticks = ['0', '3', '6', '9', '12', '15', '18', '21']

                else:
                    title = variable + '_mod_mon_diurnal_clim'
                    yy = np.linspace(0, 7, 8)
                    xx = np.linspace(100, 1000, 37)
                    x, y = np.meshgrid(xx, yy)
                    test_data_con = np.concatenate(
                        (test_data[imon, :, :], test_data[imon, :, :]), axis=0)
                    #Starting time is 3:00:00 GTM, +3 hour GMT to Local time
                    #NEED TO SWTCH TO LOCAL TIME model data!!!!
                    im = axs[imon].contourf(y,
                                            x,
                                            test_data_con[1:9, ::-1],
                                            cmap='jet',
                                            levels=rlevel)
                    #im=axs[imon].pcolormesh(y,x,test_data_con[1:9,::-1], vmin=0, vmax=25)
                    plt.xlim([0, 7])
                    xax = np.arange(0, 8, 1)
                    my_xticks = ['0', '3', '6', '9', '12', '15', '18', '21']

                axs[imon].set_title(month_legend[imon], fontsize=17)
                plt.xticks(xax, my_xticks)
                plt.setp(axs[imon].get_xticklabels(), visible=True)

            for ax in axs[9:12]:
                ax.set_xlabel('Local time (hr)', fontsize=17)
            for ax in axs[::3]:
                ax.set_ylabel('Pressure (mb)', fontsize=17)
            axs[0].invert_yaxis()
            plt.suptitle(title, fontsize=30)
            fig1.subplots_adjust(right=0.8)
            cbar_ax = fig1.add_axes([0.85, 0.15, 0.05, 0.7])
            cb = fig1.colorbar(im, cax=cbar_ax)
            cb.set_ticks(np.arange(ct_lo, ct_up + 4.5, 5), update_ticks=True)
            cb.ax.tick_params(labelsize=15)
            plt.title('cl (%)', fontsize=15)
            fig1.savefig(output_path + '/figures/' + sites[0] + '/' + title +
                         '_' + sites[0] + '.png')
            plt.close('all')

        ########################## Diurnal Cycle Contours
        #----------------------------------------------
        # define site-dependent contour levels [XZ]
        if sites[0] == 'sgpc1':
            ct_lo = 0
            ct_up = 25.5
        if sites[0] == 'nsac1':
            ct_lo = 0
            ct_up = 30.5
        if sites[0] == 'enac1':
            ct_lo = 0
            ct_up = 25.5
        if sites[0] == 'twpc1':
            ct_lo = 0
            ct_up = 70.5
        if sites[0] == 'twpc2':
            ct_lo = 0
            ct_up = 40.5
        if sites[0] == 'twpc3':
            ct_lo = 0
            ct_up = 40.5
        if sites[0] == 'maom1':
            ct_lo = 0
            ct_up = 60.5
        rlevel = np.arange(ct_lo, ct_up, 0.5)
        #----------------------------------------------
        #plotting contours
        for iid, index in enumerate(index_list):
            fig2 = plt.figure()  # Create figure
            ax = fig2.add_axes([0.15, 0.15, 0.65, 0.75])  # Create axes
            if index == 0:
                title = variable + '_obs_diurnal_clim'
                yy = np.linspace(0, 23, 24)
                xx = np.linspace(100, 1000, 37)
                x, y = np.meshgrid(xx, yy)
                obs_data_con = np.concatenate(
                    (cl_ob_diurnal, cl_ob_diurnal),
                    axis=0)  #6 hour GMT to Local time
                im = ax.contourf(y,
                                 x,
                                 obs_data_con[6:30, ::-1],
                                 cmap='jet',
                                 levels=rlevel)
                #im=ax.pcolormesh(y,x,obs_data_con[6:30,::-1], vmin=0, vmax=25)
                plt.xlim([0, 23])
                xax = np.arange(0, 24, 3)
                my_xticks = ['0', '3', '6', '9', '12', '15', '18', '21']

            else:
                title = variable + '_mod_diurnal_clim'
                yy = np.linspace(0, 7, 8)
                xx = np.linspace(100, 1000, 37)
                x, y = np.meshgrid(xx, yy)
                test_data_con = np.concatenate((cl_p_diurnal, cl_p_diurnal),
                                               axis=0)
                #Starting time is 3:00:00 GTM, +3 hour GMT to Local time
                im = ax.contourf(y,
                                 x,
                                 test_data_con[1:9, ::-1],
                                 cmap='jet',
                                 levels=rlevel)
                #im=ax.pcolormesh(y,x,test_data_con[1:9,::-1], vmin=0, vmax=25)
                plt.xlim([0, 7])
                xax = np.arange(0, 8, 1)
                my_xticks = ['0', '3', '6', '9', '12', '15', '18', '21']

            plt.xticks(xax, my_xticks)
            plt.ylabel('Pressure (mb)')
            plt.xlabel('Local time (hr)')
            plt.gca().invert_yaxis()
            plt.suptitle(title)
            cbar_ax = fig2.add_axes([0.85, 0.15, 0.05, 0.75])
            cb = fig2.colorbar(im, cax=cbar_ax)
            cb.set_ticks(np.arange(ct_lo, ct_up + 4.5, 5), update_ticks=True)
            plt.title('cl (%)')
            fig2.savefig(output_path + '/figures/' + sites[0] + '/' + title +
                         '_' + sites[0] + '.png')
            #
            plt.close('all')
    ########################## Annual Cycle Contours

    # define plotting controller [XZ]
    if test_findex == 0: aindex_list = np.arange(1)
    if test_findex == 1: aindex_list = np.arange(3)

    yy = np.linspace(0, 11, 12)
    xx = np.linspace(100, 1000, 37)
    x, y = np.meshgrid(xx, yy)
    #----------------------------------------------
    # define site-dependent contour levels [XZ]
    if sites[0] == 'sgpc1':
        ct_lo = 0
        ct_up = 25.5
        ct_lo_diff = -10
        ct_up_diff = 10.5
    if sites[0] == 'nsac1':
        ct_lo = 0
        ct_up = 60.5
        ct_lo_diff = -35
        ct_up_diff = 40.5
    if sites[0] == 'enac1':
        ct_lo = 0
        ct_up = 35.5
        ct_lo_diff = -15
        ct_up_diff = 20.5
    if sites[0] == 'twpc1':
        ct_lo = 0
        ct_up = 70.5
        ct_lo_diff = -10
        ct_up_diff = 40.5
    if sites[0] == 'twpc2':
        ct_lo = 0
        ct_up = 60.5
        ct_lo_diff = -10
        ct_up_diff = 50.5
    if sites[0] == 'twpc3':
        ct_lo = 0
        ct_up = 40.5
        ct_lo_diff = -20
        ct_up_diff = 15.5
    if sites[0] == 'maom1':
        ct_lo = 0
        ct_up = 70.5
        ct_lo_diff = -15
        ct_up_diff = 60.5
    rlevel = np.arange(ct_lo, ct_up, 0.5)  #original
    drlevel = np.arange(ct_lo_diff, ct_up_diff, 0.5)  #difference
    #----------------------------------------------

    # start plotting
    for iid, index in enumerate(aindex_list):
        fig = plt.figure()  # Create figure
        ax = fig.add_axes([0.15, 0.15, 0.65, 0.75])  # Create axes
        if index == 0:  #observation
            im = ax.contourf(y, x, cl_ob[:, ::-1], cmap='jet', levels=rlevel)
            title = variable + '_obs_annual_cycle_clim_' + sites[0]
        elif index == 1:  #test model
            title = variable + '_mod_annual_cycle_clim_' + sites[0]
            im = ax.contourf(y, x, cl_p[:, ::-1], cmap='jet', levels=rlevel)
            #im=ax.pcolormesh(y,x,cl_p[:,::-1], vmin=0, vmax=25)
        elif index == 2:  #difference
            im = ax.contourf(y,
                             x,
                             cl_p[:, ::-1] - cl_ob[:, ::-1],
                             cmap='coolwarm',
                             levels=drlevel)
            title = variable + '_diff_annual_cycle_clim_' + sites[0]
        xax = np.arange(0, 12, 1)
        my_xticks = [
            'J', 'F', 'M', 'A', 'M', 'J', 'J', 'A', 'S', 'O', 'N', 'D'
        ]
        plt.xticks(xax, my_xticks)
        plt.xlim(0, 11)
        plt.ylabel('Pressure (mb)')
        plt.xlabel('Month')
        plt.gca().invert_yaxis()
        plt.suptitle(title)
        cbar_ax = fig.add_axes([0.85, 0.15, 0.05, 0.75])
        cb = fig.colorbar(im, cax=cbar_ax)
        if index < 2:
            cb.set_ticks(np.arange(ct_lo, ct_up + 4.5, 5),
                         update_ticks=True)  #original plot colorbar
        if index == 2:
            cb.set_ticks(np.arange(ct_lo_diff, ct_up_diff + 4.5, 5),
                         update_ticks=True)  #difference plot colorbar
        plt.title('cl (%)')
        fig.savefig(output_path + '/figures/' + sites[0] + '/' + title +
                    '.png')
        #
        plt.close('all')
    ########################### Seasonal Mean Vertical Lind Plot
    levels = xx
    seasons = ['MAM', 'JJA', 'SON', 'DJF']
    if test_findex == 1: cl_p2 = np.concatenate((cl_p, cl_p), axis=0)
    cl_ob2 = np.concatenate((cl_ob, cl_ob), axis=0)

    #----------------------------------------------
    # define site-dependent contour levels [XZ]
    if sites[0] == 'sgpc1': xtup = 25
    if sites[0] == 'nsac1': xtup = 50
    if sites[0] == 'enac1': xtup = 35
    if sites[0] == 'twpc1': xtup = 50
    if sites[0] == 'twpc2': xtup = 45
    if sites[0] == 'twpc3': xtup = 40
    if sites[0] == 'maom1': xtup = 60
    #----------------------------------------------
    # start plotting
    for index in range(len(seasons)):
        fig3 = plt.figure(figsize=(10, 15))  # Create figure
        ax = fig3.add_axes([0.15, 0.07, 0.80, 0.88])  # Create axes
        if test_findex == 1:
            ax.plot(np.nanmean(cl_p2[index * 3 + 2:(index + 1) * 3 + 2, ::-1],
                               axis=0),
                    levels,
                    'r',
                    lw=3,
                    label='MOD')
        ax.plot(np.nanmean(cl_ob2[index * 3 + 2:(index + 1) * 3 + 2, ::-1],
                           axis=0),
                levels,
                'k',
                lw=3,
                label='OBS')
        plt.gca().invert_yaxis()
        plt.ylabel('Pressure (mb)', fontsize=20)
        plt.xlabel('Cloud Fraction (%)', fontsize=20)
        plt.xlim([0, xtup])
        plt.legend(loc='best', prop={'size': 25})
        ax.tick_params(labelsize=20,
                       length=5,
                       width=1,
                       direction='out',
                       which='major')
        plt.title(seasons[index] + ' Mean Cloud Fraction Vertical Profile',
                  fontsize=20)
        fig3.savefig(output_path + '/figures/' + sites[0] + '/' + variable +
                     '_diff_' + seasons[index] + '_' + sites[0] + '.png')
        plt.close('all')

    ########################### ANN Mean Vertical Lind Plot

    fig0 = plt.figure(figsize=(10, 15))  # Create figure
    ax = fig0.add_axes([0.15, 0.07, 0.80, 0.88])  # Create axes
    if test_findex == 1:
        ax.plot(cl_p_ann[::-1], levels, 'r', lw=3, label='MOD')
    ax.plot(cl_ob_ann[::-1], levels, 'k', lw=3, label='OBS')
    plt.gca().invert_yaxis()
    plt.ylabel('Pressure (mb)', fontsize=20)
    plt.xlabel('Cloud Fraction (%)', fontsize=20)
    plt.xlim([0, xtup])
    plt.legend(loc='best', prop={'size': 25})
    ax.tick_params(labelsize=20,
                   length=5,
                   width=1,
                   direction='out',
                   which='major')
    plt.title('Annual Mean Cloud Fraction Vertical Profile', fontsize=20)
    fig0.savefig(output_path + '/figures/' + sites[0] + '/' + variable +
                 '_diff_' + 'ANN_' + sites[0] + '.png')
    plt.close('all')
Beispiel #46
0
def main(sta1, sta2, filterid, components, day, mov_stack=1, show=True, outfile=None):
    db = connect()
    dtt_lag = get_config(db, "dtt_lag")
    dtt_v = float(get_config(db, "dtt_v"))
    dtt_minlag = float(get_config(db, "dtt_minlag"))
    dtt_width = float(get_config(db, "dtt_width"))
    dtt_sides = get_config(db, "dtt_sides")
    dbmaxlag = int(float(get_config(db, "maxlag")))
    sta1 = sta1.replace('.','_')
    sta2 = sta2.replace('.','_')
    if sta2 >= sta1: # alphabetical order filtering!
        pair = "%s_%s"%(sta1,sta2)
        station1 = sta1.split("_")
        station2 = sta2.split("_")

        station1 = get_station(db, station1[0], station1[1])
        station2 = get_station(db, station2[0], station2[1])

        if dtt_lag == "static":
            minlag = dtt_minlag
            maxlag = minlag + dtt_width
        else:
            minlag = get_interstation_distance(station1, station2, station1.coordinates) / dtt_v
            maxlag = minlag + dtt_width

        fname = os.path.join('MWCS', "%02i" % filterid, "%03i_DAYS" %mov_stack, components, pair, '%s.txt' % day)
        print( fname)
        t = []
        dt = []
        err = []
        if os.path.isfile(fname):
            #df = pd.read_csv(fname, delimiter=' ', header=None, index_col=0, names=['t', 'dt', 'err', 'coh'])
            df = pd.read_csv(fname, delimiter=' ', header=None, names=['t', 'dt', 'err', 'coh'])
            t = df["t"].tolist()
            dt = df["dt"].tolist()
            err = df["err"].tolist()
            del df

        alldf = []
        fname = os.path.join('DTT', "%02i" % filterid, "%03i_DAYS" %mov_stack, components, '%s.txt' % day)
        if not os.path.isfile(fname):
            return
        df = pd.read_csv(fname, delimiter=',')
        alldf.append(df)
        alldf = pd.concat(alldf)
        line = alldf[alldf['Pairs'] == pair].copy()
        print(line)
        M = float(line["M"])
        M0 = float(line["M0"])
        A = float(line["A"])
        EA = float(line["EA"])
        EM = float(line["EM"])
        EM0 = float(line["EM0"])

        plt.scatter(t, dt)
        plt.errorbar(t, dt, yerr=err, linestyle="None")
        plt.xlabel("Time (s)")
        plt.ylabel("Delay time (s)")
        plt.axvspan(-maxlag, -minlag, 0,1, color='b', alpha=0.5)
        plt.axvspan(minlag, maxlag, 0,1, color='b', alpha=0.5)
        xlineM0 = range(-dbmaxlag, dbmaxlag + 1, 5)
        ylineM0 = []
        ylineEM0min = []
        ylineEM0max = []
        for i in range(-dbmaxlag, dbmaxlag + 1, 5):
            ylineM0.append(M0 * i)
            ylineEM0min.append((M0-EM0) * i)
            ylineEM0max.append((M0+EM0) * i)
        plt.plot(xlineM0, ylineM0, 'r', label='M0')
        plt.plot(xlineM0, ylineEM0min, 'r', alpha=0.3)
        plt.plot(xlineM0, ylineEM0max, 'r', alpha=0.3)
        xlineM = range(-dbmaxlag, dbmaxlag + 1, 5)
        ylineM = []
        ylineEMmax = []
        ylineEMmin = []
        for i in range(-dbmaxlag, dbmaxlag + 1, 5):
            ylineM.append((M * i) + A)
            ylineEMmin.append(((M-EM) * i) + A)
            ylineEMmax.append(((M+EM) * i) + A)
        plt.plot(xlineM, ylineM, 'k', label='M')
        plt.plot(xlineM, ylineEMmin, 'k', alpha=0.3)
        plt.plot(xlineM, ylineEMmax, 'k', alpha=0.3)
        name = '%s-%s f%i m%i %s' % (sta1, sta2, filterid, mov_stack, day)
        name = name.replace('_', '.')
        plt.suptitle(name)
        plt.legend()
        plt.grid(True, ls="-",lw=0.2)
        
        ax = plt.gca()
        ax.set_xlim((-dbmaxlag, dbmaxlag))
        if outfile:
            if outfile.startswith("?"):
                basename = '%s-%s-f%i-m%i-%s' % (sta1, sta2, filterid,
                                                 mov_stack,day)
                outfile = outfile.replace('?', basename)
            outfile = "dtt_" + outfile
            print("output to: %s" % outfile)
            plt.savefig(outfile)

        if show:
            plt.show()
def main(out_dir,
         confs,
         plot_fname='metrics',
         metrics_fname='metrics.csv',
         logger=None):

    logger = logging.getLogger('plot_results_ksp')

    out_dirs = [c.dataOutDir for c in confs]
    logger.info('--------')
    logger.info('Self-learning on: ')
    logger.info(out_dirs)
    logger.info('out_dir: ')
    logger.info(out_dir)
    logger.info('--------')

    l_dataset = learning_dataset.LearningDataset(confs[0], pos_thr=0.5)

    plot_curves(out_dir, confs, plot_fname, metrics_fname, logger)

    l_ksp_scores = list()
    l_ksp_ss_scores = list()
    l_ksp_ss_thr_scores = list()

    for i in range(len(confs)):

        file_ = os.path.join(confs[i].dataOutDir, 'metrics.npz')
        logger.info('Loading ' + file_)
        npzfile = np.load(file_)

        l_ksp_scores.append(npzfile['ksp_scores'])
        l_ksp_ss_scores.append(npzfile['ksp_ss_scores'])
        l_ksp_ss_thr_scores.append(npzfile['ksp_ss_thr_scores'])

    # Make plots
    mean_ksp_scores = np.mean(np.asarray(l_ksp_scores), axis=0)
    mean_ksp_ss_scores = np.mean(np.asarray(l_ksp_ss_scores), axis=0)
    mean_ksp_ss_thr_scores = np.mean(np.asarray(l_ksp_ss_thr_scores), axis=0)

    std_ksp_scores = np.std(np.asarray(l_ksp_scores), axis=0)
    std_ksp_ss_scores = np.std(np.asarray(l_ksp_ss_scores), axis=0)
    std_ksp_ss_thr_scores = np.std(np.asarray(l_ksp_ss_thr_scores), axis=0)

    path_ = os.path.join(out_dir, 'dataset.npz')
    data = dict()
    data['mean_ksp_scores'] = mean_ksp_scores
    data['mean_ksp_ss_scores'] = mean_ksp_ss_scores
    data['mean_ksp_ss_thr_scores'] = mean_ksp_ss_thr_scores
    data['std_ksp_scores'] = std_ksp_scores
    data['std_ksp_ss_scores'] = std_ksp_ss_scores
    data['std_ksp_ss_thr_scores'] = std_ksp_ss_thr_scores

    np.savez(path_, **data)

    logger.info('Saving KSP, PM and SS merged frames...')
    gt = l_dataset.gt
    frame_dir = 'ksp_pm_frames'
    frame_path = os.path.join(out_dir, frame_dir)
    if (os.path.exists(frame_path)):
        logger.info('[!!!] Frame dir: ' + frame_path +
                    ' exists. Delete to rerun.')
    else:
        os.mkdir(frame_path)
        c0 = confs[0]
        with progressbar.ProgressBar(maxval=len(c0.frameFileNames)) as bar:
            for f in range(len(c0.frameFileNames)):
                cont_gt = segmentation.find_boundaries(gt[..., f],
                                                       mode='thick')
                idx_cont_gt = np.where(cont_gt)
                im = utls.imread(c0.frameFileNames[f])
                im[idx_cont_gt[0], idx_cont_gt[1], :] = (255, 0, 0)
                for c in confs:
                    im = gaze.drawGazePoint(c.myGaze_fg, f, im, radius=7)

                bar.update(f)
                plt.subplot(241)
                plt.imshow(mean_ksp_scores[..., f])
                plt.title('mean KSP')
                plt.subplot(242)
                plt.imshow(std_ksp_scores[..., f])
                plt.title('std KSP')
                plt.subplot(243)
                plt.imshow(mean_ksp_ss_scores[..., f])
                plt.title('mean KSP+SS')
                plt.subplot(244)
                plt.imshow(std_ksp_ss_scores[..., f])
                plt.title('std KSP+SS')
                plt.subplot(245)
                plt.imshow(mean_ksp_ss_thr_scores[..., f])
                plt.title('mean KSP+SS -> PM -> (thr = %0.2f)' % (c.pm_thr))
                plt.subplot(246)
                plt.imshow(std_ksp_ss_thr_scores[..., f])
                plt.title('std KSP+SS -> PM -> (thr = %0.2f)' % (c.pm_thr))
                plt.subplot(247)
                plt.imshow(im)
                plt.title('image')
                plt.suptitle('frame: ' + str(f))
                plt.savefig(os.path.join(frame_path, 'f_' + str(f) + '.png'),
                            dpi=200)
Beispiel #48
0
def plot_network(tmp_result):
    #with warnings.catch_warnings():
    #    warnings.filterwarnings("ignore",category=DeprecationWarning)

    g_strip = tmp_result['g_strip']
    layout = tmp_result['layout']

    simulation_data['G'] = simulation_data['network_n']  # genes
    simulation_data['S'] = simulation_data['P']
    simulation_data['patients_number'] = 2 * simulation_data['S']

    disease_params = tmp_result['disease_params']

    plt.figure(figsize=(12, 8))
    plt.subplot(1, 2, 1)

    g_over = g_strip.subgraph(tmp_result['extracted_genes'])

    solitary_over = len([n for n, d in g_over.degree() if d == 0])

    n = len(g_over.nodes())
    if n > 0:
        c = float(n - solitary_over) / float(n)
    else:
        c = 0

    c_G = 1.0 - np.power(1.0 - nx.density(g_strip), n - 1)
    delta_c = c - c_G

    F_s = len(tmp_result['extracted_genes'])
    V_G = len(tmp_result['disease_genes'])
    Vs_G = len(tmp_result['true_poositive_genes'])

    try:
        PPV = float(Vs_G) / float(F_s)
    except:
        PPV = 0

    try:
        TPR = float(Vs_G) / float(V_G)
    except:
        TPR = 0

    Q = PPV * TPR

    #######################################################################

    nx.draw_networkx(g_strip,
                     pos=layout,
                     with_labels=False,
                     node_size=10,
                     node_color='k',
                     alpha=0.2)

    #######################################################################

    # false negative
    nx.draw_networkx(g_strip.subgraph(tmp_result['disease_genes']),
                     pos=layout,
                     with_labels=False,
                     node_size=100,
                     node_color='orange',
                     edge_color='orange',
                     width=2.0,
                     alpha=1.0)

    nx.draw_networkx_nodes(g_strip.subgraph(tmp_result['disease_genes']),
                           pos=layout,
                           with_labels=False,
                           node_size=100,
                           node_color='orange',
                           alpha=1.0,
                           label='false negative')

    #######################################################################

    # false positive
    nx.draw_networkx(g_strip.subgraph(tmp_result['extracted_genes']),
                     pos=layout,
                     with_labels=False,
                     node_size=100,
                     node_color='lime',
                     edge_color='lime',
                     width=2.0,
                     alpha=1.0)

    nx.draw_networkx_nodes(g_strip.subgraph(tmp_result['extracted_genes']),
                           pos=layout,
                           with_labels=False,
                           node_size=100,
                           node_color='lime',
                           alpha=1.0,
                           label='false positive')

    #######################################################################

    # true positive
    nx.draw_networkx(g_strip.subgraph(tmp_result['true_poositive_genes']),
                     pos=layout,
                     with_labels=False,
                     node_size=150,
                     node_color='r',
                     edge_color='r',
                     width=6.0,
                     alpha=1.0)

    nx.draw_networkx_nodes(g_strip.subgraph(
        tmp_result['true_poositive_genes']),
                           pos=layout,
                           with_labels=False,
                           node_size=150,
                           node_color='r',
                           alpha=1.0,
                           label='true positive')

    #######################################################################

    suffix = '_dc_' + str(round(delta_c, 2)) + '_Q_' + str(round(Q, 2))

    try:
        PPV = float(Vs_G) / float(F_s)
    except:
        PPV = 0

    try:
        TPR = float(Vs_G) / float(V_G)
    except:
        TPR = 0

    Q = PPV * TPR

    plt.xticks([])
    plt.yticks([])
    xylim = [-1.1, 1.1]

    plt.xlim(xylim)
    plt.ylim(xylim)
    #plt.title(title, size = 22)
    plt.legend()
    """
    title =  'G = '  + str(disease_params['G'])
    title += ',   D = '  + str(disease_params['D']) 
    title += ',   S = ' + str(disease_params['S'])
    title += ',   A = '   + str(disease_params['A'])
    title += ',   p = '   + str(disease_params['p'])  + ' \n'
    """

    title = 'f = ' + str(tmp_result['flt'])
    title += ',    $\\rho_0$ = ' + str(disease_params['rho_0'])  # + ' \n'

    #c_text = 'c = ' +  str(round(c,2)) + ' \n'
    #c_text += 'c$_G$ = ' + str(round(c_G,2)) + ' \n'
    #c_text += '$\Delta$c = ' + str(round(delta_c,2)) # + ' \n'
    c_text = '$\Delta$c = ' + str(round(delta_c, 2))  # + ' \n'

    f_text = 'PPV = ' + str(round(PPV, 2)) + ' \n'
    f_text += 'TPR = ' + str(round(TPR, 2)) + ' \n'
    f_text += 'Q = ' + str(round(Q, 2))

    plt.text(-0.2,
             -0.95,
             c_text,
             horizontalalignment='left',
             verticalalignment='top',
             size=18)

    #plt.text(0.0, 1.0, c_text, horizontalalignment='left',  verticalalignment='top', size = 20)
    #plt.text(0.8, 1.0, f_text, horizontalalignment='left',  verticalalignment='top', size = 20)
    #plt.text(0.325, 0.0, title, horizontalalignment='left',  verticalalignment='top', size = 20)
    #plt.title(title, horizontalalignment='left',  verticalalignment='top', size = 20)
    ###########################################################################
    plt.subplot(1, 2, 2)

    res = 0.025
    #E, G = add.simulate_ind(whole_params['simulation_data'], whole_params['disease_params'])

    coherence_C = []
    coherence_S = []

    tmp_result['expressed_genes_healthy']

    for i, expressed_genes in enumerate(tmp_result['expressed_genes_healthy']):
        g = nx.Graph(tmp_result['g']).copy()
        g = g.subgraph(expressed_genes)
        g = nx.Graph(g)

        N = len(g.nodes())
        #g.remove_nodes_from([ n for n,d in g.degree_iter() if d == 0 ])
        g.remove_nodes_from([n for n, d in g.degree() if d == 0])

        K = len(g.nodes())

        if N > 0:
            coherence_C.append(float(K) / float(N))
        else:
            coherence_C.append(0)

    for i, expressed_genes in enumerate(tmp_result['expressed_genes_sick']):
        g = nx.Graph(tmp_result['g']).copy()
        g = g.subgraph(expressed_genes)
        g = nx.Graph(g)

        N = len(g.nodes())
        g.remove_nodes_from([n for n, d in g.degree() if d == 0])

        K = len(g.nodes())

        if N > 0:
            coherence_S.append(float(K) / float(N))
        else:
            coherence_S.append(0)

    sns.kdeplot(np.array(coherence_C),
                shade=True,
                shade_lowest=False,
                alpha=0.66,
                color="g",
                bw=res)
    sns.kdeplot(np.array(coherence_S),
                shade=True,
                shade_lowest=False,
                alpha=0.66,
                color="r",
                bw=res)
    #plt.title('A = ' +str(A)+'  $\\rho_0$ = ' +str(rho_0))

    plt.xlabel('c', size=16)
    plt.yticks([])
    plt.legend(['C - controls', 'P - patients'])

    ###########################################################################
    #if row_number == 1:
    plt.suptitle('$\\rho_0$ = ' + str(tmp_result['rho_0']) + ',   f = ' +
                 str(tmp_result['flt']),
                 size=18)

    #if col_number == 1:
    #plt.ylabel('f = ' + str(tmp_result['flt']) , size = 32)

    #filename = simulation_data['img'] + prefix + '_flt_' + str(flt) + '_rho_0_' + str(disease_params['rho_0']) + suffix
    #filename = simulation_data['img'] + prefix
    #filename = filename.replace('.','_')

    plt.tight_layout()
    #plt.savefig(filename + '_network_spring.png')
    #plt.savefig(filename + '_network_spring.pdf')
    plt.show()
    plt.close()
Beispiel #49
0
def quadrilinear(droof,Vb,flag,linew,fontsize,units, blg):
    droof = np.array(droof)
    Fmax = np.max(Vb)
    for index, item in enumerate(Vb):
        if item >= Fmax:
            break
    fmax = index
    dmax = droof[index]
 
#   Yielding point:
#   Vulnerability guidelines method
#   Find yielding displacement with equal energy principle n the interval from 0 to Dmax
    Areas = np.array([(Vb[i+1]+Vb[i])/2 for i in range(0,fmax)])
    dd = np.array([droof[i+1]-droof[i] for i in range(0,fmax)])    
    Edmax = np.sum(dd*Areas) #Area under the pushover curve in the interval from 0 to Dmax         
    dy = 2*(dmax-Edmax/Fmax)
    Fy = Fmax
    
    #   Onset of plateu
    #   Find were slope of pushover curve before decreasing in the plateu
    Vb_norm = Vb[fmax::]/Fy
    d_norm = droof[fmax::]/dy
    slp = [(Vb_norm[i]-Vb_norm[i-1])/(d_norm[i]-d_norm[i-1]) for i in xrange(1,len(Vb_norm))]    
    indy_soft = np.nonzero(abs(np.array(slp))>0.3)   
    if len(indy_soft[0])>1:
        fmin = indy_soft[0][-1]+fmax
        Fmin = Vb[fmin]
        dmin = droof[fmin]
            #   Onset of softening
        #   Find yielding displacement with equal energy principle n the interval from Dmax to Dmin (onset of plateu)
        Areas = np.array([(Vb[i+1]+Vb[i])/2 for i in range(fmax,fmin)])
        dd = np.array([droof[i+1]-droof[i] for i in range(fmax,fmin)])
        Edmin = np.sum(dd*Areas)
        ds = 2/(Fmax-Fmin)*(Edmin - (dmin-dmax)*Fmax + 0.5*dmin*(Fmax-Fmin))
        du = np.max(droof)   
        if ds<dy: ds = dy
        if ds>du: ds=du        
        #   Residual Plateu
        if len(indy_soft[0])>0:
            Areas= np.array([(Vb[i+1]+Vb[i])/2 for i in range(fmin,len(Vb)-1)])
            dd = np.array([droof[i+1]-droof[i] for i in range(fmin,len(Vb)-1)])
            Edplat = np.sum(dd*Areas)
            Fres = Edplat/(droof[-1]-dmin)
            slp_soft = abs((Fmax-Fmin)/(ds-dmin))
            dmin = dmin+(Fmin-Fres)/slp_soft
            if dmin>du:
                dmin = du
                Fmin = Vb[-1]
            else:
                Fmin = Fres        
    else:
        fmin = len(Vb)-1
        Fmin = Fmax
        dmin = droof[fmin]
        ds = dmin
        du = dmin
    
    if flag:
        # Plot pushover curve and bilinear curve
        plt.plot(droof,Vb,color='b',linewidth=linew,label='pushover input')
        x = np.array([0, dy, ds, dmin, du])
        y = np.array([0, Fy, Fmax, Fmin, Fmin])
        plt.plot(x,y,color='r',marker = 'o', linewidth=linew, label='quadrilinear idealisation')
        plt.xlabel('roof displacement, droof '+units[0],fontsize = fontsize)
        plt.ylabel('base shear, Vb '+units[1],fontsize = fontsize)
        plt.suptitle('Pushover curve - blg n.'+str(blg),fontsize = fontsize)
        plt.legend(loc='lower right',frameon = False)
        plt.show()
    return [dy,ds,dmin,du,Fy,Fmax,Fmin]
                     feed_dict={
                         x: new_test_set,
                         keep_prob: 1.
                     })

    # compute softmax probabilities
    softmax_probs = sess.run(tf.nn.softmax(logits),
                             feed_dict={
                                 x: new_test_set,
                                 keep_prob: 1.
                             })

# plot softmax probs along with traffic sign examples
n_images = new_test_set.shape[0]
fig, axarray = plt.subplots(n_images, 2)
plt.suptitle('Visualization of softmax probabilities for each example',
             fontweight='bold')
for r in range(0, n_images):
    axarray[r, 0].imshow(np.squeeze(new_test_images[r]))
    axarray[r, 0].set_xticks([]), axarray[r, 0].set_yticks([])
    plt.setp(axarray[r, 0].get_xticklabels(), visible=False)
    plt.setp(axarray[r, 0].get_yticklabels(), visible=False)
    axarray[r, 1].bar(np.arange(n_classes), softmax_probs[r])
    axarray[r, 1].set_ylim([0, 1])

# print top K predictions of the model for each example, along with confidence (softmax score)
for i in range(len(new_test_images)):
    print('Top model predictions for image {} (Target is {:02d})'.format(
        K, i, new_targets[i]))
    for k in range(K):
        top_c = top_3[1][i][k]
        print('   Prediction = {:02d} with confidence {:.2f}'.format(
Beispiel #51
0
        grid_hat = gmm.predict(grid_test)

        change = np.empty((n_types, grid_hat.size), dtype=np.bool)
        for i in range(n_types):
            change[i] = grid_hat == order[i]
        for i in range(n_types):
            grid_hat[change[i]] = i

        grid_hat = grid_hat.reshape(x1.shape)
        plt.subplot(3, 2, k + 1)
        plt.pcolormesh(x1, x2, grid_hat, cmap=cm_light)
        plt.scatter(x[pair[0]],
                    x[pair[1]],
                    s=30,
                    c=y,
                    marker='o',
                    cmap=cm_dark,
                    edgecolors='k')
        xx = 0.95 * x1_min + 0.05 * x1_max
        yy = 0.1 * x2_min + 0.9 * x2_max
        plt.text(xx, yy, acc, fontsize=14)
        plt.xlim((x1_min, x1_max))
        plt.ylim((x2_min, x2_max))
        plt.xlabel(iris_feature[pair[0]], fontsize=14)
        plt.ylabel(iris_feature[pair[1]], fontsize=14)
        plt.grid()
    plt.tight_layout(2)
    plt.suptitle(u'EM算法无监督分类鸢尾花数据', fontsize=20)
    plt.subplots_adjust(top=0.92)
    plt.show()
def plot_curves(out_dir, confs, plot_fname, metrics_fname, logger):
    l_fpr_pm_ss = list()
    l_tpr_pm_ss = list()
    l_pr_pm_ss = list()
    l_rc_pm_ss = list()
    l_f1_pm_ss = list()

    l_fpr_ksp_ss_thr = list()
    l_tpr_ksp_ss_thr = list()
    l_pr_ksp_ss_thr = list()
    l_rc_ksp_ss_thr = list()
    l_f1_ksp_ss_thr = list()

    l_fpr_pm = list()
    l_tpr_pm = list()
    l_pr_pm = list()
    l_rc_pm = list()
    l_f1_pm = list()

    l_pm_ksp_ss = list()
    l_pm_ksp = list()

    l_fpr_pm_thr = list()
    l_tpr_pm_thr = list()
    l_rc_pm_thr = list()
    l_pr_pm_thr = list()
    l_f1_pm_thr = list()
    l_pm_ksp_ss_thr = list()

    l_ksp_scores = list()
    l_ksp_ss_scores = list()
    l_ksp_ss_thr_scores = list()

    met_auc_pm = list()
    met_f1_pm = list()
    met_auc_pm_ss = list()
    met_f1_pm_ss = list()
    met_auc_pm_thr = list()
    met_f1_pm_thr = list()

    for i in range(len(confs)):

        file_ = os.path.join(confs[i].dataOutDir, 'metrics.npz')
        if (not os.path.exists(file_)):
            logger.info(file_ + ' does not exist. Calculating metrics')
            pksp.main(confs[i])
        logger.info('Loading ' + file_)
        npzfile = np.load(file_)

        l_fpr_pm_ss.append(npzfile['fpr_pm_ss'])
        l_tpr_pm_ss.append(npzfile['tpr_pm_ss'])
        l_pr_pm_ss.append(npzfile['pr_pm_ss'])
        l_rc_pm_ss.append(npzfile['rc_pm_ss'])
        l_f1_pm_ss.append(npzfile['f1_pm_ss'])

        l_fpr_ksp_ss_thr.append(npzfile['fpr_ksp_ss_thr'])
        l_tpr_ksp_ss_thr.append(npzfile['tpr_ksp_ss_thr'])
        l_pr_ksp_ss_thr.append(npzfile['pr_ksp_ss_thr'])
        l_rc_ksp_ss_thr.append(npzfile['rc_ksp_ss_thr'])
        l_f1_ksp_ss_thr.append(npzfile['f1_ksp_ss_thr'])

        l_fpr_pm.append(npzfile['fpr_pm'])
        l_tpr_pm.append(npzfile['tpr_pm'])
        l_pr_pm.append(npzfile['pr_pm'])
        l_rc_pm.append(npzfile['rc_pm'])
        l_f1_pm.append(npzfile['f1_pm'])

        l_pm_ksp_ss.append(npzfile['pm_ksp_ss'])
        l_pm_ksp.append(npzfile['pm_ksp'])

        l_fpr_pm_thr.append(npzfile['fpr_pm_thr'])
        l_tpr_pm_thr.append(npzfile['tpr_pm_thr'])
        l_rc_pm_thr.append(npzfile['rc_pm_thr'])
        l_pr_pm_thr.append(npzfile['pr_pm_thr'])
        l_f1_pm_thr.append(npzfile['f1_pm_thr'])
        l_pm_ksp_ss_thr.append(npzfile['pm_ksp_ss_thr'])

    # Make ROC curves
    # Plot all iterations of PM
    plt.clf()
    confs[0].roc_xlim = [0, 0.4]
    confs[0].pr_rc_xlim = [0.6, 1.]
    alpha = 0.3

    lw = 1
    # PM curves
    aucs = [
        auc(np.asarray(l_fpr_pm[i]).ravel(),
            np.asarray(l_tpr_pm[i]).ravel(),
            reorder=True) for i in range(len(l_fpr_pm))
    ]
    auc_roc = np.mean(aucs)
    met_auc_pm.append((np.mean(aucs), np.std(aucs)))
    auc_pr_rc = np.mean([
        auc(np.asarray(l_pr_pm[i]).ravel(),
            np.asarray(l_rc_pm[i]).ravel(),
            reorder=True) for i in range(len(l_pr_pm))
    ])
    max_f1s = [np.max(l_f1_pm[i].ravel()) for i in range(len(l_f1_pm))]
    max_f1 = np.mean(max_f1s)
    met_f1_pm.append((np.mean(max_f1s), np.std(max_f1s)))

    fpr, tpr = utls.concat_interp(l_fpr_pm, l_tpr_pm, 2000)
    rc, pr = utls.concat_interp(l_rc_pm, l_pr_pm, 2000)

    plt.subplot(121)
    plt.plot(fpr,
             tpr.mean(axis=0),
             'r-',
             lw=lw,
             label='KSP/PM (area = %0.4f, max(F1) = %0.4f)' %
             (auc_roc, max_f1))
    plt.fill_between(fpr,
                     tpr.mean(axis=0) + tpr.std(axis=0),
                     tpr.mean(axis=0) - tpr.std(axis=0),
                     facecolor='r',
                     alpha=alpha)

    plt.subplot(122)
    plt.plot(rc,
             pr.mean(axis=0),
             'r-',
             lw=lw,
             label='KSP/PM (area = %0.4f, max(F1) = %0.4f)' %
             (auc_pr_rc, max_f1))
    plt.fill_between(rc,
                     pr.mean(axis=0) + pr.std(axis=0),
                     pr.mean(axis=0) - pr.std(axis=0),
                     facecolor='r',
                     alpha=alpha)

    # Plot KSP+SS PM
    aucs = [
        auc(np.asarray(l_fpr_pm_ss[i]).ravel(),
            np.asarray(l_tpr_pm_ss[i]).ravel(),
            reorder=True) for i in range(len(l_fpr_pm_ss))
    ]
    auc_roc = np.mean(aucs)
    met_auc_pm_ss.append((np.mean(aucs), np.std(aucs)))
    auc_pr_rc = np.mean([
        auc(np.asarray(l_pr_pm_ss[i]).ravel(),
            np.asarray(l_rc_pm_ss[i]).ravel(),
            reorder=True) for i in range(len(l_pr_pm_ss))
    ])
    max_f1s = [np.max(l_f1_pm_ss[i].ravel()) for i in range(len(l_f1_pm_ss))]
    max_f1 = np.mean(max_f1s)
    met_f1_pm_ss.append((np.mean(max_f1s), np.std(max_f1s)))

    fpr, tpr = utls.concat_interp(l_fpr_pm_ss, l_tpr_pm_ss, 2000)
    rc, pr = utls.concat_interp(l_rc_pm_ss, l_pr_pm_ss, 2000)
    plt.subplot(121)
    plt.plot(fpr,
             tpr.mean(axis=0),
             'g-',
             lw=lw,
             label='KSP+SS/PM (area = %0.4f, max(F1) = %0.4f)' %
             (auc_roc, max_f1))
    plt.fill_between(fpr,
                     tpr.mean(axis=0) + tpr.std(axis=0),
                     tpr.mean(axis=0) - tpr.std(axis=0),
                     facecolor='g',
                     alpha=alpha)

    plt.subplot(122)
    plt.plot(rc,
             pr.mean(axis=0),
             'g-',
             lw=lw,
             label='KSP+SS/PM (area = %0.4f, max(F1) = %0.4f)' %
             (auc_pr_rc, max_f1))
    plt.fill_between(rc,
                     pr.mean(axis=0) + pr.std(axis=0),
                     pr.mean(axis=0) - pr.std(axis=0),
                     facecolor='g',
                     alpha=alpha)

    # Plot KSP+SS PM thresholded
    aucs = [
        auc(np.asarray(l_fpr_pm_thr[i]).ravel(),
            np.asarray(l_tpr_pm_thr[i]).ravel(),
            reorder=True) for i in range(len(l_fpr_pm_thr))
    ]
    auc_roc = np.mean(aucs)
    met_auc_pm_thr.append((np.mean(aucs), np.std(aucs)))
    auc_pr_rc = np.mean([
        auc(np.asarray(l_pr_pm_thr[i]).ravel(),
            np.asarray(l_rc_pm_thr[i]).ravel(),
            reorder=True) for i in range(len(l_pr_pm_thr))
    ])
    max_f1s = [np.max(l_f1_pm_thr[i].ravel()) for i in range(len(l_f1_pm_thr))]
    max_f1 = np.mean(max_f1s)
    met_f1_pm_thr.append((np.mean(max_f1s), np.std(max_f1s)))

    fpr, tpr = utls.concat_interp(l_fpr_pm_thr, l_tpr_pm_thr, 2000)
    rc, pr = utls.concat_interp(l_rc_pm_thr, l_pr_pm_thr, 2000)
    plt.subplot(121)
    plt.plot(fpr,
             tpr.mean(axis=0),
             'b-',
             lw=lw,
             label='KSP+SS/PM (thr: %0.1f) (area = %0.4f, max(F1) = %0.4f)' %
             (confs[0].pm_thr, auc_roc, max_f1))
    plt.fill_between(fpr,
                     tpr.mean(axis=0) + tpr.std(axis=0),
                     tpr.mean(axis=0) - tpr.std(axis=0),
                     facecolor='b',
                     alpha=alpha)

    plt.subplot(122)
    plt.plot(rc,
             pr.mean(axis=0),
             'b-',
             lw=lw,
             label='KSP+SS/PM (thr: %0.1f) (area = %0.4f, max(F1) = %0.4f)' %
             (confs[0].pm_thr, auc_pr_rc, max_f1))
    plt.fill_between(rc,
                     pr.mean(axis=0) + pr.std(axis=0),
                     pr.mean(axis=0) - pr.std(axis=0),
                     facecolor='b',
                     alpha=alpha)

    plt.subplot(121)
    plt.legend()
    plt.xlim(confs[0].roc_xlim)
    plt.xlabel('false positive rate')
    plt.ylabel('true positive rate')
    plt.subplot(122)
    plt.legend()
    plt.xlim(confs[0].pr_rc_xlim)
    plt.xlabel('recall')
    plt.ylabel('precision')
    plt.suptitle(confs[0].seq_type + '(' + confs[0].ds_dir + ')' +
                 ' Num. gaze sets: ' + str(len(confs)))
    fig = plt.gcf()
    fig.set_size_inches(18.5, 10.5)
    plt.savefig(os.path.join(out_dir, plot_fname + '_all.pdf'), dpi=200)

    rc, pr = utls.concat_interp(l_rc_pm, l_pr_pm, 2000)

    plt.plot(rc,
             pr.mean(axis=0),
             'r-',
             lw=lw,
             label='KSP/PM (area = %0.4f, max(F1) = %0.4f)' %
             (auc_pr_rc, max_f1))
    plt.fill_between(rc,
                     pr.mean(axis=0) + pr.std(axis=0),
                     pr.mean(axis=0) - pr.std(axis=0),
                     facecolor='r',
                     alpha=alpha)

    plt.savefig(os.path.join(out_dir, plot_fname + '_pr.pdf'), dpi=200)

    logger.info('Done generating curves')

    file_out = os.path.join(out_dir, metrics_fname)
    logger.info('Writing mean and std of metrics to: ' + file_out)

    I = pd.Index(["mean AUC", "mean F1", "std AUC", "std F1"], name="rows")
    C = pd.Index(["ksp", "ksp+ss", "ksp+ss+thr"], name="columns")
    data = np.asarray([
        np.asarray([np.asarray(met_auc_pm),
                    np.asarray(met_f1_pm)]),
        np.asarray([np.asarray(met_auc_pm_ss),
                    np.asarray(met_f1_pm_ss)]),
        np.asarray([np.asarray(met_auc_pm_thr),
                    np.asarray(met_f1_pm_thr)])
    ]).T
    data = data.reshape(4, 3)
    df = pd.DataFrame(data=data, index=I, columns=C)
    df.to_csv(path_or_buf=file_out)

    return True
Beispiel #53
0
	w_2_1.append(model.get_weights()[2][0,0])
	w_2_2.append(model.get_weights()[2][1,0])
	w_2_3.append(model.get_weights()[2][2,0])
	w_2_4.append(model.get_weights()[2][3,0])
	print("save each and every loss and accuracy")
	losses.append(hist.history['loss'][0])
	accuracies.append(hist.history['acc'][0])


#######################################################
# plot all weights and losses, accuracies
#######################################################

plt.figure()

plt.suptitle("xor_h_4nodes_no_sigmoid")


ax1 = plt.subplot2grid((6, 4), (0, 0), colspan=2, rowspan=2)  # stands for axes
ax1.plot(losses, c='blue', label='losses') # change index name
ax1.legend(loc='best')
ax1.set_title("end value: %.04f" % losses[-1], fontsize = 5)


ax2 = plt.subplot2grid((6, 4), (0, 2), colspan=2, rowspan=2)
ax2.plot(accuracies, c='red', label='acc')
ax2.legend(loc='best')
ax2.set_title("end value: %.04f" % accuracies[-1], fontsize=5)

ax3 = plt.subplot2grid((6, 4), (2, 0), colspan=1, rowspan=2)
ax3.plot(w_2_1, c='green', label='w_2_1')
Beispiel #54
0
dataset = pd.read_csv(r"Appdata10.csv")

dataset.describe()

#Data cleaning

dataset["hour"] = dataset.hour.str.slice(1, 3).astype(int)

#Plotting
dataset2 = dataset.copy().drop(
    columns=["user", "first_open", "enrolled", "screen_list", "enrolled_date"])
dataset2.head()

#Histograms
plt.suptitle("Histograms of features", fontsize=20)
for i in range(1, dataset2.shape[1] + 1):
    plt.subplot(3, 3, i)
    f = plt.gca()
    f.set_title(dataset2.columns.values[i - 1])

    vals = np.size(dataset2.iloc[:, i - 1].unique())

    plt.hist(dataset2.iloc[:, i - 1], bins=vals, color="green")
    plt.show()

#Correlation with response

dataset2.corrwith(dataset.enrolled).plot.bar(figsize=(20, 10),
                                             title="Co-Relation",
                                             grid=True,
Beispiel #55
0
def plot_regression_continuous(X,
                               target_col,
                               types=None,
                               scatter_alpha='auto',
                               scatter_size='auto',
                               drop_outliers=True,
                               **kwargs):
    """Plots for continuous features in regression.

    Creates plots of all the continuous features vs the target.
    Relevant features are determined using F statistics.

    Parameters
    ----------
    X : dataframe
        Input data including features and target.
    target_col : str or int
        Identifier of the target column in X.
    types : dataframe of types, optional
        Output of detect_types on X. Can be used to avoid recomputing the
        types.
    scatter_alpha : float, default='auto'
        Alpha values for scatter plots. 'auto' is dirty hacks.
    scatter_size : float, default='auto'
        Marker size for scatter plots. 'auto' is dirty hacks.
    drop_outliers : bool, default=True
        Whether to drop outliers when plotting.
    """
    types = _check_X_target_col(X, target_col, types, task="regression")

    if np.isnan(X[target_col]).any():
        X = X.dropna(subset=[target_col])
        warn("Missing values in target_col have been removed for regression",
             UserWarning)

    features = X.loc[:, types.continuous]
    if target_col in features.columns:
        features = features.drop(target_col, axis=1)
    if features.shape[1] == 0:
        return

    show_top = _get_n_top(features, "continuous")

    target = X[target_col]
    # HACK we should drop them per column before feeding them into f_regression
    # FIXME
    features_imp = SimpleImputer().fit_transform(features)
    f, p = f_regression(features_imp, target)
    top_k = np.argsort(f)[-show_top:][::-1]
    # we could do better lol
    fig, axes = _make_subplots(n_plots=show_top)

    # FIXME this could be a function or maybe using seaborn
    plt.suptitle("Continuous Feature vs Target")
    for i, (col_idx, ax) in enumerate(zip(top_k, axes.ravel())):
        if i % axes.shape[1] == 0:
            ax.set_ylabel(target_col)
        col = features.columns[col_idx]
        if drop_outliers:
            inliers = _find_inliers(features.loc[:, col])
            ax.scatter(features.loc[inliers, col],
                       target[inliers],
                       alpha=scatter_alpha,
                       s=scatter_size)
        else:
            ax.scatter(features.loc[:, col],
                       target,
                       alpha=scatter_alpha,
                       s=scatter_size)
        ax.set_xlabel(_shortname(col))
        ax.set_title("F={:.2E}".format(f[col_idx]))

    for j in range(i + 1, axes.size):
        # turn off axis if we didn't fill last row
        axes.ravel()[j].set_axis_off()
t1 = np.array([94.62,95.72,95.80,95.97])
t2 = np.array([95.20,95.59,96.03,96.48])
t3 = np.array([92.35,92.97,93.23,96.48])
t4 = np.array([94.66,95.08,96.07,96.10])

#legend = []
fig = plt.figure(1)

plt.subplot(211)
plt.plot(n,t1,color="blue")
plt.plot(n,t2,color="red")
plt.plot(n,t3,color="green")
plt.plot(n,t4,color="black")
plt.legend(['Hidden Layers: 2', 'Hidden Layers: 3', 'Hidden Layers: 4', 'Hidden Layers: 5'])
plt.suptitle('Accuracy of different layers on increasing number of epochs and 500 neurons')
plt.xlabel('No. of epochs')
plt.ylabel('Accuracy in %')

n = np.array([1000, 500, 100])
t1 = np.array([83.77,87.08,92.07])
t2 = np.array([90.97,93.13,94.62])
t3 = np.array([82.26,85.16,92.35])
t4 = np.array([78.12,87.50,94.66])
plt.subplot(212)
plt.plot(n,t1,color="blue")
plt.plot(n,t2,color="red")
plt.plot(n,t3,color="green")
plt.plot(n,t4,color="black")
plt.legend(['Hidden Layers: 2', 'Hidden Layers: 3', 'Hidden Layers: 4', 'Hidden Layers: 5'])
plt.suptitle('Accuracy of different layers on decreasing number of batch size and 100 neurons')
Beispiel #57
0
def plot_regression_categorical(X, target_col, types=None, **kwargs):
    """Plots for categorical features in regression.

    Creates box plots of target distribution for important categorical
    features. Relevant features are identified using mutual information.

    For high cardinality categorical variables (variables with many categories)
    only the most frequent categories are shown.

    Parameters
    ----------
    X : dataframe
        Input data including features and target.
    target_col : str or int
        Identifier of the target column in X.
    types : dataframe of types, optional
        Output of detect_types on X. Can be used to avoid recomputing the
        types.
    """
    types = _check_X_target_col(X, target_col, types, task="regression")

    # drop nans from target column
    if np.isnan(X[target_col]).any():
        X = X.dropna(subset=[target_col])
        warn("Missing values in target_col have been removed for regression",
             UserWarning)

    if types is None:
        types = detect_types(X)
    features = X.loc[:, types.categorical]
    if target_col in features.columns:
        features = features.drop(target_col, axis=1)
    if features.shape[1] == 0:
        return
    features = features.astype('category')
    show_top = _get_n_top(features, "categorical")

    # can't use OrdinalEncoder because we might have mix of int and string
    ordinal_encoded = features.apply(lambda x: x.cat.codes)
    target = X[target_col]
    f = mutual_info_regression(ordinal_encoded,
                               target,
                               discrete_features=np.ones(X.shape[1],
                                                         dtype=bool))
    top_k = np.argsort(f)[-show_top:][::-1]

    # large number of categories -> taller plot
    row_height = 3 if X.nunique().max() <= 5 else 5
    fig, axes = _make_subplots(n_plots=show_top, row_height=row_height)
    plt.suptitle("Categorical Feature vs Target")
    for i, (col_ind, ax) in enumerate(zip(top_k, axes.ravel())):
        col = features.columns[i]

        # count frequency for each categorical including NaN rows
        vc = X[col].value_counts(dropna=False)
        # convert vc index to string to match mpl's string labels
        vc.index = vc.index.astype('str')
        # assume the series is ordinal unless a float conversion fails
        # ordinals are numeric like 2.0, np.NaN but not "somelabel"
        is_ordinal = True
        try:
            _ = [float(s) for s in vc.index.values]
        except ValueError:
            is_ordinal = False

        X_new = _prune_category_make_X(X, col, target_col)
        if is_ordinal:
            # alphanumeric label sort for ordinal items
            order = sorted(np.unique(X[col].dropna()))
        else:
            # median sort for non-ordinal labels
            medians = X_new.groupby(col)[target_col].median()
            order = medians.sort_values().index

        sns.boxplot(x=target_col, y=col, data=X_new, order=order, ax=ax)
        plt.draw()
        ax.set_title("F={:.2E}".format(f[col_ind]))
        add_counts_to_yticklabels(ax, vc)
        # shorten long ticks and labels
        _short_tick_names(ax)

    for j in range(i + 1, axes.size):
        # turn off axis if we didn't fill last row
        axes.ravel()[j].set_axis_off()
Beispiel #58
0
def plot_classification_categorical(X,
                                    target_col,
                                    types=None,
                                    kind='auto',
                                    hue_order=None,
                                    **kwargs):
    """Plots for categorical features in classification.

    Creates plots of categorical variable distributions for each target class.
    Relevant features are identified via mutual information.

    For high cardinality categorical variables (variables with many categories)
    only the most frequent categories are shown.

    Parameters
    ----------
    X : dataframe
        Input data including features and target
    target_col : str or int
        Identifier of the target column in X
    types : dataframe of types, optional.
        Output of detect_types on X. Can be used to avoid recomputing the
        types.
    kind : string, default 'auto'
        Kind of plot to show. Options are 'count', 'proportion',
        'mosaic' and 'auto'.
        Count shows raw class counts within categories
        (can be hard to read with imbalanced classes)
        Proportion shows class proportions within categories
        (can be misleading with imbalanced categories)
        Mosaic shows both aspects, but can be a bit busy.
        Auto uses mosaic plots for binary classification and counts otherwise.

    """
    types = _check_X_target_col(X, target_col, types, task="classification")
    if kind == "auto":
        if X[target_col].nunique() > 5:
            kind = 'count'
        else:
            kind = 'mosaic'

    features = X.loc[:, types.categorical]
    if target_col in features.columns:
        features = features.drop(target_col, axis=1)

    if features.shape[1] == 0:
        return

    features = features.astype('category')

    show_top = _get_n_top(features, "categorical")

    # can't use OrdinalEncoder because we might have mix of int and string
    ordinal_encoded = features.apply(lambda x: x.cat.codes)
    target = X[target_col]
    f = mutual_info_classif(ordinal_encoded,
                            target,
                            discrete_features=np.ones(X.shape[1], dtype=bool))
    top_k = np.argsort(f)[-show_top:][::-1]
    # large number of categories -> taller plot
    row_height = 3 if features.nunique().max() <= 5 else 5
    fig, axes = _make_subplots(n_plots=show_top, row_height=row_height)
    plt.suptitle("Categorical Features vs Target", y=1.02)
    for i, (col_ind, ax) in enumerate(zip(top_k, axes.ravel())):
        col = features.columns[col_ind]
        if kind == 'proportion':
            X_new = _prune_category_make_X(X, col, target_col)

            df = (X_new.groupby(col)[target_col].value_counts(
                normalize=True).unstack().sort_values(by=target[0])
                  )  # hacky way to get a class name
            df.plot(kind='barh', stacked='True', ax=ax, legend=i == 0)
            ax.set_title(col)
            ax.set_ylabel(None)
        elif kind == 'mosaic':
            # how many categories make up at least 1% of data:
            n_cats = (X[col].value_counts() / len(X) > 0.01).sum()
            n_cats = np.minimum(n_cats, 20)
            X_new = _prune_category_make_X(X,
                                           col,
                                           target_col,
                                           max_categories=n_cats)
            mosaic_plot(X_new, col, target_col, ax=ax, legend=i == 0)
            ax.set_title(col)
        elif kind == 'count':
            X_new = _prune_category_make_X(X, col, target_col)

            # absolute counts
            # FIXME show f value
            # FIXME shorten titles?
            props = {}
            if X[target_col].nunique() > 15:
                props['font.size'] = 6
            with mpl.rc_context(props):
                sns.countplot(y=col,
                              data=X_new,
                              ax=ax,
                              hue=target_col,
                              hue_order=hue_order)
            if i > 0:
                ax.legend(())
        else:
            raise ValueError("Unknown plot kind {}".format(kind))
        _short_tick_names(ax)

    for j in range(i + 1, axes.size):
        # turn off axis if we didn't fill last row
        axes.ravel()[j].set_axis_off()
m.drawstates()
m.drawcountries(linewidth=1.0)
m.drawcoastlines()
map_lon, map_lat = m(*(x,y))
        
# target grid to interpolate to
xi = np.linspace(map_lon.min(), map_lon.max(), numcols)
yi = np.linspace(map_lat.min(), map_lat.max(), numrows)
xi,yi = np.meshgrid(xi,yi)

# interpolate
zi = griddata((map_lon,map_lat),z,(xi,yi),method='linear')

# plot
bounds = np.arange(0,110,10)
m.contourf(xi,yi,zi, cmap=cm.Blues, levels=bounds)
cbar = m.colorbar()
degree_sign = u'\N{DEGREE SIGN}'
# cbar.set_label("Temperature %sC" % degree_sign, fontsize=14)
cbar.set_label("Relative Humidity (%)")
m.plot(map_lon,map_lat,'.k',ms=1)

# plt.title('Contour Map Test - Temperature at %s feet' % ALT_txt, fontsize=20)
if ALT_txt == 0:
    plt.suptitle('Relative Humidity at Ground Level', fontsize=20)
else:
    plt.suptitle('Relative Humidity at %s feet' % ALT_txt, fontsize=20)
plt.title('06/12/18 at 12z', fontsize=12)
plt.show()

Beispiel #60
0
def plot_classification_continuous(X,
                                   target_col,
                                   types=None,
                                   hue_order=None,
                                   scatter_alpha='auto',
                                   scatter_size="auto",
                                   univariate_plot='histogram',
                                   drop_outliers=True,
                                   plot_pairwise=True,
                                   top_k_interactions=10,
                                   random_state=None,
                                   **kwargs):
    """Plots for continuous features in classification.

    Selects important continuous features according to F statistics.
    Creates univariate distribution plots for these, as well as scatterplots
    for selected pairs of features, and scatterplots for selected pairs of
    PCA directions.
    If there are more than 2 classes, scatter plots from Linear Discriminant
    Analysis are also shown.
    Scatter plots are determined "interesting" is a decision tree on the
    two-dimensional projection performs well. The cross-validated macro-average
    recall of a decision tree is shown in the title for each scatterplot.

    Parameters
    ----------
    X : dataframe
        Input data including features and target.
    target_col : str or int
        Identifier of the target column in X.
    types : dataframe of types, optional.
        Output of detect_types on X. Can be used to avoid recomputing the
        types.
    scatter_alpha : float, default='auto'
        Alpha values for scatter plots. 'auto' is dirty hacks.
    scatter_size : float, default='auto'
        Marker size for scatter plots. 'auto' is dirty hacks.
    univariate_plot : string, default="histogram"
        Supported: 'histogram' and 'kde'.
    drop_outliers : bool, default=True
        Whether to drop outliers when plotting.
    plot_pairwise : bool, default=True
        Whether to create pairwise plots. Can be a bit slow.
    top_k_interactions : int, default=10
        How many pairwise interactions to consider
        (ranked by univariate f scores).
        Runtime is quadratic in this, but higher numbers might find more
        interesting interactions.
    random_state : int, None or numpy RandomState
        Random state used for subsampling for determining pairwise features
        to show.

    Notes
    -----
    important kwargs parameters are: scatter_size and scatter_alpha.
    """

    types = _check_X_target_col(X, target_col, types, task='classification')

    features = X.loc[:, types.continuous]
    if target_col in features.columns:
        features = features.drop(target_col, axis=1)
    if features.shape[1] == 0:
        return

    features_imp = SimpleImputer().fit_transform(features)
    target = X[target_col]
    figures = []
    if features.shape[1] <= 5:
        pairplot(X,
                 target_col=target_col,
                 columns=features.columns,
                 scatter_alpha=scatter_alpha,
                 scatter_size=scatter_size)
        title = "Continuous features"
        if features.shape[1] > 1:
            title = title + " pairplot"
        plt.suptitle(title, y=1.02)

        fig = plt.gcf()
    else:
        # univariate plots
        f = _plot_univariate_classification(features, features_imp, target,
                                            drop_outliers, target_col,
                                            univariate_plot, hue_order)
        figures.append(plt.gcf())

        # FIXME remove "variable = " from title, add f score
        # pairwise plots
        if not plot_pairwise:
            return figures
        top_k = np.argsort(f)[-top_k_interactions:][::-1]
        fig, axes = _plot_top_pairs(features_imp[:, top_k],
                                    target,
                                    scatter_alpha,
                                    scatter_size,
                                    feature_names=features.columns[top_k],
                                    how_many=4,
                                    random_state=random_state)
        fig.suptitle("Top feature interactions")
    figures.append(fig)
    if not plot_pairwise:
        return figures
    # get some PCA directions
    # we're using all features here, not only most informative
    # should we use only those?
    n_components = min(top_k_interactions, features.shape[0],
                       features.shape[1])
    if n_components < 2:
        return figures
    features_scaled = _plot_pca_classification(n_components,
                                               features_imp,
                                               target,
                                               scatter_alpha,
                                               scatter_size,
                                               random_state=random_state)
    figures.append(plt.gcf())
    # LDA
    _plot_lda_classification(features_scaled,
                             target,
                             top_k_interactions,
                             scatter_alpha,
                             scatter_size,
                             random_state=random_state)
    figures.append(plt.gcf())
    return figures