def plotting(plot_data):
    # acessing dictionary data and making it into numpy
    # arrays so that it can be used for doing mathemtical operations
    # and plotting values
    ref_range = np.array(plot_data["ref_range"])
    rho_range = np.array(plot_data["rho_range"])
    ref = np.array(plot_data["ref"])
    rho = np.array(plot_data["rho"])
    az = np.array(plot_data["az"])
    fig, axes = plt.subplots(1, 2, figsize=(15, 8))
    for var_data, var_range, ax in zip((ref, rho), (ref_range, rho_range),
                                       axes):
        # Turn into an array, then mask
        data = ma.array(var_data)
        data[np.isnan(data)] = ma.masked
        # Convert az,range to x,y
        xlocs = var_range * np.sin(np.deg2rad(az[:, np.newaxis]))
        ylocs = var_range * np.cos(np.deg2rad(az[:, np.newaxis]))
        # Plot the data
        cmap = ctables.registry.get_colortable('viridis')
        ax.pcolormesh(xlocs, ylocs, data, cmap=cmap)
        ax.set_aspect('equal', 'datalim')
        ax.set_xlim(-40, 20)
        ax.set_ylim(-30, 30)
        add_timestamp(ax, datetime.now(), y=0.02, high_contrast=True)
    # Labelling plot
    fig.suptitle('Minimum and Maximum range of Reflectivity')
    # saving the file to be used in future
    plt.savefig("Reflectivity_Correlation.png")
    return hosting()
예제 #2
0
def generate_plot(site, date=None):

    if date:
        request_time = datetime.strptime(date, '%Y%m%d%H')
    else:
        now = datetime.now(timezone.utc) - timedelta(hours=2)
        request_time = now.replace(hour=(now.hour // 12) * 12,
                                   minute=0,
                                   second=0)

    # Request the data and plot
    df = WyomingUpperAir.request_data(request_time, site)
    skewt = plot_skewt(df)

    # Add the timestamp for the data to the plot
    add_timestamp(skewt.ax,
                  request_time,
                  y=1.02,
                  x=0,
                  ha='left',
                  fontsize='large')
    skewt.ax.set_title(site)
    # skewt.ax.figure.savefig(make_name(site, date, request_time))

    bio = io.BytesIO()
    skewt.ax.figure.savefig(bio, format='svg')
    bio.seek(0)
    b64 = base64.b64encode(bio.read())
    message = {}
    message['station_id'] = site
    message['sounding'] = b64
    db.soundings.replace_one({'station_id': site}, message, upsert=True)
예제 #3
0
파일: test_util.py 프로젝트: ytao579/MetPy
def test_add_timestamp_custom_format():
    """Test adding a timestamp to an axes object with custom time formatting."""
    fig = plt.figure(figsize=(9, 9))
    ax = plt.subplot(1, 1, 1)
    add_timestamp(ax,
                  time=datetime(2017, 1, 1),
                  time_format='%H:%M:%S %Y/%m/%d')
    return fig
예제 #4
0
def save_as_image(d, nexrad):
    LAYER1 = b"REF"
    LAYER2 = b"VEL"
    f = Level2File(str(nexrad))

    # Pull data out of the file
    for sweep in range(0, 21):
        try:
            print(f"rendering sweep {sweep}")
            # First item in ray is header, which has azimuth angle
            az = np.array([ray[0].az_angle for ray in f.sweeps[sweep]])

            # 5th item is a dict mapping a var name (byte string) to a tuple
            # of (header, data array)
            ref_hdr = f.sweeps[sweep][0][4][LAYER1][0]
            ref_range = np.arange(
                ref_hdr.num_gates) * ref_hdr.gate_width + ref_hdr.first_gate
            ref = np.array([ray[4][LAYER1][1] for ray in f.sweeps[sweep]])
            try:
                rho_hdr = f.sweeps[sweep][0][4][LAYER2][0]
                rho_range = (np.arange(rho_hdr.num_gates + 1) -
                             0.5) * rho_hdr.gate_width + rho_hdr.first_gate
                rho = np.array([ray[4][LAYER2][1] for ray in f.sweeps[sweep]])
            except:
                rho_hdr = f.sweeps[sweep][0][4][b"RHO"][0]
                rho_range = np.arange(
                    rho_hdr.num_gates) * rho_hdr.gate_width + rho_hdr.first_gate
                rho = np.array([ray[4][b"RHO"][1] for ray in f.sweeps[sweep]])

            fig, axes = plt.subplots(1, 2, figsize=(15, 8))
            for var_data, var_range, ax in zip((ref, rho),
                                               (ref_range, rho_range), axes):
                # Turn into an array, then mask
                data = np.ma.array(var_data)
                data[np.isnan(data)] = np.ma.masked

                # Convert az,range to x,y
                xlocs = var_range * np.sin(np.deg2rad(az[:, np.newaxis]))
                ylocs = var_range * np.cos(np.deg2rad(az[:, np.newaxis]))

                # Plot the data
                ax.pcolormesh(xlocs, ylocs, data, cmap='viridis')
                ax.set_aspect('equal', 'datalim')
                ax.set_xlim(-275, 275)
                ax.set_ylim(-275, 275)
                add_timestamp(ax, f.dt, y=0.02, high_contrast=True)

            plt.savefig(str(d / OUT_PREFIX.format(f.dt.timestamp(), sweep)))
        except:
            print(f"sweep {sweep} failed, skipping")
예제 #5
0
def main():
    manager = mp.Manager()
    results = manager.dict()
    pool = TPool(12)
    jobs = []

    startDateTime = datetime.datetime.strptime(args.convTime, '%Y%m%d%H%M')
    intervalDateTime = datetime.timedelta(
        hours=2, minutes=0
    )  #hours = int(args.convInterval[:2]), minutes=int([args.convInterval[2:]]))

    station = args.sensor

    # Query all L2 files for the sensor
    totalRadarObjects = []
    totalSweepDateTimes = []
    hrIter = datetime.timedelta(hours=0)
    while True:  # grab a specific interval of files
        radarObjects, sweepDateTimes = pull_data(startDateTime=(startDateTime+hrIter),\
                   station=station)
        totalRadarObjects.extend(radarObjects[:-1])
        totalSweepDateTimes.extend(
            sweepDateTimes[:-1])  # remove trailing *_MDM file
        if totalSweepDateTimes[-1] - startDateTime >= intervalDateTime:
            break
        else:
            hrIter += datetime.timedelta(hours=1)
    fileDict = {'L2File': totalRadarObjects, 'Time': totalSweepDateTimes}
    fileDF = pd.DataFrame(fileDict)
    print(
        f'Start time: {startDateTime}, Interval: {intervalDateTime}, End Time: {startDateTime + intervalDateTime}'
    )

    filesToStream = fileDF[((fileDF['Time'] >= startDateTime) \
        & (fileDF['Time'] <= startDateTime + \
        intervalDateTime))]['L2File'].tolist()       # Bitwise operators, conditions double wrapped in perentheses to handle overriding
    logging.info(f'files: {[obj.key for obj in filesToStream]}')
    if len(filesToStream) < 8:
        warnings.warn("n of radar inputs is not sufficent for curve smoothing",
                      UserWarning)

    # --- Stream files ahead of time to avoid error with multiprocessing and file handles ---
    filesToWorkers = []

    for L2FileStream in tqdm(filesToStream, desc="Streaming L2 Files"):
        try:
            if datetime.datetime.strptime(
                    L2FileStream.key[20:35],
                    '%Y%m%d_%H%M%S') >= datetime.datetime(2016, 1, 1):
                filesToWorkers.append(Level2File(L2FileStream.get()['Body']))
            else:
                bytestream = BytesIO(L2FileStream.get()['Body'].read())
                with gzip.open(bytestream, 'rb') as f:
                    filesToWorkers.append(Level2File(f))
        except:
            print("value Error, Most likely in parsing header")

    # --- Create pool for workers ---
    for file in filesToWorkers:
        job = pool.apply_async(calculate_radar_stats, (results, file))
        jobs.append(job)

    # --- Commit pool to workers ---
    for job in tqdm(jobs, desc="Bounding & Searching Data"):
        job.get()

    pool.close()
    pool.join()

    columns = [
        'sweepDateTime', 'metadata', 'sensorData', 'indices', 'xlocs', 'ylocs',
        'data', 'polyVerts', 'offset', 'areaValue', 'refValue', 'varRefValue'
    ]
    print(
        'Creating Dataframe... (This may take a while if plotting significant data)'
    )
    resultsDF = pd.DataFrame.from_dict(results,
                                       orient='index',
                                       columns=columns)  #SUPER slow
    print('Converting datetimes...')
    resultsDF['sweepDateTime'] = pd.to_datetime(resultsDF.sweepDateTime)
    print('Sorting...')
    resultsDF.sort_values(by='sweepDateTime', inplace=True)
    #resultsDF.to_csv(args.output + '.csv', index = False)
    print(resultsDF[['areaValue', 'refValue']].head(5))

    # --- Plot time series---
    fig, axes = plt.subplots(8, 8, figsize=(30, 30))
    date_format = mpl_dates.DateFormatter('%H:%Mz')

    for i, (dt, record) in tqdm(enumerate(resultsDF.iterrows()),
                                desc='Plotting Slices'):
        plotx = i % 8
        ploty = int(i / 8)

        negXLim = -.5
        posXLim = 1.5
        negYLim = -1.0
        posYLim = 1.0
        norm, cmap = ctables.registry.get_with_steps('NWSReflectivity', 5, 5)
        tempdata = record[
            'data']  # create a deep copy of data to maipulate for plotting
        tempdata[tempdata == 0] = np.ma.masked  # mask out 0s for plotting

        axes[ploty][plotx].pcolormesh(record['xlocs'],
                                      record['ylocs'],
                                      tempdata,
                                      norm=norm,
                                      cmap=cmap,
                                      shading='auto')
        axes[ploty][plotx].set_aspect(aspect='equal')
        axes[ploty][plotx].set_xlim(negXLim, posXLim)
        axes[ploty][plotx].set_ylim(negYLim, posYLim)
        pVXs, pVYs = zip(
            *record['polyVerts']
        )  # create lists of x and y values for transformed polyVerts
        axes[ploty][plotx].plot(pVXs, pVYs)
        if negXLim < record['offset'][1] < posXLim and \
        negYLim < record['offset'][0] < posYLim:
            axes[ploty][plotx].plot(record['offset'][1], record['offset'][0],
                                    'o')  # Location of the radar
            axes[ploty][plotx].text(record['offset'][1], record['offset'][0],
                                    record['sensorData']['siteID'])

        axes[ploty][plotx].plot(0.0, 0.0, 'bx')  # Location of the convection
        axes[ploty][plotx].text(0.0, 0.0, str(args.convLatLon))
        add_timestamp(axes[ploty][plotx],
                      record['sweepDateTime'],
                      y=0.02,
                      high_contrast=True)
        axes[ploty][plotx].tick_params(axis='both', which='both')

    print('Calculating Statistics...')

    # pull data out of DF to make code cleaner
    datetimes = resultsDF['sweepDateTime'].tolist()
    #elapsedtimes = list(map(lambda x: x - min(datetimes), datetimes))						# not currently used, need to get this working
    areaValues = resultsDF['areaValue'].tolist()  # area ≥ 35dbz within ROI
    refValues = np.array(
        resultsDF['refValue'].tolist()
    )  # mean reflectivity ≥ 35dbz within ROI (conversion: (val-65)*0.5) [https://mesonet.agron.iastate.edu/GIS/rasters.php?rid=2]
    if np.nan in refValues:
        warnings.warn(
            "Radar inputs contains instance with no ref values >= thresh",
            UserWarning)
    varValues = resultsDF['varRefValue'].tolist(
    )  # variance of mean reflectivity ≥ 35dbz within ROI
    cvValues = np.array([
        a / b for a, b in zip(varValues, refValues)
    ]) * 0.5  # coeff. of variation for mean reflectivity ≥ 35dbz within ROI

    # Frequency
    N = len(refValues)
    T = 1.0 / N
    yf = fft(refValues)
    w = blackman(N)
    ywf = fft(refValues * w)

    # Normalization
    areaNorm = areaValues / np.max(areaValues)
    xf = np.linspace(0, 1.0 / (2.0 * T), N // 2)
    cvNorm = cvValues / np.max(cvValues)
    areaCVValuesNormalized = np.multiply(areaNorm, cvNorm)

    # Curve Smoothing
    window = len(
        resultsDF.index
    ) // 8  # ~2 hours/8 = ~15 mins ----> number of samples in moving average ( helps counteract more visible noise in higher temporal resolution data)
    yAreaAvg = movingaverage(
        areaValues, window)[window // 2:-window //
                            2]  # create moving averages for time series'
    yRefAvg = movingaverage(refValues, window)[window // 2:-window // 2]
    yCVAvg = movingaverage(cvValues, window)[window // 2:-window // 2]
    yAreaCVNormAvg = movingaverage(areaCVValuesNormalized,
                                   window)[window // 2:-window // 2]

    # local minima & maxima on smoothed curves
    minTemporalWindow = window * 2

    areaLocalMax = argrelmax(yAreaAvg)
    areaLocalMin = argrelmin(yAreaAvg)
    endpoints = []
    if yAreaAvg[0] <= np.all(yAreaAvg[1:window+1]) or\
     yAreaAvg[0] >= np.all(yAreaAvg[1:window+1]):
        endpoints.append(0)
    if yAreaAvg[-1] <= np.all(yAreaAvg[len(yAreaAvg-1)-window+1:-2]) or\
     yAreaAvg[-1] >= np.all(yAreaAvg[len(yAreaAvg-1)-window+1:-2]):
        endpoints.append(len(yAreaAvg) - 1)
    #print(f'Area: Endpoints: {yAreaAvg[endpoints]}, Local Maxes: {yAreaAvg[areaLocalMax]}, Local Mins: {yAreaAvg[areaLocalMin]}')
    areaExtremaRaw = sorted(
        areaLocalMax[0].tolist() + areaLocalMin[0].tolist() + endpoints
    )  # combine mins, maxes, and endpoints (if endpoints are an extreme) then sort
    areaExtrema = [
        x for x in areaExtremaRaw[1:]
        if x - areaExtremaRaw[0] >= minTemporalWindow
    ]  # remove maxima that are within threshold of first one
    areaExtrema = [areaExtremaRaw[0]
                   ] + areaExtrema  # add back in forst one to begining
    logging.info(f'Area Values: {yAreaAvg}')
    logging.info(f'Area Extrema: {yAreaAvg[areaExtrema]}')

    refLocalMax = argrelmax(yRefAvg)
    refLocalMin = argrelmin(yRefAvg)
    endpoints = []
    if yRefAvg[0] <= np.all(yRefAvg[1:window+1]) or\
     yRefAvg[0] >= np.all(yRefAvg[1:window+1]):
        endpoints.append(0)
    if yRefAvg[-1] <= np.all(yRefAvg[len(yRefAvg-1)-window+1:-2]) or\
     yRefAvg[-1] >= np.all(yRefAvg[len(yRefAvg-1)-window+1:-2]):
        endpoints.append(len(yRefAvg) - 1)
    refExtremaRaw = sorted(refLocalMax[0].tolist() + refLocalMin[0].tolist() +
                           endpoints)
    refExtrema = [
        x for x in refExtremaRaw[1:]
        if x - refExtremaRaw[0] >= minTemporalWindow
    ]
    refExtrema = [refExtremaRaw[0]] + refExtrema
    logging.info(f'Ref Values: {yRefAvg}')
    logging.info(f'Ref Extrema: {yRefAvg[refExtrema]}')

    #cvLocalMax = argrelmax(yCVAvg)
    #cvLocalMin = argrelmin(yCVAvg)
    #endpoints = []
    #if yCVAvg[0] <= np.all(yCVAvg[1:window+1]) or\
    # yCVAvg[0] >= np.all(yCVAvg[1:window+1]):
    #	endpoints.append(0)
    #if yCVAvg[-1] <= np.all(yCVAvg[len(yCVAvg-1)-window+1:-2]) or\
    # yCVAvg[-1] >= np.all(yCVAvg[len(yCVAvg-1)-window+1:-2]):
    #	endpoints.append(len(yCVAvg)-1)
    #cvExtremaRaw = sorted(cvLocalMax[0].tolist()+cvLocalMin[0].tolist()+endpoints)
    #cvExtrema = [x for x in cvExtremaRaw[1:] if x-cvExtremaRaw[0]>=minTemporalWindow]
    #cvExtrema = [cvExtremaRaw[0]]+cvExtrema
    #logging.info((f'CV Values: {yCVAvg}')
    #logging.info((f'CV Extrema: {yCVAvg[cvExtrema]}')

    yAreaCVNormLocalMax = argrelmax(yAreaCVNormAvg)
    yAreaCVNormLocalMin = argrelmin(yAreaCVNormAvg)
    endpoints = []
    if yAreaCVNormAvg[0] <= np.all(yAreaCVNormAvg[1:window+1]) or\
     yAreaCVNormAvg[0] >= np.all(yAreaCVNormAvg[1:window+1]):
        endpoints.append(0)
    if yAreaCVNormAvg[-1] <= np.all(yAreaCVNormAvg[len(yAreaCVNormAvg-1)-window+1:-2]) or\
     yAreaCVNormAvg[-1] >= np.all(yAreaCVNormAvg[len(yAreaCVNormAvg-1)-window+1:-2]):
        endpoints.append(len(yAreaCVNormAvg) - 1)
    yAreaCVNormExtremaRaw = sorted(yAreaCVNormLocalMax[0].tolist() +
                                   yAreaCVNormLocalMin[0].tolist() + endpoints)
    yAreaCVNormExtrema = [
        x for x in yAreaCVNormExtremaRaw[1:]
        if x - yAreaCVNormExtremaRaw[0] >= minTemporalWindow
    ]
    yAreaCVNormExtrema = [yAreaCVNormExtremaRaw[0]] + yAreaCVNormExtrema
    logging.info(f'AreaCVNorm Extrema: {yAreaCVNormAvg[yAreaCVNormExtrema]}')

    # Find slopes of Build-up Lines
    # 	Area
    xArea = np.array(datetimes[window // 2:-window // 2])[np.array(
        [areaExtrema[0], areaExtrema[1]]
    )]  # grab datetime (x component) of the leftmost bounds (determined by window size), and the first extreme on the smoothed curve (sm curve is already bound by window, we need to apply bounds to datetimes)
    xAreaDiff = xArea[1] - xArea[
        0]  # subtract the later value from the former to get our delta x
    yArea = yAreaAvg[np.array(
        [areaExtrema[0], areaExtrema[1]]
    )]  # grab the values (y component) of the sm curve at the begining and at the first extreme
    yAreaDiff = yArea[1] - yArea[0]  # subtract to find delta y
    slopeArea = np.arctan(yAreaDiff /
                          xAreaDiff.seconds)  # calc the slope angle
    logging.info(f'Slope of Area: {slopeArea}')

    #   Reflectivity
    xRef = np.array(datetimes[window // 2:-window // 2])[np.array(
        [refExtrema[0], refExtrema[1]])]
    xRefDiff = xRef[1] - xRef[0]
    yRef = yRefAvg[np.array([refExtrema[0], refExtrema[1]])]
    yRefDiff = yRef[1] - yRef[0]
    slopeRef = np.arctan(yRefDiff / xRefDiff.seconds)
    print(f'Slope of Reflectivity: {slopeRef}')

    # 	Product of Area and Coefficent of Variation of Reflectivity
    xProduct = np.array(datetimes[window // 2:-window // 2])[np.array(
        [yAreaCVNormExtrema[0], yAreaCVNormExtrema[1]])]
    XProductDiff = xProduct[1] - xProduct[0]
    yProduct = yAreaCVNormAvg[np.array(
        [yAreaCVNormExtrema[0], yAreaCVNormExtrema[1]])]
    yProductDiff = yProduct[1] - yProduct[0]
    slopeProduct = np.arctan(yProductDiff / XProductDiff.seconds)
    print(f'Slope of Product: {slopeProduct}')

    print('Plotting Additional Data and Saving Output...')
    # Area for Reflectivity ≥ 35dbz
    axes[-1][-5].plot_date(datetimes, areaValues, linestyle='solid', ms=2)
    axes[-1][-5].plot_date(datetimes[window // 2:-window // 2],
                           yAreaAvg,
                           linestyle='solid',
                           ms=2)
    axes[-1][-5].plot_date(
        np.array(datetimes[window // 2:-window // 2])[np.array(
            [areaExtrema[0], areaExtrema[1]])],
        yAreaAvg[np.array([areaExtrema[0], areaExtrema[1]])],
        linestyle="solid",
        ms=2)
    axes[-1][-5].legend(['Area Delta', 'Sm. Area Delta', 'Build-up Rate'])
    axes[-1][-5].xaxis.set_major_formatter(date_format)
    plt.setp(axes[-1][-5].xaxis.get_majorticklabels(),
             rotation=45,
             ha="right",
             rotation_mode="anchor")
    axes[-1][-5].set_title('Area of Reflectivity ≥ 35dbz (km^2)')

    # Mean of Reflectivity ≥ 35dbz
    axes[-1][-4].plot_date(datetimes, refValues, linestyle='solid', ms=2)
    #axes[-1][-4].plot_date(datetimes[window//2:-window//2], yRefAvg, linestyle='solid', ms=2)
    #axes[-1][-4].plot_date(np.array(datetimes[window//2:-window//2])[np.array([0,refLocalMax[0][0]])], yRefAvg[np.array([0,refLocalMax[0][0]])], linestyle="solid", ms=2)
    axes[-1][-4].plot_date(datetimes[window // 2:-window // 2],
                           yRefAvg,
                           linestyle='solid',
                           ms=2)
    axes[-1][-4].plot_date(np.array(
        datetimes[window // 2:-window // 2])[np.array(
            [refExtrema[0], refExtrema[1]])],
                           yRefAvg[np.array([refExtrema[0], refExtrema[1]])],
                           linestyle="solid",
                           ms=2)
    axes[-1][-4].legend(['Ref Delta', 'Sm. Ref Delta', 'Build-up Rate'])
    axes[-1][-4].xaxis.set_major_formatter(date_format)
    plt.setp(axes[-1][-4].xaxis.get_majorticklabels(),
             rotation=45,
             ha="right",
             rotation_mode="anchor")
    axes[-1][-4].set_title('Mean of Reflectivity ≥ 35dbz')

    # Product of cv reflectivity and area
    axes[-1][-3].plot_date(datetimes,
                           areaCVValuesNormalized,
                           linestyle='solid',
                           ms=2)
    axes[-1][-3].plot_date(datetimes[window // 2:-window // 2],
                           yAreaCVNormAvg,
                           linestyle='solid',
                           ms=2)
    axes[-1][-3].plot_date(
        np.array(datetimes[window // 2:-window // 2])[np.array(
            [yAreaCVNormExtrema[0], yAreaCVNormExtrema[1]])],
        yAreaCVNormAvg[np.array([yAreaCVNormExtrema[0],
                                 yAreaCVNormExtrema[1]])],
        linestyle="solid",
        ms=2)
    axes[-1][-3].legend(
        ['Area*cv_Ref Delta', 'Sm. Area*cv_Ref Delta', 'Build-up Rate'])
    axes[-1][-3].xaxis.set_major_formatter(date_format)
    plt.setp(axes[-1][-3].xaxis.get_majorticklabels(),
             rotation=45,
             ha="right",
             rotation_mode="anchor")
    axes[-1][-3].set_title('Norm Product:\nCV Reflectivity * Area ≥ 35dbz')

    # Coeff. of Variance of Reflectivity ≥ 35dbz
    axes[-1][-2].plot_date(datetimes, cvValues, linestyle='solid', ms=2)
    axes[-1][-2].plot_date(datetimes[window // 2:-window // 2],
                           yCVAvg,
                           linestyle='solid',
                           ms=2)
    axes[-1][-2].legend(['CV Delta', 'Sm. CV Delta'])
    axes[-1][-2].xaxis.set_major_formatter(date_format)
    plt.setp(axes[-1][-2].xaxis.get_majorticklabels(),
             rotation=45,
             ha="right",
             rotation_mode="anchor")
    axes[-1][-2].set_title('CV of Reflectivity ≥ 35dbz')

    # Testing plot
    axes[-1][-1].semilogy(xf[1:N // 2], 2.0 / N * np.abs(yf[1:N // 2]), '-b')
    axes[-1][-1].semilogy(xf[1:N // 2], 2.0 / N * np.abs(ywf[1:N // 2]), '-r')
    axes[-1][-1].legend(['FFT', 'FFT w. Window'])
    #axes[-1][-1].plot(xf, 2.0/N * np.abs(yf[0:N//2]),linestyle='solid', ms=2)
    #axes[-1][-1].plot_date(datetimes, yCVAvg, linestyle='solid')
    #axes[-1][-1].xaxis.set_major_formatter(date_format)
    plt.setp(axes[-1][-1].xaxis.get_majorticklabels(),
             rotation=45,
             ha="right",
             rotation_mode="anchor")
    axes[-1][-1].set_title('Testing Plot (Frequency)')

    plt.tight_layout()
    plt.savefig(args.output + 'Nexrad.png')  # Set the output file name
    #plt.show()

    f_o = open(args.output + 'log_stats_area_nexrad.txt', 'a')
    f_o.write(datetimes[0].strftime("%Y%m%d%H%M%S") + '\t' +
              str(args.convLatLon) + '\t' + str(args.convBearing) + '\t' +
              str(args.scaleFactor) + '\t' + str(np.max(areaValues)) + '\t' +
              str(np.max(refValues)) + '\t' +
              str(slopeArea)  # std dev of LIS aligned data
              + '\t' + str(slopeRef) + '\t' + str(slopeProduct) + '\n')
    f_o.close()
예제 #6
0
          rightside_up=True,
          use_clabeltext=True)

# Contour the temperature
cf = ax.contourf(lon,
                 lat,
                 temp[FH, 0, :, :],
                 range(-20, 20, 1),
                 cmap=plt.cm.RdBu_r,
                 transform=ccrs.PlateCarree())
cb = fig.colorbar(cf,
                  orientation='horizontal',
                  aspect=65,
                  shrink=0.5,
                  pad=0.05,
                  extendrect='True')
cb.set_label('Celsius', size='x-large')

ax.set_extent([-106.5, -90.4, 34.5, 46.75], crs=ccrs.PlateCarree())

# Make the axis title
ax.set_title(f'{plevs[0]:~.0f} Heights (m) and Temperature (C)',
             loc='center',
             fontsize=10)

# Set the figure title
fig.suptitle(f'WRF-ARW Forecast VALID: {vtimes[FH]} UTC', fontsize=14)
add_timestamp(ax, vtimes[FH], y=0.02, high_contrast=True)

plt.show()
예제 #7
0
# of (header, data array)
ref_hdr = f.sweeps[sweep][0][4][b'REF'][0]
ref_range = np.arange(ref_hdr.num_gates) * ref_hdr.gate_width + ref_hdr.first_gate
ref = np.array([ray[4][b'REF'][1] for ray in f.sweeps[sweep]])

rho_hdr = f.sweeps[sweep][0][4][b'RHO'][0]
rho_range = (np.arange(rho_hdr.num_gates + 1) - 0.5) * rho_hdr.gate_width + rho_hdr.first_gate
rho = np.array([ray[4][b'RHO'][1] for ray in f.sweeps[sweep]])

###########################################
fig, axes = plt.subplots(1, 2, figsize=(15, 8))
add_metpy_logo(fig, 190, 85, size='large')
for var_data, var_range, ax in zip((ref, rho), (ref_range, rho_range), axes):
    # Turn into an array, then mask
    data = np.ma.array(var_data)
    data[np.isnan(data)] = np.ma.masked

    # Convert az,range to x,y
    xlocs = var_range * np.sin(np.deg2rad(az[:, np.newaxis]))
    ylocs = var_range * np.cos(np.deg2rad(az[:, np.newaxis]))

    # Plot the data
    cmap = colortables.get_colortable('viridis')
    ax.pcolormesh(xlocs, ylocs, data, cmap=cmap)
    ax.set_aspect('equal', 'datalim')
    ax.set_xlim(-40, 20)
    ax.set_ylim(-30, 30)
    add_timestamp(ax, f.dt, y=0.02, high_contrast=True)

plt.show()
예제 #8
0
파일: test_util.py 프로젝트: zhatin/MetPy
def test_add_timestamp_pretext():
    """Test adding a timestamp to an axes object with custom pre-text."""
    fig = plt.figure(figsize=(9, 9))
    ax = plt.subplot(1, 1, 1)
    add_timestamp(ax, time=datetime(2017, 1, 1), pretext='Valid: ')
    return fig
def read_nexRad(filename):

    # Open the file
    # name = get_test_data('PHWA20201031_000332_V06.gz', as_file_obj=False)
    f = Level2File(filename)

    # print(f.sweeps[0][0])
    # Pull data out of the file
    sweep = 0

    # First item in ray is header, which has azimuth angle
    az = np.array([ray[0].az_angle for ray in f.sweeps[sweep]])

    # 5th item is a dict mapping a var name (byte string) to a tuple
    # of (header, data array)
    ref_hdr = f.sweeps[sweep][0][4][b'REF'][0]
    ref_range = np.arange(
        ref_hdr.num_gates) * ref_hdr.gate_width + ref_hdr.first_gate
    ref = np.array([ray[4][b'REF'][1] for ray in f.sweeps[sweep]])

    # rho_hdr = f.sweeps[sweep][0][4][b'RHO'][0]
    # rho_range = (np.arange(rho_hdr.num_gates + 1) - 0.5) * rho_hdr.gate_width + rho_hdr.first_gate
    # rho = np.array([ray[4][b'RHO'][1] for ray in f.sweeps[sweep]])

    fig, axes = plt.subplots(1, 1, figsize=(15, 8))

    # reflexivity plot
    data = np.ma.array(ref)
    data[np.isnan(data)] = np.ma.masked

    # Convert az,range to x,y
    xlocs = ref_range * np.sin(np.deg2rad(az[:, np.newaxis]))
    ylocs = ref_range * np.cos(np.deg2rad(az[:, np.newaxis]))

    # Plot the data
    axes.pcolormesh(xlocs, ylocs, data, cmap='viridis')
    axes.set_aspect('equal', 'datalim')
    axes.set_xlim(-150, 150)
    axes.set_ylim(-150, 150)
    add_timestamp(axes, f.dt, y=0.02, high_contrast=True)
    axes.axis('off')
    # fig.show()

    # redraw the plot
    fig.canvas.draw()

    # Now we can save it to a numpy array.
    width, height = fig.get_size_inches() * fig.get_dpi()
    data = np.fromstring(fig.canvas.tostring_rgb(),
                         dtype=np.uint8).reshape(int(height), int(width), 3)
    data = cv2.cvtColor(data[200:600, 600:1000], cv2.COLOR_BGR2GRAY)
    data = cv2.resize(data, (200, 200), interpolation=cv2.INTER_NEAREST)

    plt.close()
    # data = cv2.blur(data, (3, 3))
    # print(data.shape)
    # plt.show()
    # plt.imshow(data, cmap='gray')
    # plt.show()
    #plt.savefig('test.png', cmap='gray')

    # save into a file
    return data
예제 #10
0
    else:
        # Figure out the most recent sounding, 00 or 12. Subtracting two hours
        # helps ensure that we choose a time with data available.
        now = datetime.utcnow() - timedelta(hours=2)
        request_time = now.replace(hour=(now.hour // 12) * 12,
                                   minute=0,
                                   second=0)

    # Request the data and plot
    df = WyomingUpperAir.request_data(request_time, args.site)
    skewt = plot_skewt(df)

    # Add the timestamp for the data to the plot
    add_timestamp(skewt.ax,
                  request_time,
                  y=1.02,
                  x=0,
                  ha='left',
                  fontsize='large')
    skewt.ax.set_title(args.site)

    if args.show:
        plt.show()
    else:
        fname = args.filename if args.filename else make_name(
            args.site, request_time)
        if args.gdrive:
            uploader = DriveUploader()
            with tempfile.NamedTemporaryFile(suffix='.png') as f:
                skewt.ax.figure.savefig(f.name)
                uploader.upload_to(f.name, posixpath.join(args.gdrive, fname))
        else:
예제 #11
0
###########################################

# Open the GINI file from the test data
f = GiniFile(get_test_data('WEST-CONUS_4km_WV_20151208_2200.gini'))
print(f)

###########################################
# Get a Dataset view of the data (essentially a NetCDF-like interface to the
# underlying data). Pull out the data and (x, y) coordinates. We use `metpy.parse_cf` to
# handle parsing some netCDF Climate and Forecasting (CF) metadata to simplify working with
# projections.
ds = xr.open_dataset(f)
x = ds.variables['x'][:]
y = ds.variables['y'][:]
dat = ds.metpy.parse_cf('WV')

###########################################
# Plot the image. We use MetPy's xarray/cartopy integration to automatically handle parsing
# the projection information.
fig = plt.figure(figsize=(10, 12))
add_metpy_logo(fig, 125, 145)
ax = fig.add_subplot(1, 1, 1, projection=dat.metpy.cartopy_crs)
wv_norm, wv_cmap = colortables.get_with_range('WVCIMSS', 100, 260)
wv_cmap.set_under('k')
im = ax.imshow(dat[:], cmap=wv_cmap, norm=wv_norm,
               extent=(x.min(), x.max(), y.min(), y.max()), origin='upper')
ax.add_feature(cfeature.COASTLINE.with_scale('50m'))
add_timestamp(ax, f.prod_desc.datetime, y=0.02, high_contrast=True)

plt.show()
    print(f'{len(df)} stations with variable {args.var}\nPlotting...')
    # Make an LCC map projection
    proj = ccrs.LambertConformal()

    # Plot the map
    fig = plt.figure(figsize=(12, 7))
    ax = plt.axes(projection=proj)
    ax.add_feature(cfeature.COASTLINE.with_scale('50m'))
    ax.add_feature(cfeature.OCEAN.with_scale('50m'))
    ax.add_feature(cfeature.LAND.with_scale('50m'))
    ax.add_feature(cfeature.BORDERS.with_scale('50m'), linestyle=':')
    ax.add_feature(cfeature.STATES.with_scale('50m'), linestyle=':')
    ax.add_feature(cfeature.LAKES.with_scale('50m'), alpha=0.5)
    ax.add_feature(cfeature.RIVERS.with_scale('50m'), alpha=0.5)

    add_timestamp(ax)
    add_metpy_logo(fig, x=300, y=350)

    scatter = ax.scatter(df.longitude, df.latitude,
                         c=df[args.var], transform=ccrs.PlateCarree(),
                         cmap=plt.get_cmap(args.cmap), vmin=args.min, vmax=args.max,
                         s=args.msize) # cm.Oranges or Use plt.get_cmap(str)

    plt.colorbar(scatter, orientation='horizontal',
                 label=args.var.replace('_', ' ').title(),
                 shrink=0.6, pad=0.05)

    #u, v = mpcalc.wind_components(df.wind_direction.values * units('m/s'), df.wind_direction.values * units.degrees)
    #x = df.longitude.values
    #y = df.latitude.values
    #ax.quiver(x, y, u.m, v.m, transform=ccrs.PlateCarree(), units='dots')
예제 #13
0
add_metpy_logo(fig, 190, 85, size='large')
for v, ctable, ax in zip(('N0Q', 'N0U'), ('NWSReflectivity', 'NWSVelocity'), axes):
    # Open the file
    name = get_test_data('nids/KOUN_SDUS54_{}TLX_201305202016'.format(v), as_file_obj=False)
    f = Level3File(name)

    # Pull the data out of the file object
    datadict = f.sym_block[0][0]

    # Turn into an array, then mask
    data = np.ma.array(datadict['data'])
    data[data == 0] = np.ma.masked

    # Grab azimuths and calculate a range based on number of gates
    az = np.array(datadict['start_az'] + [datadict['end_az'][-1]])
    rng = np.linspace(0, f.max_range, data.shape[-1] + 1)

    # Convert az,range to x,y
    xlocs = rng * np.sin(np.deg2rad(az[:, np.newaxis]))
    ylocs = rng * np.cos(np.deg2rad(az[:, np.newaxis]))

    # Plot the data
    norm, cmap = colortables.get_with_steps(ctable, 16, 16)
    ax.pcolormesh(xlocs, ylocs, data, norm=norm, cmap=cmap)
    ax.set_aspect('equal', 'datalim')
    ax.set_xlim(-40, 20)
    ax.set_ylim(-30, 30)
    add_timestamp(ax, f.metadata['prod_time'], y=0.02, high_contrast=True)

plt.show()
예제 #14
0
def produce(data, conn, client):
    Year = data['inputData']['Year']
    Month = data['inputData']['Month']
    Day = data['inputData']['Day']
    Radar = data['inputData']['Radar']
    uid = data['uid']
    inputData = data['inputData']
    userID = data["userID"]

    numberOfPlots = 1
    scans = conn.get_avail_scans(Year, Month, Day,
                                 Radar)  # year, month and day
    results = conn.download(scans[numberOfPlots - 1], 'templocation')

    # fig = plt.figure(figsize=(16,12))
    for i, scan in enumerate(results.iter_success(), start=1):
        #         ax = fig.add_subplot(1,1,i)
        #         radar = scan.open_pyart()

        #         display = pyart.graph.RadarDisplay(radar)
        #         display.plot('reflectivity',0,ax=ax,title="{} {}".format(scan.radar_id,scan.scan_time))
        #         display.set_limits((-150, 150), (-150, 150), ax=ax)

        sweep = 0
        name = scan.open()
        f = Level2File(name)
        # First item in ray is header, which has azimuth angle
        az = np.array([ray[0].az_angle for ray in f.sweeps[sweep]])

        # 5th item is a dict mapping a var name (byte string) to a tuple
        # of (header, data array)
        ref_hdr = f.sweeps[sweep][0][4][b'REF'][0]
        ref_range = np.arange(
            ref_hdr.num_gates) * ref_hdr.gate_width + ref_hdr.first_gate
        ref = np.array([ray[4][b'REF'][1] for ray in f.sweeps[sweep]])

        rho_hdr = f.sweeps[sweep][0][4][b'RHO'][0]
        rho_range = (np.arange(rho_hdr.num_gates + 1) -
                     0.5) * rho_hdr.gate_width + rho_hdr.first_gate
        rho = np.array([ray[4][b'RHO'][1] for ray in f.sweeps[sweep]])

        fig, axes = plt.subplots(1, 2, figsize=(15, 8))
        add_metpy_logo(fig, 190, 85, size='large')
        for var_data, var_range, ax in zip((ref, rho), (ref_range, rho_range),
                                           axes):
            # Turn into an array, then mask
            data = np.ma.array(var_data)
            data[np.isnan(data)] = np.ma.masked

            # Convert az,range to x,y
            xlocs = var_range * np.sin(np.deg2rad(az[:, np.newaxis]))
            ylocs = var_range * np.cos(np.deg2rad(az[:, np.newaxis]))

            # Plot the data
            ax.pcolormesh(xlocs, ylocs, data, cmap='viridis')
            ax.set_aspect('equal', 'datalim')
            ax.set_xlim(-40, 20)
            ax.set_ylim(-30, 30)
            add_timestamp(ax, f.dt, y=0.02, high_contrast=True)

        pltName = 'images/' + str(uid + str(i) + '.png')
        plt.savefig(pltName)
        plt.close(fig)
        uploadImage = im.upload_image(pltName, title="Uploaded with PyImgur")
        link = str(uploadImage.link)

        body = {
            "inputData": inputData,
            "outputData": link,
            "uid": uid,
            "userID": userID
        }

        with (client.topics['dataAnalysisConsumerF']
              ).get_sync_producer() as producer:
            producer.produce(bytes(json.dumps(body), 'utf-8'))
                         subplot_kw={'projection': ccrs.PlateCarree()})
add_metpy_logo(fig, 575, 55, size='small')

plt.labelsize : 8
plt.tick_params(labelsize=8)

for y,a in zip([0,1],axes.ravel()):
    this_sweep = sweeps_dict[y]
    a.set_extent(extent, crs=ccrs.PlateCarree())
    a.tick_params(axis='both', labelsize=8)
    a.pcolormesh(this_sweep['lons'], this_sweep['lats'],
                 this_sweep['data'], cmap=this_sweep['cmap'],
                 vmin=this_sweep['vmin'],
                 vmax=this_sweep['vmax'],
                 transform=ccrs.PlateCarree())
    a.add_feature(cfeature.STATES, linewidth=0.5)

    a.set_aspect(1.25)
    a.xformatter = LONGITUDE_FORMATTER
    a.yformatter = LATITUDE_FORMATTER
    subplot_title = '{} {}'.format(rda_name,this_sweep['sweep_type'])
    a.set_title(subplot_title, fontsize=11)
    gl = a.gridlines(color='gray',alpha=0.5,draw_labels=True)
    gl.xlabels_top, gl.ylabels_right = False, False
    gl.xlabel_style, gl.ylabel_style = {'fontsize': 7}, {'fontsize': 7}
    gl.xlocator = mticker.FixedLocator(x_ticks)
    gl.ylocator = mticker.FixedLocator(y_ticks)
    add_timestamp(a, radar_file.dt, y=0.02, high_contrast=True)

plt.show()
# Plot 700 hPa
ax = plt.subplot(111, projection=crs)
ax.add_feature(cfeature.COASTLINE.with_scale('50m'), linewidth=0.75)
ax.add_feature(cfeature.STATES, linewidth=0.5)

# Plot the heights
cs = ax.contour(lon, lat, height[FH, 0, :, :], transform=ccrs.PlateCarree(),
                colors='k', linewidths=1.0, linestyles='solid')
ax.clabel(cs, fontsize=10, inline=1, inline_spacing=7,
          fmt='%i', rightside_up=True, use_clabeltext=True)

# Contour the temperature
cf = ax.contourf(lon, lat, temp[FH, 0, :, :], range(-20, 20, 1), cmap=plt.cm.RdBu_r,
                 transform=ccrs.PlateCarree())
cb = fig.colorbar(cf, orientation='horizontal', extend='max', aspect=65, shrink=0.5,
                  pad=0.05, extendrect='True')
cb.set_label('Celsius', size='x-large')

ax.set_extent([-106.5, -90.4, 34.5, 46.75], crs=ccrs.PlateCarree())

# Make the axis title
ax.set_title('{:.0f} hPa Heights (m) and Temperature (C)'.format(plevs[0].m), loc='center',
             fontsize=10)

# Set the figure title
fig.suptitle('WRF-ARW Forecast VALID: {:s} UTC'.format(str(vtimes[FH])), fontsize=14)
add_timestamp(ax, vtimes[FH], y=0.02, high_contrast=True)

plt.show()
예제 #17
0
# Import for the bonus exercise
from metpy.plots import add_timestamp

# Make the image plot
img = ImagePlot()
img.data = ds
img.field = 'Sectorized_CMI'
img.colormap = 'WVCIMSS_r'

# Make the map panel and add the image to it
panel = MapPanel()
panel.plots = [img]

# Make the panel container and add the panel to it
pc = PanelContainer()
pc.panels = [panel]

# Bonus
start_time = datetime.strptime(ds.start_date_time, '%Y%j%H%M%S')
add_timestamp(panel.ax, time=start_time)

# Show the plot
pc.show()
예제 #18
0
# Plot wind barbs
ax.barbs(lon.values,
         lat.values,
         isent_data['u_wind'].isel(isentropic_level=level).values,
         isent_data['v_wind'].isel(isentropic_level=level).values,
         length=6,
         regrid_shape=20,
         transform=ccrs.PlateCarree())

# Make some titles
ax.set_title(
    f'{isentlevs[level]:~.0f} Isentropic Pressure (hPa), Wind (kt), '
    'Relative Humidity (percent)',
    loc='left')
add_timestamp(ax,
              isent_data['time'].values.astype('datetime64[ms]').astype('O'),
              y=0.02,
              high_contrast=True)
fig.tight_layout()

######################################
# **Montgomery Streamfunction**
#
# The Montgomery Streamfunction, :math:`{\psi} = gdz + CpT`, is often desired because its
# gradient is proportional to the geostrophic wind in isentropic space. This can be easily
# calculated with `mpcalc.montgomery_streamfunction`.

# Calculate Montgomery Streamfunction and scale by 10^-2 for plotting
msf = mpcalc.montgomery_streamfunction(isent_data['Geopotential_height'],
                                       isent_data['temperature']).values / 100.

# Choose a level to plot, in this case 296 K
radar_data = np.ma.array(radar_data, mask=np.isnan(radar_data))

proj = cartopy.crs.LambertConformal(central_longitude=data.RadarLongitude,
                                    central_latitude=data.RadarLatitude)

print(data.time_coverage_start)
data_time = datetime.strptime(data.time_coverage_start,
                              '%Y-%m-%dT%H:%M:%SZ')  #string parse time
print(data_time)
from metpy.plots import ctables, add_timestamp

state_borders = cartopy.feature.NaturalEarthFeature(
    category='cultural',
    name='admin_1_states_provinces_lakes',
    scale='50m',
    facecolor='none')

fig = plt.figure(figsize=(10, 10))
ax = plt.subplot(1, 1, 1, projection=proj)
#cmap couleur carte
norm, cmap = ctable = ctables.registry.get_with_steps('NWSReflectivity', 16,
                                                      16)
mesh = ax.pcolormesh(x, y, radar_data, norm=norm, cmap=cmap, zorder=0)
add_timestamp(ax, time=data_time)
ax.add_feature(state_borders, edgecolor='black', linewidth=2, zorder=2)

distance_in_degrees = 1.8
#ax.set_extent([data.RadarLongitude-distance_in_degrees,data.RadarLongtitude+distance_in_degrees,data.RadarLatitude-distance_in_degrees,data.RadarLatitude+distance_in_degrees])
#pour dézoomer
#carte Colorado
예제 #20
0
# Plot wind barbs
ax.barbs(lon.values,
         lat.values,
         isentu[level, :, :].m,
         isentv[level, :, :].m,
         length=6,
         regrid_shape=20,
         transform=ccrs.PlateCarree())

# Make some titles
ax.set_title(
    '{:.0f} K Isentropic Pressure (hPa), Wind (kt), Relative Humidity (percent)'
    .format(isentlevs[level].m),
    loc='left')
add_timestamp(ax, times[0].dt, y=0.02, high_contrast=True)
fig.tight_layout()

######################################
# **Montgomery Streamfunction**
#
# The Montgomery Streamfunction, :math:`{\psi} = gdz + CpT`, is often desired because its
# gradient is proportional to the geostrophic wind in isentropic space. This can be easily
# calculated with `mpcalc.montgomery_streamfunction`.

# Calculate Montgomery Streamfunction and scale by 10^-2 for plotting
msf = mpcalc.montgomery_streamfunction(isenthgt, isenttmp) / 100.

# Choose a level to plot, in this case 296 K
level = 0
예제 #21
0
def subset(s3_bucket, prefix):
    s3 = boto3.resource('s3',
                        config=Config(signature_version=botocore.UNSIGNED,
                                      user_agent_extra='Resource'))
    bucket = s3.Bucket(s3_bucket)
    for obj in bucket.objects.filter(Prefix=prefix):
        print(obj.key)

        # Use MetPy to read the file
        f = Level2File(obj.get()['Body'])

        sweep = 0
        # First item in ray is header, which has azimuth angle
        az = np.array([ray[0].az_angle for ray in f.sweeps[sweep]])

        # ref_hdr = f.sweeps[sweep][0][4][b'REF'][0]
        # ref_range = np.arange(ref_hdr.num_gates) * ref_hdr.gate_width + ref_hdr.first_gate
        # ref = np.array([ray[4][b'REF'][1] for ray in f.sweeps[sweep]])

        rho_hdr = f.sweeps[sweep][0][4][b'RHO'][0]
        rho_range = (np.arange(rho_hdr.num_gates + 1) -
                     0.5) * rho_hdr.gate_width + rho_hdr.first_gate
        rho = np.array([ray[4][b'RHO'][1] for ray in f.sweeps[sweep]])

        # phi_hdr = f.sweeps[sweep][0][4][b'PHI'][0]
        # phi_range = (np.arange(phi_hdr.num_gates + 1) - 0.5) * phi_hdr.gate_width + phi_hdr.first_gate
        # phi = np.array([ray[4][b'PHI'][1] for ray in f.sweeps[sweep]])

        zdr_hdr = f.sweeps[sweep][0][4][b'ZDR'][0]
        zdr_range = (np.arange(zdr_hdr.num_gates + 1) -
                     0.5) * zdr_hdr.gate_width + zdr_hdr.first_gate
        zdr = np.array([ray[4][b'ZDR'][1] for ray in f.sweeps[sweep]])
        ref_norm, ref_cmap = ctables.registry.get_with_steps(
            'NWSReflectivity', 5, 5)

        # Plot the data!
        fig, axes = plt.subplots(1, 2, figsize=(15, 15))
        for var_data, var_range, colors, lbl, ax in zip(
            (rho, zdr), (rho_range, zdr_range), ('plasma', 'viridis'),
            ('RHO', 'ZDR (dBZ)'), axes.flatten()):
            # Turn into an array, then mask
            data = np.ma.array(var_data)
            data[np.isnan(data)] = np.ma.masked

            # Convert az,range to x,y
            xlocs = var_range * np.sin(np.deg2rad(az[:, np.newaxis]))
            ylocs = var_range * np.cos(np.deg2rad(az[:, np.newaxis]))

            # Define norm for reflectivity
            norm = ref_norm if colors == ref_cmap else None

            # Plot the data
            a = ax.pcolormesh(xlocs, ylocs, data, cmap=colors, norm=norm)

            divider = make_axes_locatable(ax)
            cax = divider.append_axes('right', size='5%', pad=0.05)
            fig.colorbar(a, cax=cax, orientation='vertical', label=lbl)

            ax.set_aspect('equal', 'datalim')
            ax.set_xlim(-100, 100)
            ax.set_ylim(-100, 100)
            add_timestamp(ax, f.dt, y=0.02, high_contrast=False)
        plt.suptitle('KVWX Level 2 Data', fontsize=20)
        plt.tight_layout()
        plt.plot()
        file_name = 'foo_' + str(int(time.time())) + '.png'
        plt.savefig(file_name)
        return send_imgur(file_name)
        # with open("foo.png", "rb") as imageFile:
        # 	str = base64.b64encode(imageFile.read())
        # 	return str
        # return bytes(az.tostring()+b'@'+rho_range.tostring()
        # +b'@'+rho.tostring())
        # return bytes(az.tostring())
예제 #22
0
# Plot RH
cf = ax.contourf(lon, lat, isentrh[level, :, :], range(10, 106, 5),
                 cmap=plt.cm.gist_earth_r, transform=ccrs.PlateCarree())
cb = fig.colorbar(cf, orientation='horizontal', extend='max', aspect=65, shrink=0.5, pad=0.05,
                  extendrect='True')
cb.set_label('Relative Humidity', size='x-large')

# Plot wind barbs
ax.barbs(lon.values, lat.values, isentu[level, :, :].m, isentv[level, :, :].m, length=6,
         regrid_shape=20, transform=ccrs.PlateCarree())

# Make some titles
ax.set_title('{:.0f} K Isentropic Pressure (hPa), Wind (kt), Relative Humidity (percent)'
             .format(isentlevs[level].m), loc='left')
add_timestamp(ax, times[0].dt, y=0.02, high_contrast=True)
fig.tight_layout()

######################################
# **Montgomery Streamfunction**
#
# The Montgomery Streamfunction, :math:`{\psi} = gdz + CpT`, is often desired because its
# gradient is proportional to the geostrophic wind in isentropic space. This can be easily
# calculated with `mpcalc.montgomery_streamfunction`.


# Calculate Montgomery Streamfunction and scale by 10^-2 for plotting
msf = mpcalc.montgomery_streamfunction(isenthgt, isenttmp) / 100.

# Choose a level to plot, in this case 296 K
level = 0
예제 #23
0
    ('NWS8bitVel', -100, 1.0))  # m/s
for v, ctable, ax in zip(('N0Q', 'N0U'), ctables, axes):
    # Open the file
    name = get_test_data('nids/KOUN_SDUS54_{}TLX_201305202016'.format(v),
                         as_file_obj=False)
    f = Level3File(name)

    # Pull the data out of the file object
    datadict = f.sym_block[0][0]

    # Turn into an array using the scale specified by the file
    data = f.map_data(datadict['data'])

    # Grab azimuths and calculate a range based on number of gates
    az = np.array(datadict['start_az'] + [datadict['end_az'][-1]])
    rng = np.linspace(0, f.max_range, data.shape[-1] + 1)

    # Convert az,range to x,y
    xlocs = rng * np.sin(np.deg2rad(az[:, np.newaxis]))
    ylocs = rng * np.cos(np.deg2rad(az[:, np.newaxis]))

    # Plot the data
    norm, cmap = colortables.get_with_steps(*ctable)
    ax.pcolormesh(xlocs, ylocs, data, norm=norm, cmap=cmap)
    ax.set_aspect('equal', 'datalim')
    ax.set_xlim(-40, 20)
    ax.set_ylim(-30, 30)
    add_timestamp(ax, f.metadata['prod_time'], y=0.02, high_contrast=True)

plt.show()
예제 #24
0
파일: test_util.py 프로젝트: akrherz/MetPy
def test_add_timestamp_custom_format():
    """Test adding a timestamp to an axes object with custom time formatting."""
    fig = plt.figure(figsize=(9, 9))
    ax = plt.subplot(1, 1, 1)
    add_timestamp(ax, time=datetime(2017, 1, 1), time_format='%H:%M:%S %Y/%m/%d')
    return fig
    print('{} stations with variable {}\nPlotting...'.format(len(df), args.var))
    # Make an LCC map projection
    proj = ccrs.LambertConformal()

    # Plot the map
    fig = plt.figure(figsize=(12, 7))
    ax = plt.axes(projection=proj)
    ax.add_feature(cfeature.COASTLINE.with_scale('50m'))
    ax.add_feature(cfeature.OCEAN.with_scale('50m'))
    ax.add_feature(cfeature.LAND.with_scale('50m'))
    ax.add_feature(cfeature.BORDERS.with_scale('50m'), linestyle=':')
    ax.add_feature(cfeature.STATES.with_scale('50m'), linestyle=':')
    ax.add_feature(cfeature.LAKES.with_scale('50m'), alpha=0.5)
    ax.add_feature(cfeature.RIVERS.with_scale('50m'), alpha=0.5)

    add_timestamp(ax)
    add_metpy_logo(fig, x=300, y=350)

    scatter = ax.scatter(df.longitude, df.latitude,
                         c=df[args.var], transform=ccrs.PlateCarree(),
                         cmap=plt.get_cmap(args.cmap), vmin=args.min, vmax=args.max,
                         s=args.msize) # cm.Oranges or Use plt.get_cmap(str)

    plt.colorbar(scatter, orientation='horizontal',
                 label=args.var.replace('_', ' ').title(),
                 shrink=0.6, pad=0.05)

    #u, v = mpcalc.wind_components(df.wind_direction.values * units('m/s'), df.wind_direction.values * units.degrees)
    #x = df.longitude.values
    #y = df.latitude.values
    #ax.quiver(x, y, u.m, v.m, transform=ccrs.PlateCarree(), units='dots')
예제 #26
0
ref_hdr = f.sweeps[sweep][0][4][b'REF'][0]
ref_range = np.arange(
    ref_hdr.num_gates) * ref_hdr.gate_width + ref_hdr.first_gate
ref = np.array([ray[4][b'REF'][1] for ray in f.sweeps[sweep]])

rho_hdr = f.sweeps[sweep][0][4][b'RHO'][0]
rho_range = (np.arange(rho_hdr.num_gates + 1) -
             0.5) * rho_hdr.gate_width + rho_hdr.first_gate
rho = np.array([ray[4][b'RHO'][1] for ray in f.sweeps[sweep]])

###########################################
fig, axes = plt.subplots(1, 2, figsize=(15, 8))
add_metpy_logo(fig, 190, 85, size='large')
for var_data, var_range, ax in zip((ref, rho), (ref_range, rho_range), axes):
    # Turn into an array, then mask
    data = np.ma.array(var_data)
    data[np.isnan(data)] = np.ma.masked

    # Convert az,range to x,y
    xlocs = var_range * np.sin(np.deg2rad(az[:, np.newaxis]))
    ylocs = var_range * np.cos(np.deg2rad(az[:, np.newaxis]))

    # Plot the data
    ax.pcolormesh(xlocs, ylocs, data, cmap='viridis')
    ax.set_aspect('equal', 'datalim')
    ax.set_xlim(-40, 20)
    ax.set_ylim(-30, 30)
    add_timestamp(ax, f.dt, y=0.02, high_contrast=True)

plt.show()
예제 #27
0
파일: test_util.py 프로젝트: akrherz/MetPy
def test_add_timestamp_pretext():
    """Test adding a timestamp to an axes object with custom pre-text."""
    fig = plt.figure(figsize=(9, 9))
    ax = plt.subplot(1, 1, 1)
    add_timestamp(ax, time=datetime(2017, 1, 1), pretext='Valid: ')
    return fig
예제 #28
0
###########################################

# Create CartoPy projection information for the file
globe = ccrs.Globe(ellipse='sphere',
                   semimajor_axis=proj_var.earth_radius,
                   semiminor_axis=proj_var.earth_radius)
proj = ccrs.LambertConformal(
    central_longitude=proj_var.longitude_of_central_meridian,
    central_latitude=proj_var.latitude_of_projection_origin,
    standard_parallels=[proj_var.standard_parallel],
    globe=globe)

###########################################

# Plot the image
fig = plt.figure(figsize=(10, 12))
add_metpy_logo(fig, 125, 145)
ax = fig.add_subplot(1, 1, 1, projection=proj)
wv_norm, wv_cmap = ctables.registry.get_with_range('WVCIMSS', 100, 260)
wv_cmap.set_under('k')
im = ax.imshow(dat[:],
               cmap=wv_cmap,
               norm=wv_norm,
               extent=ds.img_extent,
               origin='upper')
ax.add_feature(cfeature.COASTLINE.with_scale('50m'))
add_timestamp(ax, f.prod_desc.datetime, y=0.02, high_contrast=True)

plt.show()
예제 #29
0
파일: test_util.py 프로젝트: akrherz/MetPy
def test_add_timestamp_high_contrast():
    """Test adding a timestamp to an axes object."""
    fig = plt.figure(figsize=(9, 9))
    ax = plt.subplot(1, 1, 1)
    add_timestamp(ax, time=datetime(2017, 1, 1), high_contrast=True)
    return fig
예제 #30
0
fig, axes = plt.subplots(2, 2, figsize=(15, 15))
for var_data, var_range, colors, lbl, ax in zip(
    (ref, rho, zdr, phi), (ref_range, rho_range, zdr_range, phi_range),
    (ref_cmap, 'plasma', 'viridis', 'viridis'),
    ('REF (dBZ)', 'RHO', 'ZDR (dBZ)', 'PHI'), axes.flatten()):
    # Turn into an array, then mask
    data = np.ma.array(var_data)
    data[np.isnan(data)] = np.ma.masked

    # Convert az,range to x,y
    xlocs = var_range * np.sin(np.deg2rad(az[:, np.newaxis]))
    ylocs = var_range * np.cos(np.deg2rad(az[:, np.newaxis]))

    # Define norm for reflectivity
    norm = ref_norm if colors == ref_cmap else None

    # Plot the data
    a = ax.pcolormesh(xlocs, ylocs, data, cmap=colors, norm=norm)

    divider = make_axes_locatable(ax)
    cax = divider.append_axes('right', size='5%', pad=0.05)
    fig.colorbar(a, cax=cax, orientation='vertical', label=lbl)

    ax.set_aspect('equal', 'datalim')
    ax.set_xlim(-100, 100)
    ax.set_ylim(-100, 100)
    add_timestamp(ax, f.dt, y=0.02, high_contrast=False)
plt.suptitle('KVWX Level 2 Data', fontsize=20)
plt.tight_layout()
plt.show()
def main():
    manager = mp.Manager()
    results = manager.dict()
    pool = mp.Pool(12)

    jobs = []

    for filepath in glob.glob(join(args["NEXRADL3"], '*')):
        job = pool.apply_async(calculate_radar_stats, (results, filepath))
        jobs.append(job)

    for job in tqdm(jobs, desc="Bounding & Searching Data"):
        job.get()

    pool.close()
    pool.join()

    print('Sorting...')
    columns = [
        'datetime', 'metadata', 'sensorData', 'indices', 'xlocs', 'ylocs',
        'data', 'polyVerts', 'offset', 'areaValue', 'refValue', 'varRefValue'
    ]
    resultsDF = pd.DataFrame.from_dict(results,
                                       orient='index',
                                       columns=columns)
    resultsDF['datetime'] = pd.to_datetime(resultsDF.datetime)
    resultsDF.sort_values(by='datetime', inplace=True)
    #resultsDF.to_csv(args["output"] + '.csv', index = False)
    print(resultsDF[['areaValue', 'refValue']].head(5))

    # --- Plot time series---
    print('Plotting Slices...')
    fig, axes = plt.subplots(8, 8, figsize=(30, 30))
    date_format = mpl_dates.DateFormatter('%H:%Mz')

    for i, (dt, record) in tqdm(enumerate(resultsDF.iterrows()),
                                desc='Plotting Slices'):
        plotx = i % 8
        ploty = int(i / 8)

        negXLim = -.5
        posXLim = 1.5
        negYLim = -1.0
        posYLim = 1.0
        norm, cmap = colortables.get_with_steps('NWSReflectivity', 18, 16)
        tempdata = record[
            'data']  # create a deep copy of data to maipulate for plotting
        tempdata[tempdata == 0] = np.ma.masked  # mask out 0s for plotting

        axes[ploty][plotx].pcolormesh(record['xlocs'],
                                      record['ylocs'],
                                      tempdata,
                                      norm=norm,
                                      cmap=cmap)
        axes[ploty][plotx].set_aspect('equal', 'datalim')
        axes[ploty][plotx].set_xlim(negXLim, posXLim)
        axes[ploty][plotx].set_ylim(negYLim, posYLim)
        pVXs, pVYs = zip(
            *record['polyVerts']
        )  # create lists of x and y values for transformed polyVerts
        axes[ploty][plotx].plot(pVXs, pVYs)
        if negXLim < record['offset'][1] < posXLim and negYLim < record[
                'offset'][0] < posYLim:
            axes[ploty][plotx].plot(record['offset'][1], record['offset'][0],
                                    'o')  # Location of the radar
            axes[ploty][plotx].text(
                record['offset'][1], record['offset'][0],
                record['sensorData']['siteID']
            )  # will plot outside limits of subplot if site falls outside range

        axes[ploty][plotx].plot(0.0, 0.0, 'bx')  # Location of the convection
        axes[ploty][plotx].text(0.0, 0.0, str(args["convLatLon"]))
        add_timestamp(axes[ploty][plotx],
                      record['datetime'],
                      y=0.02,
                      high_contrast=True)
        axes[ploty][plotx].tick_params(axis='both', which='both')

    print('Calculating Statistics...')

    # pull data out of DF to make code cleaner
    datetimes = resultsDF['datetime'].tolist()
    #elapsedtimes = list(map(lambda x: x - min(datetimes), datetimes))						# not currently used, need to get this working
    areaValues = resultsDF['areaValue'].tolist()  # area ≥ 35dbz within ROI
    refValues = (
        np.array(resultsDF['refValue'].tolist()) - 65
    ) * 0.5  # mean reflectivity ≥ 35dbz within ROI (conversion: (val-65)*0.5) [https://mesonet.agron.iastate.edu/GIS/rasters.php?rid=2]
    if np.nan in refValues:
        warnings.warn(
            "Radar inputs contains instance with no ref values >= thresh",
            UserWarning)
    #areaRefValues = np.multiply(areaValues, refValues)										# product of area and reflectivity
    varValues = resultsDF['varRefValue'].tolist(
    )  # variance of mean reflectivity ≥ 35dbz within ROI
    cvValues = np.array([
        a / b for a, b in zip(varValues, refValues)
    ]) * 0.5  # coeff. of variation for mean reflectivity ≥ 35dbz within ROI

    # Frequency
    N = len(refValues)
    T = 1.0 / N
    yf = fft(refValues)
    w = blackman(N)
    ywf = fft(refValues * w)

    # Normalization
    areaNorm = areaValues / np.max(areaValues)
    xf = np.linspace(0, 1.0 / (2.0 * T), N // 2)
    cvNorm = cvValues / np.max(cvValues)
    areaCVValuesNormalized = np.multiply(areaNorm, cvNorm)

    # Curve Smoothing
    window = len(
        resultsDF.index
    ) // 8  # ~2 hours/8 = ~15 mins ----> number of samples in moving average ( helps counteract more visible noise in higher temporal resolution data)
    yAreaAvg = movingaverage(
        areaValues, window)[window // 2:-window //
                            2]  # create moving averages for time series'
    yRefAvg = movingaverage(refValues, window)[window // 2:-window // 2]
    yCVAvg = movingaverage(cvValues, window)[window // 2:-window // 2]
    yAreaCVNormAvg = movingaverage(areaCVValuesNormalized,
                                   window)[window // 2:-window // 2]

    # local minima & maxima on smoothed curves
    minTemporalwindow = window * 2

    areaLocalMax = argrelmax(yAreaAvg)
    areaLocalMin = argrelmin(yAreaAvg)
    endpoints = []
    if yAreaAvg[0] <= np.all(yAreaAvg[1:window]) or yAreaAvg[0] >= np.all(
            yAreaAvg[1:window]):
        endpoints.append(0)
    if yAreaAvg[-1] <= np.all(
            yAreaAvg[len(yAreaAvg - 1) - window:-2]) or yAreaAvg[-1] >= np.all(
                yAreaAvg[len(yAreaAvg - 1) - window:-2]):
        endpoints.append(len(yAreaAvg) - 1)
    areaExtremaRaw = sorted(
        areaLocalMax[0].tolist() + areaLocalMin[0].tolist() + endpoints
    )  # combine mins, maxes, and endpoints (if endpoints are an extreme) then sort
    areaExtrema = [
        x for x in areaExtremaRaw[1:]
        if x - areaExtremaRaw[0] >= minTemporalwindow
    ]  # remove maxima that are within threshold of first one
    areaExtrema = [areaExtremaRaw[0]
                   ] + areaExtrema  # add back in forst one to begining
    print(f'Area Extrema: {areaExtrema}')

    refLocalMax = argrelmax(yRefAvg)
    refLocalMin = argrelmin(yRefAvg)
    endpoints = []
    if yRefAvg[0] <= np.all(yRefAvg[1:window]) or yRefAvg[0] >= np.all(
            yRefAvg[1:window]):
        endpoints.append(0)
    if yRefAvg[-1] <= np.all(
            yRefAvg[len(yRefAvg - 1) - window:-2]) or yRefAvg[-1] >= np.all(
                yRefAvg[len(yRefAvg - 1) - window:-2]):
        endpoints.append(len(yRefAvg) - 1)
    refExtremaRaw = sorted(refLocalMax[0].tolist() + refLocalMin[0].tolist() +
                           endpoints)
    refExtrema = [
        x for x in refExtremaRaw[1:]
        if x - refExtremaRaw[0] >= minTemporalwindow
    ]
    refExtrema = [refExtremaRaw[0]] + refExtrema
    print(f'Ref Extrema: {refExtrema}')

    #cvLocalMax = argrelmax(yCVAvg)
    #cvLocalMin = argrelmin(yCVAvg)
    #endpoints = []
    #if yCVAvg[0] <= np.all(yCVAvg[1:window]) or yCVAvg[0] >= np.all(yCVAvg[1:window]):
    #	endpoints.append(0)
    #if yCVAvg[-1] <= np.all(yCVAvg[len(yCVAvg-1)-window:-2]) or yCVAvg[-1] >= np.all(yCVAvg[len(yCVAvg-1)-window:-2]):
    #	endpoints.append(len(yCVAvg)-1)
    #cvExtremaRaw = sorted(cvLocalMax[0].tolist()+cvLocalMin[0].tolist()+endpoints)
    #cvExtrema = [x for x in cvExtremaRaw[1:] if x-cvExtremaRaw[0]>=minTemporalwindow]
    #cvExtrema = [cvExtremaRaw[0]]+cvExtrema
    #print(f'CV Extrema: {cvExtrema}')

    yAreaCVNormLocalMax = argrelmax(yAreaCVNormAvg)
    yAreaCVNormLocalMin = argrelmin(yAreaCVNormAvg)
    endpoints = []
    if yAreaCVNormAvg[0] <= np.all(
            yAreaCVNormAvg[1:window]) or yAreaCVNormAvg[0] >= np.all(
                yAreaCVNormAvg[1:window]):
        endpoints.append(0)
    if yAreaCVNormAvg[-1] <= np.all(
            yAreaCVNormAvg[len(yAreaCVNormAvg - 1) -
                           window:-2]) or yAreaCVNormAvg[-1] >= np.all(
                               yAreaCVNormAvg[len(yAreaCVNormAvg - 1) -
                                              window:-2]):
        endpoints.append(len(yAreaCVNormAvg) - 1)
    yAreaCVNormExtremaRaw = sorted(yAreaCVNormLocalMax[0].tolist() +
                                   yAreaCVNormLocalMin[0].tolist() + endpoints)
    yAreaCVNormExtrema = [
        x for x in yAreaCVNormExtremaRaw[1:]
        if x - yAreaCVNormExtremaRaw[0] >= minTemporalwindow
    ]
    yAreaCVNormExtrema = [yAreaCVNormExtremaRaw[0]] + yAreaCVNormExtrema
    print(f'AreaCVNorm Extrema: {yAreaCVNormExtrema}')

    # Find slopes of Build-up Lines
    # 	Area
    xArea = np.array(datetimes[window // 2:-window // 2])[np.array(
        [areaExtrema[0], areaExtrema[1]]
    )]  # grab datetime (x component) of the leftmost bounds (determined by window size), and the first extreme on the smoothed curve (sm curve is already bound by window, we need to apply bounds to datetimes)
    xAreaDiff = xArea[1] - xArea[
        0]  # subtract the later value from the former to get our delta x
    yArea = yAreaAvg[np.array(
        [areaExtrema[0], areaExtrema[1]]
    )]  # grab the values (y component) of the sm curve at the begining and at the first extreme
    yAreaDiff = yArea[1] - yArea[0]  # subtract to find delta y
    slopeArea = np.arctan(yAreaDiff /
                          xAreaDiff.seconds)  # calc the slope angle
    print(f'Slope Area: {slopeArea}')

    #   Reflectivity
    xRef = np.array(datetimes[window // 2:-window // 2])[np.array(
        [refExtrema[0], refExtrema[1]])]
    xRefDiff = xRef[1] - xRef[0]
    yRef = yRefAvg[np.array([refExtrema[0], refExtrema[1]])]
    yRefDiff = yRef[1] - yRef[0]
    slopeRef = np.arctan(yRefDiff / xRefDiff.seconds)
    print(f'Slope Reflectivity: {slopeRef}')

    # 	Product of Area and CV of ref
    xProduct = np.array(datetimes[window // 2:-window // 2])[np.array(
        [yAreaCVNormExtrema[0], yAreaCVNormExtrema[1]])]
    XProductDiff = xProduct[1] - xProduct[0]
    yProduct = yAreaCVNormAvg[np.array(
        [yAreaCVNormExtrema[0], yAreaCVNormExtrema[1]])]
    yProductDiff = yProduct[1] - yProduct[0]
    slopeProduct = np.arctan(yProductDiff / XProductDiff.seconds)
    print(f'Slope Product: {slopeProduct}')

    print('Plotting Additional Data and Saving Output...')
    # Area for Reflectivity ≥ 35dbz
    axes[-1][-5].plot_date(datetimes, areaValues, linestyle='solid', ms=2)
    axes[-1][-5].plot_date(datetimes[window // 2:-window // 2],
                           yAreaAvg,
                           linestyle='solid',
                           ms=2)
    axes[-1][-5].plot_date(
        np.array(datetimes[window // 2:-window // 2])[np.array(
            [areaExtrema[0], areaExtrema[1]])],
        yAreaAvg[np.array([areaExtrema[0], areaExtrema[1]])],
        linestyle="solid",
        ms=2)
    axes[-1][-5].legend(['Area Delta', 'Sm. Area Delta', 'Build-up Rate'])
    axes[-1][-5].xaxis.set_major_formatter(date_format)
    plt.setp(axes[-1][-5].xaxis.get_majorticklabels(),
             rotation=45,
             ha="right",
             rotation_mode="anchor")
    axes[-1][-5].set_title('Area of Reflectivity ≥ 35dbz (km^2)')

    # TODO: map y axis to dbz for output
    # Mean of Reflectivity ≥ 35dbz
    axes[-1][-4].plot_date(datetimes, refValues, linestyle='solid', ms=2)
    #axes[-1][-4].plot_date(datetimes[window//2:-window//2], yRefAvg, linestyle='solid', ms=2)
    #axes[-1][-4].plot_date(np.array(datetimes[window//2:-window//2])[np.array([0,refLocalMax[0][0]])], yRefAvg[np.array([0,refLocalMax[0][0]])], linestyle="solid", ms=2)
    axes[-1][-4].plot_date(datetimes[window // 2:-window // 2],
                           yRefAvg,
                           linestyle='solid',
                           ms=2)
    axes[-1][-4].plot_date(np.array(
        datetimes[window // 2:-window // 2])[np.array(
            [refExtrema[0], refExtrema[1]])],
                           yRefAvg[np.array([refExtrema[0], refExtrema[1]])],
                           linestyle="solid",
                           ms=2)
    axes[-1][-4].legend(['Ref Delta', 'Sm. Ref Delta', 'Build-up Rate'])
    axes[-1][-4].xaxis.set_major_formatter(date_format)
    plt.setp(axes[-1][-4].xaxis.get_majorticklabels(),
             rotation=45,
             ha="right",
             rotation_mode="anchor")
    axes[-1][-4].set_title('Mean of Reflectivity ≥ 35dbz')

    # Product of cv reflectivity and area
    axes[-1][-3].plot_date(datetimes,
                           areaCVValuesNormalized,
                           linestyle='solid',
                           ms=2)
    axes[-1][-3].plot_date(datetimes[window // 2:-window // 2],
                           yAreaCVNormAvg,
                           linestyle='solid',
                           ms=2)
    axes[-1][-3].plot_date(
        np.array(datetimes[window // 2:-window // 2])[np.array(
            [yAreaCVNormExtrema[0], yAreaCVNormExtrema[1]])],
        yAreaCVNormAvg[np.array([yAreaCVNormExtrema[0],
                                 yAreaCVNormExtrema[1]])],
        linestyle="solid",
        ms=2)
    axes[-1][-3].legend(
        ['Area*cv_Ref Delta', 'Sm. Area*cv_Ref Delta', 'Build-up Rate'])
    axes[-1][-3].xaxis.set_major_formatter(date_format)
    plt.setp(axes[-1][-3].xaxis.get_majorticklabels(),
             rotation=45,
             ha="right",
             rotation_mode="anchor")
    axes[-1][-3].set_title('Norm Product: CV Reflectivity * Area ≥ 35dbz')

    # Coeff. of Variance of Reflectivity ≥ 35dbz
    axes[-1][-2].plot_date(datetimes, cvValues, linestyle='solid', ms=2)
    axes[-1][-2].plot_date(datetimes[window // 2:-window // 2],
                           yCVAvg,
                           linestyle='solid',
                           ms=2)
    axes[-1][-2].legend(['CV Delta', 'Sm. CV Delta'])
    axes[-1][-2].xaxis.set_major_formatter(date_format)
    plt.setp(axes[-1][-2].xaxis.get_majorticklabels(),
             rotation=45,
             ha="right",
             rotation_mode="anchor")
    axes[-1][-2].set_title('CV of Reflectivity ≥ 35dbz')

    # Testing plot
    axes[-1][-1].semilogy(xf[1:N // 2], 2.0 / N * np.abs(yf[1:N // 2]), '-b')
    axes[-1][-1].semilogy(xf[1:N // 2], 2.0 / N * np.abs(ywf[1:N // 2]), '-r')
    axes[-1][-1].legend(['FFT', 'FFT w. Window'])
    #axes[-1][-1].plot(xf, 2.0/N * np.abs(yf[0:N//2]),linestyle='solid', ms=2)
    #axes[-1][-1].plot_date(datetimes, yCVAvg, linestyle='solid')
    #axes[-1][-1].xaxis.set_major_formatter(date_format)
    plt.setp(axes[-1][-1].xaxis.get_majorticklabels(),
             rotation=45,
             ha="right",
             rotation_mode="anchor")
    axes[-1][-1].set_title('Testing Plot (Frequency)')

    plt.tight_layout()
    plt.savefig(args["output"] + 'Nexrad.png')  # Set the output file name
    #plt.show()

    f_o = open(args["output"] + 'log_stats_area_nexrad.txt', 'a')
    f_o.write(datetimes[0].strftime("%Y%m%d%H%M%S") + '\t' +
              str(args["convLatLon"]) + '\t' + str(args["convBearing"]) +
              '\t' + str(args["scaleFactor"]) + '\t' +
              str(np.max(areaValues)) + '\t' + str(np.max(refValues)) + '\t' +
              str(slopeArea)  # std dev of LIS aligned data
              + '\t' + str(slopeRef) + '\t' + str(slopeProduct) + '\n')
    f_o.close()
예제 #32
0
파일: test_util.py 프로젝트: zhatin/MetPy
def test_add_timestamp_high_contrast():
    """Test adding a timestamp to an axes object."""
    fig = plt.figure(figsize=(9, 9))
    ax = plt.subplot(1, 1, 1)
    add_timestamp(ax, time=datetime(2017, 1, 1), high_contrast=True)
    return fig
예제 #33
0
#Import for colortables
from metpy.plots import colortables

# Import for the bonus exercise
from metpy.plots import add_timestamp

fig = plt.figure(figsize=(10, 10))
ax = fig.add_subplot(1, 1, 1, projection=proj)
ax.add_feature(cfeature.COASTLINE.with_scale('50m'), linewidth=2)
ax.add_feature(cfeature.STATES.with_scale('50m'), linestyle=':', edgecolor='black')
ax.add_feature(cfeature.BORDERS.with_scale('50m'), linewidth=2, edgecolor='black')

im = ax.imshow(dat, extent=(x.min(), x.max(), y.min(), y.max()), origin='upper')

wv_cmap = colortables.get_colortable('WVCIMSS_r')
im.set_cmap(wv_cmap)

#Bonus
start_time = datetime.strptime(ds.start_date_time, '%Y%j%H%M%S')
add_timestamp(ax, time=start_time, pretext=f'GOES-16 Ch. {channel} ',
              high_contrast=True, fontsize=16, y=0.01)

plt.show()