def setUp(self):
     self.time = Time(timeFrom=TIME_FROM, timeTo=TIME_TO)
     self.targets = [Target(format = Target.DEFAULT_FORMAT, rawQuery = True, rawSql = Target.DEFAULT_RAW_SQL, refId = Target.DEFAULT_REFID)]
     self.gridPos = Grid_Position()
     self.panels = [Panel(targets = self.targets, gridPos = self.gridPos)]    
     self.properties = Dashboard_Properties(time = self.time)
     self.dashboard = Dashboard(panels = self.panels, properties = self.properties)
Example #2
0
def get_PhenomenonTime(grbs, deep_check=False):
    """Get the Phenomenon Times for a collection of related grbs"""
    if deep_check:
        # Search all grbs to ensure the stride is consistant and dates are in
        # order
        pass
    start_date = str(grbs[0].dataDate)
    start_hour = str(grbs[0].hour).zfill(2)
    end_date = str(grbs[-1].dataDate)
    end_hour = str(grbs[-1].hour).zfill(2)

    ftime = timedelta(hours=grbs[0].forecastTime)

    start = Time.str_to_datetime(start_date + start_hour)
    end = Time.str_to_datetime(end_date + end_hour)
    start = start + ftime
    end = end + ftime
    stride = Time.ONE_DAY
    return Time.PhenomenonTime(start_time=start, end_time=end, stride=stride)
Example #3
0
def get_ResultTime(grbs, deep_check=False):
    """Get the Result Times for a collection of related grbs"""
    start_date = str(grbs[0].dataDate)
    start_hour = str(grbs[0].hour).zfill(2)
    end_date = str(grbs[-1].dataDate)
    end_hour = str(grbs[-1].hour).zfill(2)

    start = start_date + start_hour
    end = end_date + end_hour
    stride = Time.ONE_DAY
    return Time.ResultTime(start_time=start, end_time=end, stride=stride)
Example #4
0
def get_ValidTime(grbs, deep_check=False):
    """Get the Valid Times for a collection of related grbs"""
    # refer to grb attribute: validityDate and validityTime
    start_date = str(grbs[0].dataDate)
    start_hour = str(grbs[0].hour).zfill(2)
    end_date = str(grbs[-1].dataDate)
    end_hour = str(grbs[-1].hour).zfill(2)

    start = start_date + start_hour
    end = end_date + end_hour
    stride = Time.ONE_DAY
    offset = timedelta(seconds=(grbs[0].forecastTime * Time.ONE_HOUR))
    return Time.ValidTime(start_time=start,
                          end_time=end,
                          stride=stride,
                          offset=offset)
Example #5
0
def get_ForecastReferenceTime(grbs, deep_check=False):
    """Get the ForecastReference Times for a collection of related grbs"""
    if deep_check:
        # Search all grbs to ensure the stride is consistant and dates are in
        # order
        pass
    start_date = str(grbs[0].dataDate)
    start_hour = str(grbs[0].hour).zfill(2)
    end_date = str(grbs[-1].dataDate)
    end_hour = str(grbs[-1].hour).zfill(2)

    start = start_date + start_hour
    end = end_date + end_hour
    stride = Time.ONE_DAY
    return Time.ForecastReferenceTime(start_time=start,
                                      end_time=end,
                                      stride=stride)
Example #6
0
 def work_time(self):
     return str(Time(self.start_time, ago=True))
    def test_time_generation(self):

        #Seconds in a year
        YEAR_SEC = 31556926

        mysql_manager = MySQL_Manager(host = HOST)

        for query in CLEANUP_QUERIES:
            mysql_manager.execute_query(query)
        
        #time_from and time_to are lists of a tuple, [(time in seconds, 0)] like so
        time_from_seconds = mysql_manager.execute_query('select min(time_in) from packetrecords')[1][0]
        time_to_seconds = mysql_manager.execute_query('select max(time_in) from packetrecords')[1][0]

        #Convert to date format
        year_from = 1970 + (time_from_seconds // YEAR_SEC)
        year_to = 1970 + 1 + (time_to_seconds // YEAR_SEC)

        assert year_from < year_to

        time_from = self.get_formatted_time(year_from)
        time_to = self.get_formatted_time(year_to)

        print(time_from, "\n", time_to)
        
        TEST_QUERY = TEST_QUERY2 if year_to < 2038 else TEST_QUERY1
        OTHER_TEST_QUERY = TEST_QUERY4 if year_to < 2038 else TEST_QUERY3

        dashboard = Dashboard(properties=Dashboard_Properties(title="Packet Capture Microburst Sync Incast 1" ,time=Time(timeFrom=time_from, timeTo=time_to)), panels=[Panel(title="Queue Depth", targets = [Target(rawSql=TEST_QUERY)]), Panel(title="Link Utilization", targets = [Target(rawSql=OTHER_TEST_QUERY)])])
        payload = get_final_payload(dashboard)
        print(payload)
        response = requests.request("POST", url=URL, headers=headers, data = payload)
        json_response = str(response.text.encode('utf8'))
        print(json_response)
        self.assertTrue("success" in json_response)
    def test_dashboard(self):

        dashboard = Dashboard(properties=Dashboard_Properties(title="Test Dashboard" ,time=Time(timeFrom="2039-10-01", timeTo="2042-01-01")), panels=[Panel(title="My sample panel", targets = [Target(rawSql='select * from links')])])
        payload = get_final_payload(dashboard)
        print(payload)
        response = requests.request("POST", url=URL, headers=headers, data = payload)
        json_response = str(response.text.encode('utf8'))
        print(json_response)
        self.assertTrue("success" in json_response)
class Test_Core(unittest.TestCase):

    def setUp(self):
        self.time = Time(timeFrom=TIME_FROM, timeTo=TIME_TO)
        self.targets = [Target(format = Target.DEFAULT_FORMAT, rawQuery = True, rawSql = Target.DEFAULT_RAW_SQL, refId = Target.DEFAULT_REFID)]
        self.gridPos = Grid_Position()
        self.panels = [Panel(targets = self.targets, gridPos = self.gridPos)]    
        self.properties = Dashboard_Properties(time = self.time)
        self.dashboard = Dashboard(panels = self.panels, properties = self.properties)

    def tearDown(self):
        pass

    def test_dashboard(self):

        dashboard = Dashboard(properties=Dashboard_Properties(title="Test Dashboard" ,time=Time(timeFrom="2039-10-01", timeTo="2042-01-01")), panels=[Panel(title="My sample panel", targets = [Target(rawSql='select * from links')])])
        payload = get_final_payload(dashboard)
        print(payload)
        response = requests.request("POST", url=URL, headers=headers, data = payload)
        json_response = str(response.text.encode('utf8'))
        print(json_response)
        self.assertTrue("success" in json_response)

    def test_time_generation(self):

        #Seconds in a year
        YEAR_SEC = 31556926

        mysql_manager = MySQL_Manager(host = HOST)

        for query in CLEANUP_QUERIES:
            mysql_manager.execute_query(query)
        
        #time_from and time_to are lists of a tuple, [(time in seconds, 0)] like so
        time_from_seconds = mysql_manager.execute_query('select min(time_in) from packetrecords')[1][0]
        time_to_seconds = mysql_manager.execute_query('select max(time_in) from packetrecords')[1][0]

        #Convert to date format
        year_from = 1970 + (time_from_seconds // YEAR_SEC)
        year_to = 1970 + 1 + (time_to_seconds // YEAR_SEC)

        assert year_from < year_to

        time_from = self.get_formatted_time(year_from)
        time_to = self.get_formatted_time(year_to)

        print(time_from, "\n", time_to)
        
        TEST_QUERY = TEST_QUERY2 if year_to < 2038 else TEST_QUERY1
        OTHER_TEST_QUERY = TEST_QUERY4 if year_to < 2038 else TEST_QUERY3

        dashboard = Dashboard(properties=Dashboard_Properties(title="Packet Capture Microburst Sync Incast 1" ,time=Time(timeFrom=time_from, timeTo=time_to)), panels=[Panel(title="Queue Depth", targets = [Target(rawSql=TEST_QUERY)]), Panel(title="Link Utilization", targets = [Target(rawSql=OTHER_TEST_QUERY)])])
        payload = get_final_payload(dashboard)
        print(payload)
        response = requests.request("POST", url=URL, headers=headers, data = payload)
        json_response = str(response.text.encode('utf8'))
        print(json_response)
        self.assertTrue("success" in json_response)

    def get_formatted_time(self, year):
        return "{}-{}-{}".format(year, "01", "01")

    def test_get_json_string(self):
        self.assertTrue(is_json("{" + self.time.get_json_string() + "}"))
        for target in self.targets:
            self.assertTrue(is_json("{" + target.get_json_string() + "}"))
        self.assertTrue(is_json("{" + self.gridPos.get_json_string() + "}"))
        for panel in self.panels:
            self.assertTrue(is_json("{" + panel.get_json_string() + "}"))
            self.assertTrue(is_json("{" + panel.xaxis.get_json_string() + "}"))
        print(self.dashboard.get_json_string())
Example #10
0
def get_LeadTime(lead_time_data, deep_check=False):
    """Get the Lead Times from a collection of related grbs"""
    return Time.LeadTime(data=lead_time_data)
Example #11
0
def convert_grib2(control, grb2):
    """
    Converts a list of ncepgirb2.Grib2Messgage instances (i.e. GRIB2 records) into Wisps
    Data and writes to NetCDF.
    """

    # Get lead times
    lead_time_range = control['lead_time']
    print lead_time_range
    lead_times = range(lead_time_range[0], lead_time_range[1] + 1,
                       lead_time_range[2])
    print lead_times

    all_objs = []  # Collection of Wisps_data objects
    logging.info(
        "Creating Wisps-data objects for variables at each projection")

    # Read dimensions
    dimensions = yamlutil.read_dimensions()

    # Loop through each forcast hour.
    # Will typically only do something every third forecast hour, but is not
    # limited.
    data_dict = {}
    for nlead, hour in enumerate(lead_times):

        # Loop through each variable in every forecast hour
        print "Creating variables for lead time:", hour
        for n, grb in enumerate(grb2):

            # At the first lead time, first GRIB2 Message, get the lats and lons and grid x, y
            if nlead == 0 and n == 0:
                lats, lons = grb.grid()
                x_proj_data, y_proj_data = get_projection_data(grb)

                # Add latitude to WISPS data object
                latobj = Wisps_data('latitude')
                latobj.data = lats
                latobj.dimensions = ['y', 'x']
                all_objs.append(latobj)

                # Add longitude to WISPS data object
                lonobj = Wisps_data('longitude')
                lonobj.data = lons
                lonobj.dimensions = ['y', 'x']
                all_objs.append(lonobj)

                # Add x to WISPS data object
                x_obj = Wisps_data('x')
                x_obj.dimensions = ['x']
                x_obj.data = x_proj_data
                all_objs.append(x_obj)

                # Add y to WISPS data object
                y_obj = Wisps_data('y')
                y_obj.dimensions = ['y']
                y_obj.data = y_proj_data
                all_objs.append(y_obj)

                # Add lead_time to WISPS data object
                tobj = Time.LeadTime(data=np.array(lead_times) * 60 * 60)
                all_objs.append(tobj)

                #logging.info("Writing Latitude and Longitude variables to "+control['output'])

            # Get the Init date in YYYYMMDDHH
            grib_init_date = (grb.identification_section[5]*1000000)+\
                             (grb.identification_section[6]*10000)+\
                             (grb.identification_section[7]*100)+\
                             (grb.identification_section[8]*1)

            # Get the lead time in hours.
            if grb.product_definition_template_number == 0:
                grib_lead_time = int(
                    _grib2_table_4_4[grb.product_definition_template[7]] *
                    grb.product_definition_template[8])
            elif grb.product_definition_template_number == 8:
                grib_lead_time = int(
                    (_grib2_table_4_4[grb.product_definition_template[7]] *
                     grb.product_definition_template[8]) +
                    (_grib2_table_4_4[grb.product_definition_template[25]] *
                     grb.product_definition_template[26]))
            elif grb.product_definition_template_number == 11:
                grib_lead_time = int(
                    (_grib2_table_4_4[grb.product_definition_template[7]] *
                     grb.product_definition_template[8]) +
                    (_grib2_table_4_4[grb.product_definition_template[28]] *
                     grb.product_definition_template[29]))

            # Check if the lead time for this GRIB2 record is what we want to archive.
            # If not, move to the next iteration.
            if grib_lead_time != hour: continue

            #convert lead time to seconds
            fcst_time = grib_lead_time * 60 * 60

            # Calculate the ValidTime
            epochtime = Time.epoch_time(str(grib_init_date))
            valid_time = epochtime + fcst_time

            # Add phenomenon time
            phenom_time = valid_time
            ptime = Time.PhenomenonTime(data=phenom_time)

            # Add result time
            rtime = Time.ResultTime(data=epoch_time)

            # Create a reduced GUMI string (reduced GUMI has no model or lead time info).
            # Then pass the reduced GUMI to Wisp_data for matching for a valid WISPS
            # data object.
            wisps_gumi = reduce_gumi(construct_gumi(grb))
            if wisps_gumi in yamlutil.read_variables():
                obj = Wisps_data(wisps_gumi)
                obj.data = grb.data(fill_value=9999., masked_array=False)
                obj.dimensions.append('y')
                obj.dimensions.append('x')
                obj.dimensions.append('lead_times')
                obj.add_fcstTime(fcst_time)
                obj.time.append(ptime)
                obj.time.append(rtime)
                if control.processes:
                    [obj.add_process(p) for p in control.processes]
                #pdb.set_trace()
                #NEWobj.add_dimensions('lead_time','lon','lat')
                print obj.standard_name

                # If len all_objs is zero, then we have our first WISPS data object so
                # just append; else we need to try to match the data to an existing
                # WISPS data object.
                if len(all_objs) == 0:
                    all_objs.append(obj)
                else:
                    for nn, o in enumerate(all_objs):
                        if wisps_gumi == o.name:
                            if len(o.data.shape) == 2:
                                #pdb.set_trace()
                                o.data = np.expand_dims(o.data, axis=2)
                                o.data = np.dstack((o.data,
                                                    np.expand_dims(grb.data(
                                                        fill_value=9999.,
                                                        masked_array=False),
                                                                   axis=2)))
                            else:
                                o.data = np.dstack((o.data,
                                                    np.expand_dims(grb.data(
                                                        fill_value=9999.,
                                                        masked_array=False),
                                                                   axis=2)))
                            break
                    else:
                        all_objs.append(obj)

    # IMPORTANT: At this point, we have a list of WISPS Data objects where each object
    # should have a data attribute array that is 3 dimensional (y,x,lead_time).

    print "SIZE OF ALL_OBJS = ", len(all_objs)
    for ob in all_objs:
        print ob

    # Test NetCDF file output
    writer.write(all_objs, control['output'])

    exit(0)

    # ----------------------------------------------------------------------------------------
    # ----------------------------------------------------------------------------------------
    # RILEY'S CODE BELOW...SOME USEABLE...SOME NOT
    # ----------------------------------------------------------------------------------------
    # ----------------------------------------------------------------------------------------

    ##
    # Format and Write values to NetCDF file
    ##

    # Get standard dimension names
    dimensions = yamlutil.read_dimensions()
    lat = dimensions['lat']
    lon = dimensions['lon']
    lead_time_dim = dimensions['lead_time']
    time = dimensions['time']
    x_proj = dimensions['x_proj']
    y_proj = dimensions['y_proj']

    x_proj_data, y_proj_data = get_projection_data(tmp_grb)

    #convert to seconds
    fcst_time = run_time
    fcst_time = fcst_time * 60 * 60
    values = lead_times[0].values()[0]
    for name, grb_dict in data_dict.iteritems():
        stacked = grb_dict['data']
        stacked = np.array(stacked)
        stacked = np.swapaxes(stacked, 0, 2)
        stacked = np.swapaxes(stacked, 0, 1)
        lead_time = grb_dict['lead_time']
        lead_time = np.array([x * Time.ONE_HOUR for x in lead_time])
        valid_time = np.vstack(grb_dict['valid_time'])
        for i, arr in enumerate(valid_time):
            for j, val in enumerate(arr):
                valid_time[i, j] = Time.epoch_time(val)

        valid_time = valid_time.astype(int)
        phenom_time = valid_time

        # Now get a generic name
        name = get_levelless_forecast_hash(grb_dict['example_grb'])
        logging.info(name)
        dtype = grb_dict['dtype']
        obj = Wisps_data(name)
        #obj.add_source('GFS')
        obj.add_process('GFSModProcStep1')
        obj.add_process('GFSModProcStep2')
        obj.add_fcstTime(fcst_time)
        obj.dimensions = [y_proj, x_proj, lead_time_dim, time]

        # Add Vertical coordinate(s)
        vert_coords = grb_dict['level']
        vert_units = grb_dict['level_units']
        if 'Pa' in vert_units:
            vert_type = 'plev'
        else:
            vert_type = 'elev'
        ### TODO: Find which key codes for the 'cell_method' of the vertical level and add below back
        #if len(vert_coords) > 1:
        #    obj.add_coord(vert_coords[0], vert_coords[1], vert_type)
        #elif len(vert_coords) == 1:
        obj.add_coord(vert_coords[0], vert_type=vert_type)

        # Add units
        obj.metadata['units'] = grb_dict['units']

        # Add data
        try:
            obj.add_data(stacked)
            obj.change_data_type(dtype)
            all_objs.append(obj)
        except:
            logging.warning('not an numpy array')

        if example_grb.startStep != example_grb.endStep:
            # Then we know it's time bounded
            pass

        # Add PhenomononTime
        #ptime = get_PhenomenonTime(values)
        #obj.time.append(ptime)
        ptime = Time.PhenomenonTime(data=phenom_time)
        obj.time.append(ptime)

        # Add ResultTime
        rtime = get_ResultTime(values)
        obj.time.append(rtime)

        # Add ValidTime
        vstart = valid_time.copy()
        vend = valid_time.copy()
        for i, j in enumerate(vstart[0]):
            vstart[:, i] = rtime.data[i]
        valid_time = np.dstack((vstart, vend))
        vtime = Time.ValidTime(data=valid_time)
        #vtime = get_ValidTime(values)
        obj.time.append(vtime)

        # Add ForecastReferenceTime
        ftime = get_ForecastReferenceTime(values)
        obj.time.append(ftime)

        # Add LeadTime
        ltime = get_LeadTime(lead_time)
        obj.time.append(ltime)

    all_objs = write_projection_data(all_objs)

    # Make longitude and latitude variables
    lat = Wisps_data('latitude')
    lon = Wisps_data('longitude')
    lat.dimensions = ['y', 'x']
    lon.dimensions = ['y', 'x']
    lat_lon_data = tmp_grb.latlons()
    lat.data = lat_lon_data[0]
    lon.data = lat_lon_data[1]
    all_objs.append(lat)
    all_objs.append(lon)

    # Make x and y projection variables
    x_obj = Wisps_data('x')
    x_obj.dimensions = ['x']
    x_obj.data = x_proj_data
    all_objs.append(x_obj)

    y_obj = Wisps_data('y')
    y_obj.dimensions = ['y']
    y_obj.data = y_proj_data
    all_objs.append(y_obj)

    outfile = outpath + get_output_filename(year, month)
    writer.write(all_objs, outfile, write_to_db=True)
Example #12
0
from loc_dataframe import *

file_dir = os.path.dirname(os.path.realpath(__file__))
relative_path = '/..'
path = os.path.abspath(file_dir + relative_path)
sys.path.insert(0, path)

import registry.util as cfg
from mospred import read_pred as read_pred
from core import Time as Time

#read graphs control file
ctrl = cfg.read_yaml('../registry/graphs.yaml')
date_range = ctrl.date_range
start, end, stride = read_pred.parse_range(date_range)
start = Time.str_to_datetime(start)
end = Time.str_to_datetime(end)

#--------------------------------------------------------------------------
#Checks if users are subsetting geographically
#If so, checks for file, if it doesn't find it, builds subsetted dataframe
#If no, just checks for file, if it doesn't find it, builds dataframed
#Creates suffix that will be in the name of all files created by this run
#--------------------------------------------------------------------------
#calls this if user inputs region based on lat/lon
if ctrl.input_loc == True:
    data_name = 'loc_' + str(start)[0:4] + str(start)[5:7] + str(
        start)[8:10] + str(start)[11:13] + '_' + str(end)[0:4] + str(
            end)[5:7] + str(end)[8:10] + str(end)[11:13] + '_' + str(
                ctrl.lead_time) + 'hrs_' + str(ctrl.LCOlat) + '_' + str(
                    ctrl.UCOlat) + '_' + str(ctrl.LCOlon) + '_' + str(