Ejemplo n.º 1
0
    def get_ndbc_data(self, site, observations, begin_date, units_coverter,
                      db_obj):
        start_time = time.time()
        logger = logging.getLogger(self.__class__.__name__)
        logger.debug("Starting get_ndbc_data")

        row_entry_date = datetime.now()
        utc_tz = timezone('UTC')
        eastern_tz = timezone('US/Eastern')

        platform_handle = 'ndbc.%s.met' % (site)
        #if db_obj.platformExists(platform_handle) == -1:
        if db_obj.platformExists(platform_handle) is None:
            obs_list = []
            for obs_setup in observations:
                if site in obs_setup['sites']:
                    for xenia_obs in obs_setup['xenia_obs']:
                        obs_list.append({
                            'obs_name': xenia_obs['xenia_name'],
                            'uom_name': xenia_obs['xenia_units'],
                            's_order': 1
                        })
            db_obj.buildMinimalPlatform(platform_handle, obs_list)
        #Build sensor_id and m_type_id list.
        for obs_setup in observations:
            if site in obs_setup['sites']:
                for xenia_obs in obs_setup['xenia_obs']:
                    m_type_id = db_obj.mTypeExists(xenia_obs['xenia_name'],
                                                   xenia_obs['xenia_units'])
                    sensor_id = db_obj.sensorExists(xenia_obs['xenia_name'],
                                                    xenia_obs['xenia_units'],
                                                    platform_handle, 1)
                    xenia_obs['m_type_id'] = m_type_id
                    xenia_obs['sensor_id'] = sensor_id
        sos_query = NdbcSos()
        # dates.sort(reverse=True)

        logger.debug("Query site: %s for date: %s" % (site, begin_date))
        sos_query.clear()
        #utc_end_date = begin_date.astimezone(utc_tz) + timedelta(hours=24)
        utc_end_date = begin_date.astimezone(utc_tz)
        start_date = begin_date.astimezone(utc_tz) - timedelta(hours=24)

        for obs_setup in ndbc_obs:
            if site in obs_setup['sites']:
                date_ndx = None
                value_ndx = None
                lat_ndx = None
                lon_ndx = None
                depth_ndx = None

                sos_query.filter(features=[site],
                                 start=start_date,
                                 end=utc_end_date,
                                 variables=[obs_setup['sos_obs_query']])
                try:
                    # results = nos_query.collect()
                    response = sos_query.raw(responseFormat="text/csv")
                except Exception as e:
                    logger.exception(e)
                else:
                    csv_reader = csv.reader(
                        response.decode('utf-8').split('\n'), delimiter=',')
                    line_cnt = 0

                    for row in csv_reader:
                        for xenia_obs_setup in obs_setup['xenia_obs']:
                            obs_type = xenia_obs_setup['xenia_name']
                            uom_type = xenia_obs_setup['xenia_units']
                            s_order = 1

                            if line_cnt > 0 and len(row):
                                obs_date = datetime.strptime(
                                    row[date_ndx], '%Y-%m-%dT%H:%M:%SZ')
                                try:
                                    obs_val = float(row[value_ndx])
                                except ValueError as e:
                                    logger.exception(e)
                                    obs_val = 0.0
                                try:
                                    if depth_ndx is not None:
                                        depth = float(row[depth_ndx])
                                except ValueError as e:
                                    logger.exception(e)
                                    depth = 0
                                try:
                                    if lat_ndx is not None:
                                        latitude = float(row[lat_ndx])
                                    if lon_ndx is not None:
                                        longitude = float(row[lon_ndx])
                                except ValueError as e:
                                    logger.exception(e)
                                    latitude = 0.0
                                    longitude = 0.0

                                obs_rec = multi_obs(
                                    row_entry_date=row_entry_date,
                                    platform_handle=platform_handle,
                                    sensor_id=xenia_obs_setup['sensor_id'],
                                    m_type_id=xenia_obs_setup['m_type_id'],
                                    m_date=obs_date.strftime(
                                        '%Y-%m-%dT%H:%M:%S'),
                                    m_lon=longitude,
                                    m_lat=latitude,
                                    m_z=depth,
                                    m_value=obs_val,
                                )

                                rec_id = db_obj.addRec(obs_rec, True)
                                if rec_id is not None:
                                    logger.debug("Adding obs: %s(%s) Date: %s Value: %s S_Order: %d" % \
                                                 (obs_type, uom_type, obs_date, obs_val, s_order))
                                else:
                                    logger.error("Failed adding obs: %s(%s) Date: %s Value: %s S_Order: %d" % \
                                                 (obs_type, uom_type, obs_date, obs_val, s_order))
                            else:
                                if value_ndx is None:
                                    for ndx, val in enumerate(row):
                                        if val.lower().find(
                                                xenia_obs_setup['sos_obs_name']
                                        ) != -1:
                                            value_ndx = ndx
                                        if val.lower().find('date_time') != -1:
                                            date_ndx = ndx
                                        if val.lower().find('latitude') != -1:
                                            lat_ndx = ndx
                                        if val.lower().find('longitude') != -1:
                                            lon_ndx = ndx
                                        if val.lower().find('depth') != -1:
                                            depth_ndx = ndx
                        line_cnt += 1

        logger.debug("Finished get_ndbc_data in %f seconds" %
                     (time.time() - start_time))
def get_ndbc_data(site, dates, units_coverter, db_obj):
    start_time = time.time()
    logger = logging.getLogger(__name__)
    logger.debug("Starting get_ndbc_data")

    row_entry_date = datetime.now()
    utc_tz = timezone('UTC')
    eastern_tz = timezone('US/Eastern')

    platform_handle = 'ndbc.%s.met' % (site)
    if db_obj.platformExists(platform_handle) == -1:
        obs_list = []
        for obs_setup in ndbc_obs:
            if site in obs_setup['sites']:
                for xenia_obs in obs_setup['xenia_obs']:
                    obs_list.append({
                        'obs_name': xenia_obs['xenia_name'],
                        'uom_name': xenia_obs['xenia_units'],
                        's_order': 1
                    })
        db_obj.buildMinimalPlatform(platform_handle, obs_list)

    sos_query = NdbcSos()
    #dates.sort(reverse=True)
    dates.sort(reverse=True)
    for rec_date in dates:
        logger.debug("Query site: %s for date: %s" % (site, rec_date))
        sos_query.clear()
        utc_end_date = rec_date.astimezone(utc_tz) + timedelta(hours=24)
        start_date = rec_date.astimezone(utc_tz) - timedelta(hours=24)

        for obs_setup in ndbc_obs:
            if site in obs_setup['sites']:
                date_ndx = None
                value_ndx = None
                lat_ndx = None
                lon_ndx = None
                depth_ndx = None

                sos_query.filter(features=[site],
                                 start=start_date,
                                 end=utc_end_date,
                                 variables=[obs_setup['sos_obs_query']])
                try:
                    #results = nos_query.collect()
                    response = sos_query.raw(responseFormat="text/csv")
                except Exception as e:
                    logger.exception(e)
                else:
                    csv_reader = csv.reader(response.split('\n'),
                                            delimiter=',')
                    line_cnt = 0

                    for row in csv_reader:
                        for xenia_obs_setup in obs_setup['xenia_obs']:
                            obs_type = xenia_obs_setup['xenia_name']
                            uom_type = xenia_obs_setup['xenia_units']
                            s_order = 1

                            if line_cnt > 0 and len(row):
                                obs_date = datetime.strptime(
                                    row[date_ndx], '%Y-%m-%dT%H:%M:%SZ')
                                try:
                                    obs_val = float(row[value_ndx])
                                except ValueError as e:
                                    logger.exception(e)
                                    obs_val = 0.0
                                logger.debug("Adding obs: %s(%s) Date: %s Value: %s S_Order: %d" %\
                                             (obs_type, uom_type, obs_date, obs_val, s_order))
                                depth = 0
                                if depth_ndx is not None:
                                    depth = float(row[depth_ndx])
                                if not db_obj.addMeasurement(
                                        obs_type,
                                        uom_type,
                                        platform_handle,
                                        obs_date.strftime('%Y-%m-%dT%H:%M:%S'),
                                        float(row[lat_ndx]),
                                        float(row[lon_ndx]),
                                        depth, [obs_val],
                                        sOrder=s_order,
                                        autoCommit=True,
                                        rowEntryDate=row_entry_date):
                                    logger.error(db_obj.lastErrorMsg)
                            else:
                                if value_ndx is None:
                                    for ndx, val in enumerate(row):
                                        if val.lower().find(
                                                xenia_obs_setup['sos_obs_name']
                                        ) != -1:
                                            value_ndx = ndx
                                        if val.lower().find('date_time') != -1:
                                            date_ndx = ndx
                                        if val.lower().find('latitude') != -1:
                                            lat_ndx = ndx
                                        if val.lower().find('longitude') != -1:
                                            lon_ndx = ndx
                                        if val.lower().find('depth') != -1:
                                            depth_ndx = ndx
                        line_cnt += 1

    logger.debug("Finished get_ndbc_data in %f seconds" %
                 (time.time() - start_time))
Ejemplo n.º 3
0
# <codecell>

start_time = dt.datetime.strptime(start_date, '%Y-%m-%d %H:%M')
end_time = dt.datetime.strptime(end_date, '%Y-%m-%d %H:%M')
iso_start = start_time.strftime('%Y-%m-%dT%H:%M:%SZ')
iso_end = end_time.strftime('%Y-%m-%dT%H:%M:%SZ')

# Define the SOS collector
collector = NdbcSos()
print collector.server.identification.title
collector.variables = data_dict["waves"]["sos_name"]
collector.server.identification.title

# Don't specify start and end date in the filter and the most recent observation will be returned
collector.filter(bbox=bounding_box, variables=data_dict["waves"]["sos_name"])

response = collector.raw(responseFormat="text/csv")
obs_loc_df = pd.read_csv(BytesIO(response.encode('utf-8')),
                         parse_dates=True,
                         index_col='date_time')

# Now let's specify start and end times
collector.start_time = start_time
collector.end_time = end_time

ofrs = collector.server.offerings

# <codecell>

obs_loc_df.head()
Ejemplo n.º 4
0
ofrs = coops_collector.server.offerings

# Print the first 5 rows of the DataFrame
obs_loc_df.head()

# <markdowncell>

# #### Get NDBC Station Data

# <codecell>

ndbc_collector = NdbcSos()
ndbc_collector.variables = data_dict["winds"]["sos_name"]
ndbc_collector.server.identification.title
# Don't specify start and end date in the filter and the most recent observation will be returned
ndbc_collector.filter(bbox=bounding_box,
                 variables=data_dict["winds"]["sos_name"])

response = ndbc_collector.raw(responseFormat="text/csv")
obs_loc_df = pd.read_csv(BytesIO(response.encode('utf-8')),
                         parse_dates=True,
                         index_col='date_time')

# Save the station info in a larger global dict
st_list = gather_station_info(obs_loc_df, st_list, "ndbc")

# Now let's specify start and end times
ndbc_collector.start_time = start_time
ndbc_collector.end_time = end_time

ofrs = ndbc_collector.server.offerings
Ejemplo n.º 5
0
ofrs = coops_collector.server.offerings

# Print the first 5 rows of the DataFrame
obs_loc_df.head()

# <markdowncell>

# #### Get NDBC Station Data

# <codecell>

ndbc_collector = NdbcSos()
ndbc_collector.variables = data_dict["winds"]["sos_name"]
ndbc_collector.server.identification.title
# Don't specify start and end date in the filter and the most recent observation will be returned
ndbc_collector.filter(bbox=bounding_box,
                      variables=data_dict["winds"]["sos_name"])

response = ndbc_collector.raw(responseFormat="text/csv")
obs_loc_df = pd.read_csv(BytesIO(response.encode('utf-8')),
                         parse_dates=True,
                         index_col='date_time')

# Save the station info in a larger global dict
st_list = gather_station_info(obs_loc_df, st_list, "ndbc")

# Now let's specify start and end times
ndbc_collector.start_time = start_time
ndbc_collector.end_time = end_time

ofrs = ndbc_collector.server.offerings
Ejemplo n.º 6
0
# <codecell>

start_time = dt.datetime.strptime(start_date,'%Y-%m-%d %H:%M')
end_time = dt.datetime.strptime(end_date,'%Y-%m-%d %H:%M')
iso_start = start_time.strftime('%Y-%m-%dT%H:%M:%SZ')
iso_end = end_time.strftime('%Y-%m-%dT%H:%M:%SZ')

# Define the SOS collector
collector = NdbcSos()
print collector.server.identification.title
collector.variables = data_dict["waves"]["sos_name"]
collector.server.identification.title

# Don't specify start and end date in the filter and the most recent observation will be returned
collector.filter(bbox=bounding_box,
                 variables=data_dict["waves"]["sos_name"])

response = collector.raw(responseFormat="text/csv")
obs_loc_df = pd.read_csv(BytesIO(response.encode('utf-8')),
                         parse_dates=True,
                         index_col='date_time')

# Now let's specify start and end times
collector.start_time = start_time
collector.end_time = end_time

ofrs = collector.server.offerings

# <codecell>

obs_loc_df.head()