def read_rs_values(self, tag_id: int, start_local: penddt,
                       end_local: penddt) -> List[DataDouble]:
        """
        Выполняет чтение всех точек учета
        :return: массив точек учета
        """
        d = Table('DataDouble')
        utc_start = start_local.astimezone(pytz.UTC).replace(tzinfo=None)
        utc_end = end_local.astimezone(pytz.UTC).replace(tzinfo=None)

        ret_val: List[DataDouble] = []
        q = (Q.from_(d).where(
            (d.IdTagDef == Parameter('%s'))
            & (d.TimeWrite[ToTicks(self._schema, Parameter('%s')
                                   ):ToTicks(self._schema, Parameter('%s'))])).
             orderby(d.TimeWrite, order=Order.asc).select(
                 d.IdTagDef, d.Data,
                 ToDateTime2(self._schema, d.TimeWrite, alias='TimeWrite'),
                 d.QSlim))
        sql = q.get_sql()
        self._logger.debug(
            f'SQL: {sql} PARAMS: tag_id:{tag_id}, utc_start={utc_start}, utc_end={utc_end}'
        )
        try:
            result = self._engine_data.execute(sql, tag_id, utc_start, utc_end)
            for row in result:
                data = DataDouble(
                    tag_id, row['Data'],
                    row['TimeWrite'].replace(tzinfo=pytz.UTC).astimezone(
                        settings.TIME_ZONE), row['QSlim'])
                ret_val.append(data)
        except Exception as e:
            self._logger.error(e)
        return ret_val
Ejemplo n.º 2
0
    def get_mean_number_of_hles_days(self, start_year: int, end_year: int, season_to_months: dict, hles_vname: str):
        result = defaultdict(dict)

        cache_dir = Path(self.base_folder) / "cache"
        cache_dir.mkdir(exist_ok=True)
        seasons_str = "-".join(season_to_months)
        cache_file = cache_dir / f"get_mean_number_of_hles_days_{start_year}-{end_year}_m{seasons_str}_{hles_vname}.bin"

        if cache_file.exists():
            return pickle.load(cache_file.open("rb"))

        for season, months in season_to_months.items():

            for y in range(start_year, end_year + 1):
                d1 = Pendulum(y, months[0], 1)
                d2 = d1.add(months=len(months)).subtract(seconds=1)

                if d2.year > end_year:
                    continue

                current_period = Period(d1, d2)
                logger.info("calculating mean for [{}, {}]".format(current_period.start, current_period.end))
                data = self.read_data_for_period(current_period, hles_vname)

                # calculate number of hles days

                data_daily = data.resample(t="1D", keep_attrs=True).mean(dim="t")

                result[season][y] = (data_daily.values >= 0.1).sum(axis=0)


        pickle.dump(result, cache_file.open("wb"))
        return result
Ejemplo n.º 3
0
    def get_seasonal_maxima(self, start_year: int, end_year: int, season_to_months: dict, varname_internal: str):

        """
        returns a dictionary {season:{year: field of maxima}}
        :param start_year:
        :param end_year:
        :param season_to_months:

        (order of months in the list of months is important, i.e. for DJF the order should be [12, 1, 2])
        """
        result = defaultdict(dict)

        for season, months in season_to_months.items():

            for y in range(start_year, end_year + 1):
                d1 = Pendulum(y, months[0], 1)
                d2 = d1.add(months=len(months)).subtract(seconds=1)

                if d2.year > end_year:
                    continue

                current_period = Period(d1, d2)
                ("calculating mean for [{}, {}]".format(current_period.start, current_period.end))
                data = self.read_data_for_period(current_period, varname_internal)

                if varname_internal == LAKE_ICE_FRACTION:
                    result[season][y] = np.ma.masked_where(data.values > 1, data.values).max(axis=0)
                else:
                    result[season][y] = data.max(dim="t").values

        return result
Ejemplo n.º 4
0
    def get_date_params(stream_slice: Mapping[str, Any], cursor_field: str, end_date: pendulum.datetime = None, time_unit: str = "months"):
        end_date = end_date or pendulum.yesterday()
        start_date = pendulum.parse(stream_slice.get(cursor_field))
        if start_date > pendulum.now():
            return start_date.to_date_string(), start_date.add(days=1).to_date_string()

        end_date = min(end_date, pendulum.parse(stream_slice.get(cursor_field)).add(**{time_unit: 1}))

        # Fix issue #4806, start date should always be lower than end date.
        if start_date.add(days=1).date() >= end_date.date():
            return start_date.add(days=1).to_date_string(), start_date.add(days=2).to_date_string()
        return start_date.add(days=1).to_date_string(), end_date.to_date_string()
Ejemplo n.º 5
0
def execute_webhook(dt: pendulum.datetime, uq: str, title: str) -> None:
    """Execute webhook given a UQ's name, datetime, and title of the
    event page it belongs.

    Args:
        dt (pendulum.datetime): the datetime of the UQ
        uq (str): name of the UQ
        title (str): title of the page that had the UQ on schedule

    """
    payload = {
        "embeds": [{
            "title":
            f"**{uq}**",
            "description":
            MESSAGE.format(
                dt.to_day_datetime_string(),
                title,
            )
        }]
    }
    response = requests.post(ID, json=payload)
    out = {
        'ID': ID,
        'LAST': str(dt),
    }
    with open('webhook.yaml', 'w') as f:
        yaml.safe_dump(out, stream=f)

    config.LOGGER.info(f'Executed webhook @ {config.NOW}: {response}')
    config.LOGGER.info(f'UQ: {uq}, DT: {dt}')
Ejemplo n.º 6
0
    def get_date_params(stream_slice: Mapping[str, Any], cursor_field: str, end_date: pendulum.datetime = None):
        end_date = end_date or pendulum.yesterday()
        start_date = pendulum.parse(stream_slice.get(cursor_field))
        if start_date > pendulum.now():
            return start_date.to_date_string(), start_date.add(days=1).to_date_string()

        end_date = min(end_date, pendulum.parse(stream_slice.get(cursor_field)).add(months=1))
        return start_date.add(days=1).to_date_string(), end_date.to_date_string()
Ejemplo n.º 7
0
def do_diagnostics(host: str, status: int, uptime: str,
                   timestamp: pendulum.datetime) -> str:
    local_time = timestamp.in_timezone("America/Denver").to_datetime_string()

    diagnostics = f"{host} @ {timestamp} ({local_time} MT):\n"
    diagnostics += f"Uptime: {uptime}\n"

    if status == Config.UP:
        diagnostics += "Is now OK\n"

    if status & Config.assertions["SERVER_DOWN"]:
        diagnostics += "SERVER DOWN\n"

    if status & Config.assertions["JETTY_DOWN"]:
        diagnostics += "JETTY DOWN\n"

    if status & Config.assertions["TOMCAT_DOWN"]:
        diagnostics += "TOMCAT DOWN\n"

    if status & Config.assertions["SOLR_DOWN"]:
        diagnostics += "SOLR DOWN\n"

    if status & Config.assertions["LDAP_DOWN"]:
        diagnostics += "LDAP DOWN\n"

    if status & Config.assertions["APACHE_DOWN"]:
        diagnostics += "APACHE DOWN\n"

    if status & Config.assertions["GMN_DOWN"]:
        diagnostics += "GMN DOWN\n"

    if status & Config.assertions["PORTAL_DOWN"]:
        diagnostics += "PORTAL DOWN\n"

    if status & Config.assertions["PACKAGE_DOWN"]:
        diagnostics += "PACKAGE DOWN\n"

    if status & Config.assertions["GATEKEEPER_DOWN"]:
        diagnostics += "GATEKEEPER DOWN\n"

    if status & Config.assertions["AUDIT_DOWN"]:
        diagnostics += "AUDIT DOWN\n"

    if status & Config.assertions["AUTH_DOWN"]:
        diagnostics += "AUTH DOWN\n"

    if status & Config.assertions["SYNC_DOWN"]:
        diagnostics += "SYNC DOWN\n"

    if status & Config.assertions["LOAD_HIGH"]:
        diagnostics += "LOAD HIGH\n"

    if status & Config.assertions["READ_ONLY"]:
        diagnostics += "FILESYSTEM READ ONLY\n"

    return diagnostics
Ejemplo n.º 8
0
def iter_date(start_date: pendulum.datetime,
              end_date: pendulum.datetime,
              chunk_size=59):
    if end_date < start_date:
        raise ValueError("start_date:%s should not large than end_date:%s",
                         start_date, end_date)
    while start_date <= end_date:
        new_end_date = min(start_date.add(days=chunk_size), end_date)
        yield start_date, new_end_date
        start_date = new_end_date.add(days=1)
 def get_test_ti(session, execution_date: pendulum.datetime, state: str) -> TI:
     dag.create_dagrun(
         run_id='scheduled__{}'.format(execution_date.to_iso8601_string()),
         state=state,
         execution_date=execution_date,
         start_date=pendulum.utcnow(),
         session=session
     )
     ti = TI(task=task, execution_date=execution_date)
     ti.set_state(state=State.SUCCESS, session=session)
     return ti
Ejemplo n.º 10
0
def iter_date(
    start_date: pendulum.datetime, end_date: pendulum.datetime, chunk_size=59
):
    if end_date < start_date:
        raise ValueError(
            "start_date:%s should not large than end_date:%s", start_date, end_date
        )
    while start_date <= end_date:
        new_end_date = min(start_date.add(days=chunk_size), end_date)
        yield start_date, new_end_date
        start_date = new_end_date.add(days=1)
Ejemplo n.º 11
0
def get_date_params(start_date: str,
                    time_zone=None,
                    range_days: int = None,
                    end_date: pendulum.datetime = None) -> Tuple[str, str]:
    """
    Returns `start_date` and `end_date` for the given stream_slice.
    If (end_date - start_date) is a big date range (>= 1 month), it can take more than 2 hours to process all the records from the given slice.
    After 2 hours next page tokens will be expired, finally resulting in page token expired error
    Currently this method returns `start_date` and `end_date` with 15 days difference.
    """

    end_date = end_date or pendulum.yesterday(tz=time_zone)
    start_date = pendulum.parse(start_date)
    if start_date > pendulum.now():
        return start_date.to_date_string(), start_date.add(
            days=1).to_date_string()
    end_date = min(end_date, start_date.add(days=range_days))

    # Fix issue #4806, start date should always be lower than end date.
    if start_date.add(days=1).date() >= end_date.date():
        return start_date.add(days=1).to_date_string(), start_date.add(
            days=2).to_date_string()
    return start_date.add(days=1).to_date_string(), end_date.to_date_string()
Ejemplo n.º 12
0
    def get_seasonal_means(self, start_year: int, end_year: int, season_to_months: dict, varname_internal: str):

        """
        returns a dictionary {season:{year: mean_field}}
        :param start_year:
        :param end_year:
        :param season_to_months:

        (order of months in the list of months is important, i.e. for DJF the order should be [12, 1, 2])
        """
        result = defaultdict(dict)

        cache_dir = Path(self.base_folder) / "cache"
        cache_dir.mkdir(exist_ok=True)
        seasons_str = "-".join(season_to_months)
        cache_file = cache_dir / f"get_seasonal_means_{start_year}-{end_year}_m{seasons_str}_{varname_internal}.bin"

        if cache_file.exists():
            return pickle.load(cache_file.open("rb"))

        for season, months in season_to_months.items():

            for y in range(start_year, end_year + 1):
                d1 = Pendulum(y, months[0], 1)
                d2 = d1.add(months=len(months)).subtract(seconds=1)

                if d2.year > end_year:
                    continue

                current_period = Period(d1, d2)
                logger.info("calculating mean for [{}, {}]".format(current_period.start, current_period.end))
                data = self.read_data_for_period(current_period, varname_internal)

                result[season][y] = data.mean(dim="t").values

        pickle.dump(result, cache_file.open("wb"))
        return result
Ejemplo n.º 13
0
def add_entry(title: str, datetime: pendulum.datetime) -> None:
    """Adds an entry to the database.

    Args:
        title (str): the entry's URL
        datetime (pendulum.datetime): the date of the entry

    """
    datetime = datetime.in_tz('UTC')
    conn = sqlite3.connect('rss.db')
    c = conn.cursor()
    c.execute("""insert into entries values
        (?, ?, ?, ?)""", (title, datetime.year, datetime.month, datetime.day))
    conn.commit()
    conn.close()
Ejemplo n.º 14
0
def add_entry(url: str, datetime: pendulum.datetime):
    """Adds an entry to the database.

    Args:
        url (str): the entry's URL
        datetime (pendulum.datetime): the date of the entry

    Returns:
        None

    """
    datetime = datetime.in_tz('UTC')
    conn = sqlite3.connect('rss.db')
    c = conn.cursor()
    c.execute(
        """insert into entries values
        (?, ?, ?, ?, ?)""",
        (url, datetime.year, datetime.month, datetime.day, datetime.hour))
    conn.commit()
    conn.close()
    return
Ejemplo n.º 15
0
    def get_mean_number_of_cao_days(self, start_year: int, end_year: int, season_to_months: dict,
                                    temperature_vname: str, min_cao_width_cells=5):
        """
        calculate mean number of CAO days for each season and year {season: {year: field}}
        Calculation following Wheeler et al 2011
        :param self:
        :param start_year:
        :param end_year:
        :param season_to_months:
        :param temperature_vname:
        """
        season_to_year_to_std = defaultdict(dict)
        season_to_year_to_data = defaultdict(dict)
        season_to_year_to_rolling_mean = defaultdict(dict)
        season_to_year_to_n_cao_days = defaultdict(dict)

        cache_dir = Path(self.base_folder) / "cache"
        cache_dir.mkdir(exist_ok=True)
        seasons_str = "-".join(season_to_months)
        cache_file = cache_dir / f"get_mean_number_of_cao_days_{start_year}-{end_year}_m{seasons_str}_{temperature_vname}.bin"

        if cache_file.exists() and False:
            return pickle.load(cache_file.open("rb"))

        for season, months in season_to_months.items():

            for y in range(start_year, end_year + 1):
                d1 = Pendulum(y, months[0], 1)
                d2 = d1.add(months=len(months)).subtract(seconds=1)

                if d2.year > end_year:
                    continue

                current_period = Period(d1, d2)
                logger.info("calculating mean for [{}, {}]".format(current_period.start, current_period.end))
                data = self.read_data_for_period(current_period, temperature_vname)

                # calculate daily means
                data_daily = data.resample(t="1D", keep_attrs=True).mean(dim="t").dropna(dim="t")



                assert isinstance(data_daily, xarray.DataArray)

                # save the data for reuse below
                season_to_year_to_data[season][y] = data_daily.values
                season_to_year_to_std[season][y] = data_daily.std(dim="t").values
                season_to_year_to_rolling_mean[season][y] = data_daily.rolling(center=True, t=31).mean(dim="t").values

        #  Calculate climatological std and rolling mean
        season_to_std_clim = {
            s: np.mean([f for f in y_to_std.values()], axis=0) for s, y_to_std in season_to_year_to_std.items()
        }

        season_to_rolling_clim = {
            s: np.mean([f for f in y_to_rolling.values()], axis=0) for s, y_to_rolling in season_to_year_to_rolling_mean.items()
        }

        #  calculate number of CAO days
        for season, std_clim in season_to_std_clim.items():
            for y in range(start_year, end_year + 1):

                t31_rolling = season_to_rolling_clim[season]

                cao_suspect = (np.array(season_to_year_to_data[season][y]) <= t31_rolling - 1.5 * std_clim) & (std_clim > 2)
                

                n_cao_days = cao_suspect.sum(axis=0)

                season_to_year_to_n_cao_days[season][y] = n_cao_days


        pickle.dump(season_to_year_to_n_cao_days, cache_file.open("wb"))
        return season_to_year_to_n_cao_days
Ejemplo n.º 16
0
def _get_period_for_ym(year, month):
    start = Pendulum(year, month, 1)
    end = start.add(months=1).subtract(microseconds=1)
    return Period(start, end)