Exemple #1
0
def parse_date(
    date_str: Union[str, datetime],
    network: Optional[NetworkSchema] = None,
    dayfirst: bool = True,
    yearfirst: bool = False,
    is_utc: bool = False,
    timezone: timezone = False,
) -> Optional[datetime]:
    dt_return = None

    if isinstance(date_str, datetime):
        dt_return = date_str

    elif isinstance(date_str, str):
        try:
            dt_return = parse(date_str, dayfirst=dayfirst, yearfirst=yearfirst)
        except ParserError:
            raise ValueError("Invalid date string passed")

    else:
        raise ValueError("Require a datetime or string object to parse date")

    if network:
        tz = network.get_timezone()

        if tz:
            dt_return = make_aware(dt_return, timezone=tz)

    if is_utc:
        dt_return = make_aware(dt_return, timezone=UTC)

    if timezone:
        dt_return = make_aware(dt_return, timezone=timezone)

    return dt_return
Exemple #2
0
def stats_factory(
    stats: List[DataQueryResult],
    units: UnitDefinition,
    interval: TimeInterval,
    period: Optional[TimePeriod] = None,
    network: Optional[NetworkSchema] = None,
    timezone: Optional[Union[timezone, str]] = None,
    code: Optional[str] = None,
    region: Optional[str] = None,
    include_group_code: bool = False,
    fueltech_group: Optional[bool] = False,
    group_field: Optional[str] = None,
    data_id: Optional[str] = None,
    localize: Optional[bool] = True,
    include_code: Optional[bool] = True,
) -> Optional[OpennemDataSet]:
    """
    Takes a list of data query results and returns OpennemDataSets

    @TODO optional groupby field
    @TODO multiple groupings / slight refactor

    """

    if network:
        timezone = network.get_timezone()

    group_codes = list(set([i.group_by for i in stats if i.group_by]))

    stats_grouped = []

    for group_code in group_codes:

        data_grouped: Dict[datetime, Any] = dict()

        for stat in stats:
            if stat.group_by != group_code:
                continue

            if stat.interval not in data_grouped:
                data_grouped[stat.interval] = None

            # if stat.result:
            data_grouped[stat.interval] = stat.result

        data_sorted = OrderedDict(sorted(data_grouped.items()))

        data_value = list(data_sorted.values())

        # Skip null series
        if len([i for i in data_value if i]) == 0:
            continue

        # @TODO possible bring this back
        # Skip zero series
        # if sum([i for i in data_value if i]) == 0:
        # continue

        # Cast trailing nulls
        if not units.name.startswith("temperature") or units.cast_nulls:
            data_value = cast_trailing_nulls(data_value)

        # Find start/end dates
        dates = list(data_grouped.keys())

        if not dates:
            return None

        start = min(dates)
        end = max(dates)

        # should probably make sure these are the same TZ
        if localize:
            if timezone and not is_aware(start):
                start = make_aware(start, timezone)

            if timezone and not is_aware(end):
                end = make_aware(end, timezone)

            if timezone and localize and network and network.offset:
                tz = pytz.FixedOffset(int(network.offset))

                start = start.astimezone(tz)
                end = end.astimezone(tz)

        # Everything needs a timezone even flat dates
        if network and timezone and not is_aware(start):
            start = start.replace(tzinfo=network.get_fixed_offset())

        if network and timezone and not is_aware(end):
            end = end.replace(tzinfo=network.get_fixed_offset())

        # free
        dates = []

        history = OpennemDataHistory(
            start=start,
            last=end,
            interval=interval.interval_human,
            data=data_value,
        )

        data = OpennemData(
            data_type=units.unit_type,
            units=units.unit,
            # interval=interval,
            # period=period,
            history=history,
        )

        if include_code:
            data.code = group_code

        if network:
            data.network = network.code.lower()

        # *sigh* - not the most flexible model
        # @TODO fix this schema and make it more flexible
        if fueltech_group:
            data.fuel_tech = group_code

            data_comps = [
                # @NOTE disable for now since FE doesn't
                # support it
                network.country if network else None,
                network.code.lower() if network else None,
                region.lower()
                if region and region.lower() != network.code.lower() else None,
                "fuel_tech",
                group_code,
                units.unit_type,
            ]

            data.id = ".".join(i for i in data_comps if i)
            # @TODO make this an alias
            data.type = units.unit_type

        if group_field:
            group_fields = []

            # setattr(data, group_field, group_code)

            if network:
                group_fields.append(network.country.lower())
                group_fields.append(network.code.lower())

            if region:
                if region.lower() != network.code.lower():
                    group_fields.append(region.lower())

            if units.name_alias:
                group_fields.append(units.name_alias)

            elif units.unit_type:
                group_fields.append(units.unit_type)

            if group_code and include_group_code:
                group_fields.append(group_code)
                group_fields.append(group_field)

            data.id = ".".join([f for f in group_fields if f])
            data.type = units.unit_type

        if data_id:
            data.id = data_id

        if not data.id:
            _id_list = []

            # @NOTE disable for now since FE doesn't
            # support it
            # network.country if network else None,

            if network:
                _id_list.append(network.code.lower())

            if region and (region.lower() != network.code.lower()):
                _id_list.append(region.lower())

            if group_code:
                _id_list.append(group_code.lower())

            if units and units.name_alias:
                _id_list.append(units.name_alias)
            elif units and units.name:
                _id_list.append(units.name)

            data.id = ".".join([f for f in _id_list if f])
            data.type = units.unit_type

        if region:
            data.region = region

        stats_grouped.append(data)

    dt_now = datetime.now()

    if network:
        dt_now = dt_now.astimezone(network.get_timezone())

    # @NOTE this should probably be
    # country.network.region
    if not code:
        if network:
            code = network.code

        if region:
            code = region

    stat_set = OpennemDataSet(
        type=units.unit_type,
        data=stats_grouped,
        created_at=dt_now,
        version=get_version(),
    )

    if include_code:
        stat_set.code = code

    if network:
        stat_set.network = network.code

    if region:
        stat_set.region = region

    return stat_set
Exemple #3
0
def parse_date(
    date_str: Union[str, datetime],
    date_format: Optional[str] = None,
    network: Optional[NetworkSchema] = None,
    dayfirst: bool = True,
    yearfirst: bool = False,
    is_utc: bool = False,
    timezone: pytimezone = None,
    use_optimized: bool = True,
) -> Optional[datetime]:
    dt_return = None

    if isinstance(date_str, datetime):
        dt_return = date_str

    elif isinstance(date_str, str):
        # avoid strptime if we can
        try:
            dt_return = datetime.fromisoformat(date_str.replace("/", "-"))
        except ValueError:
            pass

        if not dt_return and date_format:
            dt_return = datetime.strptime(date_str, date_format)

        if not dt_return and use_optimized:
            dt_return = optimized_data_parser(date_str)

        if not dt_return:
            try:
                dt_return = parse(date_str,
                                  dayfirst=dayfirst,
                                  yearfirst=yearfirst)
            except ParserError:
                raise ValueError("Invalid date string passed")

    else:
        raise ValueError("Require a datetime or string object to parse date")

    if network:
        tz = network.get_timezone()

        if tz:
            if is_aware(dt_return):
                if hasattr(tz, "localize"):
                    dt_return = tz.localize()  # type: ignore
                else:
                    dt_return = dt_return.replace(tzinfo=tz)
            else:
                dt_return = make_aware(dt_return, timezone=tz)

    if is_utc:
        tz = pytimezone.utc

        if dt_return and is_aware(dt_return):
            if tz and hasattr(tz, "localize"):
                dt_return = tz.localize()  # type: ignore
            else:
                dt_return = dt_return.replace(tzinfo=tz)
        else:
            dt_return = make_aware(dt_return, timezone=tz)  # type: ignore

    if timezone:
        dt_return = make_aware(dt_return, timezone=timezone)  # type: ignore

    return dt_return