Exemplo n.º 1
0
def trading_days(
        time_end: datetime, time_dur: str=None, time_start: datetime=None):
    """determine start and end trading days covering time_dur or time_start.
       So far use NYSE trading days calendar for all exchanges.
    """
    xchg_tz = time_end.tzinfo
    end_idx = bisect.bisect_left(NYSE_CAL, time_end.replace(tzinfo=None))
    if time_start is not None:
        # ignore time_dur, use time_start, time_end as boundary.
        start_idx = bisect.bisect_left(
            NYSE_CAL, time_start.replace(tzinfo=None))
        trading_days = NYSE_CAL[start_idx:end_idx]
    else:
        tdur = timedur_to_timedelta(time_dur)
        # If tdur remainding h/m/s > 0, round it up to 1 more day.
        n_trading_days = tdur.days
        if xchg_tz.normalize(
                time_end - relativedelta(seconds=tdur.seconds)
        ) < tzmin(time_end, tz=xchg_tz):
            n_trading_days += 1
        # time_dur in days, and time_end is not beginning of day.
        if time_end.time() != datetime.min.time():
            n_trading_days += 1
        # Slicing from trading day calendar.
        trading_days = NYSE_CAL[end_idx-n_trading_days:end_idx]
    return trading_days
Exemplo n.º 2
0
def next_502(now: datetime=None):
    if now is None:
        now = datetime.utcnow()

    now = now.replace(tzinfo=timezone('UTC')) if now.utcoffset() is None else now
    now_replaced = now.replace(second=0, microsecond=0)

    candidates = []  # dt, tz, delta

    for tz in ['Europe/Berlin', 'US/Pacific']:
        for day_offset in [-1, 0, 1]:
            for hour in [5, 17]:
                candidate = now_replaced.astimezone(timezone(tz)).replace(hour=hour, minute=2) \
                            + timedelta(days=day_offset)

                if candidate >= now_replaced:
                    candidates.append((candidate, tz, candidate - now))

    dt, tz, delta = min(*candidates, key=lambda x: x[2])

    if now.hour in (5, 17) and now.minute == 2:
        return "Holy shit, it's 502."
    else:
        delta_str = str(delta)

        if time_start_regex.match(delta_str) is None:
            delta_str = '0' + delta_str

        return '{tz} 502: T-{delta}'.format(tz=tz, delta=delta_str)
Exemplo n.º 3
0
 def __init__(self, id: str, name: str, created: datetime, modified: datetime, md5: str, size: int, status: Enum):
     self.id = id
     self.name = name
     self.created = created.replace(tzinfo=None)
     self.modified = modified.replace(tzinfo=None)
     self.md5 = md5
     self.size = size
     self.status = status
    def get(self, limit: int = -1,
            starttime: datetime=None, endtime: datetime=None) -> List[Event]:
        """Returns events sorted in descending order by timestamp"""
        # Resolution is rounded down since not all datastores like microsecond precision
        if starttime:
            starttime = starttime.replace(microsecond=1000 * int(starttime.microsecond / 1000))
        if endtime:
            # Rounding up here in order to ensure events aren't missed
            # second_offset and microseconds modulo required since replace() only takes microseconds up to 999999 (doesn't handle overflow)
            milliseconds = 1 + int(endtime.microsecond / 1000)
            second_offset = int(milliseconds / 1000)  # usually 0, rarely 1
            microseconds = (1000 * milliseconds) % 1000000  # will likely just be 1000 * milliseconds, if it overflows it would become zero
            endtime = endtime.replace(microsecond=microseconds) + timedelta(seconds=second_offset)

        return self.ds.storage_strategy.get_events(self.bucket_id, limit, starttime, endtime)
Exemplo n.º 5
0
def daily_stat(day: datetime, collection) -> dict:
    """
    create day statistics based on hourly data
    currently put in stat data for business hours
    :param day: datetime, date for aggregation hour statistic
    :param collection:
    :return: {'buy': 0.41, 'sell': 0.41, 'sell_rates': [0.41, 0.41], 'buy_rates': [0.41],
            'sell_val': 1030000, 'buy_val': 375000, 'source': 'd_int_stat',
            'time': datetime.datetime(2018, 6, 17, 0, 0),
            'currency': 'RUB'}
    """
    business_time = (day.replace(hour=9, minute=0), day.replace(hour=17, minute=59))
    full_time = (day.replace(hour=0, minute=0), day.replace(hour=23, minute=59))
    time = business_time
    source = 'd_int_stat'
    pipeline = [{'$match': {'source': 'h_int_stat',
                            '$and': [{'time': {'$gte': time[0]}}, {'time': {'$lt': time[1]}}]}},
                {'$group': {'_id': None, 'sell': {'$avg': '$sell'}, 'sell_rates': {'$push':  '$sell'},
                                         'buy': {'$avg': '$buy'}, 'buy_rates': {'$push':  '$buy'},
                            'sell_val': {'$sum': '$sell_val'}, 'buy_val': {'$sum': '$buy_val'}}},
                {'$project': {'_id': False, 'buy': '$buy', 'sell': '$sell', 'sell_rates': '$sell_rates',
                              'buy_rates': '$buy_rates', 'sell_val': '$sell_val', 'buy_val': '$buy_val'}}]
    command_cursor = collection.aggregate(pipeline)

    def form_output_doc(document):
        if collection.name == 'RUB':
            round_dig = 4
        else:
            round_dig = 2
        logger.debug('document= {}'.format(document))
        if not (any(document['sell_rates']) and any(document['buy_rates'])):
            logger.warning('rates missing in stat; len(document[\'sell_rates\'])= {},'
                           'len(document[\'buy_rates\']= {}'.format(document['sell_rates'],
                                                                    document['buy_rates']))
            return {}
        document['sell'] = round(document['sell'], round_dig)
        document['buy'] = round(document['buy'], round_dig)
        document['source'] = source
        document['time'] = time[1].replace(hour=0, minute=0)
        document['currency'] = collection.name
        return document
    # actually one document
    try:
        result_doc = [form_output_doc(doc) for doc in command_cursor][0]
    except IndexError or TypeError:
        return {}
    logger.debug('result_doc= {}'.format(result_doc))
    return result_doc
Exemplo n.º 6
0
    def get_booked(self, date: datetime, is_after: bool, is_now: bool = False):
        order_func = None
        compare_op = None
        if is_after:
            compare_op = operator.ge
            if not is_now:
                date += timedelta(days=1)
            order_func = stub_func
        else:
            compare_op = operator.lt
            order_func = desc

        date = date.replace(hour=0, minute=0, second=0, microsecond=0)
        start_date = dataentities.BookedRange.start_date
        next_date = session.query(dataentities.BookedRange) \
            .filter(dataentities.BookedRange.start_date > datetime.now()) \
            .filter(compare_op(dataentities.BookedRange.start_date, date)) \
            .order_by(order_func(dataentities.BookedRange.start_date)) \
            .limit(1).first()

        if not next_date:
            return None

        return session.query(dataentities.BookedRange) \
            .filter(extract('day', start_date) == next_date.start_date.day) \
            .filter(extract('month', start_date) == next_date.start_date.month) \
            .filter(extract('year', start_date) == next_date.start_date.year) \
            .order_by(dataentities.BookedRange.start_date).all()
Exemplo n.º 7
0
def apply_shift(start: datetime, end: datetime, delta: timedelta or None, abs_start: int or None) -> (
        datetime, datetime):

    if delta:
        pass  # nothing to be done

    elif abs_start:
        # delta must be calculated
        same_day_start = start.replace(hour=abs(abs_start), minute=0, second=0, microsecond=0)
        is_start_before_same_day_start = (start - same_day_start).total_seconds() <= 0
        shift_earlier = abs_start < 0

        if shift_earlier:
            if is_start_before_same_day_start:
                prev_day_start = same_day_start - timedelta(days=1)
                delta = prev_day_start - start
            else:
                delta = same_day_start - start

        else:  # shift to later point in time
            if is_start_before_same_day_start:
                delta = same_day_start - start
            else:
                next_day_start = same_day_start + timedelta(days=1)
                delta = start - next_day_start

    else:
        return None  # should not happen

    return start + delta, end + delta
Exemplo n.º 8
0
    def __init__(self, id_client: Identifier, z_0: bytes, r: bytes, t_0: datetime, id_server: Identifier,
                 l: int=2 ** 16):
        """
        Create a ksi client's certificate.
        :param id_client: The Identifier of the client
        :type id_client: Identifier
        :param z_0: The last computed hash, it is _not_ part of the hash tree taken from ksi.Keys.keys[0].hash
        :type z_0: bytes
        :param r: The root of the hash tree taken from ksi.Keys.hash_tree_root.hash
        :type r: bytes
        :param t_0: The time at which the certificate becomes valid
        :type t_0: datetime
        :param id_server: The Identifier of the server
        :type id_server: Identifier
        :return:
        """
        assert isinstance(id_client, Identifier) and isinstance(id_server, Identifier)
        assert isinstance(z_0, bytes) and isinstance(r, bytes)
        assert isinstance(t_0, datetime)
        assert isinstance(l, int) and is_power_of_2(l)

        self.id_client = id_client
        self.z_0 = z_0
        self.r = r
        self.t_0 = t_0.replace(microsecond=0)
        self.id_server = id_server
        self.l = l
Exemplo n.º 9
0
 def datetime_to_utc(dt: datetime):
     """Convert a given datetime to UTC time for timezone compatibility"""
     if dt.strftime(DiscordClient.DATE_FORMAT) == "1900-01-01":
         dt = datetime.combine(datetime.now().date(), dt.time())
     to_zone = tz.tzutc()
     from_zone = tz.tzlocal()
     local = dt.replace(tzinfo=from_zone)
     return local.astimezone(to_zone)
Exemplo n.º 10
0
def start_and_end_of_the_month(dt: datetime):
    """Get first of month and first of next month for a given datetime.
    """
    start = dt.replace(day=1, hour=0, minute=0, second=0, microsecond=0)
    if start.month == 12:
        end = start.replace(year=start.year + 1, month=1, day=1, hour=0, minute=0, second=0, microsecond=0)
    else:
        end = start.replace(month=start.month + 1)
    return start, end
Exemplo n.º 11
0
def convertor_finance_ua(id: int, current_date: datetime, data) -> dict:
    out_dic = {conv_dict_orig[key] : data[key][id] for key in conv_dict_orig}
    out_dic['operation'] = conv_operation_orig[out_dic['operation']]
    # out_dic['id'] = id
    out_dic['location'] = location_dict_orig.get(out_dic['location'], 'None')
    out_dic['source'] = 'f'
    out_dic['rate'] = float(out_dic['rate'])
    time = out_dic['time'].split(':')
    out_dic['time'] = current_date.replace(hour=int(time[0]), minute=int(time[1]), second=0, microsecond=0)
    return out_dic
Exemplo n.º 12
0
def convertor_finance_ua(id: int, current_date: datetime, data) -> dict:
    out_dic = {conv_dict_orig[key] : data[key][id] for key in conv_dict_orig}
    out_dic['operation'] = conv_operation_orig[out_dic['operation']]
    out_dic['bid'] = 'f' + str(out_dic['bid'])
    out_dic['location'] = location_dict_orig.get(out_dic['location'], 'None')
    out_dic['source'] = 'f'
    out_dic['rate'] = float(out_dic['rate'])
    out_dic['amount'] = int(''.join(filter(lambda x: x.isdigit(), out_dic['amount'])))
    time = out_dic['time'].split(':')
    out_dic['time'] = current_date.replace(hour= int(time[0]), minute= int(time[1]), second=0, microsecond=0)
    return out_dic
Exemplo n.º 13
0
def daily_stat(day: datetime, collection) -> dict:
    """
    create day statistics based on hourly data
    currently put in stat data for business hours
    :param day: datetime, date for aggregation hour statistic
    :param collection:
    :return:
    """
    business_time = (day.replace(hour=9, minute=0), day.replace(hour=17, minute=59))
    full_time = (day.replace(hour=0, minute=0), day.replace(hour=23, minute=59))
    time = business_time
    source = 'd_int_stat'
    pipeline = [{'$match': {'source': 'h_int_stat',
                            '$and': [{'time': {'$gte': time[0]}}, {'time': {'$lt': time[1]}}]}},
                {'$group': {'_id': None, 'sell': {'$avg': '$sell'}, 'sell_rates': {'$push':  '$sell'},
                                         'buy': {'$avg': '$buy'}, 'buy_rates': {'$push':  '$buy'}}},
                {'$project': {'_id': False, 'buy': '$buy', 'sell': '$sell', 'sell_rates': '$sell_rates',
                              'buy_rates': '$buy_rates'}}]
    command_cursor = collection.aggregate(pipeline)

    def form_output_doc(document):
        if collection.name == 'RUB':
            round_dig = 4
        else:
            round_dig = 2
        document['sell'] = round(document['sell'], round_dig)
        document['buy'] = round(document['buy'], round_dig)
        document['source'] = source
        document['time'] = time[1].replace(hour=0, minute=0)
        return document
    # actually one document
    try:
        result_doc = [form_output_doc(doc) for doc in command_cursor][0]
    except IndexError:
        return {}
    document = dict(result_doc)
    time = document.pop('time')
    collection.update_one({'time': time}, {'$set': document}, upsert=True)
    return result_doc
Exemplo n.º 14
0
def add_months(dt: datetime, months: int) -> datetime:
    assert(months >= 0)
    # It's fine that the max day in Feb is 28 for leap years.
    MAX_DAY_FOR_MONTH = {1: 31, 2: 28, 3: 31, 4: 30, 5: 31, 6: 30,
                         7: 31, 8: 31, 9: 30, 10: 31, 11: 30, 12: 31}
    year = dt.year
    month = dt.month + months
    while month > 12:
        year += 1
        month -= 12
    day = min(dt.day, MAX_DAY_FOR_MONTH[month])
    # datetimes don't support leap seconds, so don't need to worry about those
    return dt.replace(year=year, month=month, day=day)
Exemplo n.º 15
0
 def convertor_minfin(dic: dict, current_date: datetime, id: int) -> dict:
     dic['bid'] = dic['id']
     del dic['id']
     # dic['id'] = id
     dic['currency'] = dic['currency'].upper()
     dic['location'] = location_orig
     dic['source'] = 'm'
     dic['session'] = False
     time = dic['time'].split(':')
     dic['time'] = current_date.replace(hour= int(time[0]), minute= int(time[1]), second=0, microsecond=0)
     dic['rate'] = float(dic['rate'].replace(',', '.'))
     if dic['time'] > current_date:
         dic['time'] = dic['time'] - timedelta(days=1)
     return dic
Exemplo n.º 16
0
 def convertor_minfin(dic: dict, current_date: datetime, id: int) -> dict:
     dic['bid'] = dic['id']
     del dic['id']
     # dic['id'] = id
     dic['currency'] = dic['currency'].upper()
     dic['location'] = location_orig
     dic['source'] = 'm'
     dic['session'] = False
     time = dic['time'].split(':')
     dic['time'] = current_date.replace(hour=int(time[0]), minute=int(time[1]), second=0, microsecond=0)
     dic['rate'] = float(dic['rate'])
     dic['amount'] = int(''.join(filter(lambda x: x.isdigit(), dic['amount'])))
     # TODO: remove below strings
     # if dic['time'] > current_date:
     #     dic['time'] = dic['time'] - timedelta(days=1)
     return dic
Exemplo n.º 17
0
def human_timestamp(__timestamp: datetime.datetime) -> str:
    """Format a relative time.

    Args:
        __timestamp: Event to generate relative timestamp against
    Returns:
        Human readable date and time offset
    """
    numstr = '. a two three four five six seven eight nine ten'.split()

    matches = [
        60 * 60 * 24 * 365,
        60 * 60 * 24 * 28,
        60 * 60 * 24 * 7,
        60 * 60 * 24,
        60 * 60,
        60,
        1,
    ]
    match_names = ['year', 'month', 'week', 'day', 'hour', 'minute', 'second']

    if __timestamp.tzinfo is None:
        __timestamp = __timestamp.replace(tzinfo=datetime.timezone.utc)

    now = datetime.datetime.utcnow().replace(tzinfo=datetime.timezone.utc)

    delta = int((now - __timestamp).total_seconds())
    for scale in matches:
        i = delta // scale
        if i:
            name = match_names[matches.index(scale)]
            break
    else:
        i = 0  # Too small

    if i == 0:
        result = 'right now'
    elif i == 1 and name in ('year', 'month', 'week'):
        result = 'last {}'.format(name)
    elif i == 1 and name == 'day':
        result = 'yesterday'
    elif i == 1 and name == 'hour':
        result = 'about an hour ago'
    else:
        result = 'about {} {}{} ago'.format(i if i > 10 else numstr[i], name,
                                            's' if i > 1 else '')
    return result
Exemplo n.º 18
0
def query_4(date: datetime, region: str):
    date2 = str(date.replace(date.year + 1))
    date = str(date)
    query = """
    MATCH (c: Customer)<-[ORDERED_BY]-(o: Order)-[CONTAINS]->(l:lineItem)->[SUPPLIED_BY]->(s:Supplier)
    WHERE
            o.O_ORDERDATE < """ + date + """ AND
            o.O_ORDERDATE < """ + date2 + """ AND
            c.REGION_NAME = """ + region + """ AND
            s.NATION_NAME = s.NATION_NAME
    RETURN
        s.NATION_NAME as n_name,
        sum((l.L_EXTENDEDPRICE)*(1-l.L_DISCOUNT)) as revenue
    ORDER BY
        revenue DESC
    """
    result = graph.cypher.execute(query)
    return result
Exemplo n.º 19
0
def get_synchronization_layer_1_data(logger=None):
	try:
		year =  str(Datetime.now().year)
		months = _Utilities.get_current_fiscal_declaration_period(_Constants.TWO_MONTHS_PERIOD)
		begin_date = Datetime(int(year),int(months[0]),1)
		end_date = Datetime(int(year),int(months[len(months)-1]),_Utilities.get_month_days(months[len(months)-1]))
		end_date = end_date.replace(hour=23, minute=59,second=59)
		sl1_data = {
			'year' : year,
			'months' : months,
			'begin_date' : begin_date,
			'end_date' : end_date
		}#End of sl1_data
		logger.info(LOG_INDENT + 'Year:      ' + str(sl1_data['year']))
		logger.info(LOG_INDENT + 'Months:    ' + str(sl1_data['months']))
		logger.info(LOG_INDENT + 'From ' + str(sl1_data['begin_date']) + ' to ' + str(sl1_data['end_date']))
		return sl1_data
	except Exception as e:
		logger.critical(e.message)
		already_handled_exception = Already_Handled_Exception(e.message)
		raise already_handled_exception
Exemplo n.º 20
0
        def calc_coupon_dates(open_date: datetime, close_date: datetime, period=0.5) -> set():
            """

            :param open_date: datetime: should be min date in auction dates
            :param close_date: datetime
            :param period: curently only 0.5
            :return: set of datetime oblects
            """
            pay_dates = set()
            pay_dates.add(close_date)
            if (close_date - open_date).days > 170:
                while close_date > open_date:
                    # calc month and year in half year
                    if period == 0.5:
                        pay_month = (close_date.month - 6) % 12
                    elif period == 1:
                        pay_month = (close_date.month - 12) % 12
                    if pay_month == 0:
                        pay_month = 1
                    if pay_month >= close_date.month:
                        pay_year = close_date.year - 1
                    else:
                        pay_year = close_date.year
                    # --- To set exists date ----
                    while True:
                        try:
                            pay_date = close_date.replace(month=pay_month, year=pay_year)
                        except ValueError:
                            close_date += timedelta(days=1)
                            continue
                        break
                    # --------------------------
                    if (pay_date - open_date).days > 150:
                        pay_dates.add(pay_date)
                        close_date = pay_date
                    else:
                        return pay_dates
            else:
                return pay_dates
Exemplo n.º 21
0
    def _increment_field_value(self, dateval: datetime, fieldnum: int) -> Tuple[datetime, int]:
        """
        Increment the designated field and reset all less significant fields to their minimum
        values.

        :return: a tuple containing the new date, and the number of the field that was actually
            incremented

        """
        values = {}
        i = 0
        while i < len(self.fields):
            field = self.fields[i]
            if not field.real:
                if i == fieldnum:
                    fieldnum -= 1
                    i -= 1
                else:
                    i += 1
                continue

            if i < fieldnum:
                values[field.name] = field.get_value(dateval)
                i += 1
            elif i > fieldnum:
                values[field.name] = field.get_min(dateval)
                i += 1
            else:
                value = field.get_value(dateval)
                maxval = field.get_max(dateval)
                if value == maxval:
                    fieldnum -= 1
                    i -= 1
                else:
                    values[field.name] = value + 1
                    i += 1

        difference = datetime(**values) - dateval.replace(tzinfo=None)
        return self.timezone.normalize(dateval + difference), fieldnum
Exemplo n.º 22
0
def content(scenario_props, scenario_id, base_name, site_id, supply_id, user):
    now = Datetime.now(pytz.utc)
    report_context = {}
    future_funcs = {}
    report_context['future_funcs'] = future_funcs

    sess = None
    try:
        sess = Session()
        if scenario_props is None:
            scenario_contract = Contract.get_supplier_by_id(sess, scenario_id)
            scenario_props = scenario_contract.make_properties()
            base_name.append(scenario_contract.name)

        for contract in sess.query(Contract).join(MarketRole).filter(
                MarketRole.code == 'Z'):
            try:
                props = scenario_props[contract.name]
            except KeyError:
                continue

            try:
                rate_start = props['start_date']
            except KeyError:
                raise BadRequest(
                    "In " + scenario_contract.name + " for the rate " +
                    contract.name + " the start_date is missing.")

            if rate_start is not None:
                rate_start = rate_start.replace(tzinfo=pytz.utc)

            lib = importlib.import_module('chellow.' + contract.name)

            if hasattr(lib, 'create_future_func'):
                future_funcs[contract.id] = {
                    'start_date': rate_start,
                    'func': lib.create_future_func(
                        props['multiplier'], props['constant'])}

        start_date = scenario_props['scenario_start']
        if start_date is None:
            start_date = Datetime(
                now.year, now.month, 1, tzinfo=pytz.utc)
        else:
            start_date = start_date.replace(tzinfo=pytz.utc)

        base_name.append(
            hh_format(start_date).replace(' ', '_').replace(':', '').
            replace('-', ''))
        months = scenario_props['scenario_duration']
        base_name.append('for')
        base_name.append(str(months))
        base_name.append('months')
        finish_date = start_date + relativedelta(months=months)

        if 'kwh_start' in scenario_props:
            kwh_start = scenario_props['kwh_start']
        else:
            kwh_start = None

        if kwh_start is None:
            kwh_start = chellow.computer.forecast_date()
        else:
            kwh_start = kwh_start.replace(tzinfo=pytz.utc)

        sites = sess.query(Site).join(SiteEra).join(Era).filter(
            Era.start_date <= finish_date,
            or_(
                Era.finish_date == null(),
                Era.finish_date >= start_date)).distinct().order_by(Site.code)
        if site_id is not None:
            site = Site.get_by_id(sess, site_id)
            sites = sites.filter(Site.id == site.id)
            base_name.append('site')
            base_name.append(site.code)
        if supply_id is not None:
            supply = Supply.get_by_id(sess, supply_id)
            base_name.append('supply')
            base_name.append(str(supply.id))
            sites = sites.filter(Era.supply == supply)

        running_name, finished_name = chellow.dloads.make_names(
            '_'.join(base_name) + '.ods', user)

        rf = open(running_name, "wb")
        f = odswriter.writer(rf, '1.1')
        group_tab = f.new_sheet("Site Level")
        sup_tab = f.new_sheet("Supply Level")
        changes = defaultdict(list, {})

        try:
            kw_changes = scenario_props['kw_changes']
        except KeyError:
            kw_changes = ''

        for row in csv.reader(io.StringIO(kw_changes)):
            if len(''.join(row).strip()) == 0:
                continue
            if len(row) != 4:
                raise BadRequest(
                    "Can't interpret the row " + str(row) + " it should be of "
                    "the form SITE_CODE, USED / GENERATED, DATE, MULTIPLIER")
            site_code, typ, date_str, kw_str = row
            date = Datetime.strptime(date_str.strip(), "%Y-%m-%d").replace(
                tzinfo=pytz.utc)
            changes[site_code.strip()].append(
                {
                    'type': typ.strip(), 'date': date,
                    'multiplier': float(kw_str)})

        sup_header_titles = [
            'imp-mpan-core', 'exp-mpan-core', 'metering-type', 'source',
            'generator-type', 'supply-name', 'msn', 'pc', 'site-id',
            'site-name', 'associated-site-ids', 'month']
        site_header_titles = [
            'site-id', 'site-name', 'associated-site-ids', 'month',
            'metering-type', 'sources', 'generator-types']
        summary_titles = [
            'import-net-kwh', 'export-net-kwh', 'import-gen-kwh',
            'export-gen-kwh', 'import-3rd-party-kwh', 'export-3rd-party-kwh',
            'displaced-kwh', 'used-kwh', 'used-3rd-party-kwh',
            'import-net-gbp', 'export-net-gbp', 'import-gen-gbp',
            'export-gen-gbp', 'import-3rd-party-gbp', 'export-3rd-party-gbp',
            'displaced-gbp', 'used-gbp', 'used-3rd-party-gbp',
            'billed-import-net-kwh', 'billed-import-net-gbp']

        title_dict = {}
        for cont_type, con_attr in (
                ('mop', Era.mop_contract), ('dc', Era.hhdc_contract),
                ('imp-supplier', Era.imp_supplier_contract),
                ('exp-supplier', Era.exp_supplier_contract)):
            titles = []
            title_dict[cont_type] = titles
            conts = sess.query(Contract).join(con_attr) \
                .join(Era.supply).join(Source).filter(
                    Era.start_date <= start_date,
                    or_(
                        Era.finish_date == null(),
                        Era.finish_date >= start_date),
                    Source.code.in_(('net', '3rd-party'))
                ).distinct().order_by(Contract.id)
            if supply_id is not None:
                conts = conts.filter(Era.supply_id == supply_id)
            for cont in conts:
                title_func = chellow.computer.contract_func(
                    report_context, cont, 'virtual_bill_titles', None)
                if title_func is None:
                    raise Exception(
                        "For the contract " + cont.name +
                        " there doesn't seem to be a "
                        "'virtual_bill_titles' function.")
                for title in title_func():
                    if title not in titles:
                        titles.append(title)

        sup_tab.writerow(
            sup_header_titles + summary_titles + [None] +
            ['mop-' + t for t in title_dict['mop']] +
            [None] + ['dc-' + t for t in title_dict['dc']] + [None] +
            ['imp-supplier-' + t for t in title_dict['imp-supplier']] +
            [None] + ['exp-supplier-' + t for t in title_dict['exp-supplier']])
        group_tab.writerow(site_header_titles + summary_titles)

        sites = sites.all()
        month_start = start_date
        while month_start < finish_date:
            month_finish = month_start + relativedelta(months=1) - HH
            for site in sites:
                site_changes = changes[site.code]
                site_associates = set()
                site_category = None
                site_sources = set()
                site_gen_types = set()
                site_month_data = defaultdict(int)
                for group in site.groups(
                        sess, month_start, month_finish, False):
                    site_associates.update(
                        set(
                            s.code for s in group.sites
                            if s.code != site.code))
                    for cand_supply in group.supplies:
                        site_sources.add(cand_supply.source.code)
                        if cand_supply.generator_type is not None:
                            site_gen_types.add(cand_supply.generator_type.code)
                        for cand_era in sess.query(Era).filter(
                                Era.supply == cand_supply,
                                Era.start_date <= group.finish_date, or_(
                                    Era.finish_date == null(),
                                    Era.finish_date >= group.start_date)). \
                                options(
                                    joinedload(Era.channels),
                                    joinedload(Era.pc),
                                    joinedload(Era.mtc).joinedload(
                                        Mtc.meter_type)):
                            if site_category != 'hh':
                                if cand_era.pc.code == '00':
                                    site_category = 'hh'
                                elif site_category != 'amr':
                                    if len(cand_era.channels) > 0:
                                        site_category = 'amr'
                                    elif site_category != 'nhh':
                                        if cand_era.mtc.meter_type.code \
                                                not in ['UM', 'PH']:
                                            site_category = 'nhh'
                                        else:
                                            site_category = 'unmetered'

                for group in site.groups(
                        sess, month_start, month_finish, True):
                    calcs = []
                    deltas = defaultdict(int)
                    group_associates = set(
                        s.code for s in group.sites if s.code != site.code)
                    for supply in group.supplies:
                        if supply_id is not None and supply.id != supply_id:
                            continue
                        for era in sess.query(Era).join(Supply) \
                                .join(Source).filter(
                                    Era.supply == supply,
                                    Era.start_date <= group.finish_date, or_(
                                        Era.finish_date == null(),
                                        Era.finish_date >= group.start_date)) \
                                .options(
                                    joinedload(Era.ssc),
                                    joinedload(Era.hhdc_contract),
                                    joinedload(Era.mop_contract),
                                    joinedload(Era.imp_supplier_contract),
                                    joinedload(Era.exp_supplier_contract),
                                    joinedload(Era.channels),
                                    joinedload(Era.imp_llfc).joinedload(
                                        Llfc.voltage_level),
                                    joinedload(Era.exp_llfc).joinedload(
                                        Llfc.voltage_level),
                                    joinedload(Era.cop),
                                    joinedload(Era.supply).joinedload(
                                        Supply.dno_contract),
                                    joinedload(Era.mtc).joinedload(
                                        Mtc.meter_type)):

                            if era.start_date > group.start_date:
                                ss_start = era.start_date
                            else:
                                ss_start = group.start_date

                            if hh_before(era.finish_date, group.finish_date):
                                ss_finish = era.finish_date
                            else:
                                ss_finish = group.finish_date

                            if era.imp_mpan_core is None:
                                imp_ss = None
                            else:
                                imp_ss = SupplySource(
                                    sess, ss_start, ss_finish, kwh_start, era,
                                    True, None, report_context)

                            if era.exp_mpan_core is None:
                                exp_ss = None
                                measurement_type = imp_ss.measurement_type
                            else:
                                exp_ss = SupplySource(
                                    sess, ss_start, ss_finish, kwh_start, era,
                                    False, None, report_context)
                                measurement_type = exp_ss.measurement_type

                            order = meter_order[measurement_type]
                            calcs.append(
                                (
                                    order, era.imp_mpan_core,
                                    era.exp_mpan_core, imp_ss, exp_ss))

                            if imp_ss is not None and len(era.channels) == 0:
                                for hh in imp_ss.hh_data:
                                    deltas[hh['start-date']] += hh['msp-kwh']

                    imp_net_delts = defaultdict(int)
                    exp_net_delts = defaultdict(int)
                    imp_gen_delts = defaultdict(int)

                    displaced_era = chellow.computer.displaced_era(
                        sess, group, group.start_date, group.finish_date)
                    site_ds = chellow.computer.SiteSource(
                        sess, site, group.start_date, group.finish_date,
                        kwh_start, None, report_context, displaced_era)

                    for hh in site_ds.hh_data:
                        try:
                            delta = deltas[hh['start-date']]
                            hh['import-net-kwh'] += delta
                            hh['used-kwh'] += delta
                        except KeyError:
                            pass

                    for hh in site_ds.hh_data:
                        for change in site_changes:
                            if change['type'] == 'used' and \
                                    change['date'] <= hh['start-date']:
                                used = change['multiplier'] * hh['used-kwh']
                                exp_net = max(
                                    0, hh['import-gen-kwh'] -
                                    hh['export-gen-kwh'] -
                                    used)
                                exp_net_delt = exp_net - hh['export-net-kwh']
                                exp_net_delts[hh['start-date']] += exp_net_delt
                                displaced = hh['import-gen-kwh'] - \
                                    hh['export-gen-kwh'] - exp_net
                                imp_net = used - displaced
                                imp_delt = imp_net - hh['import-net-kwh']
                                imp_net_delts[hh['start-date']] += imp_delt

                                hh['import-net-kwh'] = imp_net
                                hh['used-kwh'] = used
                                hh['export-net-kwh'] = exp_net
                                hh['msp-kwh'] = displaced
                            elif change['type'] == 'generated' and \
                                    change['date'] <= hh['start-date']:
                                imp_gen = change['multiplier'] * \
                                    hh['import-gen-kwh']
                                imp_gen_delt = imp_gen - hh['import-gen-kwh']
                                exp_net = max(
                                    0, imp_gen - hh['export-gen-kwh'] -
                                    hh['used-kwh'])
                                exp_net_delt = exp_net - hh['export-net-kwh']
                                exp_net_delts[hh['start-date']] += exp_net_delt

                                displaced = imp_gen - hh['export-gen-kwh'] - \
                                    exp_net

                                imp_net = hh['used-kwh'] - displaced
                                imp_net_delt = imp_net - hh['import-net-kwh']
                                imp_net_delts[hh['start-date']] += imp_net_delt

                                imp_gen_delts[hh['start-date']] += imp_gen_delt

                                hh['import-net-kwh'] = imp_net
                                hh['export-net-kwh'] = exp_net
                                hh['import-gen-kwh'] = imp_gen
                                hh['msp-kwh'] = displaced

                    if displaced_era is not None and supply_id is None:
                        month_data = {}
                        for sname in (
                                'import-net', 'export-net', 'import-gen',
                                'export-gen', 'import-3rd-party',
                                'export-3rd-party', 'msp', 'used',
                                'used-3rd-party', 'billed-import-net'):
                            for xname in ('kwh', 'gbp'):
                                month_data[sname + '-' + xname] = 0

                        month_data['used-kwh'] = \
                            month_data['displaced-kwh'] = \
                            sum(hh['msp-kwh'] for hh in site_ds.hh_data)

                        disp_supplier_contract = \
                            displaced_era.imp_supplier_contract
                        disp_vb_function = chellow.computer.contract_func(
                            report_context, disp_supplier_contract,
                            'displaced_virtual_bill', None)
                        if disp_vb_function is None:
                            raise BadRequest(
                                "The supplier contract " +
                                disp_supplier_contract.name +
                                " doesn't have the displaced_virtual_bill() "
                                "function.")
                        disp_vb_function(site_ds)
                        disp_supplier_bill = site_ds.supplier_bill

                        try:
                            gbp = disp_supplier_bill['net-gbp']
                        except KeyError:
                            disp_supplier_bill['problem'] += \
                                'For the supply ' + \
                                site_ds.mpan_core + \
                                ' the virtual bill ' + \
                                str(disp_supplier_bill) + \
                                ' from the contract ' + \
                                disp_supplier_contract.name + \
                                ' does not contain the net-gbp key.'

                        month_data['used-gbp'] = \
                            month_data['displaced-gbp'] = \
                            site_ds.supplier_bill['net-gbp']

                        out = [
                            None, None, displaced_era.make_meter_category(),
                            'displaced', None, None, None, None, site.code,
                            site.name,
                            ','.join(sorted(list(group_associates))),
                            month_finish] + \
                            [month_data[t] for t in summary_titles]

                        sup_tab.writerow(out)
                        for k, v in month_data.items():
                            site_month_data[k] += v
                    for i, (
                            order, imp_mpan_core, exp_mpan_core, imp_ss,
                            exp_ss) in enumerate(sorted(calcs, key=str)):
                        if imp_ss is None:
                            era = exp_ss.era
                        else:
                            era = imp_ss.era
                        supply = era.supply
                        source = supply.source
                        source_code = source.code
                        site_sources.add(source_code)
                        month_data = {}
                        for name in (
                                'import-net', 'export-net', 'import-gen',
                                'export-gen', 'import-3rd-party',
                                'export-3rd-party', 'displaced', 'used',
                                'used-3rd-party', 'billed-import-net'):
                            for sname in ('kwh', 'gbp'):
                                month_data[name + '-' + sname] = 0

                        if source_code == 'net':
                            delts = imp_net_delts
                        elif source_code == 'gen':
                            delts = imp_gen_delts
                        else:
                            delts = []

                        if len(delts) > 0 and imp_ss is not None:
                            for hh in imp_ss.hh_data:
                                diff = hh['msp-kwh'] + delts[hh['start-date']]
                                if diff < 0:
                                    hh['msp-kwh'] = 0
                                    hh['msp-kw'] = 0
                                    delts[hh['start-date']] -= hh['msp-kwh']
                                else:
                                    hh['msp-kwh'] += delts[hh['start-date']]
                                    hh['msp-kw'] += hh['msp-kwh'] / 2
                                    del delts[hh['start-date']]

                            left_kwh = sum(delts.values())
                            if left_kwh > 0:
                                first_hh = imp_ss.hh_data[0]
                                first_hh['msp-kwh'] += left_kwh
                                first_hh['msp-kw'] += left_kwh / 2

                        imp_supplier_contract = era.imp_supplier_contract
                        if imp_supplier_contract is not None:
                            import_vb_function = contract_func(
                                report_context, imp_supplier_contract,
                                'virtual_bill', None)
                            if import_vb_function is None:
                                raise BadRequest(
                                    "The supplier contract " +
                                    imp_supplier_contract.name +
                                    " doesn't have the virtual_bill() "
                                    "function.")
                            import_vb_function(imp_ss)
                            imp_supplier_bill = imp_ss.supplier_bill

                            try:
                                gbp = imp_supplier_bill['net-gbp']
                            except KeyError:
                                imp_supplier_bill['problem'] += \
                                    'For the supply ' + \
                                    imp_ss.mpan_core + \
                                    ' the virtual bill ' + \
                                    str(imp_supplier_bill) + \
                                    ' from the contract ' + \
                                    imp_supplier_contract.name + \
                                    ' does not contain the net-gbp key.'
                            if source_code in ('net', 'gen-net'):
                                month_data['import-net-gbp'] += gbp
                                month_data['used-gbp'] += gbp
                            elif source_code == '3rd-party':
                                month_data['import-3rd-party-gbp'] += gbp
                                month_data['used-gbp'] += gbp
                            elif source_code == '3rd-party-reverse':
                                month_data['export-3rd-party-gbp'] += gbp
                                month_data['used-gbp'] -= gbp

                            kwh = sum(
                                hh['msp-kwh'] for hh in imp_ss.hh_data)

                            if source_code in ('net', 'gen-net'):
                                month_data['import-net-kwh'] += kwh
                                month_data['used-kwh'] += kwh
                            elif source_code == '3rd-party':
                                month_data['import-3rd-party-kwh'] += kwh
                                month_data['used-kwh'] += kwh
                            elif source_code == '3rd-party-reverse':
                                month_data['export-3rd-party-kwh'] += kwh
                                month_data['used-kwh'] -= kwh
                            elif source_code in ('gen', 'gen-net'):
                                month_data['import-gen-kwh'] += kwh

                        exp_supplier_contract = era.exp_supplier_contract
                        if exp_supplier_contract is None:
                            kwh = sess.query(
                                func.coalesce(
                                    func.sum(
                                        cast(HhDatum.value, Float)), 0)). \
                                join(Channel).filter(
                                    Channel.era == era,
                                    Channel.channel_type == 'ACTIVE',
                                    Channel.imp_related == false()).scalar()
                            if source_code == 'gen':
                                month_data['export-net-kwh'] += kwh
                        else:
                            export_vb_function = contract_func(
                                report_context, exp_supplier_contract,
                                'virtual_bill', None)
                            export_vb_function(exp_ss)

                            exp_supplier_bill = exp_ss.supplier_bill
                            try:
                                gbp = exp_supplier_bill['net-gbp']
                            except KeyError:
                                exp_supplier_bill['problem'] += \
                                    'For the supply ' + \
                                    imp_ss.mpan_core + \
                                    ' the virtual bill ' + \
                                    str(imp_supplier_bill) + \
                                    ' from the contract ' + \
                                    imp_supplier_contract.name + \
                                    ' does not contain the net-gbp key.'

                            kwh = sum(hh['msp-kwh'] for hh in exp_ss.hh_data)

                            if source_code in ('net', 'gen-net'):
                                month_data['export-net-kwh'] += kwh
                                month_data['export-net-gbp'] += gbp
                            elif source_code in \
                                    ('3rd-party', '3rd-party-reverse'):
                                month_data['export-3rd-party-kwh'] += kwh
                                month_data['export-3rd-party-gbp'] += gbp
                                month_data['used-kwh'] -= kwh
                                month_data['used-gbp'] -= gbp
                            elif source_code == 'gen':
                                month_data['export-gen-kwh'] += kwh

                        sss = exp_ss if imp_ss is None else imp_ss
                        dc_contract = era.hhdc_contract
                        sss.contract_func(
                            dc_contract, 'virtual_bill')(sss)
                        dc_bill = sss.dc_bill
                        gbp = dc_bill['net-gbp']

                        mop_contract = era.mop_contract
                        mop_bill_function = sss.contract_func(
                            mop_contract, 'virtual_bill')
                        mop_bill_function(sss)
                        mop_bill = sss.mop_bill
                        gbp += mop_bill['net-gbp']

                        if source_code in ('3rd-party', '3rd-party-reverse'):
                            month_data['import-3rd-party-gbp'] += gbp
                        else:
                            month_data['import-net-gbp'] += gbp
                        month_data['used-gbp'] += gbp

                        if source_code in ('gen', 'gen-net'):
                            generator_type = supply.generator_type.code
                            site_gen_types.add(generator_type)
                        else:
                            generator_type = None

                        sup_category = era.make_meter_category()
                        if CATEGORY_ORDER[site_category] < \
                                CATEGORY_ORDER[sup_category]:
                            site_category = sup_category

                        for bill in sess.query(Bill).filter(
                                Bill.supply == supply,
                                Bill.start_date <= sss.finish_date,
                                Bill.finish_date >= sss.start_date):
                            bill_start = bill.start_date
                            bill_finish = bill.finish_date
                            bill_duration = (
                                bill_finish - bill_start).total_seconds() + \
                                (30 * 60)
                            overlap_duration = (
                                min(bill_finish, sss.finish_date) -
                                max(bill_start, sss.start_date)
                                ).total_seconds() + (30 * 60)
                            overlap_proportion = \
                                float(overlap_duration) / bill_duration
                            month_data['billed-import-net-kwh'] += \
                                overlap_proportion * float(bill.kwh)
                            month_data['billed-import-net-gbp'] += \
                                overlap_proportion * float(bill.net)

                        out = [
                            era.imp_mpan_core, era.exp_mpan_core,
                            sup_category, source_code,
                            generator_type, supply.name, era.msn, era.pc.code,
                            site.code, site.name,
                            ','.join(sorted(list(site_associates))),
                            month_finish] + [
                            month_data[t] for t in summary_titles] + [None] + [
                            (mop_bill[t] if t in mop_bill else None)
                            for t in title_dict['mop']] + [None] + \
                            [(dc_bill[t] if t in dc_bill else None)
                                for t in title_dict['dc']]
                        if imp_supplier_contract is None:
                            out += [None] * \
                                (len(title_dict['imp-supplier']) + 1)
                        else:
                            out += [None] + [
                                (
                                    imp_supplier_bill[t]
                                    if t in imp_supplier_bill else None)
                                for t in title_dict['imp-supplier']]
                        if exp_supplier_contract is not None:
                            out += [None] + [
                                (
                                    exp_supplier_bill[t]
                                    if t in exp_supplier_bill else None)
                                for t in title_dict['exp-supplier']]

                        for k, v in month_data.items():
                            site_month_data[k] += v
                        sup_tab.writerow(out)

                group_tab.writerow(
                    [
                        site.code, site.name,
                        ''.join(sorted(list(site_associates))),
                        month_finish, site_category,
                        ', '.join(sorted(list(site_sources))),
                        ', '.join(sorted(list(site_gen_types)))] +
                    [site_month_data[k] for k in summary_titles])
                sess.rollback()

            month_start += relativedelta(months=1)
    except BadRequest as e:
        msg = e.description + traceback.format_exc()
        sys.stderr.write(msg + '\n')
        group_tab.writerow(["Problem " + msg])
    except:
        msg = traceback.format_exc()
        sys.stderr.write(msg + '\n')
        group_tab.writerow(["Problem " + msg])
    finally:
        if sess is not None:
            sess.close()
        try:
            f.close()
            rf.close()
            os.rename(running_name, finished_name)
        except:
            msg = traceback.format_exc()
            r_name, f_name = chellow.dloads.make_names('error.txt', user)
            ef = open(r_name, "w")
            ef.write(msg + '\n')
            ef.close()
Exemplo n.º 23
0
def date_to_day_frame(date: datetime) -> (datetime, int):
    # This function is the inverse of `day_frame_to_date`
    day = date.replace(hour=0, minute=0)
    frame = (date - day).seconds // 300
    return day, frame
Exemplo n.º 24
0
 def local_time(date: datetime) -> str:
     if date.tzinfo is None:
         return date.replace(tzinfo=tz.tzutc()).astimezone(
             tz.tzlocal()).strftime("%Y-%m-%dT%H:%M:%S")
     else:
         return date.astimezone(tz.tzlocal()).strftime("%Y-%m-%dT%H:%M:%S")
Exemplo n.º 25
0
Arquivo: zas.py Projeto: efenka/ZAS
 def _snapshot_name(self, snapshot_creation: datetime.datetime):
     return "%s%s" % (self.snapshot_prefix, snapshot_creation.replace(second=0, microsecond=0).isoformat())
Exemplo n.º 26
0
def single_backtest(
        vt_symbol: str,
        interval: str,
        capital: int,
        start_date: datetime,
        end_date: datetime,
        real_start: datetime,
        strategy_class: type,
        strategy_params: dict,
        is_last: bool = False) -> Tuple[pd.DataFrame, pd.DataFrame, datetime]:
    """"""
    commodity = strip_digt(vt_symbol)
    size = future_basic_data.loc[commodity]['size']
    pricetick = future_basic_data.loc[commodity]['pricetick']

    end_date = end_date.replace(hour=16)
    real_end_date = end_date if is_last else datetime.now()

    engine = SegBacktestingEngine()
    engine.set_parameters(
        vt_symbol=vt_symbol,
        interval=interval,
        start=start_date,
        end=real_end_date,
        rate=0,
        slippage=0,
        size=size,
        pricetick=pricetick,
        capital=capital,
    )
    engine.add_strategy(strategy_class, strategy_params)

    # print(engine.vt_symbol, engine.start, engine.end, type(engine.start), type(engine.end))
    engine.load_data()
    engine.run_backtesting(real_start)

    # from copy import copy
    # copy_trades = copy(engine.get_all_trades())

    # before calculate daily pnl, clear open trade after end date
    pop_list = clear_open_trade_after_deadline(engine.get_all_trades(),
                                               end_date)
    if pop_list:
        [engine.trades.pop(trade_id) for trade_id in pop_list]

    # check trade result after pop upkeep trade
    if not engine.get_all_trades():
        print("单段回测没有成交结果!")
        # print(copy_trades)
        return

    last_trade_dt = engine.get_all_trades()[-1].datetime
    trade_df = vt_trade_to_df(engine.get_all_trades())

    # check the last trade closed excpet for the last seg contract
    if not is_last and trade_df.iloc[-1].offset != '平':
        print("单端回测合约到期前交易无法闭合")
        # print('None closed:', last_trade_dt, vt_symbol)
        # for trade in copy_trades:
        #     print(trade.datetime, trade.direction, trade.vt_symbol, trade.offset, trade.price, trade.volume)
        return

    # calculate daily pnl
    pnl_df = engine.calculate_result()

    # remove daily pnl after last trade if last trade happend after end date
    # print('last trade:', last_trade_dt, 'end_date:', end_date)
    # print('processed last trade:', process_last_trade_dt(last_trade_dt))
    end_dt = process_last_trade_dt(
        last_trade_dt) if last_trade_dt > end_date else end_date
    # print('new start:', end_dt)
    pnl_df = pnl_df[:end_dt.date()].copy()

    return pnl_df, trade_df, end_dt
Exemplo n.º 27
0
 def __init__(self, when: datetime, raw: bytes = None) -> None:
     super().__init__()
     if when.tzinfo is None:
         when = when.replace(tzinfo=self.get_local_tzinfo())
     self.when = when
     self._raw = raw
Exemplo n.º 28
0
 def prepare_period(self, config: 'DocumentDigestConfig', dst_user: User, run_date: datetime.datetime) \
         -> Tuple[datetime.datetime, datetime.datetime]:
     day_start = run_date.replace(hour=0, minute=0, second=0, microsecond=0)
     return day_start.astimezone(
         dst_user.get_time_zone()), run_date.astimezone(
             dst_user.get_time_zone())
Exemplo n.º 29
0
def parse_time(date_time: datetime):
    date_time.replace(tzinfo=dateutil.tz.tzlocal())
    return date_time.strftime('%Y-%m-%d %H:%M:%S')
Exemplo n.º 30
0
def end_of_month(_time: datetime.datetime):
    if _time.month == 12:
        return _time.replace(day=31)
    return _time.replace(month=_time.month + 1,
                         day=1) - datetime.timedelta(days=1)
Exemplo n.º 31
0
def round_date_to_seconds(
    date: datetime.datetime, ) -> datetime.datetime:  # pragma: no cover
    if date.microsecond >= 500000:
        date = date + datetime.timedelta(seconds=1)
    return date.replace(microsecond=0)
Exemplo n.º 32
0
def from_tz_to_tz(dt: datetime, from_tz: str = "UTC", to_tz: str = None):
    dt = dt.replace(tzinfo=pytz.timezone(from_tz))
    dt = dt.astimezone(pytz.timezone(to_tz))
    return dt
Exemplo n.º 33
0
async def test_browse_media_browse_whole_month_december(
        hass: HomeAssistant, ufp: MockUFPFixture, doorbell: Camera,
        fixed_now: datetime):
    """Test events for a specific day."""

    fixed_now = fixed_now.replace(month=12)
    last_month = fixed_now.replace(day=1) - timedelta(days=1)
    ufp.api.bootstrap._recording_start = last_month

    ufp.api.get_bootstrap = AsyncMock(return_value=ufp.api.bootstrap)
    await init_entry(hass, ufp, [doorbell], regenerate_ids=False)

    event1 = Event(
        id="test_event_id",
        type=EventType.SMART_DETECT,
        start=fixed_now - timedelta(seconds=3663),
        end=fixed_now,
        score=100,
        smart_detect_types=[SmartDetectObjectType.PERSON],
        smart_detect_event_ids=[],
        camera_id=doorbell.id,
    )
    event1._api = ufp.api
    event2 = Event(
        id="test_event_id2",
        type=EventType.MOTION,
        start=fixed_now - timedelta(seconds=20),
        end=fixed_now,
        score=100,
        smart_detect_types=[],
        smart_detect_event_ids=["test_event_id"],
        camera_id=doorbell.id,
    )
    event2._api = ufp.api
    event3 = Event(
        id="test_event_id3",
        type=EventType.MOTION,
        start=fixed_now - timedelta(seconds=20),
        end=fixed_now,
        score=100,
        smart_detect_types=[],
        smart_detect_event_ids=[],
        camera_id="other_camera",
    )
    event3._api = ufp.api
    event4 = Event(
        id="test_event_id4",
        type=EventType.MOTION,
        start=fixed_now - timedelta(seconds=20),
        end=None,
        score=100,
        smart_detect_types=[],
        smart_detect_event_ids=[],
        camera_id=doorbell.id,
    )
    event4._api = ufp.api

    ufp.api.get_events_raw = AsyncMock(return_value=[
        event1.unifi_dict(),
        event2.unifi_dict(),
        event3.unifi_dict(),
        event4.unifi_dict(),
    ])

    base_id = (
        f"test_id:browse:{doorbell.id}:all:range:{fixed_now.year}:{fixed_now.month}:all"
    )
    source = await async_get_media_source(hass)
    media_item = MediaSourceItem(hass, DOMAIN, base_id, None)

    browse = await source.async_browse_media(media_item)

    assert (
        browse.title ==
        f"UnifiProtect > {doorbell.name} > All Events > {fixed_now.strftime('%B %Y')} > Whole Month (1)"
    )
    assert browse.identifier == base_id
    assert len(browse.children) == 1
    assert browse.children[0].identifier == "test_id:event:test_event_id"
Exemplo n.º 34
0
def _to_utc_naive(date: datetime) -> datetime:
    """Переводит дату в UTC и делает ее наивной."""
    date = date.astimezone(timezone.utc)
    return date.replace(tzinfo=None)
Exemplo n.º 35
0
def dday(value: datetime):
	return (datetime_now().replace(hour=0, minute=0, second=0, microsecond=0, tzinfo=config.TIMEZONE) \
		- value.replace(hour=0, minute=0, second=0, microsecond=0, tzinfo=config.TIMEZONE)).days
Exemplo n.º 36
0
    async def handle_buy_side(
        self,
        symbols_position: Dict[str, float],
        data_loader: DataLoader,
        now: datetime,
        trade_fee_precentage: float,
    ) -> Dict[str, Dict]:
        actions = {}

        for symbol, position in symbols_position.items():
            if position != 0:
                continue

            current_price = data_loader[symbol].close[now]
            tlog(f"{symbol} -> {current_price}")
            resampled_close = self.calc_close(symbol, data_loader, now)
            bband = BBANDS(
                resampled_close,
                timeperiod=7,
                nbdevdn=1,
                nbdevup=1,
                matype=MA_Type.EMA,
            )

            yesterday_lower_band = bband[2][-2]
            today_lower_band = bband[2][-1]
            yesterday_close = resampled_close[-2]

            today_open = self.data_loader[symbol].open[now.replace(
                hour=9, minute=30, second=0, microsecond=0)]

            print(
                f"\nyesterday_close < yesterday_lower_band : {yesterday_close < yesterday_lower_band}({yesterday_close} < {yesterday_lower_band})"
            )
            print(
                f"today_open > yesterday_close :{today_open > yesterday_close}({today_open} > {yesterday_close})"
            )
            print(
                f"current_price > today_lower_band :{current_price > today_lower_band}({current_price} > {today_lower_band})"
            )

            if (yesterday_close < yesterday_lower_band
                    and today_open > yesterday_close
                    and current_price > today_lower_band):
                yesterday_upper_band = bband[0][-2]
                if current_price > yesterday_upper_band:
                    return {}

                buy_indicators[symbol] = {
                    "lower_band": bband[2][-2:].tolist(),
                }
                shares_to_buy = await self.calc_qty(
                    current_price,
                    trade_fee_precentage,
                )
                tlog(
                    f"[{self.name}][{now}] Submitting buy for {shares_to_buy} shares of {symbol} at {current_price}"
                )
                tlog(f"indicators:{buy_indicators[symbol]}")
                actions[symbol] = {
                    "side": "buy",
                    "qty": str(shares_to_buy),
                    "type": "limit",
                    "limit_price": str(current_price),
                }

        return actions
Exemplo n.º 37
0
def add_utc_if_naive(timestamp: datetime.datetime):
    if timestamp.tzinfo is None:
        timestamp = timestamp.replace(tzinfo=datetime.timezone.utc)

    return timestamp
Exemplo n.º 38
0
 def _fmt_ts(ts: datetime):
     return ts.replace(tzinfo=None).isoformat(sep="T", timespec="seconds")
def utc_to_local(utc_dt: datetime) -> datetime:
    return utc_dt.replace(tzinfo=timezone.utc).astimezone(tz=None)
Exemplo n.º 40
0
def utctimestamp(utcdt: datetime):
    return utcdt.replace(tzinfo=timezone.utc).timestamp()
Exemplo n.º 41
0
def find_next_time_expression_time(now: dt.datetime, seconds: List[int],
                                   minutes: List[int],
                                   hours: List[int]) -> dt.datetime:
    """Find the next datetime from now for which the time expression matches.

    The algorithm looks at each time unit separately and tries to find the
    next one that matches for each. If any of them would roll over, all
    time units below that are reset to the first matching value.

    Timezones are also handled (the tzinfo of the now object is used),
    including daylight saving time.
    """
    if not seconds or not minutes or not hours:
        raise ValueError(
            "Cannot find a next time: Time expression never matches!")

    def _lower_bound(arr: List[int], cmp: int) -> Optional[int]:
        """Return the first value in arr greater or equal to cmp.

        Return None if no such value exists.
        """
        left = 0
        right = len(arr)
        while left < right:
            mid = (left + right) // 2
            if arr[mid] < cmp:
                left = mid + 1
            else:
                right = mid

        if left == len(arr):
            return None
        return arr[left]

    result = now.replace(microsecond=0)

    # Match next second
    next_second = _lower_bound(seconds, result.second)
    if next_second is None:
        # No second to match in this minute. Roll-over to next minute.
        next_second = seconds[0]
        result += dt.timedelta(minutes=1)

    result = result.replace(second=next_second)

    # Match next minute
    next_minute = _lower_bound(minutes, result.minute)
    if next_minute != result.minute:
        # We're in the next minute. Seconds needs to be reset.
        result = result.replace(second=seconds[0])

    if next_minute is None:
        # No minute to match in this hour. Roll-over to next hour.
        next_minute = minutes[0]
        result += dt.timedelta(hours=1)

    result = result.replace(minute=next_minute)

    # Match next hour
    next_hour = _lower_bound(hours, result.hour)
    if next_hour != result.hour:
        # We're in the next hour. Seconds+minutes needs to be reset.
        result = result.replace(second=seconds[0], minute=minutes[0])

    if next_hour is None:
        # No minute to match in this day. Roll-over to next day.
        next_hour = hours[0]
        result += dt.timedelta(days=1)

    result = result.replace(hour=next_hour)

    if result.tzinfo is None:
        return result

    # Now we need to handle timezones. We will make this datetime object
    # "naive" first and then re-convert it to the target timezone.
    # This is so that we can call pytz's localize and handle DST changes.
    tzinfo: pytzinfo.DstTzInfo = result.tzinfo
    result = result.replace(tzinfo=None)

    try:
        result = tzinfo.localize(result, is_dst=None)
    except pytzexceptions.AmbiguousTimeError:
        # This happens when we're leaving daylight saving time and local
        # clocks are rolled back. In this case, we want to trigger
        # on both the DST and non-DST time. So when "now" is in the DST
        # use the DST-on time, and if not, use the DST-off time.
        use_dst = bool(now.dst())
        result = tzinfo.localize(result, is_dst=use_dst)
    except pytzexceptions.NonExistentTimeError:
        # This happens when we're entering daylight saving time and local
        # clocks are rolled forward, thus there are local times that do
        # not exist. In this case, we want to trigger on the next time
        # that *does* exist.
        # In the worst case, this will run through all the seconds in the
        # time shift, but that's max 3600 operations for once per year
        result = result.replace(tzinfo=tzinfo) + dt.timedelta(seconds=1)
        return find_next_time_expression_time(result, seconds, minutes, hours)

    result_dst = cast(dt.timedelta, result.dst())
    now_dst = cast(dt.timedelta, now.dst())
    if result_dst >= now_dst:
        return result

    # Another edge-case when leaving DST:
    # When now is in DST and ambiguous *and* the next trigger time we *should*
    # trigger is ambiguous and outside DST, the excepts above won't catch it.
    # For example: if triggering on 2:30 and now is 28.10.2018 2:30 (in DST)
    # we should trigger next on 28.10.2018 2:30 (out of DST), but our
    # algorithm above would produce 29.10.2018 2:30 (out of DST)

    # Step 1: Check if now is ambiguous
    try:
        tzinfo.localize(now.replace(tzinfo=None), is_dst=None)
        return result
    except pytzexceptions.AmbiguousTimeError:
        pass

    # Step 2: Check if result of (now - DST) is ambiguous.
    check = now - now_dst
    check_result = find_next_time_expression_time(check, seconds, minutes,
                                                  hours)
    try:
        tzinfo.localize(check_result.replace(tzinfo=None), is_dst=None)
        return result
    except pytzexceptions.AmbiguousTimeError:
        pass

    # OK, edge case does apply. We must override the DST to DST-off
    check_result = tzinfo.localize(check_result.replace(tzinfo=None),
                                   is_dst=False)
    return check_result
Exemplo n.º 42
0
 def unconvert(value: dt.datetime) -> bytes:
     return b"%a" % str(value.replace(microsecond=0))
Exemplo n.º 43
0
 def _dt_evo_to_aware(dt_naive: dt, utc_offset: timedelta) -> dt:
     dt_aware = dt_naive.replace(tzinfo=dt_util.UTC) - utc_offset
     return dt_util.as_local(dt_aware)
Exemplo n.º 44
0
def datetime_to_epoch_milliseconds(dt: datetime) -> int:
    """Convert datetime object to epoch milliseconds."""
    if not dt.tzinfo:
        dt = dt.replace(tzinfo=timezone.utc)
    return int(dt.timestamp() * 1000)
Exemplo n.º 45
0
def safe_aware(dt: datetime) -> datetime:
    if dt.tzinfo is not None:
        return dt.replace(tzinfo=pytz.utc)
    return dt
Exemplo n.º 46
0
 def __init__(self, when: datetime, raw: bytes = None) -> None:
     super().__init__()
     if when.tzinfo is None:
         when = when.replace(tzinfo=self.get_local_tzinfo())
     self.when = when
     self._raw = raw
Exemplo n.º 47
0
    def _create_solar_radiation_cube(
        self,
        solar_radiation_data: ndarray,
        target_grid: Cube,
        time: datetime,
        accumulation_period: int,
        at_mean_sea_level: bool,
        new_title: Optional[str],
    ) -> Cube:
        """Create a cube of accumulated clearsky solar radiation.

        Args:
            solar_radiation_data:
                Solar radiation data.
            target_grid:
                Cube containing spatial grid over which the solar radiation
                has been calculated.
            time:
                Time corresponding to the solar radiation accumulation.
            accumulation_period:
                Time window over which solar radiation has been accumulated,
                specified in hours.
            at_mean_sea_level:
                Flag denoting whether solar radiation is defined at mean-sea-level
                or at the Earth's surface. The appropriate vertical coordinate will
                be assigned accordingly.
            new_title:
                New title for the output cube attributes. If None, this attribute is
                left out since it has no prescribed standard.

        Returns:
            Cube containing clearsky solar radiation.
        """
        x_coord = target_grid.coord(axis="X")
        y_coord = target_grid.coord(axis="Y")

        time_lower_bounds = np.array(
            (time - timedelta(hours=accumulation_period)).replace(
                tzinfo=timezone.utc).timestamp(),
            dtype=np.int64,
        )
        time_upper_bounds = np.array(
            time.replace(tzinfo=timezone.utc).timestamp(), dtype=np.int64)

        time_coord = AuxCoord(
            time_upper_bounds,
            bounds=np.array([time_lower_bounds, time_upper_bounds]),
            standard_name="time",
            units=cf_units.Unit(
                "seconds since 1970-01-01 00:00:00 UTC",
                calendar=cf_units.CALENDAR_STANDARD,
            ),
        )

        # Add vertical coordinate to indicate whether solar radiation is evaluated at mean_sea_level
        # or at altitude.
        if at_mean_sea_level:
            vertical_coord = "altitude"
        else:
            vertical_coord = "height"
        z_coord = AuxCoord(
            np.float32(0.0),
            standard_name=vertical_coord,
            units="m",
            attributes={"positive": "up"},
        )

        attrs = generate_mandatory_attributes([target_grid])
        attrs["source"] = "IMPROVER"
        if new_title is not None:
            attrs["title"] = new_title
        else:
            attrs.pop("title", None)

        solar_radiation_cube = Cube(
            solar_radiation_data.astype(np.float32),
            long_name=CLEARSKY_SOLAR_RADIATION_CF_NAME,
            units="W s m-2",
            dim_coords_and_dims=[(y_coord, 0), (x_coord, 1)],
            aux_coords_and_dims=[(time_coord, None), (z_coord, None)],
            attributes=attrs,
        )

        return solar_radiation_cube
Exemplo n.º 48
0
def equalize_dbs_for_a_taxpayer(forcing_period=False,forcing_execution=None,taxpayer=None,process_logger=None,process_params=None):
	try:
		indent = '    '
		log = {
			'before' : {},
			'after' : {
				'stored' : 0,
				'errors' : 0
			}
		}# End of log
		# Get params:
		identifier = taxpayer['identifier']
		created_at = taxpayer['created_at']
		created_at_lower_limit = Datetime.now() - relativedelta(months=1)
		force_start_date = True
		# process_logger.info(2*LOG_INDENT + 'FORCING START DATE')
		if created_at > created_at_lower_limit:
			begin_date = taxpayer['start_date']#Since taxpayer claim to be synchronized
			process_logger.info(2*LOG_INDENT + 'Chosing start date as begin date')
		else:
			current_date = Datetime.now()
			current_year = current_date.year
			begin_date = Datetime(current_year,1,1)# Jan 1st of the current year
			process_logger.info(2*LOG_INDENT + 'Chosing ' + str(begin_date) + ' as begin date')
		if forcing_period:
			year = int(process_params['year'])
			month = int(process_params['month'])
			begin_date = Datetime(year,month,1)
			begin_date = begin_date.replace(hour=0, minute=0)
			if 'all' in process_params:
				end_date = Datetime.now()# Until now
			else:
				end_date = begin_date + relativedelta(months=1)
			process_logger.info(2*LOG_INDENT + 'Forcing ' + str(begin_date) + ' as begin date')
			process_logger.info(2*LOG_INDENT + 'Forcing ' + str(end_date) + ' as end date')
		# year =  str(Datetime.now().year)
		# months = _Utilities.get_current_fiscal_declaration_period(_Constants.TWO_MONTHS_PERIOD)
		# begin_date = Datetime(int(year),int(months[0]),1)# Since previous month (optimization introduced on Sep 8, 2015)
		begin_date = begin_date.replace(hour=0, minute=0)
		if not forcing_period:
			end_date = Datetime.now()# Until now
		process_logger.info(2*LOG_INDENT + 'Equalizing dbs from ' + str(begin_date) + ' to ' + str(end_date))
		# Get CFDIs from Forest DB:
		cfdis_in_forest_db_count = _Utilities.get_cfdis_count_in_forest_for_this_taxpayer_at_period(taxpayer,begin_date,end_date)
		process_logger.info(2*LOG_INDENT + 'Retrieving ' + str(cfdis_in_forest_db_count) + ' from Forest DB ... ')
		cfdis_in_forest_db = _Utilities.get_cfdis_in_forest_for_this_taxpayer_at_period(taxpayer,begin_date,end_date,limit=None,logger=process_logger,dict_result=True)	
		# Get CFDIs from Corebook DB:
		try:
			cfdis_in_corebook_db_count = _Locals.get_cfdis_count_in_corebook_for_this_taxpayer_at_period(identifier,begin_date,end_date,logger=process_logger)
			process_logger.info(2*LOG_INDENT + 'Retrieving ' + str(cfdis_in_corebook_db_count) + ' from Corebook DB ... ')
			cfdis_in_corebook_db = _Locals.get_cfdis_in_corebook_for_this_taxpayer_at_period(identifier,begin_date,end_date,limit=None,logger=process_logger)	
		except Already_Handled_Exception as already_handled_exception:
			process_logger.info(2*LOG_INDENT + already_handled_exception.value)
			log['before'] = {
				'forest_db' : 0,
				'corebook_db' : 0,
				'f_but_not_in_cb' : 0,
				'cb_but_not_in_f' : 0
			}# End of log
			log['after']['errors'] = log['after']['errors'] + 1
			return log
		# Log data in db:
		log['before']['forest_db'] = len(cfdis_in_forest_db)
		log['before']['corebook_db'] = len(cfdis_in_corebook_db)
		# Get missing CFDIs in Corebook:
		process_logger.info(2*LOG_INDENT + 'Getting differences in dbs ... ')
		# _Utilities.log_cfdis_uuids(title='Forest CFDIs: ',indent=2*LOG_INDENT,cfdis=cfdis_in_forest_db,logger=process_logger,dict=True)
		# _Utilities.log_cfdis_uuids(title='Corebook CFDIs: ',indent=2*LOG_INDENT,cfdis=cfdis_in_corebook_db,logger=process_logger,dict=True)
		missing_cfdis = _Locals.get_missing_cfdis_in_each_db(cfdis_in_forest_db,cfdis_in_corebook_db,logger=process_logger)
		missing_cfdis_in_corebook_db = missing_cfdis['in_corebook_db']
		missing_cfdis_in_forest_db = missing_cfdis['in_forest_db']
		cfdis_with_different_status = missing_cfdis['cfdis_with_different_status']
		# General status:
		log['before']['f_but_not_in_cb'] = len(missing_cfdis_in_corebook_db)
		log['before']['cb_but_not_in_f'] = len(missing_cfdis_in_forest_db)
		log['before']['cfdis_with_different_status'] = cfdis_with_different_status
		process_logger.info(2*LOG_INDENT + 'DBs Status: ')
		process_logger.info(3*LOG_INDENT + 'Forest DB   -> ' + str(log['before']['forest_db']))
		process_logger.info(3*LOG_INDENT + 'Corebook DB -> ' + str(log['before']['corebook_db']))
		process_logger.info(3*LOG_INDENT + 'F not in CB -> ' + str(log['before']['f_but_not_in_cb'] - log['before']['cfdis_with_different_status']))
		process_logger.info(3*LOG_INDENT + 'CB not in F -> ' + str(log['before']['cb_but_not_in_f']))
		process_logger.info(3*LOG_INDENT + 'Diff Status -> ' + str(log['before']['cfdis_with_different_status']))
		if len(missing_cfdis_in_corebook_db) > 0:
			# _Utilities.log_cfdis_uuids(title='Missing CFDIs: ',indent=2*LOG_INDENT,cfdis=missing_cfdis_in_corebook_db,logger=process_logger)
			# log['after']['stored'] = len(missing_cfdis_in_corebook_db)
			# log['after']['errors'] = 0
			cb_summary = _Locals.store_missing_cfdis_in_corebook(missing_cfdis_in_corebook_db,identifier,logger=process_logger,limit=None)
			log['after']['stored'] = cb_summary['stored']
			log['after']['errors'] = cb_summary['errors']
			process_logger.info(2*LOG_INDENT + 'Equalization Summary: ')
			process_logger.info(3*LOG_INDENT + 'CFDIs stored in CB  -> ' + str(log['after']['stored']))
			process_logger.info(3*LOG_INDENT + 'Errors occurred     -> ' + str(log['after']['errors']))
		if forcing_execution:
			process_logger.info(3*LOG_INDENT + 'Sending telegram notification ... ')
			message = 'Ya iguale a este vato: ' + taxpayer['identifier']
			_Utilities.send_message_to_forest_telegram_contacts(message,logger=process_logger)
		return log
	except Already_Handled_Exception as already_handled_exception:
		raise already_handled_exception
	except Exception as e:
		equalization_logger.critical(e.message)
		already_handled_exception = Already_Handled_Exception(e.message)
		raise already_handled_exception
Exemplo n.º 49
0
def test_datetime_headers(header: str, value: datetime) -> None:
    response = Response(b'Body')
    value = value.replace(microsecond=0)
    setattr(response, header, value)
    assert response.headers.get(header.title().replace('_', '-'))
    assert getattr(response, header) == value
    def get_historical_ticker_price(self, ticker_symbol: str, date: datetime):
        """
        Get historic ticker price of a specific coin
        """
        target_date = date.replace(second=0,
                                   microsecond=0).strftime("%d %b %Y %H:%M:%S")
        key = f"{ticker_symbol} - {target_date}"
        val = cache.get(key, None)
        if val == "Missing":
            return None
        if val is None:
            end_date = date.replace(second=0,
                                    microsecond=0) + timedelta(minutes=1000)
            if end_date > datetime.now().replace(tzinfo=timezone.utc):
                end_date = datetime.now().replace(tzinfo=timezone.utc)
            end_date_str = end_date.strftime("%d %b %Y %H:%M:%S")
            self.logger.info(
                f"Fetching prices for {ticker_symbol} between {date} and {end_date_str}",
                False)

            last_day = datetime.now().replace(tzinfo=timezone.utc) - timedelta(
                days=1)
            if date >= last_day or end_date >= last_day:
                try:
                    data = self.client.get_historical_klines(ticker_symbol,
                                                             "1m",
                                                             target_date,
                                                             end_date_str,
                                                             limit=1000)
                    for kline in data:
                        kl_date = datetime.utcfromtimestamp(kline[0] / 1000)
                        kl_datestr = kl_date.strftime("%d %b %Y %H:%M:%S")
                        kl_price = float(kline[1])
                        cache[f"{ticker_symbol} - {kl_datestr}"] = kl_price
                except BinanceAPIException as e:
                    if e.code == -1121:  # invalid symbol
                        self.get_historical_klines_from_api(ticker_symbol,
                                                            "1m",
                                                            target_date,
                                                            end_date_str,
                                                            limit=1000)
                    else:
                        raise e
            else:
                self.get_historical_klines_from_api(ticker_symbol,
                                                    "1m",
                                                    target_date,
                                                    end_date_str,
                                                    limit=1000)
            val = cache.get(key, None)
            if val == None:
                cache.set(key, "Missing")
                current_date = date + timedelta(minutes=1)
                while current_date <= end_date:
                    current_date_str = current_date.strftime(
                        "%d %b %Y %H:%M:%S")
                    current_key = f"{ticker_symbol} - {current_date_str}"
                    current_val = cache.get(current_key, None)
                    if current_val == None:
                        cache.set(current_key, "Missing")
                    current_date = current_date + timedelta(minutes=1)
            if val == "Missing":
                val = None
        return val
Exemplo n.º 51
0
 def encode_key(ts: datetime, seq_no: int):
     int_ts = int(1000000 * ts.replace(tzinfo=timezone.utc).timestamp())
     int_ts = int_ts & KvStoreMetricsFormat.ts_mask
     seq_no = seq_no & KvStoreMetricsFormat.seq_mask
     return ((int_ts << KvStoreMetricsFormat.seq_bits) | seq_no).to_bytes(64, byteorder='big', signed=False)
Exemplo n.º 52
0
def utc(d: datetime):
    if d.tzinfo is None:
        return d.replace(tzinfo=tz.tzutc())
    return d.astimezone(tz.tzutc())
Exemplo n.º 53
0
    def __init__(
        self,
        model_date: datetime = None,
        source: str = '2ds',
        time_interval: str = 'daily',
        study_area_polygon_filename: PathLike = STUDY_AREA_POLYGON_FILENAME,
        source_url: str = None,
        use_defaults: bool = True,
    ):
        """
        Creates new observation object from datetime and given model parameters.
        :param model_date: model run date
        :param source: either '2ds' or '3dz'
        :param time_interval: time interval of model output
        :param study_area_polygon_filename: filename of vector file containing study area boundary
        :param source_url: directory containing NetCDF files
        :param use_defaults: whether to fall back to default source URLs if the provided one does not exist
        """

        if not isinstance(study_area_polygon_filename, Path):
            study_area_polygon_filename = Path(study_area_polygon_filename)

        if model_date is None:
            model_date = datetime.now()

        if type(model_date) is date:
            self.model_time = datetime.combine(model_date, datetime.min.time())
        else:
            self.model_time = model_date.replace(hour=0,
                                                 minute=0,
                                                 second=0,
                                                 microsecond=0)

        self.source = source
        self.time_interval = time_interval

        self.study_area_polygon_filename = study_area_polygon_filename
        self.study_area_geojson = utilities.get_first_record(
            self.study_area_polygon_filename)['geometry']

        self.datasets = {}
        self.dataset_locks = {}

        date_string = f'{self.model_time:%Y%m%d}'
        date_dir = f'rtofs_global{date_string}'

        source_urls = SOURCE_URLS.copy()

        if source_url is not None:
            source_url = {'priority': source_url}
            if use_defaults:
                source_urls = {**source_url, **{source_urls}}

        self.source_names = []
        if self.time_interval == '3hrly' or self.time_interval == 'hrly' or self.time_interval == 'daily':
            # added due to the different hourly source for nowcast and forecast
            for self.time_interval in {'hrly', '3hrly'}:
                for source_name, source_url in source_urls.items():
                    for forecast_direction, datasets in DATASET_STRUCTURE[
                            self.source].items():
                        if (forecast_direction == 'nowcast'
                                and 'nowcast' in self.datasets
                                and len(self.datasets['nowcast']) > 0) or (
                                    forecast_direction == 'forecast'
                                    and 'forecast' in self.datasets
                                    and len(self.datasets['forecast']) > 0):
                            continue

                        self.datasets[forecast_direction] = {}
                        self.dataset_locks[forecast_direction] = {}

                        for dataset_name in datasets:
                            filename = f'rtofs_glo_{self.source}_{forecast_direction}_{self.time_interval}_{dataset_name}'
                            if filename not in [
                                    'rtofs_glo_2ds_nowcast_3hrly_prog',
                                    'rtofs_glo_2ds_nowcast_3hrly_diag',
                                    'rtofs_glo_2ds_forecast_hrly_prog',
                                    'rtofs_glo_2ds_forecast_hrly_diag'
                            ]:

                                url = f'{source_url}/{date_dir}/{filename}'
                                if source_name == 'local':
                                    url = f'{url}.nc'

                                try:
                                    dataset = xarray.open_dataset(url)
                                    self.datasets[forecast_direction][
                                        dataset_name] = dataset
                                    self.dataset_locks[forecast_direction][
                                        dataset_name] = threading.Lock()
                                    self.source_names.append(source_name)
                                except OSError as error:
                                    LOGGER.warning(
                                        f'{error.__class__.__name__}: {error}')

        if (len(self.datasets['nowcast']) +
                len(self.datasets['forecast'])) > 0:
            if len(self.datasets['nowcast']) > 0:
                sample_dataset = next(iter(self.datasets['nowcast'].values()))
            else:
                sample_dataset = next(iter(self.datasets['forecast'].values()))

            self.lat = sample_dataset['lat'].values
            if not any(source_name == 'NCEP'
                       for source_name in self.source_names):
                self.lon = sample_dataset['lon']
                self.raw_lon = self.lon
            else:
                # for some reason RTOFS from NCEP has longitude values shifted by 360
                self.raw_lon = sample_dataset['lon'].values
                self.lon = self.raw_lon - 180 - numpy.min(self.raw_lon)

            lat_pixel_size = numpy.mean(numpy.diff(sample_dataset['lat']))
            lon_pixel_size = numpy.mean(numpy.diff(sample_dataset['lon']))

            self.global_north = numpy.max(self.lat)
            self.global_west = numpy.min(self.lon)

            self.global_grid_transform = rasterio.transform.from_origin(
                self.global_west, self.global_north, lon_pixel_size,
                lat_pixel_size)

            (
                self.study_area_west,
                self.study_area_south,
                self.study_area_east,
                self.study_area_north,
            ) = geometry.shape(self.study_area_geojson).bounds

            self.study_area_transform = rasterio.transform.from_origin(
                self.study_area_west, self.study_area_north, lon_pixel_size,
                lat_pixel_size)
        else:
            raise PyOFS.NoDataError(
                f'No RTOFS datasets found for {self.model_time}.')
Exemplo n.º 54
0
    def data(self,
             variable: str,
             time: datetime,
             crop: bool = True) -> xarray.DataArray:
        """
        Get data of specified variable at specified hour.
        :param variable: name of variable to retrieve
        :param time: time from which to retrieve data
        :param crop: whether to crop to study area extent
        :return: array of data
        """

        if time >= self.model_time:
            direction = 'forecast'
        else:
            direction = 'nowcast'

        if self.time_interval == 'daily':
            time = time.replace(hour=0, minute=0, second=0, microsecond=0)

        if direction in DATASET_STRUCTURE[self.source]:
            if len(self.datasets[direction]) > 0:
                if variable in DATA_VARIABLES:
                    datasets = DATA_VARIABLES[variable][self.source]
                    dataset_name, variable_name = next(iter(datasets.items()))

                    with self.dataset_locks[direction][dataset_name]:
                        data_variable = self.datasets[direction][dataset_name][
                            DATA_VARIABLES[variable][
                                self.source][dataset_name]]

                        # TODO study areas that cross over longitude +74.16 may have problems here
                        if crop:
                            selection = data_variable.sel(
                                lon=slice(self.study_area_west + 360,
                                          self.study_area_east + 360),
                                lat=slice(self.study_area_south,
                                          self.study_area_north),
                            )
                        else:
                            western_selection = data_variable.sel(
                                lon=slice(180, numpy.max(self.raw_lon)),
                                lat=slice(numpy.min(self.lat),
                                          numpy.max(self.lat)),
                            )
                            eastern_selection = data_variable.sel(
                                lon=slice(numpy.min(self.raw_lon), 180),
                                lat=slice(numpy.min(self.lat),
                                          numpy.max(self.lat)),
                            )
                            selection = numpy.concatenate(
                                (western_selection, eastern_selection), axis=1)

                        # to resample the 3 hr for forcast and 1hr for nowcast nc file to a daily
                        selections = selection.resample(time='D').mean()
                        selections = selections.sel(time=time,
                                                    method='nearest')

                        # correction for the
                        if variable == 'ssh':
                            selections = selections + 0.25

                        selections = numpy.flip(selections.squeeze(), axis=0)

                        if selections.size > 0:
                            return selections
                        else:
                            raise PyOFS.NoDataError(
                                f'no RTOFS data for {time} within the cropped area ({self.study_area_west:.2f}, {self.study_area_south:.2f}), ({self.study_area_east:.2f}, {self.study_area_north:.2f})'
                            )
                else:
                    raise ValueError(
                        f'Variable must be one of {list(DATA_VARIABLES)}.')
            else:
                LOGGER.warning(
                    f'{direction} does not exist in RTOFS for {self.model_time:%Y%m%d}.'
                )
        else:
            raise ValueError(
                f'Direction must be one of {list(DATASET_STRUCTURE[self.source].keys())}.'
            )
Exemplo n.º 55
0
def reset_time(_datetime: datetime) -> datetime:
    return _datetime.replace(minute=0, hour=0, second=0, microsecond=0)
Exemplo n.º 56
0
    def write_rasters(
        self,
        output_dir: PathLike,
        variables: list,
        time: datetime,
        filename_prefix: str = None,
        filename_suffix: str = None,
        fill_value=LEAFLET_NODATA_VALUE,
        driver: str = 'GTiff',
        crop: bool = True,
    ):
        """
        Write averaged raster data of given variables to given output directory.
        :param output_dir: path to directory
        :param variables: variable names to use
        :param time: time from which to retrieve data
        :param filename_prefix: prefix for filenames
        :param filename_suffix: suffix for filenames
        :param fill_value: desired fill value of output
        :param driver: strings of valid GDAL driver (currently one of 'GTiff', 'GPKG', or 'AAIGrid')
        :param crop: whether to crop to study area extent
        """

        if not isinstance(output_dir, Path):
            output_dir = Path(output_dir)

        if variables is None:
            variables = DATA_VARIABLES[self.source]

        if filename_prefix is None:
            filename_prefix = 'rtofs'
        filename_suffix = f'_{filename_suffix}' if filename_suffix is not None else ''

        if self.time_interval == 'daily':
            time = time.replace(hour=0, minute=0, second=0, microsecond=0)

        time_delta = int((time - self.model_time) / timedelta(days=1))
        direction = 'forecast' if time_delta >= 0 else 'nowcast'
        time_delta_string = f'{direction[0]}{abs(time_delta) + 1 if direction == "forecast" else abs(time_delta):03}'

        variable_means = {}
        for variable in variables:
            if variable not in ['dir', 'mag']:
                try:
                    variable_means[variable] = self.data(variable, time, crop)
                except KeyError:
                    LOGGER.warning(
                        f'variable "{variable}" not found in RTOFS dataset')
                except Exception as error:
                    LOGGER.warning(error)

        variable_means = {
            variable: variable_mean.values
            for variable, variable_mean in variable_means.items()
            if variable_mean is not None
        }

        if 'dir' in variables or 'mag' in variables:
            u_name = 'ssu'
            v_name = 'ssv'

            if u_name not in variable_means:
                u_data = self.data(u_name, time, crop)
                u_data = u_data.values if u_data is not None else None
            else:
                u_data = variable_means[u_name]

            if v_name not in variable_means:
                v_data = self.data(v_name, time, crop)
                v_data = v_data.values if v_data is not None else None
            else:
                v_data = variable_means[v_name]

            if 'anim' in filename_suffix:
                variable_means['dir'] = u_data
                variable_means['mag'] = v_data

            else:
                # calculate direction and magnitude of vector in degrees (0-360) and in metres per second
                variable_means['dir'] = (numpy.arctan2(u_data, v_data) +
                                         numpy.pi) * (180 / numpy.pi)
                variable_means['mag'] = numpy.sqrt(u_data**2 + v_data**2)

        # write interpolated grids to raster files
        for variable, variable_mean in variable_means.items():
            if variable_mean is not None and variable_mean.size > 0:
                if crop:
                    transform = self.study_area_transform
                else:
                    transform = self.global_grid_transform

                if fill_value is not None:
                    variable_mean[numpy.isnan(variable_mean)] = fill_value

                gdal_args = {
                    'transform':
                    transform,
                    'height':
                    variable_mean.shape[0],
                    'width':
                    variable_mean.shape[1],
                    'count':
                    1,
                    'dtype':
                    rasterio.float32,
                    'crs':
                    CRS.from_dict(OUTPUT_CRS),
                    'nodata':
                    numpy.array([fill_value
                                 ]).astype(variable_mean.dtype).item(),
                }

                if driver == 'AAIGrid':
                    file_extension = 'asc'
                    gdal_args.update({'FORCE_CELLSIZE': 'YES'})
                elif driver == 'GPKG':
                    file_extension = 'gpkg'
                else:
                    file_extension = 'tiff'
                    gdal_args.update(TIFF_CREATION_OPTIONS)

                output_filename = f'{filename_prefix}_{variable}_{self.model_time:%Y%m%d}_{time_delta_string}{filename_suffix}.{file_extension}'
                output_filename = output_dir / output_filename

                LOGGER.info(f'Writing {output_filename}')
                with rasterio.open(output_filename, 'w', driver,
                                   **gdal_args) as output_raster:
                    output_raster.write(variable_mean, 1)
                    if driver == 'GTiff':
                        output_raster.build_overviews(
                            PyOFS.overview_levels(variable_mean.shape),
                            Resampling['average'])
                        output_raster.update_tags(ns='rio_overview',
                                                  resampling='average')
Exemplo n.º 57
0
def localtime_format(value: datetime, fmt: str):
    return value.replace(tzinfo=config.TIMEZONE).strftime(fmt)
Exemplo n.º 58
0
 def trunc_datetime(dt: datetime.datetime):
     return dt.replace(microsecond=0).replace(tzinfo=None)
Exemplo n.º 59
0
 def get_timestamp_delta(self, time: datetime):
     return timedelta(days=(time.replace(year=time.year + 1) - time).days)
Exemplo n.º 60
0
def convert_to_UTC(dt: datetime.datetime) -> datetime.datetime:
    if dt.tzinfo is None:
        return dt.replace(tzinfo=datetime.timezone.utc)
    return dt.astimezone(datetime.timezone.utc)